diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 79260da6cd..e82c39bb26 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 60.9.3 +current_version = 67.7.2 commit = True tag = True diff --git a/.coveragerc b/.coveragerc index 6a34e662d3..0402a8c61f 100644 --- a/.coveragerc +++ b/.coveragerc @@ -2,6 +2,11 @@ omit = # leading `*/` for pytest-dev/pytest-cov#456 */.tox/* +disable_warnings = + couldnt-parse + + # local + */_validate_pyproject/* # generated code, tested in `validate-pyproject` [report] show_missing = True diff --git a/.editorconfig b/.editorconfig index b8aeea177b..304196f81e 100644 --- a/.editorconfig +++ b/.editorconfig @@ -14,3 +14,6 @@ max_line_length = 88 [*.{yml,yaml}] indent_style = space indent_size = 2 + +[*.rst] +indent_style = space diff --git a/.flake8 b/.flake8 deleted file mode 100644 index dd3cc20661..0000000000 --- a/.flake8 +++ /dev/null @@ -1,15 +0,0 @@ -[flake8] -max-line-length = 88 - -# jaraco/skeleton#34 -max-complexity = 10 - -extend-exclude = - build - setuptools/_vendor - setuptools/_distutils - pkg_resources/_vendor - -extend-ignore = - # Black creates whitespace before colon - E203 diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml index 672acd1885..1c42bf5a94 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -95,8 +95,13 @@ body: label: How to Reproduce description: >- Describe the steps to reproduce this bug. + + Please try to create a [minimal reproducer](https://stackoverflow.com/help/minimal-reproducible-example), + and avoid things like "see the steps in the CI logs". placeholder: | - 1. Integrate setuptools via '...' + 1. Clone a simplified example: `git clone ...` + 2. Create a virtual environment for isolation with `...` + 2. Build the project with setuptools via '...' 2. Then run '...' 3. An error occurs. validations: diff --git a/.github/workflows/ci-sage.yml b/.github/workflows/ci-sage.yml index 425681d7c3..f3fe8513b9 100644 --- a/.github/workflows/ci-sage.yml +++ b/.github/workflows/ci-sage.yml @@ -37,6 +37,9 @@ on: push: tags: - '*' + pull_request: + paths: + - .github/workflows/ci-sage.yml workflow_dispatch: # Allow to run manually @@ -45,19 +48,6 @@ env: DIST_PREREQ: python3 # Name of this project in the Sage distribution SPKG: setuptools - # Sage distribution packages to build - TARGETS_PRE: build/make/Makefile - TARGETS: setuptools pyzmq - TARGETS_OPTIONAL: build/make/Makefile - # Standard setting: Test the current beta release of Sage: - SAGE_REPO: sagemath/sage - SAGE_REF: develop - # Test with the branch from https://trac.sagemath.org/ticket/33288 - # This may provide hotfixes for the CI that have not been merged into - # the sage develop branch yet. - SAGE_TRAC_GIT: https://github.com/sagemath/sagetrac-mirror.git - SAGE_TICKET: 33288 - REMOVE_PATCHES: "*" jobs: @@ -85,68 +75,23 @@ jobs: path: upstream name: upstream - docker: - runs-on: ubuntu-latest + linux: + # https://github.com/sagemath/sage/blob/develop/.github/workflows/docker.yml + # Use branch of ticket https://trac.sagemath.org/ticket/33288 + uses: sagemath/sagetrac-mirror/.github/workflows/docker.yml@u/mkoeppe/setuptools_ci_target + with: + # Sage distribution packages to build + targets: setuptools pyzmq + # Standard setting: Test the current beta release of Sage: + sage_repo: sagemath/sage + sage_ref: develop + upstream_artifact: upstream + sage_trac_git: https://github.com/sagemath/sagetrac-mirror.git + # Test with the branch from https://trac.sagemath.org/ticket/33288 + # This may provide hotfixes for the CI that have not been merged into + # the sage develop branch yet. + sage_trac_ticket: 33288 + # We prefix the image name with the SPKG name ("setuptools-") to avoid the error + # 'Package "sage-docker-..." is already associated with another repository.' + docker_push_repository: ghcr.io/${{ github.repository }}/setuptools- needs: [dist] - strategy: - fail-fast: false - max-parallel: 32 - matrix: - tox_system_factor: [ubuntu-trusty, ubuntu-xenial, ubuntu-bionic, ubuntu-focal, ubuntu-hirsute, ubuntu-impish, ubuntu-jammy, debian-stretch, debian-buster, debian-bullseye, debian-bookworm, debian-sid, linuxmint-17, linuxmint-18, linuxmint-19, linuxmint-19.3, linuxmint-20.1, linuxmint-20.2, linuxmint-20.3, fedora-26, fedora-27, fedora-28, fedora-29, fedora-30, fedora-31, fedora-32, fedora-33, fedora-34, fedora-35, centos-7, centos-stream-8, centos-stream-9, gentoo-python3.9, archlinux-latest, opensuse-15, opensuse-15.3, opensuse-tumbleweed, slackware-14.2, ubuntu-bionic-i386, manylinux-2_24-i686, debian-buster-i386, centos-7-i386] - tox_packages_factor: [minimal, standard] - env: - TOX_ENV: docker-${{ matrix.tox_system_factor }}-${{ matrix.tox_packages_factor }} - LOGS_ARTIFACT_NAME: logs-commit-${{ github.sha }}-tox-docker-${{ matrix.tox_system_factor }}-${{ matrix.tox_packages_factor }} - DOCKER_TARGETS: configured with-targets with-targets-optional - steps: - - name: Check out SageMath - uses: actions/checkout@v2 - with: - repository: ${{ env.SAGE_REPO }} - ref: ${{ env.SAGE_REF }} - fetch-depth: 2000 - if: env.SAGE_REPO != '' - - name: Check out git-trac-command - uses: actions/checkout@v2 - with: - repository: sagemath/git-trac-command - path: git-trac-command - if: env.SAGE_TRAC_GIT != '' - - name: Check out SageMath from trac.sagemath.org - shell: bash {0} - run: | - git config --global user.email "ci-sage@example.com" - git config --global user.name "ci-sage workflow" - if [ ! -d .git ]; then git init; fi; git remote add trac ${{ env.SAGE_TRAC_GIT }} && x=1 && while [ $x -le 5 ]; do x=$(( $x + 1 )); sleep $(( $RANDOM % 60 + 1 )); if git-trac-command/git-trac fetch $SAGE_TICKET; then git merge FETCH_HEAD || echo "(ignored)"; exit 0; fi; sleep 40; done; exit 1 - if: env.SAGE_TRAC_GIT != '' - - uses: actions/download-artifact@v2 - with: - path: upstream - name: upstream - - name: Install test prerequisites - run: | - sudo DEBIAN_FRONTEND=noninteractive apt-get update - sudo DEBIAN_FRONTEND=noninteractive apt-get install tox python3-setuptools - - name: Update Sage packages from upstream artifact - run: | - (export PATH=$(pwd)/build/bin:$PATH; (cd upstream && bash -x update-pkgs.sh) && sed -i.bak '/upstream/d' .dockerignore && echo "/:toolchain:/i ADD upstream upstream" | sed -i.bak -f - build/bin/write-dockerfile.sh && git diff) - - name: Configure and build Sage distribution within a Docker container - run: | - set -o pipefail; EXTRA_DOCKER_BUILD_ARGS="--build-arg USE_MAKEFLAGS=\"-k V=0 SAGE_NUM_THREADS=3\"" tox -e $TOX_ENV -- $TARGETS 2>&1 | sed "/^configure: notice:/s|^|::warning file=artifacts/$LOGS_ARTIFACT_NAME/config.log::|;/^configure: warning:/s|^|::warning file=artifacts/$LOGS_ARTIFACT_NAME/config.log::|;/^configure: error:/s|^|::error file=artifacts/$LOGS_ARTIFACT_NAME/config.log::|;" - - name: Copy logs from the Docker image or build container - run: | - mkdir -p "artifacts/$LOGS_ARTIFACT_NAME" - cp -r .tox/$TOX_ENV/Dockerfile .tox/$TOX_ENV/log "artifacts/$LOGS_ARTIFACT_NAME" - if [ -f .tox/$TOX_ENV/Dockertags ]; then CONTAINERS=$(docker create $(tail -1 .tox/$TOX_ENV/Dockertags) /bin/bash || true); fi - if [ -n "$CONTAINERS" ]; then for CONTAINER in $CONTAINERS; do for ARTIFACT in /sage/logs; do docker cp $CONTAINER:$ARTIFACT artifacts/$LOGS_ARTIFACT_NAME && HAVE_LOG=1; done; if [ -n "$HAVE_LOG" ]; then break; fi; done; fi - if: always() - - uses: actions/upload-artifact@v2 - with: - path: artifacts - name: ${{ env.LOGS_ARTIFACT_NAME }} - if: always() - - name: Print out logs for immediate inspection - # and markup the output with GitHub Actions logging commands - run: | - .github/workflows/scan-logs.sh "artifacts/$LOGS_ARTIFACT_NAME" - if: always() diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c680fb3636..d962bdba9e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -9,36 +9,92 @@ concurrency: ${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true +permissions: + contents: read + +env: + # Environment variables to support color support (jaraco/skeleton#66): + # Request colored output from CLI tools supporting it. Different tools + # interpret the value differently. For some, just being set is sufficient. + # For others, it must be a non-zero integer. For yet others, being set + # to a non-empty value is sufficient. For tox, it must be one of + # , 0, 1, false, no, off, on, true, yes. The only enabling value + # in common is "1". + FORCE_COLOR: 1 + # MyPy's color enforcement (must be a non-zero number) + MYPY_FORCE_COLOR: -42 + # Recognized by the `py` package, dependency of `pytest` (must be "1") + PY_COLORS: 1 + # Make tox-wrapped tools see color requests + TOX_TESTENV_PASSENV: >- + FORCE_COLOR + MYPY_FORCE_COLOR + NO_COLOR + PY_COLORS + PYTEST_THEME + PYTEST_THEME_MODE + + # Suppress noisy pip warnings + PIP_DISABLE_PIP_VERSION_CHECK: 'true' + PIP_NO_PYTHON_VERSION_WARNING: 'true' + PIP_NO_WARN_SCRIPT_LOCATION: 'true' + + # Disable the spinner, noise in GHA; TODO(webknjaz): Fix this upstream + # Must be "1". + TOX_PARALLEL_NO_SPINNER: 1 + + jobs: test: strategy: matrix: - distutils: - - local python: - - pypy-3.7 - - 3.7 - - 3.8 - - 3.9 - - "3.10" + - "3.7" + - "3.11" + - "3.12" + # Workaround for actions/setup-python#508 + dev: + - -dev platform: - ubuntu-latest - macos-latest - windows-latest include: + - python: "3.8" + platform: ubuntu-latest + - python: "3.9" + platform: ubuntu-latest + - python: "3.10" + platform: ubuntu-latest + - python: pypy3.9 + platform: ubuntu-latest + distutils: stdlib - platform: ubuntu-latest python: "3.10" distutils: stdlib runs-on: ${{ matrix.platform }} + continue-on-error: ${{ matrix.python == '3.12' }} env: - SETUPTOOLS_USE_DISTUTILS: ${{ matrix.distutils }} + SETUPTOOLS_USE_DISTUTILS: ${{ matrix.distutils || 'local' }} timeout-minutes: 75 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v2 + id: python-install + uses: actions/setup-python@v4 with: - python-version: ${{ matrix.python }} + python-version: ${{ matrix.python }}${{ matrix.dev }} + - uses: actions/cache@v3 + id: cache + with: + path: setuptools/tests/config/downloads/*.cfg + key: >- + ${{ hashFiles('setuptools/tests/config/setupcfg_examples.txt') }}- + ${{ hashFiles('setuptools/tests/config/downloads/*.py') }} + - name: Populate download cache + if: steps.cache.outputs.cache-hit != 'true' + working-directory: setuptools/tests/config + run: python -m downloads.preload setupcfg_examples.txt - name: Install tox run: | python -m pip install tox @@ -49,35 +105,101 @@ jobs: run: pipx run coverage xml --ignore-errors - name: Publish coverage if: hashFiles('coverage.xml') != '' # Rudimentary `file.exists()` - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v3 with: flags: >- # Mark which lines are covered by which envs - ${{ runner.os }}, - ${{ matrix.python }} + CI-GHA, + ${{ github.job }}, + OS-${{ runner.os }}, + VM-${{ matrix.platform }}, + Py-${{ steps.python-install.outputs.python-version }} + + docs: + runs-on: ubuntu-latest + env: + TOXENV: docs + steps: + - uses: actions/checkout@v3 + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }}${{ matrix.dev }} + - name: Install tox + run: | + python -m pip install tox + - name: Run tests + run: tox + + check: # This job does nothing and is only used for the branch protection + if: always() + + needs: + - integration-test + - test + - docs + - test_cygwin + + runs-on: ubuntu-latest + + steps: + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@release/v1 + with: + allowed-skips: integration-test + jobs: ${{ toJSON(needs) }} test_cygwin: - runs-on: windows-latest + strategy: + matrix: + python: + - 39 + platform: + - windows-latest + runs-on: ${{ matrix.platform }} timeout-minutes: 75 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Install Cygwin with Python - uses: cygwin/cygwin-install-action@v1 + uses: cygwin/cygwin-install-action@v2 with: platform: x86_64 packages: >- - git, + python${{ matrix.python }}, + python${{ matrix.python }}-devel, + python${{ matrix.python }}-tox, gcc-core, - python38, - python38-devel, - python38-pip - - name: Install tox - shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0} + git, + - name: Record the currently selected Python version + id: python-install + # NOTE: This roughly emulates what `actions/setup-python@v4` provides + # NOTE: except the action gets the version from the installation path + # NOTE: on disk and we get it from runtime. run: | - python3.8 -m pip install tox + python -c 'import platform; print("python-version=" + platform.python_version())' >> ${GITHUB_OUTPUT} + shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0} - name: Run tests shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0} run: | - tox -- --cov-report xml + git config --global --add safe.directory "$(cygpath -u "$GITHUB_WORKSPACE")" # workaround for #3408 + tox + - name: Create coverage report + if: hashFiles('.coverage') != '' # Rudimentary `file.exists()` + run: | + python -m pip install coverage + python -m coverage xml --ignore-errors + shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0} + - name: Publish coverage + if: hashFiles('coverage.xml') != '' # Rudimentary `file.exists()` + uses: codecov/codecov-action@v3 + with: + files: >- + ${{ github.workspace }}\coverage.xml + flags: >- # Mark which lines are covered by which envs + CI-GHA, + ${{ github.job }}, + OS-${{ runner.os }}, + VM-${{ matrix.platform }}, + Py-${{ steps.python-install.outputs.python-version }} integration-test: needs: test @@ -92,13 +214,13 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 75 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Install OS-level dependencies run: | sudo apt-get update sudo apt-get install build-essential gfortran libopenblas-dev - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: # Use a release that is not very new but still have a long life: python-version: "3.8" @@ -109,16 +231,19 @@ jobs: run: tox -e integration release: - needs: [test, test_cygwin, integration-test] + permissions: + contents: write + needs: + - check if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') runs-on: ubuntu-latest timeout-minutes: 75 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: 3.11-dev - name: Install tox run: | python -m pip install tox diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000000..6bef34935b --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,13 @@ +version: 2 +python: + install: + - path: . + extra_requirements: + - docs + +# workaround for readthedocs/readthedocs.org#9623 +build: + # workaround for readthedocs/readthedocs.org#9635 + os: ubuntu-22.04 + tools: + python: "3" diff --git a/.readthedocs.yml b/.readthedocs.yml deleted file mode 100644 index cc698548db..0000000000 --- a/.readthedocs.yml +++ /dev/null @@ -1,6 +0,0 @@ -version: 2 -python: - install: - - path: . - extra_requirements: - - docs diff --git a/CHANGES.rst b/CHANGES.rst index a24cd2ad9b..242d4d06c4 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,1156 @@ +v67.7.2 +------- + + +Misc +^^^^ +* #3902: Fixed wrong URLs used in warnings and logs. + + +v67.7.1 +------- + + +Misc +^^^^ +* #3898: Fixes setuptools.dist:invalid_unless_false when value is false don't raise error -- by :user:`jammarher` + + +v67.7.0 +------- + + +Changes +^^^^^^^ +* #3849: Overhaul warning system for better visibility. + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3859: Added a note about historical presence of ``wheel`` + in ``build-system.requires``, in ``pyproject.toml``. +* #3893: Improved the documentation example regarding making a thin :pep:`517` in-tree + backend wrapper of ``setuptools.build_meta`` that is future-proof and supports + :pep:`660` hook too -- by :user:`webknjaz`. + +Misc +^^^^ +* #3884: Add a ``stacklevel`` parameter to ``warnings.warn()`` to provide more information to the user. + -- by :user:`cclauss` + + +v67.6.1 +------- + + +Misc +^^^^ +* #3865: Fixed ``_WouldIgnoreField`` warnings for ``scripts`` and ``gui_scripts``, + when ``entry-points`` is not listed in dynamic. +* #3875: Update code generated by ``validate-pyproject`` to use v0.12.2. + This should fix default license patterns when ``pyproject.toml`` is used. + + +v67.6.0 +------- + + +Changes +^^^^^^^ +* #3804: Added caching for supported wheel tags. +* #3846: Added pruning heuristics to ``PackageFinder`` based on ``exclude``. + + +v67.5.1 +------- + + +Misc +^^^^ +* #3836: Fixed interaction between ``setuptools``' package auto-discovery and + auto-generated ``htmlcov`` files. + + Previously, the ``htmlcov`` name was ignored when searching for single-file + modules, however the correct behaviour is to ignore it when searching for + packages (since it is supposed to be a directory, see `coverage config`_) + -- by :user:`yukihiko-shinoda`. + + .. _coverage config: https://coverage.readthedocs.io/en/stable/config.html#html-directory +* #3838: Improved error messages for ``pyproject.toml`` validations. +* #3839: Fixed ``pkg_resources`` errors caused when parsing metadata of packages that + are already installed but do not conform with PEP 440. + + +v67.5.0 +------- + + +Changes +^^^^^^^ +* #3843: Although pkg_resources has been discouraged for use, some projects still consider pkg_resources viable for usage. This change makes it clear that pkg_resources should not be used, emitting a DeprecationWarning when imported. + + +v67.4.0 +------- + + +Changes +^^^^^^^ +* #3832: Update vendored ``importlib-metadata`` (to 6.0.0) and + ``importlib-resources`` (to 5.10.2) + + +v67.3.3 +------- + + +Misc +^^^^ +* #3820: Restore quoted ``#include`` argument to ``has_function``. + + +v67.3.2 +------- + + +Misc +^^^^ +* #3827: Improve deprecation warning message on ``pkg_resources.declare_namespace`` + to display package name. + + +v67.3.1 +------- + + +Misc +^^^^ +* #3823: Fixes ``egg_info`` code path triggered during integration with ``pip``. + + +v67.3.0 +------- + + +Deprecations +^^^^^^^^^^^^ +* #3434: Added deprecation warning for ``pkg_resources.declare_namespace``. + Users that wish to implement namespace packages, are recommended to follow the + practice described in PEP 420 and omit the ``__init__.py`` file entirely. + +Changes +^^^^^^^ +* #3792: Reduced usage of ``pkg_resources`` in ``setuptools`` via internal + restructuring and refactoring. + +Misc +^^^^ +* #3822: Added debugging tips for "editable mode" and update related docs. + Instead of using a custom exception to display the help message to the user, + ``setuptools`` will now use a warning and re-raise the original exception. +* #3822: Added clarification about ``editable_wheel`` and ``dist_info`` CLI commands: + they should not be called directly with ``python setup.py ...``. + Instead they are reserved for internal use of ``setuptools`` (effectively as "private" commands). + Users are recommended to rely on build backend APIs (:pep:`517` and :pep:`660`) + exposed by ``setuptools.build_meta``. + + +v67.2.0 +------- + + +Changes +^^^^^^^ +* #3809: Merge with distutils@8c3c3d29, including fix for ``sysconfig.get_python_inc()`` (pypa/distutils#178), fix for segfault on MinGW (pypa/distutils#196), and better ``has_function`` support (pypa/distutils#195, #3648). + + +v67.1.0 +------- + + +Changes +^^^^^^^ +* #3795: Ensured that ``__file__`` is an absolute path when executing ``setup.py`` as + part of ``setuptools.build_meta``. + +Misc +^^^^ +* #3798: Updated validations for ``pyproject.toml`` using ``validate-pyproject==0.12.1`` + to allow stub packages (:pep:`561`) to be listed in ``tool.setuptools.packages`` + and ``tool.setuptools.package-dir``. + + +v67.0.0 +------- + + +Breaking Changes +^^^^^^^^^^^^^^^^ +* #3741: Removed patching of ``distutils._msvccompiler.gen_lib_options`` + for compatibility with Numpy < 1.11.2 -- by :user:`mgorny` +* #3790: Bump vendored version of :pypi:`packaging` to 23.0 + (:pypi:`pyparsing` is no longer required and was removed). + As a consequence, users will experience a more strict parsing of requirements. + Specifications that don't comply with :pep:`440` and :pep:`508` will result + in build errors. + + +v66.1.1 +------- + + +Misc +^^^^ +* #3782: Fixed problem with ``file`` directive in ``tool.setuptools.dynamic`` + (``pyproject.toml``) when value is a simple string instead of list. + + +v66.1.0 +------- + + +Changes +^^^^^^^ +* #3685: Fix improper usage of deprecated/removed ``pkgutil`` APIs in Python 3.12+. +* #3779: Files referenced by ``file:`` in ``setup.cfg`` and by ``project.readme.file``, + ``project.license.file`` or ``tool.setuptools.dynamic.*.file`` in + ``pyproject.toml`` are now automatically included in the generated sdists. + +Misc +^^^^ +* #3776: Added note about using the ``--pep-517`` flag with ``pip`` to workaround + ``InvalidVersion`` errors for packages that are already installed in the system. + + +v66.0.0 +------- + + +Breaking Changes +^^^^^^^^^^^^^^^^ +* #2497: Support for PEP 440 non-conforming versions has been removed. Environments containing packages with non-conforming versions may fail or the packages may not be recognized. + +Changes +^^^^^^^ +* #3769: Replace 'appdirs' with 'platformdirs'. + + +v65.7.0 +------- + + +Changes +^^^^^^^ +* #3594: Added ``htmlcov`` to FlatLayoutModuleFinder.DEFAULT_EXCLUDE -- by :user:`demianbrecht` +* #3667: Added a human-readable error description when ``.egg-info`` directory is not writeable -- by :user:`droodev` + +Misc +^^^^ +* #3713: Fixed incomplete ``getattr`` statement that caused problems when accessing + undefined attribute. + + +v65.6.3 +------- + + +Misc +^^^^ +* #3709: Fix condition to patch ``distutils.dist.log`` to only apply when using + ``distutils`` from the stdlib. + + +v65.6.2 +------- + +No significant changes. + + +v65.6.1 +------- + + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3689: Documented that ``distutils.cfg`` might be ignored unless + ``SETUPTOOLS_USE_DISTUTILS=stdlib``. + +Misc +^^^^ +* #3678: Improve clib builds reproducibility by sorting sources -- by :user:`danigm` +* #3684: Improved exception/traceback when invalid entry-points are specified. +* #3690: Fixed logging errors: 'underlying buffer has been detached' (issue #1631). +* #3693: Merge pypa/distutils@3e9d47e with compatibility fix for distutils.log.Log. +* #3695, #3697, #3698, #3699: Changed minor text details (spelling, spaces ...) +* #3696: Removed unnecessary ``coding: utf-8`` annotations +* #3704: Fixed temporary build directories interference with auto-discovery. + + +v65.6.0 +------- + + +Changes +^^^^^^^ +* #3674: Sync with pypa/distutils@e0787fa, including pypa/distutils#183 updating distutils to use the Python logging framework. + + +v65.5.1 +------- + + +Misc +^^^^ +* #3638: Drop a test dependency on the ``mock`` package, always use :external+python:py:mod:`unittest.mock` -- by :user:`hroncok` +* #3659: Fixed REDoS vector in package_index. + + +v65.5.0 +------- + + +Changes +^^^^^^^ +* #3624: Fixed editable install for multi-module/no-package ``src``-layout projects. +* #3626: Minor refactorings to support distutils using stdlib logging module. + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3419: Updated the example version numbers to be compliant with PEP-440 on the "Specifying Your Project’s Version" page of the user guide. + +Misc +^^^^ +* #3569: Improved information about conflicting entries in the current working directory + and editable install (in documentation and as an informational warning). +* #3576: Updated version of ``validate_pyproject``. + + +v65.4.1 +------- + + +Misc +^^^^ +* #3613: Fixed encoding errors in ``expand.StaticModule`` when system default encoding doesn't match expectations for source files. +* #3617: Merge with pypa/distutils@6852b20 including fix for pypa/distutils#181. + + +v65.4.0 +------- + + +Changes +^^^^^^^ +* #3609: Merge with pypa/distutils@d82d926 including support for DIST_EXTRA_CONFIG in pypa/distutils#177. + + +v65.3.0 +------- + + +Changes +^^^^^^^ +* #3547: Stop ``ConfigDiscovery.analyse_name`` from splatting the ``Distribution.name`` attribute -- by :user:`jeamland` + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3554: Changed requires to requests in the pyproject.toml example in the :doc:`Dependency management section of the Quickstart guide ` -- by :user:`mfbutner` + +Misc +^^^^ +* #3561: Fixed accidental name matching in editable hooks. + + +v65.2.0 +------- + + +Changes +^^^^^^^ +* #3553: Sync with pypa/distutils@22b9bcf, including fixed cross-compiling support and removing deprecation warning per pypa/distutils#169. + + +v65.1.1 +------- + + +Misc +^^^^ +* #3551: Avoided circular imports in meta path finder for editable installs when a + missing module has the same name as its parent. + + +v65.1.0 +------- + + +Changes +^^^^^^^ +* #3536: Remove monkeypatching of msvc9compiler. + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3538: Corrected documentation on how to use the ``legacy-editable`` mode. + + +v65.0.2 +------- + + +Misc +^^^^ +* #3505: Restored distutils msvccompiler and msvc9compiler modules and marked as deprecated (pypa/distutils@c802880). + + +v65.0.1 +------- + + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3529: Added clarification to :doc:`/userguide/quickstart` about support + to ``setup.py``. + +Misc +^^^^ +* #3526: Fixed backward compatibility of editable installs and custom ``build_ext`` + commands inheriting directly from ``distutils``. +* #3528: Fixed ``buid_meta.prepare_metadata_for_build_wheel`` when + given ``metadata_directory`` is ``"."``. + + +v65.0.0 +------- + + +Breaking Changes +^^^^^^^^^^^^^^^^ +* #3505: Removed 'msvccompiler' and 'msvc9compiler' modules from distutils. +* #3521: Remove bdist_msi and bdist_wininst commands, which have been deprecated since Python 3.9. Use older Setuptools for these behaviors if needed. + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3519: Changed the note in ``keywords`` documentation regarding editable installations + to specify which ``setuptools`` version require a minimal ``setup.py`` file or not. + + +v64.0.3 +------- + + +Misc +^^^^ +* #3515: Fixed "inline" file copying for editable installations and + optional extensions. +* #3517: Fixed ``editable_wheel`` to ensure other commands are finalized before using + them. This should prevent errors with plugins trying to use different commands + or reinitializing them. +* #3517: Augmented filter to prevent transient/temporary source files from being + considered ``package_data`` or ``data_files``. + + +v64.0.2 +------- + + +Misc +^^^^ +* #3506: Suppress errors in custom ``build_py`` implementations when running editable + installs in favor of a warning indicating what is the most appropriate + migration path. + This is a *transitional* measure. Errors might be raised in future versions of + ``setuptools``. +* #3512: Added capability of handling namespace packages created + accidentally/purposefully via discovery configuration during editable installs. + This should emulate the behaviour of a non-editable installation. + + +v64.0.1 +------- + + +Misc +^^^^ +* #3497: Fixed ``editable_wheel`` for legacy namespaces. +* #3502: Fixed issue with editable install and single module distributions. +* #3503: Added filter to ignore external ``.egg-info`` files in manifest. + + Some plugins might rely on the fact that the ``.egg-info`` directory is + produced inside the project dir, which may not be the case in editable installs + (the ``.egg-info`` directory is produced inside the metadata directory given by + the build frontend via PEP 660 hooks). + + +v64.0.0 +------- + + +Deprecations +^^^^^^^^^^^^ +* #3380: Passing some types of parameters via ``--global-option`` to setuptools PEP 517/PEP 660 backend + is now considered deprecated. The user can pass the same arbitrary parameter + via ``--build-option`` (``--global-option`` is now reserved for flags like + ``--verbose`` or ``--quiet``). + + Both ``--build-option`` and ``--global-option`` are supported as a **transitional** effort (a.k.a. "escape hatch"). + In the future a proper list of allowed ``config_settings`` may be created. + +Breaking Changes +^^^^^^^^^^^^^^^^ +* #3265: Added implementation for *editable install* hooks (PEP 660). + + By default the users will experience a *lenient* behavior which prioritises + the ability of the users of changing the distributed packages (e.g. adding new + files or removing old ones). + But they can also opt into a *strict* mode, which will try to replicate as much + as possible the behavior of the package as if it would be normally installed by + end users. The *strict* editable installation is not able to detect if files + are added or removed from the project (a new installation is required). + + This implementation might also affect plugins and customizations that assume + certain ``build`` subcommands don't run during editable installs or that they + always copy files to the temporary build directory. + + .. important:: + The *editable* aspect of the *editable install* supported this implementation + is restricted to the Python modules contained in the distributed package. + Changes in binary extensions (e.g. C/C++), entry-point definitions, + dependencies, metadata, datafiles, etc may require a new installation. + +Changes +^^^^^^^ +* #3380: Improved the handling of the ``config_settings`` parameter in both PEP 517 and + PEP 660 interfaces: + + - It is possible now to pass both ``--global-option`` and ``--build-option``. + As discussed in #1928, arbitrary arguments passed via ``--global-option`` + should be placed before the name of the setuptools' internal command, while + ``--build-option`` should come after. + + - Users can pass ``editable-mode=strict`` to select a strict behaviour for the + editable installation. +* #3392: Exposed ``get_output_mapping()`` from ``build_py`` and ``build_ext`` + subcommands. This interface is reserved for the use of ``setuptools`` + Extensions and third part packages are explicitly disallowed to calling it. + However, any implementation overwriting ``build_py`` or ``build_ext`` are + required to honour this interface. +* #3412: Added ability of collecting source files from custom build sub-commands to + ``sdist``. This allows plugins and customization scripts to automatically + add required source files in the source distribution. +* #3414: Users can *temporarily* specify an environment variable + ``SETUPTOOLS_ENABLE_FEATURES=legacy-editable`` as a escape hatch for the + :pep:`660` behavior. This setting is **transitional** and may be removed in the + future. +* #3484: Added *transient* ``compat`` mode to editable installs. + This more will be temporarily available (to facilitate the transition period) + for those that want to emulate the behavior of the ``develop`` command + (in terms of what is added to ``sys.path``). + This mode is provided "as is", with limited support, and will be removed in + future versions of ``setuptools``. + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3414: Updated :doc:`Development Mode ` to reflect on the + implementation of :pep:`660`. + + +v63.4.3 +------- + + +Misc +^^^^ +* #3496: Update to pypa/distutils@b65aa40 including more robust support for library/include dir handling in msvccompiler (pypa/distutils#153) and test suite improvements. + + +v63.4.2 +------- + + +Misc +^^^^ +* #3453: Bump vendored version of :pypi:`pyparsing` to 3.0.9. +* #3481: Add warning for potential ``install_requires`` and ``extras_require`` + misconfiguration in ``setup.cfg`` +* #3487: Modified ``pyproject.toml`` validation exception handling to + make relevant debugging information easier to spot. + + +v63.4.1 +------- + + +Misc +^^^^ +* #3482: Sync with pypa/distutils@274758f1c02048d295efdbc13d2f88d9923547f8, restoring compatibility shim in bdist.format_commands. + + +v63.4.0 +------- + + +Changes +^^^^^^^ +* #2971: ``upload_docs`` command is deprecated once again. + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3443: Installed ``sphinx-hoverxref`` extension to show tooltips on internal an external references. + -- by :user:`humitos` +* #3444: Installed ``sphinx-notfound-page`` extension to generate nice 404 pages. + -- by :user:`humitos` + +Misc +^^^^ +* #3480: Merge with pypa/distutils@c397f4c + + +v63.3.0 +------- + + +Changes +^^^^^^^ +* #3475: Merge with pypa/distutils@129480b, including substantial delinting and cleanup, some refactoring around compiler logic, better messaging in cygwincompiler (pypa/distutils#161). + + +v63.2.0 +------- + + +Changes +^^^^^^^ +* #3395: Included a performance optimization: ``setuptools.build_meta`` no longer tries + to :func:`compile` the setup script code before :func:`exec`-ing it. + +Misc +^^^^ +* #3435: Corrected issue in macOS framework builds on Python 3.9 not installed by homebrew (pypa/distutils#158). + + +v63.1.0 +------- + + +Changes +^^^^^^^ +* #3430: Merge with pypa/distutils@152c13d including pypa/distutils#155 (improved compatibility for editable installs on homebrew Python 3.9), pypa/distutils#150 (better handling of runtime_library_dirs on cygwin), and pypa/distutils#151 (remove warnings for namespace packages). + + +v63.0.0 +------- + + +Breaking Changes +^^^^^^^^^^^^^^^^ +* #3421: Drop setuptools' support for installing an entrypoint extra requirements at load time: + - the functionality has been broken since v60.8.0. + - the mechanism to do so is deprecated (``fetch_build_eggs``). + - that use case (e.g. a custom command class entrypoint) is covered by making sure the necessary build requirements are declared. + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3305: Updated the example pyproject.toml -- by :user:`jacalata` +* #3394: This updates the documentation for the ``file_finders`` hook so that + the logging recommendation aligns with the suggestion to not use + ``distutils`` directly. +* #3397: Fix reference for ``keywords`` to point to the Core Metadata Specification + instead of PEP 314 (the live standard is kept always up-to-date and + consolidates several PEPs together in a single document). +* #3402: Reordered the User Guide's Table of Contents -- by :user:`codeandfire` + + +v62.6.0 +------- + + +Changes +^^^^^^^ +* #3253: Enabled using ``file:`` for requirements in setup.cfg -- by :user:`akx` + (this feature is currently considered to be in **beta** stage). +* #3255: Enabled using ``file:`` for dependencies and optional-dependencies in pyproject.toml -- by :user:`akx` + (this feature is currently considered to be in **beta** stage). +* #3391: Updated ``attr:`` to also extract simple constants with type annotations -- by :user:`karlotness` + + +v62.5.0 +------- + + +Changes +^^^^^^^ +* #3347: Changed warnings and documentation notes about *experimental* aspect of ``pyproject.toml`` configuration: + now ``[project]`` is a fully supported configuration interface, but the ``[tool.setuptools]`` table + and sub-tables are still considered to be in **beta** stage. +* #3383: In _distutils_hack, suppress/undo the use of local distutils when select tests are imported in CPython. + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3368: Added documentation page about extension modules -- by :user:`mkoeppe` +* #3371: Moved documentation from ``/userguide/commands`` to ``/depracted/commands``. + This change was motived by the fact that running ``python setup.py`` directly is + considered a deprecated practice. +* #3372: Consolidated sections about ``sdist`` contents and ``MANIFEST.in`` into a single page. + + Added a simple ``MANIFEST.in`` example. +* #3373: Moved remarks about using :pypi:`Cython` to the newly created page for + extension modules. +* #3374: Added clarification that using ``python setup.py egg_info`` commands to + manage project versions is only supported in a *transitional* basis, and + that eventually ``egg_info`` will be deprecated. + + Reorganized sections with tips for managing versions. +* #3378: Updated ``Quickstart`` docs to make it easier to follow for beginners. + +Misc +^^^^ +* #3385: Modules used to parse and evaluate configuration from ``pyproject.toml`` files are + intended for internal use only and that not part of the public API. + + +v62.4.0 +------- + + +Changes +^^^^^^^ +* #3256: Added setuptools.command.build command to match distutils.command.build -- by :user:`isuruf` +* #3366: Merge with pypa/distutils@75ed79d including reformat using black, fix for Cygwin support (pypa/distutils#139), and improved support for cross compiling (pypa/distutils#144 and pypa/distutils#145). + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3355: Changes to the User Guide's Entry Points page -- by :user:`codeandfire` +* #3361: Further minor corrections to the Entry Points page -- by :user:`codeandfire` +* #3363: Rework some documentation pages to de-emphasize ``distutils`` and the history + of packaging in the Python ecosystem. The focus of these changes is to make the + documentation easier to read for new users. +* #3364: Update documentation about dependency management, removing mention to + the deprecated ``dependency_links`` and adding some small improvements. +* #3367: Extracted text about automatic resource extraction and the zip-safe flag + from ``userguide/miscellaneous`` to ``deprecated/resource_extraction`` and + ``deprecated/zip_safe``. + + Extracted text about additional metadata files from + ``userguide/miscellaneous`` into the existing ``userguide/extension`` + document. + + Updated ``userguide/extension`` to better reflect the status of the + setuptools project. + + Removed ``userguide/functionalities_rewrite`` (a virtually empty part of the + docs). + + +v62.3.4 +------- + + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3349: Fixed two small issues preventing docs from building locally -- by :user:`codeandfire` +* #3350: Added note explaining ``package_data`` glob pattern matching for dotfiles -- by :user:`comabrewer` +* #3358: Clarify the role of the ``package_dir`` configuration. + +Misc +^^^^ +* #3354: Improve clarity in warning about unlisted namespace packages. + + +v62.3.3 +------- + + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3331: Replaced single backticks with double ones in ``CHANGES.rst`` -- by :user:`codeandfire` +* #3332: Fixed grammar/typos, modified example directory trees for src-layout and flat-layout -- by :user:`codeandfire` +* #3335: Changes to code snippets and other examples in the Data Files page of the User Guide -- by :user:`codeandfire` + +Misc +^^^^ +* #3336: Modified ``test_setup_install_includes_dependencies`` to work with custom ``PYTHONPATH`` –- by :user:`hroncok` + + +v62.3.2 +------- + + +Misc +^^^^ +* #3328: Include a first line summary to some of the existing multi-line warnings. + + +v62.3.1 +------- + + +Misc +^^^^ +* #3320: Fixed typo which causes ``namespace_packages`` to raise an error instead of + warning. + + +v62.3.0 +------- + + +Deprecations +^^^^^^^^^^^^ +* #3262: Formally added deprecation messages for ``namespace_packages``. + The methodology that uses ``pkg_resources`` and ``namespace_packages`` for + creating namespaces was already discouraged by the :doc:`setuptools docs + ` and the + :doc:`Python Packaging User Guide `, + therefore this change just make the deprecation more official. + Users can consider migrating to native/implicit namespaces (as introduced in + :pep:`420`). +* #3308: Relying on ``include_package_data`` to ensure sub-packages are automatically + added to the build wheel distribution (as "data") is now considered a + deprecated practice. + + This behaviour was controversial and caused inconsistencies (#3260). + + Instead, projects are encouraged to properly configure ``packages`` or use + discovery tools. General information can be found in :doc:`userguide/package_discovery`. + +Changes +^^^^^^^ +* #1806: Allowed recursive globs (``**``) in ``package_data``. -- by :user:`nullableVoidPtr` +* #3206: Fixed behaviour when both ``install_requires`` (in ``setup.py``) and + ``dependencies`` (in ``pyproject.toml``) are specified. + The configuration in ``pyproject.toml`` will take precedence over ``setup.py`` + (in accordance with PEP 621). A warning was added to inform users. + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3307: Added introduction to references/keywords. + + Added deprecation tags to test kwargs. + + Moved userguide/keywords to deprecated section. + + Clarified in deprecated doc what keywords came from distutils and which were added or changed by setuptools. + +Misc +^^^^ +* #3274: Updated version of vendored ``pyparsing`` to 3.0.8 to avoid problems with + upcoming deprecation in Python 3.11. +* #3292: Added warning about incompatibility with old versions of + ``importlib-metadata``. + + +v62.2.0 +------- + + +Changes +^^^^^^^ +* #3299: Optional metadata fields are now truly optional. Includes merge with pypa/distutils@a7cfb56 per pypa/distutils#138. + +Misc +^^^^ +* #3282: Added CI cache for ``setup.cfg`` examples used when testing ``setuptools.config``. + + +v62.1.0 +------- + + +Changes +^^^^^^^ +* #3258: Merge pypa/distutils@5229dad46b. + +Misc +^^^^ +* #3249: Simplified ``package_dir`` obtained via auto-discovery. + + +v62.0.0 +------- + + +Breaking Changes +^^^^^^^^^^^^^^^^ +* #3151: Made ``setup.py develop --user`` install to the user site packages directory even if it is disabled in the current interpreter. + +Changes +^^^^^^^ +* #3153: When resolving requirements use both canonical and normalized names -- by :user:`ldaniluk` +* #3167: Honor unix file mode in ZipFile when installing wheel via ``install_as_egg`` -- by :user:`delijati` + +Misc +^^^^ +* #3088: Fixed duplicated tag with the ``dist-info`` command. +* #3247: Fixed problem preventing ``readme`` specified as dynamic in ``pyproject.toml`` + from being dynamically specified in ``setup.py``. + + +v61.3.1 +------- + + +Misc +^^^^ +* #3233: Included missing test file ``setupcfg_examples.txt`` in ``sdist``. +* #3233: Added script that allows developers to download ``setupcfg_examples.txt`` prior to + running tests. By caching these files it should be possible to run the test suite + offline. + + +v61.3.0 +------- + + +Changes +^^^^^^^ +* #3229: Disabled automatic download of ``trove-classifiers`` to facilitate reproducibility. + +Misc +^^^^ +* #3229: Updated ``pyproject.toml`` validation via ``validate-pyproject`` v0.7.1. +* #3229: New internal tool made available for updating the code responsible for + the validation of ``pyproject.toml``. + This tool can be executed via ``tox -e generate-validation-code``. + + +v61.2.0 +------- + + +Changes +^^^^^^^ +* #3215: Ignored a subgroup of invalid ``pyproject.toml`` files that use the ``[project]`` + table to specify only ``requires-python`` (**transitional**). + + .. warning:: + Please note that future releases of setuptools will halt the build process + if a ``pyproject.toml`` file that does not match doc:`the PyPA Specification + ` is given. +* #3215: Updated ``pyproject.toml`` validation, as generated by ``validate-pyproject==0.6.1``. +* #3218: Prevented builds from erroring if the project specifies metadata via + ``pyproject.toml``, but uses other files (e.g. ``setup.py``) to complement it, + without setting ``dynamic`` properly. + + .. important:: + This is a **transitional** behaviour. + Future releases of ``setuptools`` may simply ignore externally set metadata + not backed by ``dynamic`` or even halt the build with an error. +* #3224: Merge changes from pypa/distutils@e1d5c9b1f6 + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3217: Fixed typo in ``pyproject.toml`` example in Quickstart -- by :user:`pablo-cardenas`. + +Misc +^^^^ +* #3223: Fixed missing requirements with environment markers when + ``optional-dependencies`` is set in ``pyproject.toml``. + + +v61.1.1 +------- + + +Misc +^^^^ +* #3212: Fixed missing dependencies when running ``setup.py install``. + Note that calling ``setup.py install`` directly is still deprecated and + will be removed in future versions of ``setuptools``. + Please check the release notes for :ref:`setup_install_deprecation_note`. + + +v61.1.0 +------- + + +Deprecations +^^^^^^^^^^^^ +* #3206: Changed ``setuptools.convert_path`` to an internal function that is not exposed + as part of setuptools API. + Future releases of ``setuptools`` are likely to remove this function. + +Changes +^^^^^^^ +* #3202: Changed behaviour of auto-discovery to not explicitly expand ``package_dir`` + for flat-layouts and to not use relative paths starting with ``./``. +* #3203: Prevented ``pyproject.toml`` parsing from overwriting + ``dist.include_package_data`` explicitly set in ``setup.py`` with default + value. +* #3208: Added a warning for non existing files listed with the ``file`` directive in + ``setup.cfg`` and ``pyproject.toml``. +* #3208: Added a default value for dynamic ``classifiers`` in ``pyproject.toml`` when + files are missing and errors being ignored. +* #3211: Disabled auto-discovery when distribution class has a ``configuration`` + attribute (e.g. when the ``setup.py`` script contains ``setup(..., + configuration=...)``). This is done to ensure extension-only packages created + with ``numpy.distutils.misc_util.Configuration`` are not broken by the safe + guard + behaviour to avoid accidental multiple top-level packages in a flat-layout. + + .. note:: + Users that don't set ``packages``, ``py_modules``, or ``configuration`` are + still likely to observe the auto-discovery behavior, which may halt the + build if the project contains multiple directories and/or multiple Python + files directly under the project root. + + To disable auto-discovery please explicitly set either ``packages`` or + ``py_modules``. Alternatively you can also configure :ref:`custom-discovery`. + + +v61.0.0 +------- + + +Deprecations +^^^^^^^^^^^^ +* #3068: Deprecated ``setuptools.config.read_configuration``, + ``setuptools.config.parse_configuration`` and other functions or classes + from ``setuptools.config``. + + Users that still need to parse and process configuration from ``setup.cfg`` can + import a direct replacement from ``setuptools.config.setupcfg``, however this + module is transitional and might be removed in the future + (the ``setup.cfg`` configuration format itself is likely to be deprecated in the future). + +Breaking Changes +^^^^^^^^^^^^^^^^ +* #2894: If you purposefully want to create an *"empty distribution"*, please be aware + that some Python files (or general folders) might be automatically detected and + included. + + Projects that currently don't specify both ``packages`` and ``py_modules`` in their + configuration and contain extra folders or Python files (not meant for distribution), + might see these files being included in the wheel archive or even experience + the build to fail. + + You can check details about the automatic discovery (and how to configure a + different behaviour) in :doc:`/userguide/package_discovery`. +* #3067: If the file ``pyproject.toml`` exists and it includes project + metadata/config (via ``[project]`` table or ``[tool.setuptools]``), + a series of new behaviors that are not backward compatible may take place: + + - The default value of ``include_package_data`` will be considered to be ``True``. + - Setuptools will attempt to validate the ``pyproject.toml`` file according + to PEP 621 specification. + - The values specified in ``pyproject.toml`` will take precedence over those + specified in ``setup.cfg`` or ``setup.py``. + +Changes +^^^^^^^ +* #2887: **[EXPERIMENTAL]** Added automatic discovery for ``py_modules`` and ``packages`` + -- by :user:`abravalheri`. + + Setuptools will try to find these values assuming that the package uses either + the *src-layout* (a ``src`` directory containing all the packages or modules), + the *flat-layout* (package directories directly under the project root), + or the *single-module* approach (an isolated Python file, directly under + the project root). + + The automatic discovery will also respect layouts that are explicitly + configured using the ``package_dir`` option. + + For backward-compatibility, this behavior will be observed **only if both** + ``py_modules`` **and** ``packages`` **are not set**. + (**Note**: specifying ``ext_modules`` might also prevent auto-discover from + taking place) + + If setuptools detects modules or packages that are not supposed to be in the + distribution, please manually set ``py_modules`` and ``packages`` in your + ``setup.cfg`` or ``setup.py`` file. + If you are using a *flat-layout*, you can also consider switching to + *src-layout*. +* #2887: **[EXPERIMENTAL]** Added automatic configuration for the ``name`` metadata + -- by :user:`abravalheri`. + + Setuptools will adopt the name of the top-level package (or module in the case + of single-module distributions), **only when** ``name`` **is not explicitly + provided**. + + Please note that it is not possible to automatically derive a single name when + the distribution consists of multiple top-level packages or modules. +* #3066: Added vendored dependencies for :pypi:`tomli`, :pypi:`validate-pyproject`. + + These dependencies are used to read ``pyproject.toml`` files and validate them. +* #3067: **[EXPERIMENTAL]** When using ``pyproject.toml`` metadata, + the default value of ``include_package_data`` is changed to ``True``. +* #3068: **[EXPERIMENTAL]** Add support for ``pyproject.toml`` configuration + (as introduced by :pep:`621`). Configuration parameters not covered by + standards are handled in the ``[tool.setuptools]`` sub-table. + + In the future, existing ``setup.cfg`` configuration + may be automatically converted into the ``pyproject.toml`` equivalent before taking effect + (as proposed in #1688). Meanwhile users can use automated tools like + :pypi:`ini2toml` to help in the transition. + + Please note that the legacy backend is not guaranteed to work with + ``pyproject.toml`` configuration. + + -- by :user:`abravalheri` +* #3125: Implicit namespaces (as introduced in :pep:`420`) are now considered by default + during :doc:`package discovery `, when + ``setuptools`` configuration and project metadata are added to the + ``pyproject.toml`` file. + + To disable this behaviour, use ``namespaces = False`` when explicitly setting + the ``[tool.setuptools.packages.find]`` section in ``pyproject.toml``. + + This change is backwards compatible and does not affect the behaviour of + configuration done in ``setup.cfg`` or ``setup.py``. +* #3152: **[EXPERIMENTAL]** Added support for ``attr:`` and ``cmdclass`` configurations + in ``setup.cfg`` and ``pyproject.toml`` when ``package_dir`` is implicitly + found via auto-discovery. +* #3178: Postponed importing ``ctypes`` when hiding files on Windows. + This helps to prevent errors in systems that might not have ``libffi`` installed. +* #3179: Merge with pypa/distutils@267dbd25ac + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3172: Added initial documentation about configuring ``setuptools`` via ``pyproject.toml`` + (using standard project metadata). + +Misc +^^^^ +* #3065: Refactored ``setuptools.config`` by separating configuration parsing (specific + to the configuration file format, e.g. ``setup.cfg``) and post-processing + (which includes directives such as ``file:`` that can be used across different + configuration formats). + + +v60.10.0 +-------- + + +Changes +^^^^^^^ +* #2971: Deprecated upload_docs command, to be removed in the future. +* #3137: Use samefile from stdlib, supported on Windows since Python 3.2. +* #3170: Adopt nspektr (vendored) to implement Distribution._install_dependencies. + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3144: Added documentation on using console_scripts from setup.py, which was previously only shown in setup.cfg -- by :user:`xhlulu` +* #3148: Added clarifications about ``MANIFEST.in``, that include links to PyPUG docs + and more prominent mentions to using a revision control system plugin as an + alternative. +* #3148: Removed mention to ``pkg_resources`` as the recommended way of accessing data + files, in favour of importlib.resources. + Additionally more emphasis was put on the fact that *package data files* reside + **inside** the *package directory* (and therefore should be *read-only*). + +Misc +^^^^ +* #3120: Added workaround for intermittent failures of backend tests on PyPy. + These tests now are marked with `XFAIL + `_, instead of erroring + out directly. +* #3124: Improved configuration for :pypi:`rst-linker` (extension used to build the + changelog). +* #3133: Enhanced isolation of tests using virtual environments - PYTHONPATH is not leaking to spawned subprocesses -- by :user:`befeleme` +* #3147: Added options to provide a pre-built ``setuptools`` wheel or sdist for being + used during tests with virtual environments. + Paths for these pre-built distribution files can now be set via the environment + variables: ``PRE_BUILT_SETUPTOOLS_SDIST`` and ``PRE_BUILT_SETUPTOOLS_WHEEL``. + + v60.9.3 ------- @@ -39,6 +1192,17 @@ Changes * #3098: Bump vendored packaging to 21.3. * Removed bootstrap script. +.. warning:: Users trying to install the unmaintained :pypi:`pathlib` backport + from PyPI/``sdist``/source code may find problems when using ``setuptools >= 60.9.0``. + This happens because during the installation, the unmaintained + implementation of ``pathlib`` is loaded and may cause compatibility problems + (it does not expose the same public API defined in the Python standard library). + + Whenever possible users should avoid declaring ``pathlib`` as a dependency. + An alternative is to pre-build a wheel for ``pathlib`` using a separated + virtual environment with an older version of setuptools and install the + library directly from the pre-built wheel. + v60.8.2 ------- @@ -171,7 +1335,7 @@ v60.4.0 Changes ^^^^^^^ -* #2839: Removed `requires` sorting when installing wheels as an egg dir. +* #2839: Removed ``requires`` sorting when installing wheels as an egg dir. * #2953: Fixed a bug that easy install incorrectly parsed Python 3.10 version string. * #3006: Fixed startup performance issue of Python interpreter due to imports of costly modules in ``_distutils_hack`` -- by :user:`tiran` @@ -511,6 +1675,8 @@ Documentation changes :user:`abravalheri` +.. _setup_install_deprecation_note: + v58.3.0 ------- @@ -610,7 +1776,7 @@ v57.5.0 Changes ^^^^^^^ -* #2712: Added implicit globbing support for `[options.data_files]` values. +* #2712: Added implicit globbing support for ``[options.data_files]`` values. Documentation changes ^^^^^^^^^^^^^^^^^^^^^ @@ -682,7 +1848,7 @@ Changes ``license_file`` (deprecated) and ``license_files`` options, relative to ``.dist-info``. - by :user:`cdce8p` * #2678: Moved Setuptools' own entry points into declarative config. -* #2680: Vendored `more_itertools `_ for Setuptools. +* #2680: Vendored :pypi:`more_itertools` for Setuptools. * #2681: Setuptools own setup.py no longer declares setup_requires, but instead expects wheel to be installed as declared by pyproject.toml. Misc @@ -897,7 +2063,7 @@ Changes * #2481: Define ``create_module()`` and ``exec_module()`` methods in ``VendorImporter`` to get rid of ``ImportWarning`` -- by :user:`hroncok` * #2489: ``pkg_resources`` behavior for zipimport now matches the regular behavior, and finds - ``.egg-info`` (previoulsy would only find ``.dist-info``) -- by :user:`thatch` + ``.egg-info`` (previously would only find ``.dist-info``) -- by :user:`thatch` * #2529: Fixed an issue where version tags may be added multiple times @@ -908,7 +2074,7 @@ v51.2.0 Changes ^^^^^^^ * #2493: Use importlib.import_module() rather than the deprecated loader.load_module() - in pkg_resources namespace delaration -- by :user:`encukou` + in pkg_resources namespace declaration -- by :user:`encukou` Documentation changes ^^^^^^^^^^^^^^^^^^^^^ @@ -1610,7 +2776,7 @@ Breaking Changes * eggs are not supported * no support for the ``allow_hosts`` easy_install option (``index_url``/``find_links`` are still honored) * pip environment variables are honored (and take precedence over easy_install options) -* #1898: Removed the "upload" and "register" commands in favor of `twine `_. +* #1898: Removed the "upload" and "register" commands in favor of :pypi:`twine`. Changes ^^^^^^^ @@ -1620,7 +2786,7 @@ Changes * add support for manylinux2010 * fix use of removed 'm' ABI flag in Python 3.8 on Windows * #1861: Fix empty namespace package installation from wheel. -* #1877: Setuptools now exposes a new entry point hook "setuptools.finalize_distribution_options", enabling plugins like `setuptools_scm `_ to configure options on the distribution at finalization time. +* #1877: Setuptools now exposes a new entry point hook "setuptools.finalize_distribution_options", enabling plugins like :pypi:`setuptools_scm` to configure options on the distribution at finalization time. v41.6.0 @@ -2887,7 +4053,7 @@ v26.1.0 ------- * #763: ``pkg_resources.get_default_cache`` now defers to the - `appdirs project `_ to + :pypi:`appdirs` project to resolve the cache directory. Adds a vendored dependency on appdirs to pkg_resources. @@ -3879,8 +5045,7 @@ process to fail and PyPI uploads no longer accept files for 13.0. * Issue #313: Removed built-in support for subversion. Projects wishing to retain support for subversion will need to use a third party library. The - extant implementation is being ported to `setuptools_svn - `_. + extant implementation is being ported to :pypi:`setuptools_svn`. * Issue #315: Updated setuptools to hide its own loaded modules during installation of another package. This change will enable setuptools to upgrade (or downgrade) itself even when its own metadata and implementation @@ -4384,8 +5549,7 @@ process to fail and PyPI uploads no longer accept files for 13.0. * Address security vulnerability in SSL match_hostname check as reported in Python #17997. -* Prefer `backports.ssl_match_hostname - `_ for backport +* Prefer :pypi:`backports.ssl_match_hostname` for backport implementation if present. * Correct NameError in ``ssl_support`` module (``socket.error``). diff --git a/LICENSE b/LICENSE index 353924be0e..1bb5a44356 100644 --- a/LICENSE +++ b/LICENSE @@ -1,5 +1,3 @@ -Copyright Jason R. Coombs - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the diff --git a/MANIFEST.in b/MANIFEST.in index 3e8f09de37..ac3308ed52 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -15,3 +15,4 @@ include launcher.c include msvc-build-launcher.cmd include pytest.ini include tox.ini +include setuptools/tests/config/setupcfg_examples.txt diff --git a/README.rst b/README.rst index e421de51b8..0bb27abd11 100644 --- a/README.rst +++ b/README.rst @@ -1,15 +1,7 @@ -.. image:: https://raw.githubusercontent.com/pypa/setuptools/main/docs/images/banner-640x320.svg - :align: center - -| - .. image:: https://img.shields.io/pypi/v/setuptools.svg - :target: `PyPI link`_ + :target: https://pypi.org/project/setuptools .. image:: https://img.shields.io/pypi/pyversions/setuptools.svg - :target: `PyPI link`_ - -.. _PyPI link: https://pypi.org/project/setuptools .. image:: https://github.com/pypa/setuptools/workflows/tests/badge.svg :target: https://github.com/pypa/setuptools/actions?query=workflow%3A%22tests%22 @@ -22,7 +14,7 @@ .. image:: https://img.shields.io/readthedocs/setuptools/latest.svg :target: https://setuptools.pypa.io -.. image:: https://img.shields.io/badge/skeleton-2022-informational +.. image:: https://img.shields.io/badge/skeleton-2023-informational :target: https://blog.jaraco.com/skeleton .. image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py index 605a6edc7c..f987a5367f 100644 --- a/_distutils_hack/__init__.py +++ b/_distutils_hack/__init__.py @@ -14,22 +14,26 @@ def warn_distutils_present(): # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 return import warnings + warnings.warn( "Distutils was imported before Setuptools, but importing Setuptools " "also replaces the `distutils` module in `sys.modules`. This may lead " "to undesirable behaviors or errors. To avoid these issues, avoid " "using distutils directly, ensure that setuptools is installed in the " "traditional way (e.g. not an editable install), and/or make sure " - "that setuptools is always imported before distutils.") + "that setuptools is always imported before distutils." + ) def clear_distutils(): if 'distutils' not in sys.modules: return import warnings + warnings.warn("Setuptools is replacing distutils.") mods = [ - name for name in sys.modules + name + for name in sys.modules if name == "distutils" or name.startswith("distutils.") ] for name in mods: @@ -46,6 +50,7 @@ def enabled(): def ensure_local_distutils(): import importlib + clear_distutils() # With the DistutilsMetaFinder in place, @@ -82,7 +87,9 @@ def match(self, string): class DistutilsMetaFinder: def find_spec(self, fullname, path, target=None): - if path is not None: + # optimization: only consider top level modules and those + # found in the CPython test suite. + if path is not None and not fullname.startswith('test.'): return method_name = 'spec_for_{fullname}'.format(**locals()) @@ -111,7 +118,6 @@ def spec_for_distutils(self): return class DistutilsLoader(importlib.abc.Loader): - def create_module(self, spec): mod.__name__ = 'distutils' return mod @@ -147,9 +153,9 @@ def pip_imported_during_build(cls): Detect if pip is being imported in a build script. Ref #2355. """ import traceback + return any( - cls.frame_file_is_setup(frame) - for frame, line in traceback.walk_stack(None) + cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None) ) @staticmethod @@ -160,6 +166,35 @@ def frame_file_is_setup(frame): # some frames may not have __file__ (#2940) return frame.f_globals.get('__file__', '').endswith('setup.py') + def spec_for_sensitive_tests(self): + """ + Ensure stdlib distutils when running select tests under CPython. + + python/cpython#91169 + """ + clear_distutils() + self.spec_for_distutils = lambda: None + + sensitive_tests = ( + [ + 'test.test_distutils', + 'test.test_peg_generator', + 'test.test_importlib', + ] + if sys.version_info < (3, 10) + else [ + 'test.test_distutils', + ] + ) + + +for name in DistutilsMetaFinder.sensitive_tests: + setattr( + DistutilsMetaFinder, + f'spec_for_{name}', + DistutilsMetaFinder.spec_for_sensitive_tests, + ) + DISTUTILS_FINDER = DistutilsMetaFinder() diff --git a/bootstrap.egg-info/entry_points.txt b/bootstrap.egg-info/entry_points.txt index c00d1d3a02..a21ca22709 100644 --- a/bootstrap.egg-info/entry_points.txt +++ b/bootstrap.egg-info/entry_points.txt @@ -2,6 +2,7 @@ egg_info = setuptools.command.egg_info:egg_info build_py = setuptools.command.build_py:build_py sdist = setuptools.command.sdist:sdist +editable_wheel = setuptools.command.editable_wheel:editable_wheel [distutils.setup_keywords] include_package_data = setuptools.dist:assert_bool diff --git a/changelog.d/2971.change.rst b/changelog.d/2971.change.rst deleted file mode 100644 index b9a093b430..0000000000 --- a/changelog.d/2971.change.rst +++ /dev/null @@ -1 +0,0 @@ -Deprecated upload_docs command, to be removed in the future. diff --git a/changelog.d/3120.misc.rst b/changelog.d/3120.misc.rst deleted file mode 100644 index 3531a0abfc..0000000000 --- a/changelog.d/3120.misc.rst +++ /dev/null @@ -1,4 +0,0 @@ -Added workaround for intermittent failures of backend tests on PyPy. -These tests now are marked with `XFAIL -`_, instead of erroring -out directly. diff --git a/changelog.d/3124.misc.rst b/changelog.d/3124.misc.rst deleted file mode 100644 index aba19b808e..0000000000 --- a/changelog.d/3124.misc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improved configuration for :pypi:`rst-linker` (extension used to build the -changelog). diff --git a/changelog.d/3915.misc.rst b/changelog.d/3915.misc.rst new file mode 100644 index 0000000000..cd962b2c6e --- /dev/null +++ b/changelog.d/3915.misc.rst @@ -0,0 +1 @@ +Adequate tests to the latest changes in ``virtualenv`` for Python 3.12. diff --git a/changelog.d/README.rst b/changelog.d/README.rst index 49b4d563c4..58f0a0f5f9 100644 --- a/changelog.d/README.rst +++ b/changelog.d/README.rst @@ -21,8 +21,7 @@ recorded in the Git history rather than a changelog. Alright! So how to add a news fragment? ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -``setuptools`` uses `towncrier `_ -for changelog management. +``setuptools`` uses :pypi:`towncrier` for changelog management. To submit a change note about your PR, add a text file into the ``changelog.d/`` folder. It should contain an explanation of what applying this PR will change in the way @@ -91,4 +90,4 @@ File :file:`changelog.d/2355.change.rst`: (``tool.towncrier.type``). .. _Towncrier philosophy: - https://towncrier.readthedocs.io/en/actual-freaking-docs/#philosophy + https://towncrier.readthedocs.io/en/latest/#philosophy diff --git a/conftest.py b/conftest.py index 43f33ba45a..2271ec3ed8 100644 --- a/conftest.py +++ b/conftest.py @@ -19,6 +19,7 @@ def pytest_addoption(parser): def pytest_configure(config): config.addinivalue_line("markers", "integration: integration tests") + config.addinivalue_line("markers", "uses_network: tests may try to download files") collect_ignore = [ @@ -31,6 +32,7 @@ def pytest_configure(config): 'pkg_resources/tests/data', 'setuptools/_vendor', 'pkg_resources/_vendor', + 'setuptools/config/_validate_pyproject', ] diff --git a/docs/artwork.rst b/docs/artwork.rst index 907e62a667..d815561e9d 100644 --- a/docs/artwork.rst +++ b/docs/artwork.rst @@ -65,7 +65,7 @@ Inspiration This design was inspired by :user:`cajhne`'s `original proposal`_ and the ancient symbol of the ouroboros_. It features a snake moving in a circular trajectory not only as a reference to -the Python programming language but also to the `wheel package format`_ as one +the Python programming language but also to the :pep:`wheel package format <427>` as one of the distribution formats supported by setuptools. The shape of the snake also resembles a cog, which together with the hammer is a nod to the two words that compose the name of the project. @@ -115,5 +115,4 @@ https://github.com/pypa/setuptools or https://setuptools.pypa.io. .. _setuptools repository: https://github.com/pypa/setuptools .. _install the correct fonts: https://wiki.inkscape.org/wiki/Installing_fonts .. _original proposal: https://github.com/pypa/setuptools/issues/2227#issuecomment-653628344 -.. _wheel package format: https://www.python.org/dev/peps/pep-0427/ .. _ouroboros: https://en.wikipedia.org/wiki/Ouroboros diff --git a/docs/build_meta.rst b/docs/build_meta.rst index 1337bddbfb..aa4f190712 100644 --- a/docs/build_meta.rst +++ b/docs/build_meta.rst @@ -12,57 +12,61 @@ uses a ``setup()`` function within the ``setup.py`` script. Commands such as ``python setup.py bdist`` or ``python setup.py bdist_wheel`` generate a distribution bundle and ``python setup.py install`` installs the distribution. This interface makes it difficult to choose other packaging tools without an -overhaul. Because ``setup.py`` scripts allowed for arbitrary execution, it -proved difficult to provide a reliable user experience across environments +overhaul. Because ``setup.py`` scripts allow for arbitrary execution, it +is difficult to provide a reliable user experience across environments and history. -`PEP 517 `_ therefore came to -rescue and specified a new standard to -package and distribute Python modules. Under PEP 517: +:pep:`517` came to +the rescue and specified a new standard for packaging and distributing Python +modules. Under PEP 517: - a ``pyproject.toml`` file is used to specify what program to use - for generating distribution. + A ``pyproject.toml`` file is used to specify which program to use + to generate the distribution. - Then, two functions provided by the program, ``build_wheel(directory: str)`` - and ``build_sdist(directory: str)`` create the distribution bundle at the - specified ``directory``. The program is free to use its own configuration - script or extend the ``.toml`` file. + Two functions provided by the program, ``build_wheel(directory: str)`` + and ``build_sdist(directory: str)``, create the distribution bundle in the + specified ``directory``. - Lastly, ``pip install *.whl`` or ``pip install *.tar.gz`` does the actual - installation. If ``*.whl`` is available, ``pip`` will go ahead and copy - the files into ``site-packages`` directory. If not, ``pip`` will look at - ``pyproject.toml`` and decide what program to use to 'build from source' - (the default is ``setuptools``) + The program may use its own configuration file or extend the ``.toml`` file. -With this standard, switching between packaging tools becomes a lot easier. ``build_meta`` -implements ``setuptools``' build system support. + The actual installation is done with ``pip install *.whl`` or + ``pip install *.tar.gz``. If ``*.whl`` is available, ``pip`` will go ahead and copy + its files into the ``site-packages`` directory. If not, ``pip`` will look at + ``pyproject.toml`` and decide which program to use to 'build from source'. + (Note that if there is no ``pyproject.toml`` file or the ``build-backend`` + parameter is not defined, then the fall-back behaviour is to use ``setuptools``.) + +With this standard, switching between packaging tools is a lot easier. How to use it? -------------- -Starting with a package that you want to distribute. You will need your source -scripts, a ``pyproject.toml`` file and a ``setup.cfg`` file:: +Start with a package that you want to distribute. You will need your source +files, a ``pyproject.toml`` file and a ``setup.cfg`` file:: ~/meowpkg/ pyproject.toml setup.cfg - meowpkg/__init__.py + meowpkg/ + __init__.py + module.py -The pyproject.toml file is required to specify the build system (i.e. what is +The ``pyproject.toml`` file specifies the build system (i.e. what is being used to package your scripts and install from source). To use it with -setuptools, the content would be:: +``setuptools`` the content would be:: [build-system] requires = ["setuptools"] build-backend = "setuptools.build_meta" +``build_meta`` implements ``setuptools``' build system support. The ``setuptools`` package implements the ``build_sdist`` command and the ``wheel`` package implements the ``build_wheel`` command; the latter is a dependency of the former exposed via :pep:`517` hooks. Use ``setuptools``' :ref:`declarative config ` to -specify the package information:: +specify the package information in ``setup.cfg``:: [metadata] name = meowpkg @@ -72,6 +76,8 @@ specify the package information:: [options] packages = find: +.. _building: + Now generate the distribution. To build the package, use `PyPA build `_:: @@ -91,61 +97,66 @@ or:: $ pip install dist/meowpkg-0.0.1.tar.gz + +.. _backend-wrapper: + Dynamic build dependencies and other ``build_meta`` tweaks ---------------------------------------------------------- With the changes introduced by :pep:`517` and :pep:`518`, the -``setup_requires`` configuration field was made deprecated in ``setup.cfg`` and +``setup_requires`` configuration field was deprecated in ``setup.cfg`` and ``setup.py``, in favour of directly listing build dependencies in the ``requires`` field of the ``build-system`` table of ``pyproject.toml``. This approach has a series of advantages and gives package managers and -installers the ability to inspect in advance the build requirements and +installers the ability to inspect the build requirements in advance and perform a series of optimisations. -However some package authors might still need to dynamically inspect the final -users machine before deciding these requirements. One way of doing that, as -specified by :pep:`517`, is to "tweak" ``setuptools.build_meta`` by using a +However, some package authors might still need to dynamically inspect the final +user's machine before deciding these requirements. One way of doing that, as +specified by :pep:`517`, is to "tweak" ``setuptools.build_meta`` by using an :pep:`in-tree backend <517#in-tree-build-backends>`. -.. tip:: Before implementing a *in-tree* backend, have a look on - :pep:`PEP 508 <508#environment-markers>`. Most of the times, dependencies +.. tip:: Before implementing an *in-tree* backend, have a look at + :pep:`PEP 508 <508#environment-markers>`. Most of the time, dependencies with **environment markers** are enough to differentiate operating systems and platforms. -If you add the following configuration to your ``pyprojec.toml``: - +If you put the following configuration in your ``pyproject.toml``: .. code-block:: toml [build-system] - requires = ["setuptools", "wheel"] + requires = ["setuptools"] build-backend = "backend" backend-path = ["_custom_build"] -then you should be able to implement a thin wrapper around ``build_meta`` in +then you can implement a thin wrapper around ``build_meta`` in the ``_custom_build/backend.py`` file, as shown in the following example: .. code-block:: python from setuptools import build_meta as _orig - - prepare_metadata_for_build_wheel = _orig.prepare_metadata_for_build_wheel - build_wheel = _orig.build_wheel - build_sdist = _orig.build_sdist + from setuptools.build_meta import * - def get_requires_for_build_wheel(self, config_settings=None): + def get_requires_for_build_wheel(config_settings=None): return _orig.get_requires_for_build_wheel(config_settings) + [...] - def get_requires_for_build_sdist(self, config_settings=None): + def get_requires_for_build_sdist(config_settings=None): return _orig.get_requires_for_build_sdist(config_settings) + [...] -Note that you can override any of the functions specified in :pep:`PEP 517 -<517#build-backend-interface>`, not only the ones responsible for gathering -requirements. +.. note:: + + You can override any of the functions specified in :pep:`PEP 517 + <517#build-backend-interface>`, not only the ones responsible for gathering + requirements. It is important to ``import *`` so that the hooks that you + choose not to reimplement would be inherited from the setuptools' backend + automatically. This will also cover hooks that might be added in the future + like the ones that :pep:`660` declares. + .. important:: Make sure your backend script is included in the :doc:`source distribution `, otherwise the build will fail. @@ -153,17 +164,17 @@ requirements. and :pypi:`setuptools-svn`), or by correctly setting up :ref:`MANIFEST.in `. - If this is the first time you are using a customised backend, please have a - look on the generated ``.tar.gz`` and ``.whl``. - On POSIX systems that can be done with ``tar -tf dist/*.tar.gz`` + The generated ``.tar.gz`` and ``.whl`` files are compressed archives that + can be inspected as follows: + On POSIX systems, this can be done with ``tar -tf dist/*.tar.gz`` and ``unzip -l dist/*.whl``. - On Windows systems you can rename the ``.whl`` to ``.zip`` to be able to - inspect it on the file explorer, and use the same ``tar`` command in a - command prompt (alternativelly there are GUI programs like `7-zip`_ that - handle ``.tar.gz``). + On Windows systems, you can rename the ``.whl`` to ``.zip`` to be able to + inspect it from File Explorer. You can also use the above ``tar`` command in a + command prompt to inspect the ``.tar.gz`` file. Alternatively, there are GUI programs + like `7-zip`_ that handle ``.tar.gz`` and ``.whl`` files. - In general the backend script should be present in the ``.tar.gz`` (so the - project can be build from the source) but not in the ``.whl`` (otherwise the + In general, the backend script should be present in the ``.tar.gz`` (so the + project can be built from the source) but not in the ``.whl`` (otherwise the backend script would end up being distributed alongside your package). See ":doc:`/userguide/package_discovery`" for more details about package files. diff --git a/docs/conf.py b/docs/conf.py index e57131b102..716b899342 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,7 +1,13 @@ -extensions = ['sphinx.ext.autodoc', 'jaraco.packaging.sphinx', 'rst.linker'] +extensions = [ + 'sphinx.ext.autodoc', + 'jaraco.packaging.sphinx', +] master_doc = "index" +html_theme = "furo" +# Link dates and other references in the changelog +extensions += ['rst.linker'] link_files = { '../CHANGES.rst': dict( using=dict( @@ -10,9 +16,21 @@ ), replace=[ dict( - pattern=r'(?\d+)', + pattern=r'(Issue #|\B#)(?P\d+)', url='{package_url}/issues/{issue}', ), + dict( + pattern=r'(?m:^((?Pv?\d+(\.\d+){1,2}))\n[-=]+\n)', + with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n', + ), + dict( + pattern=r'PEP[- ](?P\d+)', + url='https://peps.python.org/pep-{pep_number:0>4}/', + ), + dict( + pattern=r'(?\d+)', + url='{package_url}/pull/{pull}', + ), dict( pattern=r'BB Pull Request ?#(?P\d+)', url='{BB}/pypa/setuptools/pull-request/{bb_pull_request}', @@ -53,10 +71,6 @@ pattern=r'[Pp]ackaging (?P\d+(\.\d+)+)', url='{GH}/pypa/packaging/blob/{packaging_ver}/CHANGELOG.rst', ), - dict( - pattern=r'(?\d+)', - url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/', - ), dict( pattern=r'setuptools_svn #(?P\d+)', url='{GH}/jaraco/setuptools_svn/issues/{setuptools_svn}', @@ -69,15 +83,11 @@ pattern=r'pypa/(?P[\-\.\w]+)@(?P[\da-f]+)', url='{GH}/pypa/{commit_repo}/commit/{commit_number}', ), - dict( - pattern=r'^(?m)((?Pv?\d+(\.\d+){1,2}))\n[-=]+\n', - with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n', - ), ], ), } -# Be strict about any broken references: +# Be strict about any broken references nitpicky = True # Include Python intersphinx mapping to prevent failures @@ -87,16 +97,47 @@ 'python': ('https://docs.python.org/3', None), } -intersphinx_mapping.update({ - 'pypa-build': ('https://pypa-build.readthedocs.io/en/latest/', None) -}) +# Preserve authored syntax for defaults +autodoc_preserve_defaults = True + +intersphinx_mapping.update( + { + 'pip': ('https://pip.pypa.io/en/latest', None), + 'build': ('https://pypa-build.readthedocs.io/en/latest', None), + 'PyPUG': ('https://packaging.python.org/en/latest/', None), + 'packaging': ('https://packaging.pypa.io/en/latest/', None), + 'twine': ('https://twine.readthedocs.io/en/stable/', None), + 'importlib-resources': ( + 'https://importlib-resources.readthedocs.io/en/latest', + None, + ), + } +) + +# Support tooltips on references +extensions += ['hoverxref.extension'] +hoverxref_auto_ref = True +hoverxref_intersphinx = [ + 'python', + 'pip', + 'build', + 'PyPUG', + 'packaging', + 'twine', + 'importlib-resources', +] # Add support for linking usernames github_url = 'https://github.com' +github_repo_org = 'pypa' +github_repo_name = 'setuptools' +github_repo_slug = f'{github_repo_org}/{github_repo_name}' +github_repo_url = f'{github_url}/{github_repo_slug}' github_sponsors_url = f'{github_url}/sponsors' extlinks = { - 'user': (f'{github_sponsors_url}/%s', '@'), # noqa: WPS323 - 'pypi': ('https://pypi.org/project/%s', '%s'), + 'user': (f'{github_sponsors_url}/%s', '@%s'), # noqa: WPS323 + 'pypi': ('https://pypi.org/project/%s', '%s'), # noqa: WPS323 + 'wiki': ('https://wikipedia.org/wiki/%s', '%s'), # noqa: WPS323 } extensions += ['sphinx.ext.extlinks'] @@ -120,6 +161,13 @@ }, } +# Redirect old docs so links and references in the ecosystem don't break +extensions += ['sphinx_reredirects'] +redirects = { + "userguide/keywords": "/deprecated/changed_keywords.html", + "userguide/commands": "/deprecated/commands.html", +} + # Add support for inline tabs extensions += ['sphinx_inline_tabs'] @@ -131,6 +179,7 @@ ('envvar', 'DISTUTILS_DEBUG'), # undocumented ('envvar', 'HOME'), # undocumented ('envvar', 'PLAT'), # undocumented + ('envvar', 'DIST_EXTRA_CONFIG'), # undocumented ('py:attr', 'CCompiler.language_map'), # undocumented ('py:attr', 'CCompiler.language_order'), # undocumented ('py:class', 'distutils.dist.Distribution'), # undocumented @@ -150,6 +199,7 @@ ('py:exc', 'LibError'), # undocumented ('py:exc', 'LinkError'), # undocumented ('py:exc', 'PreprocessError'), # undocumented + ('py:exc', 'setuptools.errors.PlatformError'), # sphinx cannot find it ('py:func', 'distutils.CCompiler.new_compiler'), # undocumented # undocumented: ('py:func', 'distutils.dist.DistributionMetadata.read_pkg_file'), @@ -164,7 +214,6 @@ # Allow linking objects on other Sphinx sites seamlessly: intersphinx_mapping.update( python=('https://docs.python.org/3', None), - python2=('https://docs.python.org/2', None), ) # Add support for the unreleased "next-version" change notes @@ -177,9 +226,12 @@ extensions += ['jaraco.tidelift'] # Add icons (aka "favicons") to documentation -extensions += ['sphinx-favicon'] +extensions += ['sphinx_favicon'] html_static_path = ['images'] # should contain the folder with icons +# Add support for nice Not Found 404 pages +extensions += ['notfound.extension'] + # List of dicts with HTML attributes # static-file points to files in the html_static_path (href is computed) favicons = [ @@ -187,15 +239,13 @@ "rel": "icon", "type": "image/svg+xml", "static-file": "logo-symbol-only.svg", - "sizes": "any" + "sizes": "any", }, { # Version with thicker strokes for better visibility at smaller sizes "rel": "icon", "type": "image/svg+xml", "static-file": "favicon.svg", - "sizes": "16x16 24x24 32x32 48x48" + "sizes": "16x16 24x24 32x32 48x48", }, # rel="apple-touch-icon" does not support SVG yet ] - -intersphinx_mapping['pip'] = 'https://pip.pypa.io/en/latest', None diff --git a/docs/deprecated/changed_keywords.rst b/docs/deprecated/changed_keywords.rst new file mode 100644 index 0000000000..7db372f2fe --- /dev/null +++ b/docs/deprecated/changed_keywords.rst @@ -0,0 +1,112 @@ +New and Changed ``setup()`` Keywords +==================================== + +This document tracks historical differences between ``setuptools`` and +``distutils``. + +Since ``distutils`` was scheduled for removal from the standard library in +Python 3.12, and ``setuptools`` started its adoption, these differences became less +relevant. +Please check :doc:`/references/keywords` for a complete list of keyword +arguments that can be passed to the ``setuptools.setup()`` function and +a their full description. + +.. tab:: Supported by both ``distutils`` and ``setuptoools`` + + ``name`` string + + ``version`` string + + ``description`` string + + ``long_description`` string + + ``long_description_content_type`` string + + ``author`` string + + ``author_email`` string + + ``maintainer`` string + + ``maintainer_email`` string + + ``url`` string + + ``download_url`` string + + ``packages`` list + + ``py_modules`` list + + ``scripts`` list + + ``ext_package`` string + + ``ext_modules`` list + + ``classifiers`` list + + ``distclass`` Distribution subclass + + ``script_name`` string + + ``script_args`` list + + ``options`` dictionary + + ``license`` string + + ``license_file`` string **deprecated** + + ``license_files`` list + + ``keywords`` string or list + + ``platforms`` list + + ``cmdclass`` dictionary + + ``data_files`` list **deprecated** + + ``package_dir`` dictionary + + ``requires`` string or list **deprecated** + + ``obsoletes`` list **deprecated** + + ``provides`` list + +.. tab:: Added or changed by ``setuptoools`` + + ``include_package_data`` bool + + ``exclude_package_data`` dictionary + + ``package_data`` dictionary + + ``zip_safe`` bool + + ``install_requires`` string or list + + ``entry_points`` dictionary + + ``extras_require`` dictionary + + ``python_requires`` string + + ``setup_requires`` string or list **deprecated** + + ``dependency_links`` list **deprecated** + + ``namespace_packages`` list + + ``test_suite`` string or function **deprecated** + + ``tests_require`` string or list **deprecated** + + ``test_loader`` class **deprecated** + + ``eager_resources`` list + + ``project_urls`` dictionary diff --git a/docs/userguide/commands.rst b/docs/deprecated/commands.rst similarity index 92% rename from docs/userguide/commands.rst rename to docs/deprecated/commands.rst index e632e550b3..d9d97a9efe 100644 --- a/docs/userguide/commands.rst +++ b/docs/deprecated/commands.rst @@ -1,3 +1,59 @@ +=============================== +Running ``setuptools`` commands +=============================== + +Historically, ``setuptools`` allowed running commands via a ``setup.py`` script +at the root of a Python project, as indicated in the examples below:: + + python setup.py --help + python setup.py --help-commands + python setup.py --version + python setup.py sdist + python setup.py bdist_wheel + +You could also run commands in other circumstances: + +* ``setuptools`` projects without ``setup.py`` (e.g., ``setup.cfg``-only):: + + python -c "from setuptools import setup; setup()" --help + +* ``distutils`` projects (with a ``setup.py`` importing ``distutils``):: + + python -c "import setuptools; with open('setup.py') as f: exec(compile(f.read(), 'setup.py', 'exec'))" develop + +That is, you can simply list the normal setup commands and options following the quoted part. + +.. warning:: + While it is perfectly fine that users write ``setup.py`` files to configure + a package build (e.g. to specify binary extensions or customize commands), + on recent versions of ``setuptools``, running ``python setup.py`` directly + as a script is considered **deprecated**. This also means that users should + avoid running commands directly via ``python setup.py ``. + + If you want to create :term:`sdist ` or :term:`wheel` + distributions the recommendation is to use the command line tool provided by :pypi:`build`:: + + pip install build # needs to be installed first + + python -m build # builds both sdist and wheel + python -m build --sdist + python -m build --wheel + + Build will automatically download ``setuptools`` and build the package in an + isolated environment. You can also specify specific versions of + ``setuptools``, by setting the :doc:`build requirements in pyproject.toml + `. + + If you want to install a package, you can use :pypi:`pip` or :pypi:`installer`:: + + pip install /path/to/wheel/file.whl + pip install /path/to/sdist/file.tar.gz + pip install . # replacement for python setup.py install + pip install --editable . # replacement for python setup.py develop + + pip install installer # needs to be installed first + python -m installer /path/to/wheel/file.whl + ----------------- Command Reference ----------------- diff --git a/docs/deprecated/dependency_links.rst b/docs/deprecated/dependency_links.rst new file mode 100644 index 0000000000..70e1a78e4a --- /dev/null +++ b/docs/deprecated/dependency_links.rst @@ -0,0 +1,77 @@ +Specifying dependencies that aren't in PyPI via ``dependency_links`` +==================================================================== + +.. warning:: + Dependency links support has been dropped by pip starting with version + 19.0 (released 2019-01-22). + +If your project depends on packages that don't exist on PyPI, you *may* still be +able to depend on them if they are available for download as: + +- an egg, in the standard distutils ``sdist`` format, +- a single ``.py`` file, or +- a VCS repository (Subversion, Mercurial, or Git). + +You need to add some URLs to the ``dependency_links`` argument to ``setup()``. + +The URLs must be either: + +1. direct download URLs, +2. the URLs of web pages that contain direct download links, or +3. the repository's URL + +In general, it's better to link to web pages, because it is usually less +complex to update a web page than to release a new version of your project. +You can also use a SourceForge ``showfiles.php`` link in the case where a +package you depend on is distributed via SourceForge. + +If you depend on a package that's distributed as a single ``.py`` file, you +must include an ``"#egg=project-version"`` suffix to the URL, to give a project +name and version number. (Be sure to escape any dashes in the name or version +by replacing them with underscores.) EasyInstall will recognize this suffix +and automatically create a trivial ``setup.py`` to wrap the single ``.py`` file +as an egg. + +In the case of a VCS checkout, you should also append ``#egg=project-version`` +in order to identify for what package that checkout should be used. You can +append ``@REV`` to the URL's path (before the fragment) to specify a revision. +Additionally, you can also force the VCS being used by prepending the URL with +a certain prefix. Currently available are: + +- ``svn+URL`` for Subversion, +- ``git+URL`` for Git, and +- ``hg+URL`` for Mercurial + +A more complete example would be: + + ``vcs+proto://host/path@revision#egg=project-version`` + +Be careful with the version. It should match the one inside the project files. +If you want to disregard the version, you have to omit it both in the +``requires`` and in the URL's fragment. + +This will do a checkout (or a clone, in Git and Mercurial parlance) to a +temporary folder and run ``setup.py bdist_egg``. + +The ``dependency_links`` option takes the form of a list of URL strings. For +example, this will cause a search of the specified page for eggs or source +distributions, if the package's dependencies aren't already installed: + +.. tab:: setup.cfg + + .. code-block:: ini + + [options] + #... + dependency_links = http://peak.telecommunity.com/snapshots/ + +.. tab:: setup.py + + .. code-block:: python + + setup( + ..., + dependency_links=[ + "http://peak.telecommunity.com/snapshots/", + ], + ) diff --git a/docs/deprecated/distutils-legacy.rst b/docs/deprecated/distutils-legacy.rst index 148dc25932..8112f12eae 100644 --- a/docs/deprecated/distutils-legacy.rst +++ b/docs/deprecated/distutils-legacy.rst @@ -3,11 +3,19 @@ Porting from Distutils Setuptools and the PyPA have a `stated goal `_ to make Setuptools the reference API for distutils. -Since the 49.1.2 release, Setuptools includes a local, vendored copy of distutils (from late copies of CPython) that is disabled by default. To enable the use of this copy of distutils when invoking setuptools, set the enviroment variable: +Since the 60.0.0 release, Setuptools includes a local, vendored copy of distutils (from late copies of CPython) that is enabled by default. To disable the use of this copy of distutils when invoking setuptools, set the environment variable: - SETUPTOOLS_USE_DISTUTILS=local + SETUPTOOLS_USE_DISTUTILS=stdlib + +.. warning:: + Please note that this also affects how ``distutils.cfg`` files inside stdlib's ``distutils`` + package directory are processed. + Unless ``SETUPTOOLS_USE_DISTUTILS=stdlib``, they will have no effect on the build process. + + You can still use a global user config file, ``~/.pydistutils.cfg`` (POSIX) or ``%USERPROFILE%/pydistutils.cfg`` (Windows), + or use the environment variable :ref:`DIST_EXTRA_CONFIG ` to point to another + supplementary configuration file. -This behavior is planned to become the default. Prefer Setuptools ----------------- @@ -16,16 +24,19 @@ As Distutils is deprecated, any usage of functions or objects from distutils is ``distutils.core.setup`` → ``setuptools.setup`` -``distutils.cmd.Command`` → ``setuptools.Command`` +``distutils.cmd.Command`` or ``distutils.core.Command`` → ``setuptools.Command`` ``distutils.command.{build_clib,build_ext,build_py,sdist}`` → ``setuptools.command.*`` -``distutils.log`` → (no replacement yet) +``distutils.log`` → :mod:`logging` (standard library) -``distutils.version.*`` → ``packaging.version.*`` +``distutils.version.*`` → :doc:`packaging.version.* ` ``distutils.errors.*`` → ``setuptools.errors.*`` [#errors]_ + +Migration advice is also provided by :pep:`PEP 632 <632#migration-advice>`. + If a project relies on uses of ``distutils`` that do not have a suitable replacement above, please search the `Setuptools issue tracker `_ and file a request, describing the use-case so that Setuptools' maintainers can investigate. Please provide enough detail to help the maintainers understand how distutils is used, what value it provides, and why that behavior should be supported. diff --git a/docs/deprecated/distutils/apiref.rst b/docs/deprecated/distutils/apiref.rst index f00ed74c69..278471a256 100644 --- a/docs/deprecated/distutils/apiref.rst +++ b/docs/deprecated/distutils/apiref.rst @@ -1845,30 +1845,6 @@ Subclasses of :class:`Command` must define the following methods. :synopsis: Build a "dumb" installer - a simple archive of files -.. % todo - - -:mod:`distutils.command.bdist_msi` --- Build a Microsoft Installer binary package -================================================================================= - -.. module:: distutils.command.bdist_msi - :synopsis: Build a binary distribution as a Windows MSI file - -.. class:: bdist_msi - -.. deprecated:: 3.9 - Use bdist_wheel (wheel packages) instead. - - Builds a `Windows Installer`_ (.msi) binary package. - - .. _Windows Installer: https://msdn.microsoft.com/en-us/library/cc185688(VS.85).aspx - - In most cases, the ``bdist_msi`` installer is a better choice than the - ``bdist_wininst`` installer, because it provides better support for - Win64 platforms, allows administrators to perform non-interactive - installations, and allows installation through group policies. - - :mod:`distutils.command.bdist_rpm` --- Build a binary distribution as a Redhat RPM and SRPM =========================================================================================== @@ -1879,19 +1855,6 @@ Subclasses of :class:`Command` must define the following methods. .. % todo -:mod:`distutils.command.bdist_wininst` --- Build a Windows installer -==================================================================== - -.. module:: distutils.command.bdist_wininst - :synopsis: Build a Windows installer - -.. deprecated:: 3.8 - Use bdist_wheel (wheel packages) instead. - - -.. % todo - - :mod:`distutils.command.sdist` --- Build a source distribution ============================================================== diff --git a/docs/deprecated/distutils/builtdist.rst b/docs/deprecated/distutils/builtdist.rst index e032c03e22..052a5850f9 100644 --- a/docs/deprecated/distutils/builtdist.rst +++ b/docs/deprecated/distutils/builtdist.rst @@ -96,11 +96,6 @@ The available formats for built distributions are: +-------------+------------------------------+---------+ | ``sdux`` | HP-UX :program:`swinstall` | | +-------------+------------------------------+---------+ -| ``wininst`` | self-extracting ZIP file for | \(4) | -| | Windows | | -+-------------+------------------------------+---------+ -| ``msi`` | Microsoft Installer. | | -+-------------+------------------------------+---------+ .. versionchanged:: 3.5 Added support for the ``xztar`` format. @@ -141,16 +136,6 @@ generated by each, are: +--------------------------+-------------------------------------+ | :command:`bdist_rpm` | rpm, srpm | +--------------------------+-------------------------------------+ -| :command:`bdist_wininst` | wininst | -+--------------------------+-------------------------------------+ -| :command:`bdist_msi` | msi | -+--------------------------+-------------------------------------+ - -.. note:: - bdist_wininst is deprecated since Python 3.8. - -.. note:: - bdist_msi is deprecated since Python 3.9. The following sections give details on the individual :command:`bdist_\*` commands. @@ -191,7 +176,7 @@ easily specify multiple formats in one run. If you need to do both, you can explicitly specify multiple :command:`bdist_\*` commands and their options:: python setup.py bdist_rpm --packager="John Doe " \ - bdist_wininst --target-version="2.0" + bdist_dumb --dumb-option=foo Creating RPM packages is driven by a :file:`.spec` file, much as using the Distutils is driven by the setup script. To make your life easier, the @@ -298,62 +283,6 @@ file winds up deep in the "build tree," in a temporary directory created by .. % \command{bdist\_rpm} command with one that writes whatever else you want .. % to the \file{.spec} file.) - -.. _creating-wininst: - -Creating Windows Installers -=========================== - -.. warning:: - bdist_wininst is deprecated since Python 3.8. - -.. warning:: - bdist_msi is deprecated since Python 3.9. - -Executable installers are the natural format for binary distributions on -Windows. They display a nice graphical user interface, display some information -about the module distribution to be installed taken from the metadata in the -setup script, let the user select a few options, and start or cancel the -installation. - -Since the metadata is taken from the setup script, creating Windows installers -is usually as easy as running:: - - python setup.py bdist_wininst - -or the :command:`bdist` command with the :option:`!--formats` option:: - - python setup.py bdist --formats=wininst - -If you have a pure module distribution (only containing pure Python modules and -packages), the resulting installer will be version independent and have a name -like :file:`foo-1.0.win32.exe`. Note that creating ``wininst`` binary -distributions in only supported on Windows systems. - -If you have a non-pure distribution, the extensions can only be created on a -Windows platform, and will be Python version dependent. The installer filename -will reflect this and now has the form :file:`foo-1.0.win32-py2.0.exe`. You -have to create a separate installer for every Python version you want to -support. - -The installer will try to compile pure modules into :term:`bytecode` after installation -on the target system in normal and optimizing mode. If you don't want this to -happen for some reason, you can run the :command:`bdist_wininst` command with -the :option:`!--no-target-compile` and/or the :option:`!--no-target-optimize` -option. - -By default the installer will display the cool "Python Powered" logo when it is -run, but you can also supply your own 152x261 bitmap which must be a Windows -:file:`.bmp` file with the :option:`!--bitmap` option. - -The installer will also display a large title on the desktop background window -when it is run, which is constructed from the name of your distribution and the -version number. This can be changed to another text by using the -:option:`!--title` option. - -The installer file will be written to the "distribution directory" --- normally -:file:`dist/`, but customizable with the :option:`!--dist-dir` option. - .. _cross-compile-windows: Cross-compiling on Windows @@ -370,12 +299,7 @@ For example, on a 32bit version of Windows, you could execute:: python setup.py build --plat-name=win-amd64 -to build a 64bit version of your extension. The Windows Installers also -support this option, so the command:: - - python setup.py build --plat-name=win-amd64 bdist_wininst - -would create a 64bit installation executable on your 32bit version of Windows. +to build a 64bit version of your extension. To cross-compile, you must download the Python source code and cross-compile Python itself for the platform you are targeting - it is not possible from a @@ -462,18 +386,3 @@ built-in functions in the installation script. and *iconindex* is the index of the icon in the file *iconpath*. Again, for details consult the Microsoft documentation for the :class:`IShellLink` interface. - - -Vista User Access Control (UAC) -=============================== - -Starting with Python 2.6, bdist_wininst supports a :option:`!--user-access-control` -option. The default is 'none' (meaning no UAC handling is done), and other -valid values are 'auto' (meaning prompt for UAC elevation if Python was -installed for all users) and 'force' (meaning always prompt for elevation). - -.. note:: - bdist_wininst is deprecated since Python 3.8. - -.. note:: - bdist_msi is deprecated since Python 3.9. diff --git a/docs/deprecated/distutils/commandref.rst b/docs/deprecated/distutils/commandref.rst index 0f6fe2aba8..d02b38c336 100644 --- a/docs/deprecated/distutils/commandref.rst +++ b/docs/deprecated/distutils/commandref.rst @@ -101,6 +101,3 @@ anything except backslash or colon. .. % \subsection{\protect\command{bdist}} .. % \subsection{\protect\command{bdist\_dumb}} .. % \subsection{\protect\command{bdist\_rpm}} -.. % \subsection{\protect\command{bdist\_wininst}} - - diff --git a/docs/deprecated/distutils/configfile.rst b/docs/deprecated/distutils/configfile.rst index 328936fb40..ab199dcaf7 100644 --- a/docs/deprecated/distutils/configfile.rst +++ b/docs/deprecated/distutils/configfile.rst @@ -36,7 +36,8 @@ consequences: :file:`setup.py` * installers can override anything in :file:`setup.cfg` using the command-line - options to :file:`setup.py` + options to :file:`setup.py` or by pointing :envvar:`DIST_EXTRA_CONFIG` + to another configuration file The basic syntax of the configuration file is simple: @@ -141,4 +142,3 @@ split across multiple lines for readability. .. [#] This ideal probably won't be achieved until auto-configuration is fully supported by the Distutils. - diff --git a/docs/deprecated/distutils/examples.rst b/docs/deprecated/distutils/examples.rst index d0984655df..00eef73fa9 100644 --- a/docs/deprecated/distutils/examples.rst +++ b/docs/deprecated/distutils/examples.rst @@ -253,9 +253,7 @@ Running the ``check`` command will display some warnings: $ python setup.py check running check - warning: check: missing required meta-data: version, url - warning: check: missing meta-data: either (author and author_email) or - (maintainer and maintainer_email) should be supplied + warning: check: missing required meta-data: version If you use the reStructuredText syntax in the ``long_description`` field and diff --git a/docs/deprecated/distutils/extending.rst b/docs/deprecated/distutils/extending.rst index c99d3c791f..fc49461647 100644 --- a/docs/deprecated/distutils/extending.rst +++ b/docs/deprecated/distutils/extending.rst @@ -94,5 +94,3 @@ to add ``(command, filename)`` pairs to ``self.distribution.dist_files`` so that :command:`upload` can upload it to PyPI. The *filename* in the pair contains no path information, only the name of the file itself. In dry-run mode, pairs should still be added to represent what would have been created. - - diff --git a/docs/deprecated/distutils/introduction.rst b/docs/deprecated/distutils/introduction.rst index 7491b965a5..58a3128317 100644 --- a/docs/deprecated/distutils/introduction.rst +++ b/docs/deprecated/distutils/introduction.rst @@ -112,17 +112,6 @@ the setup script. The difference is which Distutils *commands* they use: the :command:`install` is more often for installers (although most developers will want to install their own code occasionally). -If you want to make things really easy for your users, you can create one or -more built distributions for them. For instance, if you are running on a -Windows machine, and want to make things easy for other Windows users, you can -create an executable installer (the most appropriate type of built distribution -for this platform) with the :command:`bdist_wininst` command. For example:: - - python setup.py bdist_wininst - -will create an executable installer, :file:`foo-1.0.win32.exe`, in the current -directory. - Other useful built distribution formats are RPM, implemented by the :command:`bdist_rpm` command, Solaris :program:`pkgtool` (:command:`bdist_pkgtool`), and HP-UX :program:`swinstall` diff --git a/docs/deprecated/distutils/setupscript.rst b/docs/deprecated/distutils/setupscript.rst index f49c4f893f..ec9cf34ed7 100644 --- a/docs/deprecated/distutils/setupscript.rst +++ b/docs/deprecated/distutils/setupscript.rst @@ -582,7 +582,7 @@ This information includes: | ``maintainer_email`` | email address of the | email address | \(3) | | | package maintainer | | | +----------------------+---------------------------+-----------------+--------+ -| ``url`` | home page for the package | URL | \(1) | +| ``url`` | home page for the package | URL | | +----------------------+---------------------------+-----------------+--------+ | ``description`` | short, summary | short string | | | | description of the | | | @@ -612,8 +612,8 @@ Notes: It is recommended that versions take the form *major.minor[.patch[.sub]]*. (3) - Either the author or the maintainer must be identified. If maintainer is - provided, distutils lists it as the author in :file:`PKG-INFO`. + If maintainer is provided and author is not, distutils lists maintainer as + the author in :file:`PKG-INFO`. (4) The ``long_description`` field is used by PyPI when you publish a package, diff --git a/docs/deprecated/easy_install.rst b/docs/deprecated/easy_install.rst index 76c3f608ed..ab3d38a061 100644 --- a/docs/deprecated/easy_install.rst +++ b/docs/deprecated/easy_install.rst @@ -34,7 +34,7 @@ Using "Easy Install" Installing "Easy Install" ------------------------- -Please see the `setuptools PyPI page `_ +Please see the :pypi:`setuptools` on the package index for download links and basic installation instructions for each of the supported platforms. @@ -989,9 +989,7 @@ The following section lists only the easiest and most relevant approaches [1]_. `Use "virtualenv"`_ -.. [1] There are older ways to achieve custom installation using various ``easy_install`` and ``setup.py install`` options, combined with ``PYTHONPATH`` and/or ``PYTHONUSERBASE`` alterations, but all of these are effectively deprecated by the User scheme brought in by `PEP-370`_. - -.. _PEP-370: http://www.python.org/dev/peps/pep-0370/ +.. [1] There are older ways to achieve custom installation using various ``easy_install`` and ``setup.py install`` options, combined with ``PYTHONPATH`` and/or ``PYTHONUSERBASE`` alterations, but all of these are effectively deprecated by the User scheme brought in by :pep:`370`. Use the "--user" option @@ -1020,10 +1018,7 @@ of the User installation scheme. "virtualenv" provides a version of ``easy_inst scoped to the cloned python install and is used in the normal way. "virtualenv" does offer various features that the User installation scheme alone does not provide, e.g. the ability to hide the main python site-packages. -Please refer to the `virtualenv`_ documentation for more details. - -.. _virtualenv: https://pypi.org/project/virtualenv/ - +Please refer to the :pypi:`virtualenv` documentation for more details. Package Index "API" diff --git a/docs/deprecated/index.rst b/docs/deprecated/index.rst index ce2ac006e1..0ea66cf644 100644 --- a/docs/deprecated/index.rst +++ b/docs/deprecated/index.rst @@ -13,9 +13,13 @@ objectives. .. toctree:: :maxdepth: 1 - python3 + changed_keywords + dependency_links python_eggs easy_install + zip_safe + resource_extraction distutils/index distutils-legacy functionalities + commands diff --git a/docs/deprecated/resource_extraction.rst b/docs/deprecated/resource_extraction.rst new file mode 100644 index 0000000000..c481e339d0 --- /dev/null +++ b/docs/deprecated/resource_extraction.rst @@ -0,0 +1,54 @@ +.. _Automatic Resource Extraction: + +Automatic Resource Extraction +============================= + +In a modern setup, Python packages are usually installed as directories, +and all the files can be found on deterministic locations on the disk. +This means that most of the tools expect package resources to be "real" files. + +There are a few occasions however that packages are loaded in a different way +(e.g., from a zip file), which is incompatible with the assumptions mentioned above. +Moreover, a package developer may also include non-extension native libraries or other files that +C extensions may expect to be able to access. + +In these scenarios, the use of :mod:`importlib.resources` is recommended. + +Old implementations (prior to the advent of :mod:`importlib.resources`) and +long-living projects, however, may still rely on the library ``pkg_resources`` +to access these files. + +If you have to support such systems, or want to provide backward compatibility +for ``pkg_resources``, you may need to add an special configuration +to ``setuptools`` when packaging a project. +This can be done by listing as ``eager_resources`` (argument to ``setup()`` +in ``setup.py`` or field in ``setup.cfg``) all the files that need to be +extracted together, whenever a C extension in the project is imported. + +This is especially important if your project includes shared libraries *other* +than ``distutils``/``setuptools``-built C extensions, and those shared libraries use file +extensions other than ``.dll``, ``.so``, or ``.dylib``, which are the +extensions that setuptools 0.6a8 and higher automatically detects as shared +libraries and adds to the ``native_libs.txt`` file for you. Any shared +libraries whose names do not end with one of those extensions should be listed +as ``eager_resources``, because they need to be present in the filesystem when +he C extensions that link to them are used. + +The ``pkg_resources`` runtime for compressed packages will automatically +extract *all* C extensions and ``eager_resources`` at the same time, whenever +*any* C extension or eager resource is requested via the ``resource_filename()`` +API. (C extensions are imported using ``resource_filename()`` internally.) +This ensures that C extensions will see all of the "real" files that they +expect to see. + +Note also that you can list directory resource names in ``eager_resources`` as +well, in which case the directory's contents (including subdirectories) will be +extracted whenever any C extension or eager resource is requested. + +Please note that if you're not sure whether you need to use this argument, you +don't! It's really intended to support projects with lots of non-Python +dependencies and as a last resort for crufty projects that can't otherwise +handle being compressed. If your package is pure Python, Python plus data +files, or Python plus C, you really don't need this. You've got to be using +either C or an external program that needs "real" files in your project before +there's any possibility of ``eager_resources`` being relevant to your project. diff --git a/docs/deprecated/zip_safe.rst b/docs/deprecated/zip_safe.rst new file mode 100644 index 0000000000..26b4566232 --- /dev/null +++ b/docs/deprecated/zip_safe.rst @@ -0,0 +1,74 @@ +Understanding the ``zip_safe`` flag +=================================== + +The ``zip_safe`` flag is a ``setuptools`` configuration mainly associated +with the ``egg`` distribution format +(which got replaced in the ecosystem by the newer ``wheel`` format) and the +``easy_install`` command (deprecated in ``setuptools`` v58.3.0). + +It is very unlikely that the values of ``zip_safe`` will affect modern +deployments that use :pypi:`pip` for installing packages. +Moreover, new users of ``setuptools`` should not attempt to create egg files +using the deprecated ``build_egg`` command. +Therefore, this flag is considered **obsolete**. + +This document, however, describes what was the historical motivation behind +this flag, and how it was used. + +Historical Motivation +--------------------- + +For some use cases (such as bundling as part of a larger application), Python +packages may be run directly from a zip file. +Not all packages, however, are capable of running in compressed form, because +they may expect to be able to access either source code or data files as +normal operating system files. + +In the past, ``setuptools`` would install a project distributed +as a zipfile or a directory (via the ``easy_install`` command or +``python setup.py install``), +the default choice being determined by the project's ``zip_safe`` flag. + +How the ``zip_safe`` flag was used? +----------------------------------- + +To set this flag, a developer would pass a boolean value for the ``zip_safe`` argument to the +``setup()`` function, or omit it. When omitted, the ``bdist_egg`` +command would analyze the project's contents to see if it could detect any +conditions that preventing the project from working in a zipfile. + +This was extremely conservative: ``bdist_egg`` would consider the +project unsafe if it contained any C extensions or datafiles whatsoever. This +does *not* mean that the project couldn't or wouldn't work as a zipfile! It just +means that the ``bdist_egg`` authors were not yet comfortable asserting that +the project *would* work. If the project did not contain any C or data files, and did not +attempt to perform ``__file__`` or ``__path__`` introspection or source code manipulation, then +there was an extremely solid chance the project will work when installed as a +zipfile. (And if the project used ``pkg_resources`` for all its data file +access, then C extensions and other data files shouldn't be a problem at all. +See the :ref:`Accessing Data Files at Runtime` section for more information.) + +The developer could manually set ``zip_safe`` to ``True`` to perform tests, +or to override the default behaviour (after checking all the warnings and +understanding the implications), this would allow ``setuptools`` to install the +project as a zip file. Alternatively, by setting ``zip_safe`` to ``False``, +developers could force ``setuptools`` to always install the project as a +directory. + +Modern ways of loading packages from zip files +---------------------------------------------- + +Currently, popular Python package installers (such as :pypi:`pip`) and package +indexes (such as PyPI_) consider that distribution packages are always +installed as a directory. +It is however still possible to load packages from zip files added to +:obj:`sys.path`, thanks to the :mod:`zipimport` module +and the :mod:`importlib` machinery provided by Python standard library. + +When working with modules loaded from a zip file, it is important to keep in +mind that values of ``__file__`` and ``__path__`` might not work as expected. +Please check the documentation for :mod:`importlib.resources`, if file +locations are important for your use case. + + +.. _PyPI: https://pypi.org diff --git a/docs/index.rst b/docs/index.rst index 0f52c36074..8328f87c07 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -7,6 +7,10 @@ Documentation Setuptools is a fully-featured, actively-maintained, and stable library designed to facilitate packaging Python projects. +It helps developers to easily share reusable code (in the form of a library) +and programs (e.g., CLI/GUI tools implemented in Python), that can be installed +with :pypi:`pip` and uploaded to `PyPI `_. + .. toctree:: :maxdepth: 1 :hidden: diff --git a/docs/pkg_resources.rst b/docs/pkg_resources.rst index c115818961..7fd77f8d06 100644 --- a/docs/pkg_resources.rst +++ b/docs/pkg_resources.rst @@ -10,12 +10,12 @@ eggs, support for merging packages that have separately-distributed modules or subpackages, and APIs for managing Python's current "working set" of active packages. -Use of ``pkg_resources`` is discouraged in favor of -`importlib.resources `_, -`importlib.metadata `_, -and their backports (`resources `_, -`metadata `_). -Please consider using those libraries instead of pkg_resources. +.. attention:: + Use of ``pkg_resources`` is deprecated in favor of + :mod:`importlib.resources`, :mod:`importlib.metadata` + and their backports (:pypi:`importlib_resources`, :pypi:`importlib_metadata`). + Users should refrain from new usage of ``pkg_resources`` and + should work to port to importlib-based solutions. -------- diff --git a/docs/references/keywords.rst b/docs/references/keywords.rst index c26b9d497d..6173e3c228 100644 --- a/docs/references/keywords.rst +++ b/docs/references/keywords.rst @@ -2,123 +2,246 @@ Keywords ======== +The following are keywords ``setuptools.setup()`` accepts. +They allow configuring the build process for a Python distribution or adding +metadata via a ``setup.py`` script placed at the root of your project. +All of them are optional; you do not have to supply them unless you need the +associated ``setuptools`` feature. + +Metadata and configuration supplied via ``setup()`` is complementary to (and +may be overwritten by) the information present in ``setup.cfg`` and ``pyproject.toml``. +Some important metadata, such as ``name`` and ``version``, may assume +a default *degenerate* value if not specified. + +Users are strongly encouraged to use a declarative config either via +:doc:`setup.cfg ` or :doc:`pyproject.toml +` and only rely on ``setup.py`` if they need to +tap into special behaviour that requires scripting (such as building C +extensions). + +.. note:: + When using declarative configs via ``pyproject.toml`` + with ``setuptools<64.0.0``, users can still keep a very simple ``setup.py`` + just to ensure editable installs are supported, for example:: + + from setuptools import setup + + setup() + + Versions of ``setuptools`` ``>=64.0.0`` do not require this extra minimal + ``setup.py`` file. + +.. _keyword/name: + ``name`` A string specifying the name of the package. +.. _keyword/version: + ``version`` A string specifying the version number of the package. +.. _keyword/description: + ``description`` A string describing the package in a single line. +.. _keyword/long_description: + ``long_description`` A string providing a longer description of the package. +.. _keyword/long_description_content_type: + ``long_description_content_type`` A string specifying the content type is used for the ``long_description`` (e.g. ``text/markdown``) +.. _keyword/author: + ``author`` A string specifying the author of the package. +.. _keyword/author_email: + ``author_email`` A string specifying the email address of the package author. +.. _keyword/maintainer: + ``maintainer`` A string specifying the name of the current maintainer, if different from the author. Note that if the maintainer is provided, setuptools will use it as the author in ``PKG-INFO``. +.. _keyword/maintainer_email: + ``maintainer_email`` A string specifying the email address of the current maintainer, if different from the author. +.. _keyword/url: + ``url`` A string specifying the URL for the package homepage. +.. _keyword/download_url: + ``download_url`` A string specifying the URL to download the package. +.. _keyword/packages: + ``packages`` A list of strings specifying the packages that setuptools will manipulate. +.. _keyword/py_modules: + ``py_modules`` A list of strings specifying the modules that setuptools will manipulate. +.. _keyword/scripts: + ``scripts`` A list of strings specifying the standalone script files to be built and installed. +.. _keyword/ext_package: + ``ext_package`` A string specifying the base package name for the extensions provided by this package. +.. _keyword/ext_modules: + ``ext_modules`` A list of instances of ``setuptools.Extension`` providing the list of Python extensions to be built. +.. _keyword/classifiers: + ``classifiers`` A list of strings describing the categories for the package. +.. _keyword/distclass: + ``distclass`` A subclass of ``Distribution`` to use. +.. _keyword/script_name: + ``script_name`` A string specifying the name of the setup.py script -- defaults to ``sys.argv[0]`` +.. _keyword/script_args: + ``script_args`` A list of strings defining the arguments to supply to the setup script. +.. _keyword/options: + ``options`` A dictionary providing the default options for the setup script. +.. _keyword/license: + ``license`` A string specifying the license of the package. -``license_file`` +.. _keyword/license_file: +``license_file`` .. warning:: ``license_file`` is deprecated. Use ``license_files`` instead. -``license_files`` +.. _keyword/license_files: +``license_files`` A list of glob patterns for license related files that should be included. If neither ``license_file`` nor ``license_files`` is specified, this option defaults to ``LICEN[CS]E*``, ``COPYING*``, ``NOTICE*``, and ``AUTHORS*``. +.. _keyword/keywords: + ``keywords`` A list of strings or a comma-separated string providing descriptive - meta-data. See: `PEP 0314`_. + meta-data. See: :ref:`Core Metadata Specifications`. -.. _PEP 0314: https://www.python.org/dev/peps/pep-0314/ +.. _keyword/platforms: ``platforms`` A list of strings or comma-separated string. +.. _keyword/cmdclass: + ``cmdclass`` A dictionary providing a mapping of command names to ``Command`` subclasses. -``data_files`` +.. _keyword/data_files: +``data_files`` .. warning:: ``data_files`` is deprecated. It does not work with wheels, so it should be avoided. A list of strings specifying the data files to install. +.. _keyword/package_dir: + ``package_dir`` - A dictionary providing a mapping of package to directory names. + A dictionary that maps package names (as they will be + imported by the end-users) into directory paths (that actually exist in the + project's source tree). This configuration has two main purposes: + + 1. To effectively "rename" paths when building your package. + For example, ``package_dir={"mypkg": "dir1/dir2/code_for_mypkg"}`` + will instruct setuptools to copy the ``dir1/dir2/code_for_mypkg/...`` files + as ``mypkg/...`` when building the final :term:`wheel distribution `. + + .. attention:: + While it is *possible* to specify arbitrary mappings, developers are + **STRONGLY ADVISED AGAINST** that. They should try as much as possible + to keep the directory names and hierarchy identical to the way they will + appear in the final wheel, only deviating when absolutely necessary. + + 2. To indicate that the relevant code is entirely contained inside + a specific directory (instead of directly placed under the project's root). + In this case, a special key is required (the empty string, ``""``), + for example: ``package_dir={"": ""}``. + All the directories inside the container directory will be copied + directly into the final :term:`wheel distribution `, but the + container directory itself will not. + + This practice is very common in the community to help separate the + package implementation from auxiliary files (e.g. CI configuration files), + and is referred to as :ref:`src-layout`, because the container + directory is commonly named ``src``. + + All paths in ``package_dir`` must be relative to the project root directory + and use a forward slash (``/``) as path separator regardless of the + operating system. + + .. tip:: + When using :doc:`package discovery ` + together with :doc:`setup.cfg ` or + :doc:`pyproject.toml `, it is very likely + that you don't need to specify a value for ``package_dir``. Please have + a look at the definitions of :ref:`src-layout` and :ref:`flat-layout` to + learn common practices on how to design a project's directory structure + and minimise the amount of configuration that is needed. + +.. _keyword/requires: ``requires`` - .. warning:: ``requires`` is superseded by ``install_requires`` and should not be used anymore. -``obsoletes`` +.. _keyword/obsoletes: +``obsoletes`` .. warning:: ``obsoletes`` is currently ignored by ``pip``. @@ -135,8 +258,9 @@ Keywords e.g. Gorgon 2.3 gets subsumed into Torqued Python 1.0. When you install Torqued Python, the Gorgon distribution should be removed. -``provides`` +.. _keyword/provides: +``provides`` .. warning:: ``provides`` is currently ignored by ``pip``. @@ -164,7 +288,7 @@ Keywords Each package may be followed by an environment marker after a semicolon (e.g. ``foo; os_name == "posix"``). -.. Below are setuptools keywords, above are distutils +.. _keyword/include_package_data: ``include_package_data`` If set to ``True``, this tells ``setuptools`` to automatically include any @@ -172,12 +296,16 @@ Keywords your ``MANIFEST.in`` file. For more information, see the section on :ref:`Including Data Files`. +.. _keyword/exclude_package_data: + ``exclude_package_data`` A dictionary mapping package names to lists of glob patterns that should be *excluded* from your package directories. You can use this to trim back any excess files included by ``include_package_data``. For a complete description and examples, see the section on :ref:`Including Data Files`. +.. _keyword/package_data: + ``package_data`` A dictionary mapping package names to lists of glob patterns. For a complete description and examples, see the section on :ref:`Including Data @@ -187,17 +315,23 @@ Keywords in source control or are files that you don't want to include in your source distribution.) +.. _keyword/zip_safe: + ``zip_safe`` A boolean (True or False) flag specifying whether the project can be safely installed and run from a zip file. If this argument is not supplied, the ``bdist_egg`` command will have to analyze all of your project's contents for possible problems each time it builds an egg. +.. _keyword/install_requires: + ``install_requires`` A string or list of strings specifying what other distributions need to be installed when this one is. See the section on :ref:`Declaring Dependencies` for details and examples of the format of this argument. +.. _keyword/entry_points: + ``entry_points`` A dictionary mapping entry point group names to strings or lists of strings defining the entry points. Entry points are used to support dynamic @@ -206,28 +340,33 @@ Keywords of this argument. In addition, this keyword is used to support :ref:`Automatic Script Creation `. +.. _keyword/extras_require: + ``extras_require`` A dictionary mapping names of "extras" (optional features of your project) to strings or lists of strings specifying what other distributions must be installed to support those features. See the section on :ref:`Declaring Dependencies` for details and examples of the format of this argument. +.. _keyword/python_requires: + ``python_requires`` A string corresponding to a version specifier (as defined in PEP 440) for the Python version, used to specify the Requires-Python defined in PEP 345. -``setup_requires`` +.. _keyword/setup_requires: +``setup_requires`` .. warning:: - Using ``setup_requires`` is discouraged in favor of `PEP-518`_ + Using ``setup_requires`` is discouraged in favor of :pep:`518`. A string or list of strings specifying what other distributions need to be present in order for the *setup script* to run. ``setuptools`` will - attempt to obtain these (even going so far as to download them using - ``EasyInstall``) before processing the rest of the setup script or commands. - This argument is needed if you are using distutils extensions as part of - your build process; for example, extensions that process setup() arguments - and turn them into EGG-INFO metadata files. + attempt to obtain these before processing the + rest of the setup script or commands. This argument is needed if you + are using distutils extensions as part of your build process; for + example, extensions that process setup() arguments and turn them into + EGG-INFO metadata files. (Note: projects listed in ``setup_requires`` will NOT be automatically installed on the system where the setup script is being run. They are @@ -236,20 +375,28 @@ Keywords when the setup script is run, you should add them to ``install_requires`` **and** ``setup_requires``.) -.. _PEP-518: http://www.python.org/dev/peps/pep-0518/ +.. _keyword/dependency_links: ``dependency_links`` - .. warning:: ``dependency_links`` is deprecated. It is not supported anymore by pip. A list of strings naming URLs to be searched when satisfying dependencies. These links will be used if needed to install packages specified by ``setup_requires`` or ``tests_require``. They will also be written into - the egg's metadata for use by tools like EasyInstall to use when installing - an ``.egg`` file. + the egg's metadata for use during install by tools that support them. + +.. _keyword/namespace_packages: ``namespace_packages`` + .. warning:: + The ``namespace_packages`` implementation relies on ``pkg_resources``. + However, ``pkg_resources`` has some undesirable behaviours, and + Setuptools intends to obviate its usage in the future. Therefore, + ``namespace_packages`` was deprecated in favor of native/implicit + namespaces (:pep:`420`). Check :doc:`the Python Packaging User Guide + ` for more information. + A list of strings naming the project's "namespace packages". A namespace package is a package that may be split across multiple project distributions. For example, Zope 3's ``zope`` package is a namespace @@ -261,6 +408,8 @@ Keywords does not contain any code other than a namespace declaration. See the section on :ref:`Namespace Packages` for more information. +.. _keyword/test_suite: + ``test_suite`` A string naming a ``unittest.TestCase`` subclass (or a package or module containing one or more of them, or a method of such a subclass), or naming @@ -270,27 +419,36 @@ Keywords added to the tests to be run. If the named suite is a package, any submodules and subpackages are recursively added to the overall test suite. - Specifying this argument enables use of the :ref:`test` command to run the + Specifying this argument enables use of the :ref:`test ` command to run the specified test suite, e.g. via ``setup.py test``. See the section on the - :ref:`test` command below for more details. + :ref:`test ` command below for more details. + + .. warning:: + .. deprecated:: 41.5.0 + The test command will be removed in a future version of ``setuptools``, + alongside any test configuration parameter. - New in 41.5.0: Deprecated the test command. +.. _keyword/tests_require: ``tests_require`` If your project's tests need one or more additional packages besides those needed to install it, you can use this option to specify them. It should be a string or list of strings specifying what other distributions need to be present for the package's tests to run. When you run the ``test`` - command, ``setuptools`` will attempt to obtain these (even going - so far as to download them using ``EasyInstall``). Note that these - required projects will *not* be installed on the system where the tests - are run, but only downloaded to the project's setup directory if they're - not already installed locally. + command, ``setuptools`` will attempt to obtain these. + Note that these required projects will *not* be installed on + the system where the tests are run, but only downloaded to the project's setup + directory if they're not already installed locally. - New in 41.5.0: Deprecated the test command. + .. warning:: + .. deprecated:: 41.5.0 + The test command will be removed in a future version of ``setuptools``, + alongside any test configuration parameter. .. _test_loader: +.. _keyword/test_loader: + ``test_loader`` If you would like to use a different way of finding tests to run than what setuptools normally uses, you can specify a module name and class name in @@ -312,7 +470,12 @@ Keywords as long as you use the ``tests_require`` option to ensure that the package containing the loader class is available when the ``test`` command is run. - New in 41.5.0: Deprecated the test command. + .. warning:: + .. deprecated:: 41.5.0 + The test command will be removed in a future version of ``setuptools``, + alongside any test configuration parameter. + +.. _keyword/eager_resources: ``eager_resources`` A list of strings naming resources that should be extracted together, if @@ -330,6 +493,8 @@ Keywords mess with it. For more details on how this argument works, see the section below on :ref:`Automatic Resource Extraction`. +.. _keyword/project_urls: + ``project_urls`` An arbitrary map of URL names to hyperlinks, allowing more extensible documentation of where various resources can be found than the simple diff --git a/docs/setuptools.rst b/docs/setuptools.rst index d0fb9a9cec..5317058945 100644 --- a/docs/setuptools.rst +++ b/docs/setuptools.rst @@ -21,8 +21,9 @@ Feature Highlights: individually in setup.py * Automatically include all relevant files in your source distributions, - without needing to create a ``MANIFEST.in`` file, and without having to force - regeneration of the ``MANIFEST`` file when your source tree changes. + without needing to create a |MANIFEST.in|_ file, and without having to force + regeneration of the ``MANIFEST`` file when your source tree changes + [#manifest]_. * Automatically generate wrapper scripts or Windows (console and GUI) .exe files for any number of "main" functions in your project. (Note: this is not @@ -150,7 +151,6 @@ To use this feature: [build-system] requires = [ "setuptools >= 40.9.0", - "wheel", ] build-backend = "setuptools.build_meta" @@ -211,3 +211,17 @@ set of steps to reproduce. .. _GitHub Discussions: https://github.com/pypa/setuptools/discussions .. _setuptools bug tracker: https://github.com/pypa/setuptools/ + + +---- + + +.. [#manifest] The default behaviour for ``setuptools`` will work well for pure + Python packages, or packages with simple C extensions (that don't require + any special C header). See :ref:`Controlling files in the distribution` and + :doc:`userguide/datafiles` for more information about complex scenarios, if + you want to include other types of files. + + +.. |MANIFEST.in| replace:: ``MANIFEST.in`` +.. _MANIFEST.in: https://packaging.python.org/en/latest/guides/using-manifest-in/ diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst index 69cf36e699..44ff742529 100644 --- a/docs/userguide/datafiles.rst +++ b/docs/userguide/datafiles.rst @@ -2,170 +2,530 @@ Data Files Support ==================== -The distutils have traditionally allowed installation of "data files", which +Old packaging installation methods in the Python ecosystem +have traditionally allowed installation of "data files", which are placed in a platform-specific location. However, the most common use case for data files distributed with a package is for use *by* the package, usually -by including the data files in the package directory. +by including the data files **inside the package directory**. -Setuptools offers three ways to specify data files to be included in your -packages. First, you can simply use the ``include_package_data`` keyword, -e.g.:: +Setuptools focuses on this most common type of data files and offers three ways +of specifying which files should be included in your packages, as described in +the following sections. + +include_package_data +==================== + +First, you can simply use the ``include_package_data`` keyword. +For example, if the package tree looks like this:: + + project_root_directory + ├── setup.py # and/or setup.cfg, pyproject.toml + └── src + └── mypkg + ├── __init__.py + ├── data1.rst + ├── data2.rst + ├── data1.txt + └── data2.txt + +and you supply this configuration: + +.. tab:: setup.cfg + + .. code-block:: ini + + [options] + # ... + packages = find: + package_dir = + = src + include_package_data = True + + [options.packages.find] + where = src + +.. tab:: setup.py + + .. code-block:: python from setuptools import setup, find_packages setup( - ... + # ..., + packages=find_packages(where="src"), + package_dir={"": "src"}, include_package_data=True ) -This tells setuptools to install any data files it finds in your packages. -The data files must be specified via the distutils' ``MANIFEST.in`` file. -(They can also be tracked by a revision control system, using an appropriate -plugin. See the section below on :ref:`Adding Support for Revision -Control Systems` for information on how to write such plugins.) +.. tab:: pyproject.toml (**BETA**) [#beta]_ -If you want finer-grained control over what files are included (for example, -if you have documentation files in your package directories and want to exclude -them from installation), then you can also use the ``package_data`` keyword, -e.g.:: + .. code-block:: toml - from setuptools import setup, find_packages - setup( - ... - package_data={ - # If any package contains *.txt or *.rst files, include them: - "": ["*.txt", "*.rst"], - # And include any *.msg files found in the "hello" package, too: - "hello": ["*.msg"], - } - ) + [tool.setuptools] + # ... + # By default, include-package-data is true in pyproject.toml, so you do + # NOT have to specify this line. + include-package-data = true -The ``package_data`` argument is a dictionary that maps from package names to -lists of glob patterns. The globs may include subdirectory names, if the data -files are contained in a subdirectory of the package. For example, if the -package tree looks like this:: + [tool.setuptools.packages.find] + where = ["src"] - setup.py - src/ - mypkg/ - __init__.py - mypkg.txt - data/ - somefile.dat - otherdata.dat +then all the ``.txt`` and ``.rst`` files will be automatically installed with +your package, provided: -The setuptools setup file might look like this:: +1. These files are included via the |MANIFEST.in|_ file, like so:: - from setuptools import setup, find_packages - setup( - ... - packages=find_packages("src"), # include all packages under src - package_dir={"": "src"}, # tell distutils packages are under src - - package_data={ - # If any package contains *.txt files, include them: - "": ["*.txt"], - # And include any *.dat files found in the "data" subdirectory - # of the "mypkg" package, also: - "mypkg": ["data/*.dat"], - } - ) + include src/mypkg/*.txt + include src/mypkg/*.rst + +2. OR, they are being tracked by a revision control system such as Git, Mercurial + or SVN, and you have configured an appropriate plugin such as + :pypi:`setuptools-scm` or :pypi:`setuptools-svn`. + (See the section below on :ref:`Adding Support for Revision + Control Systems` for information on how to write such plugins.) + +package_data +============ + +By default, ``include_package_data`` considers **all** non ``.py`` files found inside +the package directory (``src/mypkg`` in this case) as data files, and includes those that +satisfy (at least) one of the above two conditions into the source distribution, and +consequently in the installation of your package. +If you want finer-grained control over what files are included, then you can also use +the ``package_data`` keyword. +For example, if the package tree looks like this:: + + project_root_directory + ├── setup.py # and/or setup.cfg, pyproject.toml + └── src + └── mypkg + ├── __init__.py + ├── data1.rst + ├── data2.rst + ├── data1.txt + └── data2.txt -Notice that if you list patterns in ``package_data`` under the empty string, -these patterns are used to find files in every package, even ones that also -have their own patterns listed. Thus, in the above example, the ``mypkg.txt`` -file gets included even though it's not listed in the patterns for ``mypkg``. +then you can use the following configuration to capture the ``.txt`` and ``.rst`` files as +data files: -Also notice that if you use paths, you *must* use a forward slash (``/``) as -the path separator, even if you are on Windows. Setuptools automatically -converts slashes to appropriate platform-specific separators at build time. +.. tab:: setup.cfg -If datafiles are contained in a subdirectory of a package that isn't a package -itself (no ``__init__.py``), then the subdirectory names (or ``*``) are required -in the ``package_data`` argument (as shown above with ``"data/*.dat"``). + .. code-block:: ini -When building an ``sdist``, the datafiles are also drawn from the -``package_name.egg-info/SOURCES.txt`` file, so make sure that this is removed if -the ``setup.py`` ``package_data`` list is updated before calling ``setup.py``. + [options] + # ... + packages = find: + package_dir = + = src -(Note: although the ``package_data`` argument was previously only available in -``setuptools``, it was also added to the Python ``distutils`` package as of -Python 2.4; there is `some documentation for the feature`__ available on the -python.org website. If using the setuptools-specific ``include_package_data`` -argument, files specified by ``package_data`` will *not* be automatically -added to the manifest unless they are listed in the MANIFEST.in file.) + [options.packages.find] + where = src -__ https://docs.python.org/3/distutils/setupscript.html#installing-package-data + [options.package_data] + mypkg = + *.txt + *.rst + +.. tab:: setup.py + + .. code-block:: python + + from setuptools import setup, find_packages + setup( + # ..., + packages=find_packages(where="src"), + package_dir={"": "src"}, + package_data={"mypkg": ["*.txt", "*.rst"]} + ) + +.. tab:: pyproject.toml (**BETA**) [#beta]_ + + .. code-block:: toml + + [tool.setuptools.packages.find] + where = ["src"] + + [tool.setuptools.package-data] + mypkg = ["*.txt", "*.rst"] + +The ``package_data`` argument is a dictionary that maps from package names to +lists of glob patterns. Note that the data files specified using the ``package_data`` +option neither require to be included within a |MANIFEST.in|_ file, nor +require to be added by a revision control system plugin. + +.. note:: + If your glob patterns use paths, you *must* use a forward slash (``/``) as + the path separator, even if you are on Windows. Setuptools automatically + converts slashes to appropriate platform-specific separators at build time. + +.. note:: + Glob patterns do not automatically match dotfiles (directory or file names + starting with a dot (``.``)). To include such files, you must explicitly start + the pattern with a dot, e.g. ``.*`` to match ``.gitignore``. + +If you have multiple top-level packages and a common pattern of data files for all these +packages, for example:: + + project_root_directory + ├── setup.py # and/or setup.cfg, pyproject.toml + └── src + ├── mypkg1 + │   ├── data1.rst + │   ├── data1.txt + │   └── __init__.py + └── mypkg2 + ├── data2.txt + └── __init__.py + +Here, both packages ``mypkg1`` and ``mypkg2`` share a common pattern of having ``.txt`` +data files. However, only ``mypkg1`` has ``.rst`` data files. In such a case, if you want to +use the ``package_data`` option, the following configuration will work: + +.. tab:: setup.cfg + + .. code-block:: ini + + [options] + packages = find: + package_dir = + = src + + [options.packages.find] + where = src + + [options.package_data] + * = + *.txt + mypkg1 = + data1.rst + +.. tab:: setup.py + + .. code-block:: python + + from setuptools import setup, find_packages + setup( + # ..., + packages=find_packages(where="src"), + package_dir={"": "src"}, + package_data={"": ["*.txt"], "mypkg1": ["data1.rst"]}, + ) + +.. tab:: pyproject.toml (**BETA**) [#beta]_ + + .. code-block:: toml + + [tool.setuptools.packages.find] + where = ["src"] + + [tool.setuptools.package-data] + "*" = ["*.txt"] + mypkg1 = ["data1.rst"] + +Notice that if you list patterns in ``package_data`` under the empty string ``""`` in +``setup.py``, and the asterisk ``*`` in ``setup.cfg`` and ``pyproject.toml``, these +patterns are used to find files in every package. For example, we use ``""`` or ``*`` +to indicate that the ``.txt`` files from all packages should be captured as data files. +Also note how we can continue to specify patterns for individual packages, i.e. +we specify that ``data1.rst`` from ``mypkg1`` alone should be captured as well. + +.. note:: + When building an ``sdist``, the datafiles are also drawn from the + ``package_name.egg-info/SOURCES.txt`` file, so make sure that this is removed if + the ``setup.py`` ``package_data`` list is updated before calling ``setup.py``. + +.. note:: + If using the ``include_package_data`` argument, files specified by + ``package_data`` will *not* be automatically added to the manifest unless + they are listed in the |MANIFEST.in|_ file or by a plugin like + :pypi:`setuptools-scm` or :pypi:`setuptools-svn`. + +.. https://docs.python.org/3/distutils/setupscript.html#installing-package-data + +exclude_package_data +==================== Sometimes, the ``include_package_data`` or ``package_data`` options alone -aren't sufficient to precisely define what files you want included. For -example, you may want to include package README files in your revision control -system and source distributions, but exclude them from being installed. So, -setuptools offers an ``exclude_package_data`` option as well, that allows you -to do things like this:: +aren't sufficient to precisely define what files you want included. For example, +consider a scenario where you have ``include_package_data=True``, and you are using +a revision control system with an appropriate plugin. +Sometimes developers add directory-specific marker files (such as ``.gitignore``, +``.gitkeep``, ``.gitattributes``, or ``.hgignore``), these files are probably being +tracked by the revision control system, and therefore by default they will be +included when the package is installed. - from setuptools import setup, find_packages - setup( - ... - packages=find_packages("src"), # include all packages under src - package_dir={"": "src"}, # tell distutils packages are under src +Supposing you want to prevent these files from being included in the +installation (they are not relevant to Python or the package), then you could +use the ``exclude_package_data`` option: - include_package_data=True, # include everything in source control +.. tab:: setup.cfg - # ...but exclude README.txt from all packages - exclude_package_data={"": ["README.txt"]}, - ) + .. code-block:: ini + + [options] + # ... + packages = find: + package_dir = + = src + include_package_data = True + + [options.packages.find] + where = src + + [options.exclude_package_data] + mypkg = + .gitattributes + +.. tab:: setup.py + + .. code-block:: python + + from setuptools import setup, find_packages + setup( + # ..., + packages=find_packages(where="src"), + package_dir={"": "src"}, + include_package_data=True, + exclude_package_data={"mypkg": [".gitattributes"]}, + ) + +.. tab:: pyproject.toml (**BETA**) [#beta]_ + + .. code-block:: toml + + [tool.setuptools.packages.find] + where = ["src"] + + [tool.setuptools.exclude-package-data] + mypkg = [".gitattributes"] The ``exclude_package_data`` option is a dictionary mapping package names to lists of wildcard patterns, just like the ``package_data`` option. And, just -as with that option, a key of ``""`` will apply the given pattern(s) to all -packages. However, any files that match these patterns will be *excluded* -from installation, even if they were listed in ``package_data`` or were -included as a result of using ``include_package_data``. +as with that option, you can use the empty string key ``""`` in ``setup.py`` and the +asterisk ``*`` in ``setup.cfg`` and ``pyproject.toml`` to match all top-level packages. + +Any files that match these patterns will be *excluded* from installation, +even if they were listed in ``package_data`` or were included as a result of using +``include_package_data``. + +Subdirectory for Data Files +=========================== + +A common pattern is where some (or all) of the data files are placed under +a separate subdirectory. For example:: + + project_root_directory + ├── setup.py # and/or setup.cfg, pyproject.toml + └── src + └── mypkg + ├── data + │   ├── data1.rst + │   └── data2.rst + ├── __init__.py + ├── data1.txt + └── data2.txt + +Here, the ``.rst`` files are placed under a ``data`` subdirectory inside ``mypkg``, +while the ``.txt`` files are directly under ``mypkg``. + +In this case, the recommended approach is to treat ``data`` as a namespace package +(refer :pep:`420`). With ``package_data``, +the configuration might look like this: + +.. tab:: setup.cfg + + .. code-block:: ini + + [options] + # ... + packages = find_namespace: + package_dir = + = src + + [options.packages.find] + where = src + + [options.package_data] + mypkg = + *.txt + mypkg.data = + *.rst + +.. tab:: setup.py + + .. code-block:: python + + from setuptools import setup, find_namespace_packages + setup( + # ..., + packages=find_namespace_packages(where="src"), + package_dir={"": "src"}, + package_data={ + "mypkg": ["*.txt"], + "mypkg.data": ["*.rst"], + } + ) + +.. tab:: pyproject.toml (**BETA**) [#beta]_ + + .. code-block:: toml + + [tool.setuptools.packages.find] + # scanning for namespace packages is true by default in pyproject.toml, so + # you do NOT need to include the following line. + namespaces = true + where = ["src"] + + [tool.setuptools.package-data] + mypkg = ["*.txt"] + "mypkg.data" = ["*.rst"] + +In other words, we allow Setuptools to scan for namespace packages in the ``src`` directory, +which enables the ``data`` directory to be identified, and then, we separately specify data +files for the root package ``mypkg``, and the namespace package ``data`` under the package +``mypkg``. + +With ``include_package_data`` the configuration is simpler: you simply need to enable +scanning of namespace packages in the ``src`` directory and the rest is handled by Setuptools. + +.. tab:: setup.cfg + + .. code-block:: ini + + [options] + packages = find_namespace: + package_dir = + = src + include_package_data = True + + [options.packages.find] + where = src + +.. tab:: setup.py + + .. code-block:: python + + from setuptools import setup, find_namespace_packages + setup( + # ... , + packages=find_namespace_packages(where="src"), + package_dir={"": "src"}, + include_package_data=True, + ) + +.. tab:: pyproject.toml (**BETA**) [#beta]_ + + .. code-block:: toml + + [tool.setuptools] + # ... + # By default, include-package-data is true in pyproject.toml, so you do + # NOT have to specify this line. + include-package-data = true + + [tool.setuptools.packages.find] + # scanning for namespace packages is true by default in pyproject.toml, so + # you need NOT include the following line. + namespaces = true + where = ["src"] + +Summary +======= In summary, the three options allow you to: ``include_package_data`` - Accept all data files and directories matched by ``MANIFEST.in``. + Accept all data files and directories matched by |MANIFEST.in|_ or added by + a :ref:`plugin `. ``package_data`` Specify additional patterns to match files that may or may - not be matched by ``MANIFEST.in`` or found in source control. + not be matched by |MANIFEST.in|_ or added by + a :ref:`plugin `. ``exclude_package_data`` Specify patterns for data files and directories that should *not* be included when a package is installed, even if they would otherwise have been included due to the use of the preceding options. -NOTE: Due to the way the distutils build process works, a data file that you -include in your project and then stop including may be "orphaned" in your -project's build directories, requiring you to run ``setup.py clean --all`` to -fully remove them. This may also be important for your users and contributors -if they track intermediate revisions of your project using Subversion; be sure -to let them know when you make changes that remove files from inclusion so they -can run ``setup.py clean --all``. +.. note:: + Due to the way the build process works, a data file that you + include in your project and then stop including may be "orphaned" in your + project's build directories, requiring you to run ``setup.py clean --all`` to + fully remove them. This may also be important for your users and contributors + if they track intermediate revisions of your project using Subversion; be sure + to let them know when you make changes that remove files from inclusion so they + can run ``setup.py clean --all``. .. _Accessing Data Files at Runtime: Accessing Data Files at Runtime -------------------------------- +=============================== Typically, existing programs manipulate a package's ``__file__`` attribute in -order to find the location of data files. However, this manipulation isn't -compatible with PEP 302-based import hooks, including importing from zip files -and Python Eggs. It is strongly recommended that, if you are using data files, -you should use the :ref:`ResourceManager API` of ``pkg_resources`` to access -them. The ``pkg_resources`` module is distributed as part of setuptools, so if -you're using setuptools to distribute your package, there is no reason not to -use its resource management API. See also `Importlib Resources`_ for -a quick example of converting code that uses ``__file__`` to use -``pkg_resources`` instead. +order to find the location of data files. For example, if you have a structure +like this:: + + project_root_directory + ├── setup.py # and/or setup.cfg, pyproject.toml + └── src + └── mypkg + ├── data + │   └── data1.txt + ├── __init__.py + └── foo.py + +Then, in ``mypkg/foo.py``, you may try something like this in order to access +``mypkg/data/data1.txt``: + +.. code-block:: python + + import os + data_path = os.path.join(os.path.dirname(__file__), 'data', 'data1.txt') + with open(data_path, 'r') as data_file: + ... + +However, this manipulation isn't compatible with :pep:`302`-based import hooks, +including importing from zip files and Python Eggs. It is strongly recommended that, +if you are using data files, you should use :mod:`importlib.resources` to access them. +In this case, you would do something like this: + +.. code-block:: python + + from importlib.resources import files + data_text = files('mypkg.data').joinpath('data1.txt').read_text() + +:mod:`importlib.resources` was added to Python 3.7. However, the API illustrated in +this code (using ``files()``) was added only in Python 3.9, [#files_api]_ and support +for accessing data files via namespace packages was added only in Python 3.10 [#namespace_support]_ +(the ``data`` subdirectory is a namespace package under the root package ``mypkg``). +Therefore, you may find this code to work only in Python 3.10 (and above). For other +versions of Python, you are recommended to use the :pypi:`importlib-resources` backport +which provides the latest version of this library. In this case, the only change that +has to be made to the above code is to replace ``importlib.resources`` with ``importlib_resources``, i.e. -.. _Importlib Resources: https://docs.python.org/3/library/importlib.html#module-importlib.resources +.. code-block:: python + + from importlib_resources import files + ... + +See :doc:`importlib-resources:using` for detailed instructions. + +.. tip:: Files inside the package directory should be *read-only* to avoid a + series of common problems (e.g. when multiple users share a common Python + installation, when the package is loaded from a zip file, or when multiple + instances of a Python application run in parallel). + + If your Python package needs to write to a file for shared data or configuration, + you can use standard platform/OS-specific system directories, such as + ``~/.local/config/$appname`` or ``/usr/share/$appname/$version`` (Linux specific) [#system-dirs]_. + A common approach is to add a read-only template file to the package + directory that is then copied to the correct system directory if no + pre-existing file is found. Non-Package Data Files ----------------------- +====================== Historically, ``setuptools`` by way of ``easy_install`` would encapsulate data files from the distribution into the egg (see `the old docs @@ -174,4 +534,22 @@ fall back to the platform-specific location for installing data files, there is no supported facility to reliably retrieve these resources. Instead, the PyPA recommends that any data files you wish to be accessible at -run time be included in the package. +run time be included **inside the package**. + + +---- + +.. [#beta] + Support for adding build configuration options via the ``[tool.setuptools]`` + table in the ``pyproject.toml`` file. See :doc:`/userguide/pyproject_config`. + +.. [#system-dirs] These locations can be discovered with the help of + third-party libraries such as :pypi:`platformdirs`. + +.. [#files_api] Reference: https://importlib-resources.readthedocs.io/en/latest/using.html#migrating-from-legacy + +.. [#namespace_support] Reference: https://github.com/python/importlib_resources/pull/196#issuecomment-734520374 + + +.. |MANIFEST.in| replace:: ``MANIFEST.in`` +.. _MANIFEST.in: https://packaging.python.org/en/latest/guides/using-manifest-in/ diff --git a/docs/userguide/declarative_config.rst b/docs/userguide/declarative_config.rst index 6f41d92b9a..fa104b10e3 100644 --- a/docs/userguide/declarative_config.rst +++ b/docs/userguide/declarative_config.rst @@ -1,8 +1,8 @@ .. _declarative config: ------------------------------------------ -Configuring setup() using setup.cfg files ------------------------------------------ +------------------------------------------------ +Configuring setuptools using ``setup.cfg`` files +------------------------------------------------ .. note:: New in 30.3.0 (8 Dec 2016). @@ -24,27 +24,25 @@ boilerplate code in some cases. [metadata] name = my_package - version = attr: src.VERSION + version = attr: my_package.VERSION + author = Josiah Carberry + author_email = josiah_carberry@brown.edu description = My package description long_description = file: README.rst, CHANGELOG.rst, LICENSE.rst keywords = one, two - license = BSD 3-Clause License + license = BSD-3-Clause classifiers = Framework :: Django - License :: OSI Approved :: BSD License Programming Language :: Python :: 3 - Programming Language :: Python :: 3.5 [options] zip_safe = False include_package_data = True packages = find: - scripts = - bin/first.py - bin/second.py + python_requires = >=3.7 install_requires = requests - importlib; python_version == "2.6" + importlib-metadata; python_version<"3.8" [options.package_data] * = *.txt, *.rst @@ -52,7 +50,7 @@ boilerplate code in some cases. [options.entry_points] console_scripts = - executable-name = package.module:function + executable-name = my_package.module:function [options.extras_require] pdf = ReportLab>=1.2; RXP @@ -60,13 +58,15 @@ boilerplate code in some cases. [options.packages.find] exclude = - src.subpackage1 - src.subpackage2 + examples* + tools* + docs* + my_package.tests* Metadata and options are set in the config sections of the same name. -* Keys are the same as the keyword arguments one provides to the ``setup()`` - function. +* Keys are the same as the :doc:`keyword arguments ` one + provides to the ``setup()`` function. * Complex values can be written comma-separated or placed one per line in *dangling* config values. The following are equivalent: @@ -93,7 +93,7 @@ Metadata and options are set in the config sections of the same name. Using a ``src/`` layout ======================= -One commonly used package configuration has all the module source code in a +One commonly used configuration has all the Python source code in a subdirectory (often called the ``src/`` layout), like this:: ├── src @@ -104,7 +104,7 @@ subdirectory (often called the ``src/`` layout), like this:: └── setup.cfg You can set up your ``setup.cfg`` to automatically find all your packages in -the subdirectory like this: +the subdirectory, using :ref:`package_dir `, like this: .. code-block:: ini @@ -119,6 +119,22 @@ the subdirectory like this: [options.packages.find] where=src +In this example, the value for the :ref:`package_dir ` +configuration (i.e. ``=src``) is parsed as ``{"": "src"}``. +The ``""`` key has a special meaning in this context, and indicates that all the +packages are contained inside the given directory. +Also note that the value for ``[options.packages.find] where`` matches the +value associated with ``""`` in the ``package_dir`` dictionary. + +.. + TODO: Add the following tip once the auto-discovery is no longer experimental: + + Starting in version 61, ``setuptools`` can automatically infer the + configurations for both ``packages`` and ``package_dir`` for projects using + a ``src/`` layout (as long as no value is specified for ``py_modules``). + Please see :doc:`package discovery ` for more + details. + Specifying values ================= @@ -130,7 +146,10 @@ Type names used below: * ``list-comma`` - dangling list or string of comma-separated values * ``list-semi`` - dangling list or string of semicolon-separated values * ``bool`` - ``True`` is 1, yes, true -* ``dict`` - list-comma where keys are separated from values by ``=`` +* ``dict`` - list-comma where each entry corresponds to a key/value pair, + with keys separated from values by ``=``. + If an entry starts with ``=``, the key is assumed to be an empty string + (e.g. ``=src`` is parsed as ``{"": "src"}``). * ``section`` - values are read from a dedicated (sub)section @@ -146,15 +165,24 @@ Special directives: * ``file:`` - Value is read from a list of files and then concatenated + .. important:: + The ``file:`` directive is sandboxed and won't reach anything outside the + project directory (i.e. the directory containing ``setup.cfg``/``pyproject.toml``). + .. note:: - The ``file:`` directive is sandboxed and won't reach anything outside - the directory containing ``setup.py``. + If you are using an old version of ``setuptools``, you might need to ensure + that all files referenced by the ``file:`` directive are included in the ``sdist`` + (you can do that via ``MANIFEST.in`` or using plugins such as ``setuptools-scm``, + please have a look on :doc:`/userguide/miscellaneous` for more information). + + .. versionchanged:: 66.1.0 + Newer versions of ``setuptools`` will automatically add these files to the ``sdist``. Metadata -------- -.. note:: +.. attention:: The aliases given below are supported for compatibility reasons, but their use is not advised. @@ -195,13 +223,13 @@ obsoletes list-comma Options ------- -======================= =================================== =============== ========= +======================= =================================== =============== ==================== Key Type Minimum Version Notes -======================= =================================== =============== ========= +======================= =================================== =============== ==================== zip_safe bool setup_requires list-semi 36.7.0 -install_requires list-semi -extras_require section [#opt-2]_ +install_requires file:, list-semi **BETA** [#opt-2]_, [#opt-6]_ +extras_require file:, section **BETA** [#opt-2]_, [#opt-6]_ python_requires str 34.4.0 entry_points file:, section 51.0.0 scripts list-comma @@ -213,27 +241,29 @@ packages find:, find_namespace:, list-comma [# package_dir dict package_data section [#opt-1]_ exclude_package_data section -namespace_packages list-comma -py_modules list-comma 34.4.0 +namespace_packages list-comma [#opt-5]_ +py_modules list-comma 34.4.0 data_files section 40.6.0 [#opt-4]_ -======================= =================================== =============== ========= +======================= =================================== =============== ==================== **Notes**: .. [#opt-1] In the ``package_data`` section, a key named with a single asterisk (``*``) refers to all packages, in lieu of the empty string used in ``setup.py``. -.. [#opt-2] In the ``extras_require`` section, values are parsed as ``list-semi``. - This implies that in order to include markers, they **must** be *dangling*: +.. [#opt-2] In ``install_requires`` and ``extras_require``, values are parsed as ``list-semi``. + This implies that in order to include markers, each requirement **must** be *dangling* + in a new line: .. code-block:: ini + [options] + install_requires = + importlib-metadata; python_version<"3.8" + [options.extras_require] - rest = docutils>=0.3; pack ==1.1, ==1.3 - pdf = - ReportLab>=1.2 - RXP - importlib-metadata; python_version < "3.8" + all = + importlib-metadata; python_version < "3.8" .. [#opt-3] The ``find:`` and ``find_namespace:`` directive can be further configured in a dedicated subsection ``options.packages.find``. This subsection accepts the @@ -246,6 +276,17 @@ data_files section 40.6.0 [# .. [#opt-4] ``data_files`` is deprecated and should be avoided. Please check :doc:`/userguide/datafiles` for more information. +.. [#opt-5] ``namespace_packages`` is deprecated in favour of native/implicit + namespaces (:pep:`420`). Check :doc:`the Python Packaging User Guide + ` for more information. + +.. [#opt-6] ``file:`` directives for reading requirements are supported since version 62.6. + The format for the file resembles a ``requirements.txt`` file, + however please keep in mind that all non-comment lines must conform with :pep:`508` + (``pip``-specify syntaxes, e.g. ``-c/-r/-e`` flags, are not supported). + Library developers should avoid tightly pinning their dependencies to a specific + version (e.g. via a "locked" requirements file). + Compatibility with other tools ============================== diff --git a/docs/userguide/dependency_management.rst b/docs/userguide/dependency_management.rst index ea2fc5563d..33aaf6c656 100644 --- a/docs/userguide/dependency_management.rst +++ b/docs/userguide/dependency_management.rst @@ -6,36 +6,39 @@ There are three types of dependency styles offered by setuptools: 1) build system requirement, 2) required dependency and 3) optional dependency. -.. Note:: - Packages that are added to dependency can be optionally specified with the - version by following `PEP 440 `_ +Each dependency, regardless of type, needs to be specified according to :pep:`508` +and :pep:`440`. +This allows adding version :pep:`range restrictions <440#version-specifiers>` +and :ref:`environment markers `. +.. _build-requires: + Build system requirement ======================== -Package requirement -------------------- After organizing all the scripts and files and getting ready for packaging, -there needs to be a way to tell Python what programs it needs to actually -do the packaging (in our case, ``setuptools`` of course). Usually, -you also need the ``wheel`` package as well since it is recommended that you -upload a ``.whl`` file to PyPI alongside your ``.tar.gz`` file. Unlike the -other two types of dependency keyword, this one is specified in your -``pyproject.toml`` file (if you have forgot what this is, go to -:doc:`quickstart` or (WIP)): +there needs to be a way to specify what programs and libraries are actually needed +do the packaging (in our case, ``setuptools`` of course). +This needs to be specified in your ``pyproject.toml`` file +(if you have forgot what this is, go to :doc:`/userguide/quickstart` or :doc:`/build_meta`): -.. code-block:: ini +.. code-block:: toml [build-system] requires = ["setuptools"] #... +Please note that you should also include here any other ``setuptools`` plugin +(e.g., :pypi:`setuptools-scm`, :pypi:`setuptools-golang`, :pypi:`setuptools-rust`) +or build-time dependency (e.g., :pypi:`Cython`, :pypi:`cppy`, :pypi:`pybind11`). + .. note:: - This used to be accomplished with the ``setup_requires`` keyword but is - now considered deprecated in favor of the PEP 517 style described above. + In previous versions of ``setuptools``, + this used to be accomplished with the ``setup_requires`` keyword but is + now considered deprecated in favor of the :pep:`517` style described above. To peek into how this legacy keyword is used, consult our :doc:`guide on - deprecated practice (WIP) <../deprecated/index>` + deprecated practice (WIP) `. .. _Declaring Dependencies: @@ -43,10 +46,22 @@ other two types of dependency keyword, this one is specified in your Declaring required dependency ============================= This is where a package declares its core dependencies, without which it won't -be able to run. ``setuptools`` support automatically download and install +be able to run. ``setuptools`` supports automatically downloading and installing these dependencies when the package is installed. Although there is more finesse to it, let's start with a simple example. +.. tab:: pyproject.toml + + .. code-block:: toml + + [project] + # ... + dependencies = [ + "docutils", + "BazSpam == 1.1", + ] + # ... + .. tab:: setup.cfg .. code-block:: ini @@ -70,20 +85,33 @@ finesse to it, let's start with a simple example. ) -When your project is installed (e.g. using pip), all of the dependencies not -already installed will be located (via PyPI), downloaded, built (if necessary), +When your project is installed (e.g., using :pypi:`pip`), all of the dependencies not +already installed will be located (via `PyPI`_), downloaded, built (if necessary), and installed and 2) Any scripts in your project will be installed with wrappers that verify the availability of the specified dependencies at runtime. +.. _environment-markers: + Platform specific dependencies ------------------------------ -Setuptools offer the capability to evaluate certain conditions before blindly +Setuptools offers the capability to evaluate certain conditions before blindly installing everything listed in ``install_requires``. This is great for platform specific dependencies. For example, the ``enum`` package was added in Python 3.4, therefore, package that depends on it can elect to install it only when the Python version is older than 3.4. To accomplish this +.. tab:: pyproject.toml + + .. code-block:: toml + + [project] + # ... + dependencies = [ + "enum34; python_version<'3.4'", + ] + # ... + .. tab:: setup.cfg .. code-block:: ini @@ -107,6 +135,18 @@ the Python version is older than 3.4. To accomplish this Similarly, if you also wish to declare ``pywin32`` with a minimal version of 1.0 and only install it if the user is using a Windows operating system: +.. tab:: pyproject.toml + + .. code-block:: toml + + [project] + # ... + dependencies = [ + "enum34; python_version<'3.4'", + "pywin32 >= 1.0; platform_system=='Windows'", + ] + # ... + .. tab:: setup.cfg .. code-block:: ini @@ -130,67 +170,37 @@ and only install it if the user is using a Windows operating system: ) The environmental markers that may be used for testing platform types are -detailed in `PEP 508 `_. - - -Dependencies that aren't in PyPI --------------------------------- -.. warning:: - Dependency links support has been dropped by pip starting with version - 19.0 (released 2019-01-22). - -If your project depends on packages that don't exist on PyPI, you may still be -able to depend on them, as long as they are available for download as: +detailed in :pep:`508`. -- an egg, in the standard distutils ``sdist`` format, -- a single ``.py`` file, or -- a VCS repository (Subversion, Mercurial, or Git). +.. seealso:: + If environment markers are not enough an specific use case, + you can also consider creating a :ref:`backend wrapper ` + to implement custom detection logic. -You just need to add some URLs to the ``dependency_links`` argument to -``setup()``. -The URLs must be either: +Direct URL dependencies +----------------------- -1. direct download URLs, -2. the URLs of web pages that contain direct download links, or -3. the repository's URL +.. attention:: + `PyPI`_ and other standards-conformant package indices **do not** accept + packages that declare dependencies using direct URLs. ``pip`` will accept them + when installing packages from the local filesystem or from another URL, + however. -In general, it's better to link to web pages, because it is usually less -complex to update a web page than to release a new version of your project. -You can also use a SourceForge ``showfiles.php`` link in the case where a -package you depend on is distributed via SourceForge. +Dependencies that are not available on a package index but can be downloaded +elsewhere in the form of a source repository or archive may be specified +using a variant of :pep:`PEP 440's direct references <440#direct-references>`: -If you depend on a package that's distributed as a single ``.py`` file, you -must include an ``"#egg=project-version"`` suffix to the URL, to give a project -name and version number. (Be sure to escape any dashes in the name or version -by replacing them with underscores.) EasyInstall will recognize this suffix -and automatically create a trivial ``setup.py`` to wrap the single ``.py`` file -as an egg. +.. tab:: pyproject.toml -In the case of a VCS checkout, you should also append ``#egg=project-version`` -in order to identify for what package that checkout should be used. You can -append ``@REV`` to the URL's path (before the fragment) to specify a revision. -Additionally, you can also force the VCS being used by prepending the URL with -a certain prefix. Currently available are: + .. code-block:: toml -- ``svn+URL`` for Subversion, -- ``git+URL`` for Git, and -- ``hg+URL`` for Mercurial - -A more complete example would be: - - ``vcs+proto://host/path@revision#egg=project-version`` - -Be careful with the version. It should match the one inside the project files. -If you want to disregard the version, you have to omit it both in the -``requires`` and in the URL's fragment. - -This will do a checkout (or a clone, in Git and Mercurial parlance) to a -temporary folder and run ``setup.py bdist_egg``. - -The ``dependency_links`` option takes the form of a list of URL strings. For -example, this will cause a search of the specified page for eggs or source -distributions, if the package's dependencies aren't already installed: + [project] + # ... + dependencies = [ + "Package-A @ git+https://example.net/package-a.git@main", + "Package-B @ https://example.net/archives/package-b.whl", + ] .. tab:: setup.cfg @@ -198,33 +208,46 @@ distributions, if the package's dependencies aren't already installed: [options] #... - dependency_links = http://peak.telecommunity.com/snapshots/ + install_requires = + Package-A @ git+https://example.net/package-a.git@main + Package-B @ https://example.net/archives/package-b.whl .. tab:: setup.py .. code-block:: python setup( - ..., - dependency_links=[ - "http://peak.telecommunity.com/snapshots/", + install_requires=[ + "Package-A @ git+https://example.net/package-a.git@main", + "Package-B @ https://example.net/archives/package-b.whl", ], + ..., ) +For source repository URLs, a list of supported protocols and VCS-specific +features such as selecting certain branches or tags can be found in pip's +documentation on `VCS support `_. +Supported formats for archive URLs are sdists and wheels. + Optional dependencies ===================== -Setuptools allows you to declare dependencies that only get installed under -specific circumstances. These dependencies are specified with ``extras_require`` -keyword and are only installed if another package depends on it (either -directly or indirectly) This makes it convenient to declare dependencies for -ancillary functions such as "tests" and "docs". +Setuptools allows you to declare dependencies that are not installed by default. +This effectively means that you can create a "variant" of your package with a +set of extra functionalities. -.. note:: - ``tests_require`` is now deprecated +For example, let's consider a ``Package-A`` that offers +optional PDF support and requires two other dependencies for it to work: + +.. tab:: pyproject.toml -For example, Package-A offers optional PDF support and requires two other -dependencies for it to work: + .. code-block:: toml + + [project] + name = "Package-A" + # ... + [project.optional-dependencies] + PDF = ["ReportLab>=1.2", "RXP"] .. tab:: setup.cfg @@ -234,7 +257,9 @@ dependencies for it to work: name = Package-A [options.extras_require] - PDF = ReportLab>=1.2; RXP + PDF = + ReportLab>=1.2 + RXP .. tab:: setup.py @@ -242,112 +267,141 @@ dependencies for it to work: .. code-block:: python setup( - name="Project-A", + name="Package-A", ..., extras_require={ "PDF": ["ReportLab>=1.2", "RXP"], }, ) -The name ``PDF`` is an arbitrary identifier of such a list of dependencies, to -which other components can refer and have them installed. There are two common -use cases. - -First is the console_scripts entry point: +.. sidebar:: -.. tab:: setup.cfg + .. tip:: + It is also convenient to declare optional requirements for + ancillary tasks such as running tests and or building docs. - .. code-block:: ini +The name ``PDF`` is an arbitrary :pep:`identifier <685>` of such a list of dependencies, to +which other components can refer and have them installed. - [metadata] - name = Project A - #... - - [options] - #... - entry_points= - [console_scripts] - rst2pdf = project_a.tools.pdfgen [PDF] - rst2html = project_a.tools.htmlgen - -.. tab:: setup.py - - .. code-block:: python +A use case for this approach is that other package can use this "extra" for their +own dependencies. For example, if ``Package-B`` needs ``Package-A`` with PDF support +installed, it might declare the dependency like this: - setup( - name="Project-A", - ..., - entry_points={ - "console_scripts": [ - "rst2pdf = project_a.tools.pdfgen [PDF]", - "rst2html = project_a.tools.htmlgen", - ], - }, - ) +.. tab:: pyproject.toml -This syntax indicates that the entry point (in this case a console script) -is only valid when the PDF extra is installed. It is up to the installer -to determine how to handle the situation where PDF was not indicated -(e.g. omit the console script, provide a warning when attempting to load -the entry point, assume the extras are present and let the implementation -fail later). + .. code-block:: toml -The second use case is that other package can use this "extra" for their -own dependencies. For example, if "Project-B" needs "project A" with PDF support -installed, it might declare the dependency like this: + [project] + name = "Package-B" + # ... + dependencies = [ + "Package-A[PDF]" + ] .. tab:: setup.cfg .. code-block:: ini [metadata] - name = Project-B + name = Package-B #... [options] #... install_requires = - Project-A[PDF] + Package-A[PDF] .. tab:: setup.py .. code-block:: python setup( - name="Project-B", - install_requires=["Project-A[PDF]"], + name="Package-B", + install_requires=["Package-A[PDF]"], ..., ) -This will cause ReportLab to be installed along with project A, if project B is -installed -- even if project A was already installed. In this way, a project +This will cause ``ReportLab`` to be installed along with ``Package-A``, if ``Package-B`` is +installed -- even if ``Package-A`` was already installed. In this way, a project can encapsulate groups of optional "downstream dependencies" under a feature name, so that packages that depend on it don't have to know what the downstream -dependencies are. If a later version of Project A builds in PDF support and -no longer needs ReportLab, or if it ends up needing other dependencies besides -ReportLab in order to provide PDF support, Project B's setup information does +dependencies are. If a later version of ``Package-A`` builds in PDF support and +no longer needs ``ReportLab``, or if it ends up needing other dependencies besides +``ReportLab`` in order to provide PDF support, ``Package-B``'s setup information does not need to change, but the right packages will still be installed if needed. -.. note:: - Best practice: if a project ends up not needing any other packages to +.. tip:: + Best practice: if a project ends up no longer needing any other packages to support a feature, it should keep an empty requirements list for that feature in its ``extras_require`` argument, so that packages depending on that feature don't break (due to an invalid feature name). +.. warning:: + Historically ``setuptools`` also used to support extra dependencies in console + scripts, for example: + + .. tab:: setup.cfg + + .. code-block:: ini + + [metadata] + name = Package-A + #... + + [options] + #... + entry_points= + [console_scripts] + rst2pdf = project_a.tools.pdfgen [PDF] + rst2html = project_a.tools.htmlgen + + .. tab:: setup.py + + .. code-block:: python + + setup( + name="Package-A", + ..., + entry_points={ + "console_scripts": [ + "rst2pdf = project_a.tools.pdfgen [PDF]", + "rst2html = project_a.tools.htmlgen", + ], + }, + ) + + This syntax indicates that the entry point (in this case a console script) + is only valid when the PDF extra is installed. It is up to the installer + to determine how to handle the situation where PDF was not indicated + (e.g., omit the console script, provide a warning when attempting to load + the entry point, assume the extras are present and let the implementation + fail later). + + **However**, ``pip`` and other tools might not support this use case for extra + dependencies, therefore this practice is considered **deprecated**. + See :doc:`PyPUG:specifications/entry-points`. + Python requirement ================== In some cases, you might need to specify the minimum required python version. -This is handled with the ``python_requires`` keyword supplied to ``setup.cfg`` -or ``setup.py``. +This can be configured as shown in the example below. + +.. tab:: pyproject.toml + .. code-block:: toml + + [project] + name = "Package-B" + requires-python = ">=3.6" + # ... .. tab:: setup.cfg .. code-block:: ini [metadata] - name = Project-B + name = Package-B #... [options] @@ -359,7 +413,10 @@ or ``setup.py``. .. code-block:: python setup( - name="Project-B", + name="Package-B", python_requires=">=3.6", ..., ) + + +.. _PyPI: https://pypi.org diff --git a/docs/userguide/development_mode.rst b/docs/userguide/development_mode.rst index 90bc56768f..6a4b04a7ad 100644 --- a/docs/userguide/development_mode.rst +++ b/docs/userguide/development_mode.rst @@ -1,52 +1,267 @@ -"Development Mode" -================== - -Under normal circumstances, the ``distutils`` assume that you are going to -build a distribution of your project, not use it in its "raw" or "unbuilt" -form. However, if you were to use the ``distutils`` to build a distribution, -you would have to rebuild and reinstall your project every time you made a -change to it during development. - -Another problem that sometimes comes up with the ``distutils`` is that you may -need to do development on two related projects at the same time. You may need -to put both projects' packages in the same directory to run them, but need to -keep them separate for revision control purposes. How can you do this? - -Setuptools allows you to deploy your projects for use in a common directory or -staging area, but without copying any files. Thus, you can edit each project's -code in its checkout directory, and only need to run build commands when you -change a project's C extensions or similarly compiled files. You can even -deploy a project into another project's checkout directory, if that's your -preferred way of working (as opposed to using a common independent staging area -or the site-packages directory). - -To do this, use the ``setup.py develop`` command. It works very similarly to -``setup.py install``, except that it doesn't actually install anything. -Instead, it creates a special ``.egg-link`` file in the deployment directory, -that links to your project's source code. And, if your deployment directory is -Python's ``site-packages`` directory, it will also update the -``easy-install.pth`` file to include your project's source code, thereby making -it available on ``sys.path`` for all programs using that Python installation. - -In addition, the ``develop`` command creates wrapper scripts in the target -script directory that will run your in-development scripts after ensuring that -all your ``install_requires`` packages are available on ``sys.path``. - -You can deploy the same project to multiple staging areas, e.g. if you have -multiple projects on the same machine that are sharing the same project you're -doing development work. - -When you're done with a given development task, you can remove the project -source from a staging area using ``setup.py develop --uninstall``, specifying -the desired staging area if it's not the default. - -There are several options to control the precise behavior of the ``develop`` -command; see the section on the :ref:`develop ` command below for more details. - -Note that you can also apply setuptools commands to non-setuptools projects, -using commands like this:: - - python -c "import setuptools; with open('setup.py') as f: exec(compile(f.read(), 'setup.py', 'exec'))" develop - -That is, you can simply list the normal setup commands and options following -the quoted part. +Development Mode (a.k.a. "Editable Installs") +============================================= + +When creating a Python project, developers usually want to implement and test +changes iteratively, before cutting a release and preparing a distribution archive. + +In normal circumstances this can be quite cumbersome and require the developers +to manipulate the ``PYTHONPATH`` environment variable or to continuously re-build +and re-install the project. + +To facilitate iterative exploration and experimentation, setuptools allows +users to instruct the Python interpreter and its import machinery to load the +code under development directly from the project folder without having to +copy the files to a different location in the disk. +This means that changes in the Python source code can immediately take place +without requiring a new installation. + +You can enter this "development mode" by performing an :doc:`editable installation +` inside of a :term:`virtual environment`, +using :doc:`pip's ` ``-e/--editable`` flag, as shown below: + +.. code-block:: bash + + $ cd your-python-project + $ python -m venv .venv + # Activate your environemt with: + # `source .venv/bin/activate` on Unix/macOS + # or `.venv\Scripts\activate` on Windows + + $ pip install --editable . + + # Now you have access to your package + # as if it was installed in .venv + $ python -c "import your_python_project" + + +An "editable installation" works very similarly to a regular install with +``pip install .``, except that it only installs your package dependencies, +metadata and wrappers for :ref:`console and GUI scripts `. +Under the hood, setuptools will try to create a special :mod:`.pth file ` +in the target directory (usually ``site-packages``) that extends the +``PYTHONPATH`` or install a custom :doc:`import hook `. + +When you're done with a given development task, you can simply uninstall your +package (as you would normally do with ``pip uninstall ``). + +Please note that, by default an editable install will expose at least all the +files that would be available in a regular installation. However, depending on +the file and directory organization in your project, it might also expose +as a side effect files that would not be normally available. +This is allowed so you can iteratively create new Python modules. +Please have a look on the following section if you are looking for a different behaviour. + +.. admonition:: Virtual Environments + + You can think about virtual environments as "isolated Python runtime deployments" + that allow users to install different sets of libraries and tools without + messing with the global behaviour of the system. + + They are a safe way of testing new projects and can be created easily + with the :mod:`venv` module from the standard library. + + Please note however that depending on your operating system or distribution, + ``venv`` might not come installed by default with Python. For those cases, + you might need to use the OS package manager to install it. + For example, in Debian/Ubuntu-based systems you can obtain it via: + + .. code-block:: bash + + sudo apt install python3-venv + + Alternatively, you can also try installing :pypi:`virtualenv`. + More information is available on the Python Packaging User Guide on + :doc:`PyPUG:guides/installing-using-pip-and-virtual-environments`. + +.. note:: + .. versionchanged:: v64.0.0 + Editable installation hooks implemented according to :pep:`660`. + Support for :pep:`namespace packages <420>` is still **EXPERIMENTAL**. + + +"Strict" editable installs +-------------------------- + +When thinking about editable installations, users might have the following +expectations: + +1. It should allow developers to add new files (or split/rename existing ones) + and have them automatically exposed. +2. It should behave as close as possible to a regular installation and help + users to detect problems (e.g. new files not being included in the distribution). + +Unfortunately these expectations are in conflict with each other. +To solve this problem ``setuptools`` allows developers to choose a more +*"strict"* mode for the editable installation. This can be done by passing +a special *configuration setting* via :pypi:`pip`, as indicated below: + +.. code-block:: bash + + pip install -e . --config-settings editable_mode=strict + +In this mode, new files **won't** be exposed and the editable installs will +try to mimic as much as possible the behavior of a regular install. +Under the hood, ``setuptools`` will create a tree of file links in an auxiliary +directory (``$your_project_dir/build``) and add it to ``PYTHONPATH`` via a +:mod:`.pth file `. (Please be careful to not delete this repository +by mistake otherwise your files may stop being accessible). + +.. warning:: + Strict editable installs require auxiliary files to be placed in a + ``build/__editable__.*`` directory (relative to your project root). + + Please be careful to not remove this directory while testing your project, + otherwise your editable installation may be compromised. + + You can remove the ``build/__editable__.*`` directory after uninstalling. + + +.. note:: + .. versionadded:: v64.0.0 + Added new *strict* mode for editable installations. + The exact details of how this mode is implemented may vary. + + +Limitations +----------- + +- The *editable* term is used to refer only to Python modules + inside the package directories. Non-Python files, external (data) files, + executable script files, binary extensions, headers and metadata may be + exposed as a *snapshot* of the version they were at the moment of the + installation. +- Adding new dependencies, entry-points or changing your project's metadata + require a fresh "editable" re-installation. +- Console scripts and GUI scripts **MUST** be specified via :doc:`entry-points + ` to work properly. +- *Strict* editable installs require the file system to support + either :wiki:`symbolic ` or :wiki:`hard links `. + This installation mode might also generate auxiliary files under the project directory. +- There is *no guarantee* that the editable installation will be performed + using a specific technique. Depending on each project, ``setuptools`` may + select a different approach to ensure the package is importable at runtime. +- There is *no guarantee* that files outside the top-level package directory + will be accessible after an editable install. +- There is *no guarantee* that attributes like ``__path__`` or ``__file__`` + will correspond to the exact location of the original files (e.g., + ``setuptools`` might employ file links to perform the editable installation). + Users are encouraged to use tools like :mod:`importlib.resources` or + :mod:`importlib.metadata` when trying to access package files directly. +- Editable installations may not work with + :doc:`namespaces created with pkgutil or pkg_resources + `. + Please use :pep:`420`-style implicit namespaces [#namespaces]_. +- Support for :pep:`420`-style implicit namespace packages for + projects structured using :ref:`flat-layout` is still **experimental**. + If you experience problems, you can try converting your package structure + to the :ref:`src-layout`. +- File system entries in the current working directory + whose names coincidentally match installed packages + may take precedence in :doc:`Python's import system `. + Users are encouraged to avoid such scenarios [#cwd]_. +- Setuptools will try to give the right precedence to modules in an editable install. + However this is not always an easy task. If you have a particular order in + ``sys.path`` or some specific import precedence that needs to be respected, + the editable installation as supported by Setuptools might not be able to + fulfil this requirement, and therefore it might not be the right tool for your use case. + +.. attention:: + Editable installs are **not a perfect replacement for regular installs** + in a test environment. When in doubt, please test your projects as + installed via a regular wheel. There are tools in the Python ecosystem, + like :pypi:`tox` or :pypi:`nox`, that can help you with that + (when used with appropriate configuration). + + +Legacy Behavior +--------------- + +If your project is not compatible with the new "editable installs" or you wish +to replicate the legacy behavior, for the time being you can also perform the +installation in the ``compat`` mode: + +.. code-block:: bash + + pip install -e . --config-settings editable_mode=compat + +This installation mode will try to emulate how ``python setup.py develop`` +works (still within the context of :pep:`660`). + +.. warning:: + The ``compat`` mode is *transitional* and will be removed in + future versions of ``setuptools``, it exists only to help during the + migration period. + Also note that support for this mode is limited: + it is safe to assume that the ``compat`` mode is offered "as is", and + improvements are unlikely to be implemented. + Users are encouraged to try out the new editable installation techniques + and make the necessary adaptations. + +.. note:: + Newer versions of ``pip`` no longer run the fallback command + ``python setup.py develop`` when the ``pyproject.toml`` file is present. + This means that setting the environment variable + ``SETUPTOOLS_ENABLE_FEATURES="legacy-editable"`` + will have no effect when installing a package with ``pip``. + + +How editable installations work +------------------------------- + +*Advanced topic* + +There are many techniques that can be used to expose packages under development +in such a way that they are available as if they were installed. +Depending on the project file structure and the selected mode, ``setuptools`` +will choose one of these approaches for the editable installation [#criteria]_. + +A non-exhaustive list of implementation mechanisms is presented below. +More information is available on the text of :pep:`PEP 660 <660#what-to-put-in-the-wheel>`. + +- A static ``.pth`` file [#static_pth]_ can be added to one of the directories + listed in :func:`site.getsitepackages` or :func:`site.getusersitepackages` to + extend :obj:`sys.path`. +- A directory containing a *farm of file links* that mimic the + project structure and point to the original files can be employed. + This directory can then be added to :obj:`sys.path` using a static ``.pth`` file. +- A dynamic ``.pth`` file [#dynamic_pth]_ can also be used to install an + "import :term:`finder`" (:obj:`~importlib.abc.MetaPathFinder` or + :obj:`~importlib.abc.PathEntryFinder`) that will hook into Python's + :doc:`import system ` machinery. + +.. attention:: + ``Setuptools`` offers **no guarantee** of which technique will be used to + perform an editable installation. This will vary from project to project + and may change depending on the specific version of ``setuptools`` being + used. + + +---- + +.. rubric:: Notes + +.. [#namespaces] + You *may* be able to use *strict* editable installations with namespace + packages created with ``pkgutil`` or ``pkg_namespaces``, however this is not + officially supported. + +.. [#cwd] + Techniques like the :ref:`src-layout` or tooling-specific options like + `tox's changedir `_ + can be used to prevent such kinds of situations (checkout `this blog post + `_ for more + insights). + +.. [#criteria] + ``setuptools`` strives to find a balance between allowing the user to see + the effects of project files being edited while still trying to keep the + editable installation as similar as possible to a regular installation. + +.. [#static_pth] + i.e., a ``.pth`` file where each line correspond to a path that should be + added to :obj:`sys.path`. See :mod:`Site-specific configuration hook `. + +.. [#dynamic_pth] + i.e., a ``.pth`` file that starts where each line starts with an ``import`` + statement and executes arbitrary Python code. See :mod:`Site-specific + configuration hook `. diff --git a/docs/userguide/distribution.rst b/docs/userguide/distribution.rst index db0f1a5f59..ae2dc4a45f 100644 --- a/docs/userguide/distribution.rst +++ b/docs/userguide/distribution.rst @@ -1,6 +1,117 @@ +.. _Specifying Your Project's Version: + +Specifying Your Project's Version +================================= + +Setuptools can work well with most versioning schemes. Over the years, +setuptools has tried to closely follow the :pep:`440` scheme, but it +also supports legacy versions. There are, however, a +few special things to watch out for, in order to ensure that setuptools and +other tools can always tell what version of your package is newer than another +version. Knowing these things will also help you correctly specify what +versions of other projects your project depends on. + +A version consists of an alternating series of release numbers and +`pre-release `_ +or `post-release `_ tags. A +release number is a series of digits punctuated by +dots, such as ``2.4`` or ``0.5``. Each series of digits is treated +numerically, so releases ``2.1`` and ``2.1.0`` are different ways to spell the +same release number, denoting the first subrelease of release 2. But ``2.10`` +is the *tenth* subrelease of release 2, and so is a different and newer release +from ``2.1`` or ``2.1.0``. Leading zeros within a series of digits are also +ignored, so ``2.01`` is the same as ``2.1``, and different from ``2.0.1``. + +Following a release number, you can have either a pre-release or post-release +tag. Pre-release tags make a version be considered *older* than the version +they are appended to. So, revision ``2.4`` is *newer* than release candidate +``2.4rc1``, which in turn is newer than beta release ``2.4b1`` or +alpha release ``2.4a1``. Postrelease tags make +a version be considered *newer* than the version they are appended to. So, +revisions like ``2.4.post1`` are newer than ``2.4``, but *older* +than ``2.4.1`` (which has a higher release number). + +In the case of legacy versions (for example, ``2.4pl1``), they are considered +older than non-legacy versions. Taking that in count, a revision ``2.4pl1`` +is *older* than ``2.4``. Note that ``2.4pl1`` is not :pep:`440`-compliant. + +A pre-release tag is a series of letters that are alphabetically before +"final". Some examples of prerelease tags would include ``alpha``, ``beta``, +``a``, ``c``, ``dev``, and so on. You do not have to place a dot or dash +before the prerelease tag if it's immediately after a number, but it's okay to +do so if you prefer. Thus, ``2.4c1`` and ``2.4.c1`` and ``2.4-c1`` all +represent release candidate 1 of version ``2.4``, and are treated as identical +by setuptools. Note that only ``a``, ``b``, and ``rc`` are :pep:`440`-compliant +pre-release tags. + +In addition, there are three special prerelease tags that are treated as if +they were ``rc``: ``c``, ``pre``, and ``preview``. So, version +``2.4c1``, ``2.4pre1`` and ``2.4preview1`` are all the exact same version as +``2.4rc1``, and are treated as identical by setuptools. + +A post-release tag is the string ``.post``, followed by a non-negative integer +value. Post-release tags are generally used to separate patch numbers, port +numbers, build numbers, revision numbers, or date stamps from the release +number. For example, the version ``2.4.post1263`` might denote Subversion +revision 1263 of a post-release patch of version ``2.4``. Or you might use +``2.4.post20051127`` to denote a date-stamped post-release. Legacy post-release +tags could be either a series of letters that are alphabetically greater than or +equal to "final", or a dash (``-``) - for example ``2.4-r1263`` or +``2.4-20051127``. + +Notice that after each legacy pre or post-release tag, you are free to place +another release number, followed again by more pre- or post-release tags. For +example, ``0.6a9.dev41475`` could denote Subversion revision 41475 of the in- +development version of the ninth alpha of release 0.6. Notice that ``dev`` is +a pre-release tag, so this version is a *lower* version number than ``0.6a9``, +which would be the actual ninth alpha of release 0.6. But the ``41475`` is +a post-release tag, so this version is *newer* than ``0.6a9.dev``. + +For the most part, setuptools' interpretation of version numbers is intuitive, +but here are a few tips that will keep you out of trouble in the corner cases: + +* Don't stick adjoining pre-release tags together without a dot or number + between them. Version ``1.9adev`` is the ``adev`` prerelease of ``1.9``, + *not* a development pre-release of ``1.9a``. Use ``.dev`` instead, as in + ``1.9a.dev``, or separate the prerelease tags with a number, as in + ``1.9a0dev``. ``1.9a.dev``, ``1.9a0dev``, and even ``1.9a0.dev0`` are + identical versions from setuptools' point of view, so you can use whatever + scheme you prefer. Of these examples, only ``1.9a0.dev0`` is + :pep:`440`-compliant. + +* If you want to be certain that your chosen numbering scheme works the way + you think it will, you can use the ``pkg_resources.parse_version()`` function + to compare different version numbers:: + + >>> from pkg_resources import parse_version + >>> parse_version("1.9.a.dev") == parse_version("1.9a0dev") + True + >>> parse_version("2.1-rc2") < parse_version("2.1") + True + >>> parse_version("0.6a9dev-r41475") < parse_version("0.6a9") + True + +Once you've decided on a version numbering scheme for your project, you can +have setuptools automatically tag your in-development releases with various +pre- or post-release tags. See the following section for more details. + + Tagging and "Daily Build" or "Snapshot" Releases ------------------------------------------------ +.. warning:: + Please note that running ``python setup.py ...`` directly is no longer + considered a good practice and that in the future the commands ``egg_info`` + and ``rotate`` will be deprecated. + + As a result, the instructions and information presented in this section + should be considered **transitional** while setuptools don't provide a + mechanism for tagging releases. + + Meanwhile, if you can also consider using :pypi:`setuptools-scm` to achieve + similar objectives. + + When a set of related projects are under development, it may be important to track finer-grained version increments than you would normally use for e.g. "stable" releases. While stable releases might be measured in dotted numbers @@ -29,7 +140,7 @@ to generate a daily build or snapshot for. See the section below on the :ref:`Specifying Your Project's Version` for more information about how pre- and post-release tags affect how version numbers are interpreted. This is important in order to make sure that dependency processing tools will know -which versions of your project are newer than others.) +which versions of your project are newer than others). Finally, if you are creating builds frequently, and either building them in a downloadable location or are copying them to a distribution server, you should @@ -53,39 +164,8 @@ directory. (And, you could also define sitewide or per-user default versions of the ``daily`` alias, so that projects that didn't define their own would use the appropriate defaults.) -Generating Source Distributions -------------------------------- - -``setuptools`` enhances the distutils' default algorithm for source file -selection with pluggable endpoints for looking up files to include. If you are -using a revision control system, and your source distributions only need to -include files that you're tracking in revision control, use a corresponding -plugin instead of writing a ``MANIFEST.in`` file. See the section below on -:ref:`Adding Support for Revision Control Systems` for information on plugins. - -If you need to include automatically generated files, or files that are kept in -an unsupported revision control system, you'll need to create a ``MANIFEST.in`` -file to specify any files that the default file location algorithm doesn't -catch. See the distutils documentation for more information on the format of -the ``MANIFEST.in`` file. - -But, be sure to ignore any part of the distutils documentation that deals with -``MANIFEST`` or how it's generated from ``MANIFEST.in``; setuptools shields you -from these issues and doesn't work the same way in any case. Unlike the -distutils, setuptools regenerates the source distribution manifest file -every time you build a source distribution, and it builds it inside the -project's ``.egg-info`` directory, out of the way of your main project -directory. You therefore need not worry about whether it is up-to-date or not. - -Indeed, because setuptools' approach to determining the contents of a source -distribution is so much simpler, its ``sdist`` command omits nearly all of -the options that the distutils' more complex ``sdist`` process requires. For -all practical purposes, you'll probably use only the ``--formats`` option, if -you use any option at all. - - Making "Official" (Non-Snapshot) Releases -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +----------------------------------------- When you make an official release, creating source or binary distributions, you will need to override the tag settings from ``setup.cfg``, so that you @@ -116,136 +196,3 @@ You can then use it like this:: Or of course you can create more elaborate aliases that do all of the above. See the sections below on the :ref:`egg_info ` and :ref:`alias ` commands for more ideas. - -Distributing Extensions compiled with Cython --------------------------------------------- - -``setuptools`` will detect at build time whether Cython is installed or not. -If Cython is not found ``setuptools`` will ignore pyx files. - -To ensure Cython is available, include Cython in the build-requires section -of your pyproject.toml:: - - [build-system] - requires=[..., "cython"] - -Built with pip 10 or later, that declaration is sufficient to include Cython -in the build. For broader compatibility, declare the dependency in your -setup-requires of setup.cfg:: - - [options] - setup_requires = - ... - cython - -As long as Cython is present in the build environment, ``setuptools`` includes -transparent support for building Cython extensions, as -long as extensions are defined using ``setuptools.Extension``. - -If you follow these rules, you can safely list ``.pyx`` files as the source -of your ``Extension`` objects in the setup script. If it is, then ``setuptools`` -will use it. - -Of course, for this to work, your source distributions must include the C -code generated by Cython, as well as your original ``.pyx`` files. This means -that you will probably want to include current ``.c`` files in your revision -control system, rebuilding them whenever you check changes in for the ``.pyx`` -source files. This will ensure that people tracking your project in a revision -control system will be able to build it even if they don't have Cython -installed, and that your source releases will be similarly usable with or -without Cython. - - -.. _Specifying Your Project's Version: - -Specifying Your Project's Version ---------------------------------- - -Setuptools can work well with most versioning schemes. Over the years, -setuptools has tried to closely follow the -`PEP 440 `_ scheme, but it -also supports legacy versions. There are, however, a -few special things to watch out for, in order to ensure that setuptools and -other tools can always tell what version of your package is newer than another -version. Knowing these things will also help you correctly specify what -versions of other projects your project depends on. - -A version consists of an alternating series of release numbers and pre-release -or post-release tags. A release number is a series of digits punctuated by -dots, such as ``2.4`` or ``0.5``. Each series of digits is treated -numerically, so releases ``2.1`` and ``2.1.0`` are different ways to spell the -same release number, denoting the first subrelease of release 2. But ``2.10`` -is the *tenth* subrelease of release 2, and so is a different and newer release -from ``2.1`` or ``2.1.0``. Leading zeros within a series of digits are also -ignored, so ``2.01`` is the same as ``2.1``, and different from ``2.0.1``. - -Following a release number, you can have either a pre-release or post-release -tag. Pre-release tags make a version be considered *older* than the version -they are appended to. So, revision ``2.4`` is *newer* than revision ``2.4c1``, -which in turn is newer than ``2.4b1`` or ``2.4a1``. Postrelease tags make -a version be considered *newer* than the version they are appended to. So, -revisions like ``2.4-1`` are newer than ``2.4``, but *older* -than ``2.4.1`` (which has a higher release number). - -In the case of legacy versions (for example, ``2.4pl1``), they are considered -older than non-legacy versions. Taking that in count, a revision ``2.4pl1`` -is *older* than ``2.4`` - -A pre-release tag is a series of letters that are alphabetically before -"final". Some examples of prerelease tags would include ``alpha``, ``beta``, -``a``, ``c``, ``dev``, and so on. You do not have to place a dot or dash -before the prerelease tag if it's immediately after a number, but it's okay to -do so if you prefer. Thus, ``2.4c1`` and ``2.4.c1`` and ``2.4-c1`` all -represent release candidate 1 of version ``2.4``, and are treated as identical -by setuptools. - -In addition, there are three special prerelease tags that are treated as if -they were the letter ``c``: ``pre``, ``preview``, and ``rc``. So, version -``2.4rc1``, ``2.4pre1`` and ``2.4preview1`` are all the exact same version as -``2.4c1``, and are treated as identical by setuptools. - -A post-release tag is either a series of letters that are alphabetically -greater than or equal to "final", or a dash (``-``). Post-release tags are -generally used to separate patch numbers, port numbers, build numbers, revision -numbers, or date stamps from the release number. For example, the version -``2.4-r1263`` might denote Subversion revision 1263 of a post-release patch of -version ``2.4``. Or you might use ``2.4-20051127`` to denote a date-stamped -post-release. - -Notice that after each pre or post-release tag, you are free to place another -release number, followed again by more pre- or post-release tags. For example, -``0.6a9.dev-r41475`` could denote Subversion revision 41475 of the in- -development version of the ninth alpha of release 0.6. Notice that ``dev`` is -a pre-release tag, so this version is a *lower* version number than ``0.6a9``, -which would be the actual ninth alpha of release 0.6. But the ``-r41475`` is -a post-release tag, so this version is *newer* than ``0.6a9.dev``. - -For the most part, setuptools' interpretation of version numbers is intuitive, -but here are a few tips that will keep you out of trouble in the corner cases: - -* Don't stick adjoining pre-release tags together without a dot or number - between them. Version ``1.9adev`` is the ``adev`` prerelease of ``1.9``, - *not* a development pre-release of ``1.9a``. Use ``.dev`` instead, as in - ``1.9a.dev``, or separate the prerelease tags with a number, as in - ``1.9a0dev``. ``1.9a.dev``, ``1.9a0dev``, and even ``1.9.a.dev`` are - identical versions from setuptools' point of view, so you can use whatever - scheme you prefer. - -* If you want to be certain that your chosen numbering scheme works the way - you think it will, you can use the ``pkg_resources.parse_version()`` function - to compare different version numbers:: - - >>> from pkg_resources import parse_version - >>> parse_version("1.9.a.dev") == parse_version("1.9a0dev") - True - >>> parse_version("2.1-rc2") < parse_version("2.1") - True - >>> parse_version("0.6a9dev-r41475") < parse_version("0.6a9") - True - -Once you've decided on a version numbering scheme for your project, you can -have setuptools automatically tag your in-development releases with various -pre- or post-release tags. See the following sections for more details: - -* `Tagging and "Daily Build" or "Snapshot" Releases`_ -* The :ref:`egg_info ` command diff --git a/docs/userguide/entry_point.rst b/docs/userguide/entry_point.rst index 21edc6971a..4aa7f9a27c 100644 --- a/docs/userguide/entry_point.rst +++ b/docs/userguide/entry_point.rst @@ -4,25 +4,38 @@ Entry Points ============ -Packages may provide commands to be run at the console (console scripts), -such as the ``pip`` command. These commands are defined for a package -as a specific kind of entry point in the ``setup.cfg`` or -``setup.py``. +Entry points are a type of metadata that can be exposed by packages on installation. +They are a very useful feature of the Python ecosystem, +and come specially handy in two scenarios: +1. The package would like to provide commands to be run at the terminal. +This functionality is known as *console* scripts. The command may also +open up a GUI, in which case it is known as a *GUI* script. An example +of a console script is the one provided by the :pypi:`pip` package, which +allows you to run commands like ``pip install`` in the terminal. + +2. A package would like to enable customization of its functionalities +via *plugins*. For example, the test framework :pypi:`pytest` allows +customization via the ``pytest11`` entry point, and the syntax +highlighting tool :pypi:`pygments` allows specifying additional styles +using the entry point ``pygments.styles``. + + +.. _console-scripts: Console Scripts =============== +Let us start with console scripts. First consider an example without entry points. Imagine a package -defined thus: - -.. code-block:: bash +defined thus:: - timmins/ - timmins/__init__.py - timmins/__main__.py - setup.cfg # or setup.py - #other necessary files + project_root_directory + ├── pyproject.toml # and/or setup.cfg, setup.py + └── src + └── timmins + ├── __init__.py + └── ... with ``__init__.py`` as: @@ -31,7 +44,10 @@ with ``__init__.py`` as: def hello_world(): print("Hello world") -and ``__main__.py`` providing a hook: +Now, suppose that we would like to provide some way of executing the +function ``hello_world()`` from the command-line. One way to do this +is to create a file ``src/timmins/__main__.py`` providing a hook as +follows: .. code-block:: python @@ -40,49 +56,167 @@ and ``__main__.py`` providing a hook: if __name__ == '__main__': hello_world() -After installing the package, the function may be invoked through the -`runpy `_ module: +Then, after installing the package ``timmins``, we may invoke the ``hello_world()`` +function as follows, through the `runpy `_ +module: .. code-block:: bash - python -m timmins + $ python -m timmins + Hello world + +Instead of this approach using ``__main__.py``, you can also create a +user-friendly CLI executable that can be called directly without ``python -m``. +In the above example, to create a command ``hello-world`` that invokes +``timmins.hello_world``, add a console script entry point to your +configuration: + +.. tab:: pyproject.toml + + .. code-block:: toml + + [project.scripts] + hello-world = "timmins:hello_world" -Adding a console script entry point allows the package to define a -user-friendly name for installers of the package to execute. Installers -like pip will create wrapper scripts to execute a function. In the -above example, to create a command ``hello-world`` that invokes -``timmins.hello_world``, add a console script entry point to -``setup.cfg``: +.. tab:: setup.cfg -.. code-block:: ini + .. code-block:: ini + + [options.entry_points] + console_scripts = + hello-world = timmins:hello_world + +.. tab:: setup.py + + .. code-block:: python + + from setuptools import setup + + setup( + # ..., + entry_points={ + 'console_scripts': [ + 'hello-world = timmins:hello_world', + ] + } + ) - [options.entry_points] - console_scripts = - hello-world = timmins:hello_world After installing the package, a user may invoke that function by simply calling -``hello-world`` on the command line. +``hello-world`` on the command line: + +.. code-block:: bash -The syntax for entry points is specified as follows: + $ hello-world + Hello world -.. code-block:: ini +Note that any function used as a console script, i.e. ``hello_world()`` in +this example, should not accept any arguments. If your function requires any input +from the user, you can use regular command-line argument parsing utilities like +:mod:`argparse` within the body of +the function to parse user input given via :obj:`sys.argv`. - = [.[.]][:.] +You may have noticed that we have used a special syntax to specify the function +that must be invoked by the console script, i.e. we have written ``timmins:hello_world`` +with a colon ``:`` separating the package name and the function name. The full +specification of this syntax is discussed in the `last section <#entry-points-syntax>`_ +of this document, and this can be used to specify a function located anywhere in +your package, not just in ``__init__.py``. -where ``name`` is the name for the script you want to create, the left hand -side of ``:`` is the module that contains your function and the right hand -side is the object you want to invoke (e.g. a function). +GUI Scripts +=========== In addition to ``console_scripts``, Setuptools supports ``gui_scripts``, which will launch a GUI application without running in a terminal window. +For example, if we have a project with the same directory structure as before, +with an ``__init__.py`` file containing the following: + +.. code-block:: python + + import PySimpleGUI as sg + + def hello_world(): + sg.Window(title="Hello world", layout=[[]], margins=(100, 50)).read() + +Then, we can add a GUI script entry point: + +.. tab:: pyproject.toml + + .. code-block:: toml + + [project.gui-scripts] + hello-world = "timmins:hello_world" + +.. tab:: setup.cfg + + .. code-block:: ini + + [options.entry_points] + gui_scripts = + hello-world = timmins:hello_world + +.. tab:: setup.py + + .. code-block:: python + + from setuptools import setup + + setup( + # ..., + entry_points={ + 'gui_scripts': [ + 'hello-world = timmins:hello_world', + ] + } + ) + +.. note:: + To be able to import ``PySimpleGUI``, you need to add ``pysimplegui`` to your package dependencies. + See :doc:`/userguide/dependency_management` for more information. + +Now, running: + +.. code-block:: bash + + $ hello-world + +will open a small application window with the title 'Hello world'. + +Note that just as with console scripts, any function used as a GUI script +should not accept any arguments, and any user input can be parsed within the +body of the function. GUI scripts also use the same syntax (discussed in the +`last section <#entry-points-syntax>`_) for specifying the function to be invoked. + +.. note:: + + The difference between ``console_scripts`` and ``gui_scripts`` only affects + Windows systems. [#use_for_scripts]_ ``console_scripts`` are wrapped in a console + executable, so they are attached to a console and can use ``sys.stdin``, + ``sys.stdout`` and ``sys.stderr`` for input and output. ``gui_scripts`` are + wrapped in a GUI executable, so they can be started without a console, but + cannot use standard streams unless application code redirects them. Other + platforms do not have the same distinction. + +.. note:: + + Console and GUI scripts work because behind the scenes, installers like :pypi:`pip` + create wrapper scripts around the function(s) being invoked. For example, + the ``hello-world`` entry point in the above two examples would create a + command ``hello-world`` launching a script like this: [#use_for_scripts]_ + + .. code-block:: python + + import sys + from timmins import hello_world + sys.exit(hello_world()) .. _dynamic discovery of services and plugins: Advertising Behavior ==================== -Console scripts are one use of the more general concept of entry points. Entry +Console/GUI scripts are one use of the more general concept of entry points. Entry points more generally allow a packager to advertise behavior for discovery by other libraries and applications. This feature enables "plug-in"-like functionality, where one library solicits entry points and any number of other @@ -93,68 +227,345 @@ A good example of this plug-in behavior can be seen in where pytest is a test framework that allows other libraries to extend or modify its functionality through the ``pytest11`` entry point. -The console scripts work similarly, where libraries advertise their commands +The console/GUI scripts work similarly, where libraries advertise their commands and tools like ``pip`` create wrapper scripts that invoke those commands. -For a project wishing to solicit entry points, Setuptools recommends the -`importlib.metadata `_ -module (part of stdlib since Python 3.8) or its backport, -`importlib_metadata `_. +Entry Points for Plugins +======================== -For example, to find the console script entry points from the example above: +Let us consider a simple example to understand how we can implement entry points +corresponding to plugins. Say we have a package ``timmins`` with the following +directory structure:: -.. code-block:: pycon + timmins + ├── pyproject.toml # and/or setup.cfg, setup.py + └── src + └── timmins + └── __init__.py + +and in ``src/timmins/__init__.py`` we have the following code: + +.. code-block:: python + + def hello_world(): + print('Hello world') + +Basically, we have defined a ``hello_world()`` function which will print the text +'Hello world'. Now, let us say we want to print the text 'Hello world' in different +ways. The current function just prints the text as it is - let us say we want another +style in which the text is enclosed within exclamation marks:: + + !!! Hello world !!! + +Let us see how this can be done using plugins. First, let us separate the style of +printing the text from the text itself. In other words, we can change the code in +``src/timmins/__init__.py`` to something like this: + +.. code-block:: python + + def display(text): + print(text) + + def hello_world(): + display('Hello world') + +Here, the ``display()`` function controls the style of printing the text, and the +``hello_world()`` function calls the ``display()`` function to print the text 'Hello +world`. + +Right now the ``display()`` function just prints the text as it is. In order to be able +to customize it, we can do the following. Let us introduce a new *group* of entry points +named ``timmins.display``, and expect plugin packages implementing this entry point +to supply a ``display()``-like function. Next, to be able to automatically discover plugin +packages that implement this entry point, we can use the +:mod:`importlib.metadata` module, +as follows: + +.. code-block:: python + + from importlib.metadata import entry_points + display_eps = entry_points(group='timmins.display') + +.. note:: + Each ``importlib.metadata.EntryPoint`` object is an object containing a ``name``, a + ``group``, and a ``value``. For example, after setting up the plugin package as + described below, ``display_eps`` in the above code will look like this: [#package_metadata]_ + + .. code-block:: python + + ( + EntryPoint(name='excl', value='timmins_plugin_fancy:excl_display', group='timmins.display'), + ..., + ) + +``display_eps`` will now be a list of ``EntryPoint`` objects, each referring to ``display()``-like +functions defined by one or more installed plugin packages. Then, to import a specific +``display()``-like function - let us choose the one corresponding to the first discovered +entry point - we can use the ``load()`` method as follows: + +.. code-block:: python + + display = display_eps[0].load() + +Finally, a sensible behaviour would be that if we cannot find any plugin packages customizing +the ``display()`` function, we should fall back to our default implementation which prints +the text as it is. With this behaviour included, the code in ``src/timmins/__init__.py`` +finally becomes: + +.. code-block:: python + + from importlib.metadata import entry_points + display_eps = entry_points(group='timmins.display') + try: + display = display_eps[0].load() + except IndexError: + def display(text): + print(text) + + def hello_world(): + display('Hello world') + +That finishes the setup on ``timmins``'s side. Next, we need to implement a plugin +which implements the entry point ``timmins.display``. Let us name this plugin +``timmins-plugin-fancy``, and set it up with the following directory structure:: + + timmins-plugin-fancy + ├── pyproject.toml # and/or setup.cfg, setup.py + └── src + └── timmins_plugin_fancy + └── __init__.py + +And then, inside ``src/timmins_plugin_fancy/__init__.py``, we can put a function +named ``excl_display()`` that prints the given text surrounded by exclamation marks: + +.. code-block:: python + + def excl_display(text): + print('!!!', text, '!!!') + +This is the ``display()``-like function that we are looking to supply to the +``timmins`` package. We can do that by adding the following in the configuration +of ``timmins-plugin-fancy``: + +.. tab:: pyproject.toml + + .. code-block:: toml + + # Note the quotes around timmins.display in order to escape the dot . + [project.entry-points."timmins.display"] + excl = "timmins_plugin_fancy:excl_display" - >>> from importlib import metadata - >>> eps = metadata.entry_points()['console_scripts'] +.. tab:: setup.cfg -``eps`` is now a list of ``EntryPoint`` objects, one of which corresponds -to the ``hello-world = timmins:hello_world`` defined above. Each ``EntryPoint`` -contains the ``name``, ``group``, and ``value``. It also supplies a ``.load()`` -method to import and load that entry point (module or object). + .. code-block:: ini -.. code-block:: ini + [options.entry_points] + timmins.display = + excl = timmins_plugin_fancy:excl_display - [options.entry_points] - my.plugins = - hello-world = timmins:hello_world +.. tab:: setup.py -Then, a different project wishing to load 'my.plugins' plugins could run -the following routine to load (and invoke) such plugins: + .. code-block:: python + + from setuptools import setup + + setup( + # ..., + entry_points = { + 'timmins.display': [ + 'excl = timmins_plugin_fancy:excl_display' + ] + } + ) + +Basically, this configuration states that we are a supplying an entry point +under the group ``timmins.display``. The entry point is named ``excl`` and it +refers to the function ``excl_display`` defined by the package ``timmins-plugin-fancy``. + +Now, if we install both ``timmins`` and ``timmins-plugin-fancy``, we should get +the following: .. code-block:: pycon - >>> from importlib import metadata - >>> eps = metadata.entry_points()['my.plugins'] - >>> for ep in eps: - ... plugin = ep.load() - ... plugin() - ... + >>> from timmins import hello_world + >>> hello_world() + !!! Hello world !!! + +whereas if we only install ``timmins`` and not ``timmins-plugin-fancy``, we should +get the following: + +.. code-block:: pycon + + >>> from timmins import hello_world + >>> hello_world() + Hello world + +Therefore, our plugin works. + +Our plugin could have also defined multiple entry points under the group ``timmins.display``. +For example, in ``src/timmins_plugin_fancy/__init__.py`` we could have two ``display()``-like +functions, as follows: + +.. code-block:: python + + def excl_display(text): + print('!!!', text, '!!!') + + def lined_display(text): + print(''.join(['-' for _ in text])) + print(text) + print(''.join(['-' for _ in text])) + +The configuration of ``timmins-plugin-fancy`` would then change to: + +.. tab:: pyproject.toml + + .. code-block:: toml -The project soliciting the entry points needs not to have any dependency -or prior knowledge about the libraries implementing the entry points, and + [project.entry-points."timmins.display"] + excl = "timmins_plugin_fancy:excl_display" + lined = "timmins_plugin_fancy:lined_display" + +.. tab:: setup.cfg + + .. code-block:: ini + + [options.entry_points] + timmins.display = + excl = timmins_plugin_fancy:excl_display + lined = timmins_plugin_fancy:lined_display + +.. tab:: setup.py + + .. code-block:: python + + from setuptools import setup + + setup( + # ..., + entry_points = { + 'timmins.display': [ + 'excl = timmins_plugin_fancy:excl_display', + 'lined = timmins_plugin_fancy:lined_display', + ] + } + ) + +On the ``timmins`` side, we can also use a different strategy of loading entry +points. For example, we can search for a specific display style: + +.. code-block:: python + + display_eps = entry_points(group='timmins.display') + try: + display = display_eps['lined'].load() + except KeyError: + # if the 'lined' display is not available, use something else + ... + +Or we can also load all plugins under the given group. Though this might not +be of much use in our current example, there are several scenarios in which this +is useful: + +.. code-block:: python + + display_eps = entry_points(group='timmins.display') + for ep in display_eps: + display = ep.load() + # do something with display + ... + +Another point is that in this particular example, we have used plugins to +customize the behaviour of a function (``display()``). In general, we can use entry +points to enable plugins to not only customize the behaviour of functions, but also +of entire classes and modules. This is unlike the case of console/GUI scripts, +where entry points can only refer to functions. The syntax used for specifying the +entry points remains the same as for console/GUI scripts, and is discussed in the +`last section <#entry-points-syntax>`_. + +.. tip:: + The recommended approach for loading and importing entry points is the + :mod:`importlib.metadata` module, + which is a part of the standard library since Python 3.8. For older versions of + Python, its backport :pypi:`importlib_metadata` should be used. While using the + backport, the only change that has to be made is to replace ``importlib.metadata`` + with ``importlib_metadata``, i.e. + + .. code-block:: python + + from importlib_metadata import entry_points + ... + +In summary, entry points allow a package to open its functionalities for +customization via plugins. +The package soliciting the entry points need not have any dependency +or prior knowledge about the plugins implementing the entry points, and downstream users are able to compose functionality by pulling together -libraries implementing the entry points. +plugins implementing the entry points. + +Entry Points Syntax +=================== + +The syntax for entry points is specified as follows:: + + = [:[.[.]*]] + +Here, the square brackets ``[]`` denote optionality and the asterisk ``*`` +denotes repetition. +``name`` is the name of the script/entry point you want to create, the left hand +side of ``:`` is the package or module that contains the object you want to invoke +(think about it as something you would write in an import statement), and the right +hand side is the object you want to invoke (e.g. a function). + +To make this syntax more clear, consider the following examples: + +Package or module + If you supply:: + + = + + as the entry point, where ```` can contain ``.`` in the case + of sub-modules or sub-packages, then, tools in the Python ecosystem will roughly + interpret this value as: + + .. code-block:: python + + import + parsed_value = + +Module-level object + If you supply:: + + = : + + where ```` does not contain any ``.``, this will be roughly interpreted + as: + + .. code-block:: python + + from import + parsed_value = + +Nested object + If you supply:: + + = :.. + + this will be roughly interpreted as: + .. code-block:: python -Dependency Management -===================== + from import + parsed_value = .. -Some entry points may require additional dependencies to properly function. -For such an entry point, declare in square brackets any number of dependency -``extras`` following the entry point definition. Such entry points will only -be viable if their extras were declared and installed. See the -:doc:`guide on dependencies management ` for -more information on defining extra requirements. Consider from the -above example: +In the case of console/GUI scripts, this syntax can be used to specify a function, while +in the general case of entry points as used for plugins, it can be used to specify a function, +class or module. -.. code-block:: ini +---- - [options.entry_points] - console_scripts = - hello-world = timmins:hello_world [pretty-printer] +.. [#use_for_scripts] + Reference: https://packaging.python.org/en/latest/specifications/entry-points/#use-for-scripts -In this case, the ``hello-world`` script is only viable if the ``pretty-printer`` -extra is indicated, and so a plugin host might exclude that entry point -(i.e. not install a console script) if the relevant extra dependencies are not -installed. +.. [#package_metadata] + Reference: https://packaging.python.org/en/latest/guides/creating-and-discovering-plugins/#using-package-metadata diff --git a/docs/userguide/ext_modules.rst b/docs/userguide/ext_modules.rst new file mode 100644 index 0000000000..8c193856ac --- /dev/null +++ b/docs/userguide/ext_modules.rst @@ -0,0 +1,172 @@ +========================== +Building Extension Modules +========================== + +Setuptools can build C/C++ extension modules. The keyword argument +``ext_modules`` of ``setup()`` should be a list of instances of the +:class:`setuptools.Extension` class. + + +For example, let's consider a simple project with only one extension module:: + + + ├── pyproject.toml + └── foo.c + +and all project metadata configuration in the ``pyproject.toml`` file: + +.. code-block:: toml + + # pyproject.toml + [build-system] + requires = ["setuptools"] + build-backend = "setuptools.build_meta" + + [project] + name = "mylib-foo" # as it would appear on PyPI + version = "0.42" + +To instruct setuptools to compile the ``foo.c`` file into the extension module +``mylib.foo``, we need to add a ``setup.py`` file similar to the following: + +.. code-block:: python + + from setuptools import Extension, setup + + setup( + ext_modules=[ + Extension( + name="mylib.foo", # as it would be imported + # may include packages/namespaces separated by `.` + + sources=["foo.c"], # all sources are compiled into a single binary file + ), + ] + ) + +.. seealso:: + You can find more information on the `Python docs about C/C++ extensions`_. + Alternatively, you might also be interested in learning about `Cython`_. + + If you plan to distribute a package that uses extensions across multiple + platforms, :pypi:`cibuildwheel` can also be helpful. + +.. important:: + All files used to compile your extension need to be available on the system + when building the package, so please make sure to include some documentation + on how developers interested in building your package from source + can obtain operating system level dependencies + (e.g. compilers and external binary libraries/artifacts). + + You will also need to make sure that all auxiliary files that are contained + inside your :term:`project` (e.g. C headers authored by you or your team) + are configured to be included in your :term:`sdist `. + Please have a look on our section on :ref:`Controlling files in the distribution`. + + +Compiler and linker options +=========================== + +The command ``build_ext`` builds C/C++ extension modules. It creates +a command line for running the compiler and linker by combining +compiler and linker options from various sources: + +.. Reference: `test_customize_compiler` in distutils/tests/test_sysconfig.py + +* the ``sysconfig`` variables ``CC``, ``CXX``, ``CCSHARED``, + ``LDSHARED``, and ``CFLAGS``, +* the environment variables ``CC``, ``CPP``, + ``CXX``, ``LDSHARED`` and ``CFLAGS``, + ``CPPFLAGS``, ``LDFLAGS``, +* the ``Extension`` attributes ``include_dirs``, + ``library_dirs``, ``extra_compile_args``, ``extra_link_args``, + ``runtime_library_dirs``. + +.. Ignoring AR, ARFLAGS, RANLIB here because they are used by the (obsolete?) build_clib, not build_ext. + +Specifically, if the environment variables ``CC``, ``CPP``, ``CXX``, and ``LDSHARED`` +are set, they will be used instead of the ``sysconfig`` variables of the same names. + +The compiler options appear in the command line in the following order: + +.. Reference: "compiler_so" and distutils.ccompiler.gen_preprocess_options, CCompiler.compile, UnixCCompiler._compile + +* first, the options provided by the ``sysconfig`` variable ``CFLAGS``, +* then, the options provided by the environment variables ``CFLAGS`` and ``CPPFLAGS``, +* then, the options provided by the ``sysconfig`` variable ``CCSHARED``, +* then, a ``-I`` option for each element of ``Extension.include_dirs``, +* finally, the options provided by ``Extension.extra_compile_args``. + +The linker options appear in the command line in the following order: + +.. Reference: "linker_so" and CCompiler.link + +* first, the options provided by environment variables and ``sysconfig`` variables, +* then, a ``-L`` option for each element of ``Extension.library_dirs``, +* then, a linker-specific option like ``-Wl,-rpath`` for each element of ``Extension.runtime_library_dirs``, +* finally, the options provided by ``Extension.extra_link_args``. + +The resulting command line is then processed by the compiler and linker. +According to the GCC manual sections on `directory options`_ and +`environment variables`_, the C/C++ compiler searches for files named in +``#include `` directives in the following order: + +* first, in directories given by ``-I`` options (in left-to-right order), +* then, in directories given by the environment variable ``CPATH`` (in left-to-right order), +* then, in directories given by ``-isystem`` options (in left-to-right order), +* then, in directories given by the environment variable ``C_INCLUDE_PATH`` (for C) and ``CPLUS_INCLUDE_PATH`` (for C++), +* then, in standard system directories, +* finally, in directories given by ``-idirafter`` options (in left-to-right order). + +The linker searches for libraries in the following order: + +* first, in directories given by ``-L`` options (in left-to-right order), +* then, in directories given by the environment variable ``LIBRARY_PATH`` (in left-to-right order). + + +Distributing Extensions compiled with Cython +============================================ + +When your :pypi:`Cython` extension modules *are declared using the* +:class:`setuptools.Extension` *class*, ``setuptools`` will detect at build time +whether Cython is installed or not. + +If Cython is present, then ``setuptools`` will use it to build the ``.pyx`` files. +Otherwise, ``setuptools`` will try to find and compile the equivalent ``.c`` files +(instead of ``.pyx``). These files can be generated using the +`cython command line tool`_. + +You can ensure that Cython is always automatically installed into the build +environment by including it as a :ref:`build dependency ` in +your ``pyproject.toml``: + +.. code-block:: toml + + [build-system] + requires = [..., "cython"] + +Alternatively, you can include the ``.c`` code that is pre-compiled by Cython +into your source distribution, alongside the original ``.pyx`` files (this +might save a few seconds when building from an ``sdist``). +To improve version compatibility, you probably also want to include current +``.c`` files in your :wiki:`revision control system`, and rebuild them whenever +you check changes in for the ``.pyx`` source files. +This will ensure that people tracking your project will be able to build it +without installing Cython, and that there will be no variation due to small +differences in the generate C files. +Please checkout our docs on :ref:`controlling files in the distribution` for +more information. + +---- + +Extension API Reference +======================= + +.. autoclass:: setuptools.Extension + + +.. _Python docs about C/C++ extensions: https://docs.python.org/3/extending/extending.html +.. _Cython: https://cython.readthedocs.io/en/stable/index.html +.. _directory options: https://gcc.gnu.org/onlinedocs/gcc/Directory-Options.html +.. _environment variables: https://gcc.gnu.org/onlinedocs/gcc/Environment-Variables.html +.. _cython command line tool: https://cython.readthedocs.io/en/stable/src/userguide/source_files_and_compilation.html diff --git a/docs/userguide/extension.rst b/docs/userguide/extension.rst index d74ca3fe08..e1e37b5db1 100644 --- a/docs/userguide/extension.rst +++ b/docs/userguide/extension.rst @@ -1,60 +1,96 @@ .. _Creating ``distutils`` Extensions: -Creating ``distutils`` Extensions -================================= +Extending or Customizing Setuptools +=================================== -It can be hard to add new commands or setup arguments to the distutils. But -the ``setuptools`` package makes it a bit easier, by allowing you to distribute -a distutils extension as a separate project, and then have projects that need -the extension just refer to it in their ``setup_requires`` argument. +Setuptools design is based on the distutils_ package originally distributed +as part of Python's standard library, effectively serving as its successor +(as established in :pep:`632`). -With ``setuptools``, your distutils extension projects can hook in new +This means that ``setuptools`` strives to honor the extension mechanisms +provided by ``distutils``, and allows developers to create third party packages +that modify or augment the build process behavior. + +A simple way of doing that is to hook in new or existing commands and ``setup()`` arguments just by defining "entry points". These are mappings from command or argument names to a specification of where to import a handler from. (See the section on :ref:`Dynamic Discovery of -Services and Plugins` above for some more background on entry points.) +Services and Plugins` for some more background on entry points). + +The following sections describe the most common procedures for extending +the ``distutils`` functionality used by ``setuptools``. + +.. important:: + Any entry-point defined in your ``setup.cfg``, ``setup.py`` or + ``pyproject.toml`` files are not immediately available for use. Your + package needs to be installed first, then ``setuptools`` will be able to + access these entry points. For example consider a ``Project-A`` that + defines entry points. When building ``Project-A``, these will not be + available. If ``Project-B`` declares a :doc:`build system requirement + ` on ``Project-A``, then ``setuptools`` + will be able to use ``Project-A``' customizations. + +Customizing Commands +-------------------- + +Both ``setuptools`` and ``distutils`` are structured around the *command design +pattern*. This means that each main action executed when building a +distribution package (such as creating a :term:`sdist ` +or :term:`wheel`) correspond to the implementation of a Python class. + +Originally in ``distutils``, these commands would correspond to actual CLI +arguments that could be passed to the ``setup.py`` script to trigger a +different aspect of the build. In ``setuptools``, however, these command +objects are just a design abstraction that encapsulate logic and help to +organise the code. + +You can overwrite existing commands (or add new ones) by defining entry +points in the ``distutils.commands`` group. For example, if you wanted to add +a ``foo`` command, you might add something like this to your project: + +.. code-block:: ini + + # setup.cfg + ... + [options.entry_points] + distutils.commands = + foo = mypackage.some_module:foo + +Assuming, of course, that the ``foo`` class in ``mypackage.some_module`` is +a ``setuptools.Command`` subclass (documented below). +Once a project containing such entry points has been activated on ``sys.path``, +(e.g. by running ``pip install``) the command(s) will be available to any +``setuptools``-based project. In fact, this is +how setuptools' own commands are installed: the setuptools project's setup +script defines entry points for them! -Adding Commands ---------------- +The commands ``sdist``, ``build_py`` and ``build_ext`` are especially useful +to customize ``setuptools`` builds. Note however that when overwriting existing +commands, you should be very careful to maintain API compatibility. +Custom commands should try to replicate the same overall behavior as the +original classes, and when possible, even inherit from them. -You can add new ``setup`` commands by defining entry points in the -``distutils.commands`` group. For example, if you wanted to add a ``foo`` -command, you might add something like this to your distutils extension -project's setup script:: +You should also consider handling exceptions such as ``CompileError``, +``LinkError``, ``LibError``, among others. These exceptions are available in +the ``setuptools.errors`` module. - setup( - # ... - entry_points={ - "distutils.commands": [ - "foo = mypackage.some_module:foo", - ], - }, - ) +.. autoclass:: setuptools.Command + :members: -(Assuming, of course, that the ``foo`` class in ``mypackage.some_module`` is -a ``setuptools.Command`` subclass.) -Once a project containing such entry points has been activated on ``sys.path``, -(e.g. by running "install" or "develop" with a site-packages installation -directory) the command(s) will be available to any ``setuptools``-based setup -scripts. It is not necessary to use the ``--command-packages`` option or -to monkeypatch the ``distutils.command`` package to install your commands; -``setuptools`` automatically adds a wrapper to the distutils to search for -entry points in the active distributions on ``sys.path``. In fact, this is -how setuptools' own commands are installed: the setuptools project's setup -script defines entry points for them! +Supporting sdists and editable installs in ``build`` sub-commands +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. note:: - When creating commands, and specially when defining custom ways of building - compiled extensions (for example via ``build_ext``), consider - handling exceptions such as ``CompileError``, ``LinkError``, ``LibError``, - among others. These exceptions are available in the ``setuptools.errors`` - module. +``build`` sub-commands (like ``build_py`` and ``build_ext``) +are encouraged to implement the following protocol: +.. autoclass:: setuptools.command.build.SubCommand + :members: -Adding ``setup()`` Arguments ----------------------------- + +Adding Arguments +---------------- .. warning:: Adding arguments to setup is discouraged as such arguments are only supported through imperative execution and not supported through @@ -64,62 +100,54 @@ Sometimes, your commands may need additional arguments to the ``setup()`` call. You can enable this by defining entry points in the ``distutils.setup_keywords`` group. For example, if you wanted a ``setup()`` argument called ``bar_baz``, you might add something like this to your -distutils extension project's setup script:: - - setup( - # ... - entry_points={ - "distutils.commands": [ - "foo = mypackage.some_module:foo", - ], - "distutils.setup_keywords": [ - "bar_baz = mypackage.some_module:validate_bar_baz", - ], - }, - ) +extension project: + +.. code-block:: ini + + # setup.cfg + ... + [options.entry_points] + distutils.commands = + foo = mypackage.some_module:foo + distutils.setup_keywords = + bar_baz = mypackage.some_module:validate_bar_baz The idea here is that the entry point defines a function that will be called to validate the ``setup()`` argument, if it's supplied. The ``Distribution`` object will have the initial value of the attribute set to ``None``, and the validation function will only be called if the ``setup()`` call sets it to -a non-None value. Here's an example validation function:: +a non-``None`` value. Here's an example validation function:: def assert_bool(dist, attr, value): """Verify that value is True, False, 0, or 1""" if bool(value) != value: - raise DistutilsSetupError( + raise SetupError( "%r must be a boolean value (got %r)" % (attr,value) ) Your function should accept three arguments: the ``Distribution`` object, the attribute name, and the attribute value. It should raise a -``DistutilsSetupError`` (from the ``distutils.errors`` module) if the argument -is invalid. Remember, your function will only be called with non-None values, -and the default value of arguments defined this way is always None. So, your +``SetupError`` (from the ``setuptools.errors`` module) if the argument +is invalid. Remember, your function will only be called with non-``None`` values, +and the default value of arguments defined this way is always ``None``. So, your commands should always be prepared for the possibility that the attribute will be ``None`` when they access it later. If more than one active distribution defines an entry point for the same ``setup()`` argument, *all* of them will be called. This allows multiple -distutils extensions to define a common argument, as long as they agree on +extensions to define a common argument, as long as they agree on what values of that argument are valid. -Also note that as with commands, it is not necessary to subclass or monkeypatch -the distutils ``Distribution`` class in order to add your arguments; it is -sufficient to define the entry points in your extension, as long as any setup -script using your extension lists your project in its ``setup_requires`` -argument. - Customizing Distribution Options -------------------------------- -Plugins may wish to extend or alter the options on a Distribution object to +Plugins may wish to extend or alter the options on a ``Distribution`` object to suit the purposes of that project. For example, a tool that infers the ``Distribution.version`` from SCM-metadata may need to hook into the option finalization. To enable this feature, Setuptools offers an entry -point "setuptools.finalize_distribution_options". That entry point must -be a callable taking one argument (the Distribution instance). +point ``setuptools.finalize_distribution_options``. That entry point must +be a callable taking one argument (the ``Distribution`` instance). If the callable has an ``.order`` property, that value will be used to determine the order in which the hook is called. Lower numbers are called @@ -130,36 +158,48 @@ plugin is encouraged to load the configuration/settings for their behavior independently. +Defining Additional Metadata +---------------------------- + +Some extensible applications and frameworks may need to define their own kinds +of metadata, which they can then access using the :mod:`importlib.metadata` APIs. +Ordinarily, this is done by having plugin +developers include additional files in their ``ProjectName.egg-info`` +directory. However, since it can be tedious to create such files by hand, you +may want to create an extension that will create the necessary files +from arguments to ``setup()``, in much the same way that ``setuptools`` does +for many of the ``setup()`` arguments it adds. See the section below for more +details. + + .. _Adding new EGG-INFO Files: Adding new EGG-INFO Files -------------------------- +~~~~~~~~~~~~~~~~~~~~~~~~~ Some extensible applications or frameworks may want to allow third parties to develop plugins with application or framework-specific metadata included in the plugins' EGG-INFO directory, for easy access via the ``pkg_resources`` -metadata API. The easiest way to allow this is to create a distutils extension +metadata API. The easiest way to allow this is to create an extension to be used from the plugin projects' setup scripts (via ``setup_requires``) that defines a new setup keyword, and then uses that data to write an EGG-INFO file when the ``egg_info`` command is run. The ``egg_info`` command looks for extension points in an ``egg_info.writers`` -group, and calls them to write the files. Here's a simple example of a -distutils extension defining a setup argument ``foo_bar``, which is a list of +group, and calls them to write the files. Here's a simple example of an +extension defining a setup argument ``foo_bar``, which is a list of lines that will be written to ``foo_bar.txt`` in the EGG-INFO directory of any -project that uses the argument:: - - setup( - # ... - entry_points={ - "distutils.setup_keywords": [ - "foo_bar = setuptools.dist:assert_string_list", - ], - "egg_info.writers": [ - "foo_bar.txt = setuptools.command.egg_info:write_arg", - ], - }, - ) +project that uses the argument: + +.. code-block:: ini + + # setup.cfg + ... + [options.entry_points] + distutils.setup_keywords = + foo_bar = setuptools.dist:assert_string_list + egg_info.writers = + foo_bar.txt = setuptools.command.egg_info:write_arg This simple example makes use of two utility functions defined by setuptools for its own use: a routine to validate that a setup keyword is a sequence of @@ -179,11 +219,11 @@ write (e.g. ``foo_bar.txt``), and the actual full filename that should be written to. In general, writer functions should honor the command object's ``dry_run`` -setting when writing files, and use the ``distutils.log`` object to do any -console output. The easiest way to conform to this requirement is to use +setting when writing files, and use ``logging`` to do any console output. +The easiest way to conform to this requirement is to use the ``cmd`` object's ``write_file()``, ``delete_file()``, and -``write_or_delete_file()`` methods exclusively for your file operations. See -those methods' docstrings for more details. +``write_or_delete_file()`` methods exclusively for your file operations. +See those methods' docstrings for more details. .. _Adding Support for Revision Control Systems: @@ -194,8 +234,8 @@ Adding Support for Revision Control Systems If the files you want to include in the source distribution are tracked using Git, Mercurial or SVN, you can use the following packages to achieve that: -- Git and Mercurial: `setuptools_scm `_ -- SVN: `setuptools_svn `_ +- Git and Mercurial: :pypi:`setuptools_scm` +- SVN: :pypi:`setuptools_svn` If you would like to create a plugin for ``setuptools`` to find files tracked by another revision control system, you can do so by adding an entry point to @@ -212,13 +252,16 @@ called "foobar", you would write a function something like this: def find_files_for_foobar(dirname): ... # loop to yield paths that start with `dirname` -And you would register it in a setup script using something like this:: +And you would register it in a setup script using something like this: + +.. code-block:: ini + + # setup.cfg + ... - entry_points={ - "setuptools.file_finders": [ - "foobar = my_foobar_module:find_files_for_foobar", - ] - } + [options.entry_points] + setuptools.file_finders = + foobar = my_foobar_module:find_files_for_foobar Then, anyone who wants to use your plugin can simply install it, and their local setuptools installation will be able to find the necessary files. @@ -246,5 +289,20 @@ A few important points for writing revision control file finders: * Your finder function SHOULD NOT raise any errors, and SHOULD deal gracefully with the absence of needed programs (i.e., ones belonging to the revision - control system itself. It *may*, however, use ``distutils.log.warn()`` to + control system itself. It *may*, however, use ``logging.warning()`` to inform the user of the missing program(s). + + +.. _distutils: https://docs.python.org/3.9/library/distutils.html + + +Final Remarks +------------- + +* To use a ``setuptools`` plugin, your users will need to add your package as a + build requirement to their build-system configuration. Please check out our + guides on :doc:`/userguide/dependency_management` for more information. + +* Directly calling ``python setup.py ...`` is considered a **deprecated** practice. + You should not add new commands to ``setuptools`` expecting them to be run + via this interface. diff --git a/docs/userguide/functionalities_rewrite.rst b/docs/userguide/functionalities_rewrite.rst deleted file mode 100644 index d0997ca67d..0000000000 --- a/docs/userguide/functionalities_rewrite.rst +++ /dev/null @@ -1,9 +0,0 @@ -======================================================== -Using setuptools to package and distribute your project -======================================================== - -``setuptools`` offers a variety of functionalities that make it easy to -build and distribute your python package. Here we provide an overview on -the commonly used ones. - - diff --git a/docs/userguide/index.rst b/docs/userguide/index.rst index eca5a85ad8..d631c5d8ac 100644 --- a/docs/userguide/index.rst +++ b/docs/userguide/index.rst @@ -2,36 +2,47 @@ Building and Distributing Packages with Setuptools ================================================== -``Setuptools`` is a collection of enhancements to the Python ``distutils`` -that allow developers to more easily build and -distribute Python packages, especially ones that have dependencies on other -packages. +The first step towards sharing a Python library or program is to build a +distribution package [#package-overload]_. This includes adding a set of +additional files containing metadata and configuration to not only instruct +``setuptools`` on how the distribution should be built but also +to help installer (such as :pypi:`pip`) during the installation process. -Packages built and distributed using ``setuptools`` look to the user like -ordinary Python packages based on the ``distutils``. +This document contains information to help Python developers through this +process. Please check the :doc:`/userguide/quickstart` for an overview of +the workflow. -Transition to PEP517 -==================== +Also note that ``setuptools`` is what is known in the community as :pep:`build +backend <517#terminology-and-goals>`, user facing interfaces are provided by tools +such as :pypi:`pip` and :pypi:`build`. To use ``setuptools``, one must +explicitly create a ``pyproject.toml`` file as described :doc:`/build_meta`. -Since setuptools no longer serves as the default build tool, one must explicitly -opt in (by providing a :file:`pyproject.toml` file) to use this library. The user -facing part is provided by tools such as pip and -backend interface is described :doc:`in this document <../build_meta>`. The -quickstart provides an overview of the new workflow. + +Contents +======== .. toctree:: :maxdepth: 1 quickstart package_discovery - entry_point dependency_management - datafiles development_mode + entry_point + datafiles + ext_modules distribution + miscellaneous extension declarative_config - keywords - commands - functionalities_rewrite - miscellaneous + pyproject_config + +--- + +.. rubric:: Notes + +.. [#package-overload] + A :term:`Distribution Package` is also referred in the Python community simply as "package" + Unfortunately, this jargon might be a bit confusing for new users because the term package + can also to refer any :term:`directory ` (or sub directory) used to organize + :term:`modules ` and auxiliary files. diff --git a/docs/userguide/keywords.rst b/docs/userguide/keywords.rst deleted file mode 100644 index 5388ffea7f..0000000000 --- a/docs/userguide/keywords.rst +++ /dev/null @@ -1,163 +0,0 @@ -New and Changed ``setup()`` Keywords -==================================== - -The following keyword arguments to ``setup()`` are added or changed by -``setuptools``. All of them are optional; you do not have to supply them -unless you need the associated ``setuptools`` feature. - -``include_package_data`` - If set to ``True``, this tells ``setuptools`` to automatically include any - data files it finds inside your package directories that are specified by - your ``MANIFEST.in`` file. For more information, see the section on - :ref:`Including Data Files`. - -``exclude_package_data`` - A dictionary mapping package names to lists of glob patterns that should - be *excluded* from your package directories. You can use this to trim back - any excess files included by ``include_package_data``. For a complete - description and examples, see the section on :ref:`Including Data Files`. - -``package_data`` - A dictionary mapping package names to lists of glob patterns. For a - complete description and examples, see the section on :ref:`Including - Data Files`. You do not need to use this option if you are using - ``include_package_data``, unless you need to add e.g. files that are - generated by your setup script and build process. (And are therefore not - in source control or are files that you don't want to include in your - source distribution.) - -``zip_safe`` - A boolean (True or False) flag specifying whether the project can be - safely installed and run from a zip file. If this argument is not - supplied, the ``bdist_egg`` command will have to analyze all of your - project's contents for possible problems each time it builds an egg. - -``install_requires`` - A string or list of strings specifying what other distributions need to - be installed when this one is. See the section on :ref:`Declaring - Dependencies` for details and examples of the format of this argument. - -``entry_points`` - A dictionary mapping entry point group names to strings or lists of strings - defining the entry points. Entry points are used to support dynamic - discovery of services or plugins provided by a project. See :ref:`Dynamic - Discovery of Services and Plugins` for details and examples of the format - of this argument. In addition, this keyword is used to support - :ref:`Automatic Script Creation `. - -``extras_require`` - A dictionary mapping names of "extras" (optional features of your project) - to strings or lists of strings specifying what other distributions must be - installed to support those features. See the section on :ref:`Declaring - Dependencies` for details and examples of the format of this argument. - -``python_requires`` - A string corresponding to a version specifier (as defined in PEP 440) for - the Python version, used to specify the Requires-Python defined in PEP 345. - -``setup_requires`` - A string or list of strings specifying what other distributions need to - be present in order for the *setup script* to run. ``setuptools`` will - attempt to obtain these (using pip if available) before processing the - rest of the setup script or commands. This argument is needed if you - are using distutils extensions as part of your build process; for - example, extensions that process setup() arguments and turn them into - EGG-INFO metadata files. - - (Note: projects listed in ``setup_requires`` will NOT be automatically - installed on the system where the setup script is being run. They are - simply downloaded to the ./.eggs directory if they're not locally available - already. If you want them to be installed, as well as being available - when the setup script is run, you should add them to ``install_requires`` - **and** ``setup_requires``.) - -``dependency_links`` - A list of strings naming URLs to be searched when satisfying dependencies. - These links will be used if needed to install packages specified by - ``setup_requires`` or ``tests_require``. They will also be written into - the egg's metadata for use during install by tools that support them. - -``namespace_packages`` - A list of strings naming the project's "namespace packages". A namespace - package is a package that may be split across multiple project - distributions. For example, Zope 3's ``zope`` package is a namespace - package, because subpackages like ``zope.interface`` and ``zope.publisher`` - may be distributed separately. The egg runtime system can automatically - merge such subpackages into a single parent package at runtime, as long - as you declare them in each project that contains any subpackages of the - namespace package, and as long as the namespace package's ``__init__.py`` - does not contain any code other than a namespace declaration. See the - section below on :ref:`Namespace Packages` for more information. - -``test_suite`` - A string naming a ``unittest.TestCase`` subclass (or a package or module - containing one or more of them, or a method of such a subclass), or naming - a function that can be called with no arguments and returns a - ``unittest.TestSuite``. If the named suite is a module, and the module - has an ``additional_tests()`` function, it is called and the results are - added to the tests to be run. If the named suite is a package, any - submodules and subpackages are recursively added to the overall test suite. - - Specifying this argument enables use of the :ref:`test ` command to run the - specified test suite, e.g. via ``setup.py test``. See the section on the - :ref:`test ` command below for more details. - - New in 41.5.0: Deprecated the test command. - -``tests_require`` - If your project's tests need one or more additional packages besides those - needed to install it, you can use this option to specify them. It should - be a string or list of strings specifying what other distributions need to - be present for the package's tests to run. When you run the ``test`` - command, ``setuptools`` will attempt to obtain these (using pip if - available). Note that these required projects will *not* be installed on - the system where the tests are run, but only downloaded to the project's setup - directory if they're not already installed locally. - - New in 41.5.0: Deprecated the test command. - -.. _test_loader: - -``test_loader`` - If you would like to use a different way of finding tests to run than what - setuptools normally uses, you can specify a module name and class name in - this argument. The named class must be instantiable with no arguments, and - its instances must support the ``loadTestsFromNames()`` method as defined - in the Python ``unittest`` module's ``TestLoader`` class. Setuptools will - pass only one test "name" in the ``names`` argument: the value supplied for - the ``test_suite`` argument. The loader you specify may interpret this - string in any way it likes, as there are no restrictions on what may be - contained in a ``test_suite`` string. - - The module name and class name must be separated by a ``:``. The default - value of this argument is ``"setuptools.command.test:ScanningLoader"``. If - you want to use the default ``unittest`` behavior, you can specify - ``"unittest:TestLoader"`` as your ``test_loader`` argument instead. This - will prevent automatic scanning of submodules and subpackages. - - The module and class you specify here may be contained in another package, - as long as you use the ``tests_require`` option to ensure that the package - containing the loader class is available when the ``test`` command is run. - - New in 41.5.0: Deprecated the test command. - -``eager_resources`` - A list of strings naming resources that should be extracted together, if - any of them is needed, or if any C extensions included in the project are - imported. This argument is only useful if the project will be installed as - a zipfile, and there is a need to have all of the listed resources be - extracted to the filesystem *as a unit*. Resources listed here - should be "/"-separated paths, relative to the source root, so to list a - resource ``foo.png`` in package ``bar.baz``, you would include the string - ``bar/baz/foo.png`` in this argument. - - If you only need to obtain resources one at a time, or you don't have any C - extensions that access other files in the project (such as data files or - shared libraries), you probably do NOT need this argument and shouldn't - mess with it. For more details on how this argument works, see the section - below on :ref:`Automatic Resource Extraction`. - -``project_urls`` - An arbitrary map of URL names to hyperlinks, allowing more extensible - documentation of where various resources can be found than the simple - ``url`` and ``download_url`` options provide. diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst index 3df327d795..19908e05ad 100644 --- a/docs/userguide/miscellaneous.rst +++ b/docs/userguide/miscellaneous.rst @@ -1,96 +1,100 @@ -.. _Automatic Resource Extraction: - -Automatic Resource Extraction ------------------------------ - -If you are using tools that expect your resources to be "real" files, or your -project includes non-extension native libraries or other files that your C -extensions expect to be able to access, you may need to list those files in -the ``eager_resources`` argument to ``setup()``, so that the files will be -extracted together, whenever a C extension in the project is imported. - -This is especially important if your project includes shared libraries *other* -than distutils-built C extensions, and those shared libraries use file -extensions other than ``.dll``, ``.so``, or ``.dylib``, which are the -extensions that setuptools 0.6a8 and higher automatically detects as shared -libraries and adds to the ``native_libs.txt`` file for you. Any shared -libraries whose names do not end with one of those extensions should be listed -as ``eager_resources``, because they need to be present in the filesystem when -he C extensions that link to them are used. - -The ``pkg_resources`` runtime for compressed packages will automatically -extract *all* C extensions and ``eager_resources`` at the same time, whenever -*any* C extension or eager resource is requested via the ``resource_filename()`` -API. (C extensions are imported using ``resource_filename()`` internally.) -This ensures that C extensions will see all of the "real" files that they -expect to see. - -Note also that you can list directory resource names in ``eager_resources`` as -well, in which case the directory's contents (including subdirectories) will be -extracted whenever any C extension or eager resource is requested. - -Please note that if you're not sure whether you need to use this argument, you -don't! It's really intended to support projects with lots of non-Python -dependencies and as a last resort for crufty projects that can't otherwise -handle being compressed. If your package is pure Python, Python plus data -files, or Python plus C, you really don't need this. You've got to be using -either C or an external program that needs "real" files in your project before -there's any possibility of ``eager_resources`` being relevant to your project. - -Defining Additional Metadata ----------------------------- - -Some extensible applications and frameworks may need to define their own kinds -of metadata to include in eggs, which they can then access using the -``pkg_resources`` metadata APIs. Ordinarily, this is done by having plugin -developers include additional files in their ``ProjectName.egg-info`` -directory. However, since it can be tedious to create such files by hand, you -may want to create a distutils extension that will create the necessary files -from arguments to ``setup()``, in much the same way that ``setuptools`` does -for many of the ``setup()`` arguments it adds. See the section below on -:ref:`Creating ``distutils\`\` Extensions` for more details, especially the -subsection on :ref:`Adding new EGG-INFO Files`. - -Setting the ``zip_safe`` flag ------------------------------ - -For some use cases (such as bundling as part of a larger application), Python -packages may be run directly from a zip file. -Not all packages, however, are capable of running in compressed form, because -they may expect to be able to access either source code or data files as -normal operating system files. So, ``setuptools`` can install your project -as a zipfile or a directory, and its default choice is determined by the -project's ``zip_safe`` flag. - -You can pass a True or False value for the ``zip_safe`` argument to the -``setup()`` function, or you can omit it. If you omit it, the ``bdist_egg`` -command will analyze your project's contents to see if it can detect any -conditions that would prevent it from working in a zipfile. It will output -notices to the console about any such conditions that it finds. - -Currently, this analysis is extremely conservative: it will consider the -project unsafe if it contains any C extensions or datafiles whatsoever. This -does *not* mean that the project can't or won't work as a zipfile! It just -means that the ``bdist_egg`` authors aren't yet comfortable asserting that -the project *will* work. If the project contains no C or data files, and does -no ``__file__`` or ``__path__`` introspection or source code manipulation, then -there is an extremely solid chance the project will work when installed as a -zipfile. (And if the project uses ``pkg_resources`` for all its data file -access, then C extensions and other data files shouldn't be a problem at all. -See the :ref:`Accessing Data Files at Runtime` section above for more information.) - -However, if ``bdist_egg`` can't be *sure* that your package will work, but -you've checked over all the warnings it issued, and you are either satisfied it -*will* work (or if you want to try it for yourself), then you should set -``zip_safe`` to ``True`` in your ``setup()`` call. If it turns out that it -doesn't work, you can always change it to ``False``, which will force -``setuptools`` to install your project as a directory rather than as a zipfile. - -In the future, as we gain more experience with different packages and become -more satisfied with the robustness of the ``pkg_resources`` runtime, the -"zip safety" analysis may become less conservative. However, we strongly -recommend that you determine for yourself whether your project functions -correctly when installed as a zipfile, correct any problems if you can, and -then make an explicit declaration of ``True`` or ``False`` for the ``zip_safe`` -flag, so that it will not be necessary for ``bdist_egg`` to try to guess -whether your project can work as a zipfile. +.. _Controlling files in the distribution: + +Controlling files in the distribution +===================================== + +For the most common use cases, ``setuptools`` will automatically find out which +files are necessary for distributing the package. +These include all :term:`pure Python modules ` in the +``py_modules`` or ``packages`` configuration, and the C sources (but not C +headers) listed as part of extensions when creating a :term:`source +distribution (or "sdist")`. + +However, when building more complex packages (e.g. packages that include +non-Python files, or that need to use custom C headers), you might find that +not all files present in your project folder are included in package +:term:`distribution archive `. + +If you are using a :wiki:`Revision Control System`, such as git_ or mercurial_, +and your source distributions only need to include files that you're +tracking in revision control, you can use a ``setuptools`` :ref:`plugin `, such as :pypi:`setuptools-scm` or +:pypi:`setuptools-svn` to automatically include all tracked files into the ``sdist``. + +.. _Using MANIFEST.in: + +Alternatively, if you need finer control over the files (e.g. you don't want to +distribute :wiki:`CI/CD`-related files) or you need automatically generated files, +you can add a ``MANIFEST.in`` file at the root of your project, +to specify any files that the default file location algorithm doesn't catch. + +This file contains instructions that tell ``setuptools`` which files exactly +should be part of the ``sdist`` (or not). +A comprehensive guide to ``MANIFEST.in`` syntax is available at the +:doc:`PyPA's Packaging User Guide `. + +.. attention:: + Please note that ``setuptools`` supports the ``MANIFEST.in``, + and not ``MANIFEST`` (no extension). Any documentation, tutorial or example + that recommends using ``MANIFEST`` (no extension) is likely outdated. + +.. tip:: + The ``MANIFEST.in`` file contains commands that allow you to discover and + manipulate lists of files. There are many commands that can be used with + different objectives, but you should try to not make your ``MANIFEST.in`` + file too fine grained. + + A good idea is to start with a ``graft`` command (to add all + files inside a set of directories) and then fine tune the file selection + by removing the excess or adding isolated files. + +An example of ``MANIFEST.in`` for a simple project that organized according to a +:ref:`src-layout` is: + +.. code-block:: bash + + # MANIFEST.in -- just for illustration + graft src + graft tests + graft docs + # `-> adds all files inside a directory + + include tox.ini + # `-> matches file paths relative to the root of the project + + global-exclude *~ *.py[cod] *.so + # `-> matches file names (regardless of directory) + +Once the correct files are present in the ``sdist``, they can then be used by +binary extensions during the build process, or included in the final +:term:`wheel ` [#build-process]_ if you configure ``setuptools`` with +``include_package_data=True``. + +.. important:: + Please note that, when using ``include_package_data=True``, only files **inside + the package directory** are included in the final ``wheel``, by default. + + So for example, if you create a :term:`Python project ` that uses + :pypi:`setuptools-scm` and have a ``tests`` directory outside of the package + folder, the ``tests`` directory will be present in the ``sdist`` but not in the + ``wheel`` [#wheel-vs-sdist]_. + + See :doc:`/userguide/datafiles` for more information. + +---- + +.. [#build-process] + You can think about the build process as two stages: first the ``sdist`` + will be created and then the ``wheel`` will be produced from that ``sdist``. + +.. [#wheel-vs-sdist] + This happens because the ``sdist`` can contain files that are useful during + development or the build process itself, but not in runtime (e.g. tests, + docs, examples, etc...). + The ``wheel``, on the other hand, is a file format that has been optimized + and is ready to be unpacked into a running installation of Python or + :term:`Virtual Environment`. + Therefore it only contains items that are required during runtime. + +.. _git: https://git-scm.com +.. _mercurial: https://www.mercurial-scm.org diff --git a/docs/userguide/package_discovery.rst b/docs/userguide/package_discovery.rst index 61da2d662c..9577a53468 100644 --- a/docs/userguide/package_discovery.rst +++ b/docs/userguide/package_discovery.rst @@ -1,23 +1,23 @@ .. _`package_discovery`: ======================================== -Package Discovery and Namespace Package +Package Discovery and Namespace Packages ======================================== .. note:: - a full specification for the keyword supplied to ``setup.cfg`` or - ``setup.py`` can be found at :doc:`keywords reference ` + a full specification for the keywords supplied to ``setup.cfg`` or + ``setup.py`` can be found at :doc:`keywords reference ` -.. note:: - the examples provided here are only to demonstrate the functionality +.. important:: + The examples provided here are only to demonstrate the functionality introduced. More metadata and options arguments need to be supplied if you want to replicate them on your system. If you are completely - new to setuptools, the :doc:`quickstart section ` is a good - place to start. + new to setuptools, the :doc:`quickstart` section is a good place to start. + +``Setuptools`` provides powerful tools to handle package discovery, including +support for namespace packages. -``Setuptools`` provide powerful tools to handle package discovery, including -support for namespace package. Normally, you would specify the package to be -included manually in the following manner: +Normally, you would specify the packages to be included manually in the following manner: .. tab:: setup.cfg @@ -26,8 +26,9 @@ included manually in the following manner: [options] #... packages = - mypkg1 - mypkg2 + mypkg + mypkg.subpkg1 + mypkg.subpkg2 .. tab:: setup.py @@ -35,11 +36,223 @@ included manually in the following manner: setup( # ... - packages=['mypkg1', 'mypkg2'] + packages=['mypkg', 'mypkg.subpkg1', 'mypkg.subpkg2'] ) -This can get tiresome really quickly. To speed things up, we introduce two -functions provided by setuptools: +.. tab:: pyproject.toml (**BETA**) [#beta]_ + + .. code-block:: toml + + # ... + [tool.setuptools] + packages = ["mypkg", "mypkg.subpkg1", "mypkg.subpkg2"] + # ... + + +If your packages are not in the root of the repository or do not correspond +exactly to the directory structure, you also need to configure ``package_dir``: + +.. tab:: setup.cfg + + .. code-block:: ini + + [options] + # ... + package_dir = + = src + # directory containing all the packages (e.g. src/mypkg, src/mypkg/subpkg1, ...) + # OR + package_dir = + mypkg = lib + # mypkg.module corresponds to lib/module.py + mypkg.subpkg1 = lib1 + # mypkg.subpkg1.module1 corresponds to lib1/module1.py + mypkg.subpkg2 = lib2 + # mypkg.subpkg2.module2 corresponds to lib2/module2.py + # ... + +.. tab:: setup.py + + .. code-block:: python + + setup( + # ... + package_dir = {"": "src"} + # directory containing all the packages (e.g. src/mypkg, src/mypkg/subpkg1, ...) + ) + + # OR + + setup( + # ... + package_dir = { + "mypkg": "lib", # mypkg.module corresponds to lib/mod.py + "mypkg.subpkg1": "lib1", # mypkg.subpkg1.module1 corresponds to lib1/module1.py + "mypkg.subpkg2": "lib2" # mypkg.subpkg2.module2 corresponds to lib2/module2.py + # ... + ) + +.. tab:: pyproject.toml (**BETA**) [#beta]_ + + .. code-block:: toml + + [tool.setuptools] + # ... + package-dir = {"" = "src"} + # directory containing all the packages (e.g. src/mypkg1, src/mypkg2) + + # OR + + [tool.setuptools.package-dir] + mypkg = "lib" + # mypkg.module corresponds to lib/module.py + "mypkg.subpkg1" = "lib1" + # mypkg.subpkg1.module1 corresponds to lib1/module1.py + "mypkg.subpkg2" = "lib2" + # mypkg.subpkg2.module2 corresponds to lib2/module2.py + # ... + +This can get tiresome really quickly. To speed things up, you can rely on +setuptools automatic discovery, or use the provided tools, as explained in +the following sections. + +.. important:: + Although ``setuptools`` allows developers to create a very complex mapping + between directory names and package names, it is better to *keep it simple* + and reflect the desired package hierarchy in the directory structure, + preserving the same names. + +.. _auto-discovery: + +Automatic discovery +=================== + +.. warning:: Automatic discovery is a **beta** feature and might change in the future. + See :ref:`custom-discovery` for other methods of discovery. + +By default ``setuptools`` will consider 2 popular project layouts, each one with +its own set of advantages and disadvantages [#layout1]_ [#layout2]_ as +discussed in the following sections. + +Setuptools will automatically scan your project directory looking for these +layouts and try to guess the correct values for the :ref:`packages ` and :doc:`py_modules ` configuration. + +.. important:: + Automatic discovery will **only** be enabled if you **don't** provide any + configuration for ``packages`` and ``py_modules``. + If at least one of them is explicitly set, automatic discovery will not take place. + + **Note**: specifying ``ext_modules`` might also prevent auto-discover from + taking place, unless your opt into :doc:`pyproject_config` (which will + disable the backward compatible behaviour). + +.. _src-layout: + +src-layout +---------- +The project should contain a ``src`` directory under the project root and +all modules and packages meant for distribution are placed inside this +directory:: + + project_root_directory + ├── pyproject.toml # AND/OR setup.cfg, setup.py + ├── ... + └── src/ + └── mypkg/ + ├── __init__.py + ├── ... + ├── module.py + ├── subpkg1/ + │   ├── __init__.py + │   ├── ... + │   └── module1.py + └── subpkg2/ + ├── __init__.py + ├── ... + └── module2.py + +This layout is very handy when you wish to use automatic discovery, +since you don't have to worry about other Python files or folders in your +project root being distributed by mistake. In some circumstances it can be +also less error-prone for testing or when using :pep:`420`-style packages. +On the other hand you cannot rely on the implicit ``PYTHONPATH=.`` to fire +up the Python REPL and play with your package (you will need an +`editable install`_ to be able to do that). + +.. _flat-layout: + +flat-layout +----------- +*(also known as "adhoc")* + +The package folder(s) are placed directly under the project root:: + + project_root_directory + ├── pyproject.toml # AND/OR setup.cfg, setup.py + ├── ... + └── mypkg/ + ├── __init__.py + ├── ... + ├── module.py + ├── subpkg1/ + │   ├── __init__.py + │   ├── ... + │   └── module1.py + └── subpkg2/ + ├── __init__.py + ├── ... + └── module2.py + +This layout is very practical for using the REPL, but in some situations +it can be more error-prone (e.g. during tests or if you have a bunch +of folders or Python files hanging around your project root). + +To avoid confusion, file and folder names that are used by popular tools (or +that correspond to well-known conventions, such as distributing documentation +alongside the project code) are automatically filtered out in the case of +*flat-layout*: + +.. autoattribute:: setuptools.discovery.FlatLayoutPackageFinder.DEFAULT_EXCLUDE + +.. autoattribute:: setuptools.discovery.FlatLayoutModuleFinder.DEFAULT_EXCLUDE + +.. warning:: + If you are using auto-discovery with *flat-layout*, ``setuptools`` will + refuse to create :term:`distribution archives ` with + multiple top-level packages or modules. + + This is done to prevent common errors such as accidentally publishing code + not meant for distribution (e.g. maintenance-related scripts). + + Users that purposefully want to create multi-package distributions are + advised to use :ref:`custom-discovery` or the ``src-layout``. + +There is also a handy variation of the *flat-layout* for utilities/libraries +that can be implemented with a single Python file: + +single-module distribution +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +A standalone module is placed directly under the project root, instead of +inside a package folder:: + + project_root_directory + ├── pyproject.toml # AND/OR setup.cfg, setup.py + ├── ... + └── single_file_lib.py + + +.. _custom-discovery: + +Custom discovery +================ + +If the automatic discovery does not work for you +(e.g., you want to *include* in the distribution top-level packages with +reserved names such as ``tasks``, ``example`` or ``docs``, or you want to +*exclude* nested packages that would be otherwise included), you can use +the provided tools for package discovery: .. tab:: setup.cfg @@ -55,30 +268,42 @@ functions provided by setuptools: .. code-block:: python from setuptools import find_packages - # or from setuptools import find_namespace_packages +.. tab:: pyproject.toml (**BETA**) [#beta]_ -Using ``find:`` or ``find_packages`` -==================================== -Let's start with the first tool. ``find:`` (``find_packages``) takes a source -directory and two lists of package name patterns to exclude and include, and -then return a list of ``str`` representing the packages it could find. To use -it, consider the following directory - -.. code-block:: bash + .. code-block:: toml - mypkg/ - src/ - pkg1/__init__.py - pkg2/__init__.py - additional/__init__.py + # ... + [tool.setuptools.packages] + find = {} # Scanning implicit namespaces is active by default + # OR + find = {namespaces = false} # Disable implicit namespaces - setup.cfg #or setup.py -To have your setup.cfg or setup.py to automatically include packages found -in ``src`` that starts with the name ``pkg`` and not ``additional``: +Finding simple packages +----------------------- +Let's start with the first tool. ``find:`` (``find_packages()``) takes a source +directory and two lists of package name patterns to exclude and include, and +then returns a list of ``str`` representing the packages it could find. To use +it, consider the following directory:: + + mypkg + ├── pyproject.toml # AND/OR setup.cfg, setup.py + └── src + ├── pkg1 + │   └── __init__.py + ├── pkg2 + │   └── __init__.py + ├── additional + │   └── __init__.py + └── pkg + └── namespace + └── __init__.py + +To have setuptools to automatically include packages found +in ``src`` that start with the name ``pkg`` and not ``additional``: .. tab:: setup.cfg @@ -92,7 +317,11 @@ in ``src`` that starts with the name ``pkg`` and not ``additional``: [options.packages.find] where = src include = pkg* - exclude = additional + # alternatively: `exclude = additional*` + + .. note:: + ``pkg`` does not contain an ``__init__.py`` file, therefore + ``pkg.namespace`` is ignored by ``find:`` (see ``find_namespace:`` below). .. tab:: setup.py @@ -102,24 +331,59 @@ in ``src`` that starts with the name ``pkg`` and not ``additional``: # ... packages=find_packages( where='src', - include=['pkg*'], - exclude=['additional'], + include=['pkg*'], # alternatively: `exclude=['additional*']` ), package_dir={"": "src"} # ... ) + .. note:: + ``pkg`` does not contain an ``__init__.py`` file, therefore + ``pkg.namespace`` is ignored by ``find_packages()`` + (see ``find_namespace_packages()`` below). + +.. tab:: pyproject.toml (**BETA**) [#beta]_ + + .. code-block:: toml + + [tool.setuptools.packages.find] + where = ["src"] + include = ["pkg*"] # alternatively: `exclude = ["additional*"]` + namespaces = false + + .. note:: + When using ``tool.setuptools.packages.find`` in ``pyproject.toml``, + setuptools will consider :pep:`implicit namespaces <420>` by default when + scanning your project directory. + To avoid ``pkg.namespace`` from being added to your package list + you can set ``namespaces = false``. This will prevent any folder + without an ``__init__.py`` file from being scanned. + +.. important:: + ``include`` and ``exclude`` accept strings representing :mod:`glob` patterns. + These patterns should match the **full** name of the Python module (as if it + was written in an ``import`` statement). + + For example if you have ``util`` pattern, it will match + ``util/__init__.py`` but not ``util/files/__init__.py``. + + The fact that the parent package is matched by the pattern will not dictate + if the submodule will be included or excluded from the distribution. + You will need to explicitly add a wildcard (e.g. ``util*``) + if you want the pattern to also match submodules. + .. _Namespace Packages: -Using ``find_namespace:`` or ``find_namespace_packages`` -======================================================== -``setuptools`` provides the ``find_namespace:`` (``find_namespace_packages``) -which behaves similarly to ``find:`` but works with namespace package. Before -diving in, it is important to have a good understanding of what namespace -packages are. Here is a quick recap: +Finding namespace packages +-------------------------- +``setuptools`` provides ``find_namespace:`` (``find_namespace_packages()``) +which behaves similarly to ``find:`` but works with namespace packages. + +Before diving in, it is important to have a good understanding of what +:pep:`namespace packages <420>` are. Here is a quick recap. -Suppose you have two packages named as follows: +When you have two packages organized as follows: .. code-block:: bash @@ -128,7 +392,7 @@ Suppose you have two packages named as follows: If both ``Desktop`` and ``Library`` are on your ``PYTHONPATH``, then a namespace package called ``timmins`` will be created automatically for you when -you invoke the import mechanism, allowing you to accomplish the following +you invoke the import mechanism, allowing you to accomplish the following: .. code-block:: pycon @@ -137,62 +401,121 @@ you invoke the import mechanism, allowing you to accomplish the following as if there is only one ``timmins`` on your system. The two packages can then be distributed separately and installed individually without affecting the -other one. Suppose you are packaging the ``foo`` part: +other one. -.. code-block:: bash +Now, suppose you decide to package the ``foo`` part for distribution and start +by creating a project directory organized as follows:: + + foo + ├── pyproject.toml # AND/OR setup.cfg, setup.py + └── src + └── timmins + └── foo + └── __init__.py + +If you want the ``timmins.foo`` to be automatically included in the +distribution, then you will need to specify: + +.. tab:: setup.cfg + + .. code-block:: ini + + [options] + package_dir = + =src + packages = find_namespace: + + [options.packages.find] + where = src + + ``find:`` won't work because ``timmins`` doesn't contain ``__init__.py`` + directly, instead, you have to use ``find_namespace:``. + + You can think of ``find_namespace:`` as identical to ``find:`` except it + would count a directory as a package even if it doesn't contain ``__init__.py`` + file directly. + +.. tab:: setup.py + + .. code-block:: python + + setup( + # ... + packages=find_namespace_packages(where='src'), + package_dir={"": "src"} + # ... + ) - foo/ - src/ - timmins/foo/__init__.py - setup.cfg # or setup.py + When you use ``find_packages()``, all directories without an + ``__init__.py`` file will be disconsidered. + On the other hand, ``find_namespace_packages()`` will scan all + directories. -and you want the ``foo`` to be automatically included, ``find:`` won't work -because timmins doesn't contain ``__init__.py`` directly, instead, you have -to use ``find_namespace:``: +.. tab:: pyproject.toml (**BETA**) [#beta]_ -.. code-block:: ini + .. code-block:: toml - [options] - package_dir = - =src - packages = find_namespace: + [tool.setuptools.packages.find] + where = ["src"] - [options.packages.find] - where = src + When using ``tool.setuptools.packages.find`` in ``pyproject.toml``, + setuptools will consider :pep:`implicit namespaces <420>` by default when + scanning your project directory. -When you install the zipped distribution, ``timmins.foo`` would become +After installing the package distribution, ``timmins.foo`` would become available to your interpreter. -You can think of ``find_namespace:`` as identical to ``find:`` except it -would count a directory as a package even if it doesn't contain ``__init__.py`` -file directly. As a result, this creates an interesting side effect. If you -organize your package like this: +.. warning:: + Please have in mind that ``find_namespace:`` (setup.cfg), + ``find_namespace_packages()`` (setup.py) and ``find`` (pyproject.toml) will + scan **all** folders that you have in your project directory if you use a + :ref:`flat-layout`. -.. code-block:: bash + If used naïvely, this might result in unwanted files being added to your + final wheel. For example, with a project directory organized as follows:: - foo/ - timmins/ - foo/__init__.py - setup.cfg # or setup.py - tests/ - test_foo/__init__.py + foo + ├── docs + │ └── conf.py + ├── timmins + │ └── foo + │ └── __init__.py + └── tests + └── tests_foo + └── __init__.py -a naive ``find_namespace:`` would include tests as part of your package to -be installed. A simple way to fix it is to adopt the aforementioned -``src`` layout. + final users will end up installing not only ``timmins.foo``, but also + ``docs`` and ``tests.tests_foo``. + + A simple way to fix this is to adopt the aforementioned :ref:`src-layout`, + or make sure to properly configure the ``include`` and/or ``exclude`` + accordingly. + +.. tip:: + After :ref:`building your package `, you can have a look if all + the files are correct (nothing missing or extra), by running the following + commands: + + .. code-block:: bash + + tar tf dist/*.tar.gz + unzip -l dist/*.whl + + This requires the ``tar`` and ``unzip`` to be installed in your OS. + On Windows you can also use a GUI program such as 7zip_. Legacy Namespace Packages ========================= -The fact you can create namespace package so effortlessly above is credited -to `PEP 420 `_. It use to be more +The fact you can create namespace packages so effortlessly above is credited +to :pep:`420`. It used to be more cumbersome to accomplish the same result. Historically, there were two methods to create namespace packages. One is the ``pkg_resources`` style supported by ``setuptools`` and the other one being ``pkgutils`` style offered by -``pkgutils`` module in Python. Both are now considered deprecated despite the +``pkgutils`` module in Python. Both are now considered *deprecated* despite the fact they still linger in many existing packages. These two differ in many subtle yet significant aspects and you can find out more on `Python packaging -user guide `_ +user guide `_. ``pkg_resource`` style namespace package @@ -228,12 +551,13 @@ And your directory should look like this .. code-block:: bash - /foo/ - src/ - timmins/ - __init__.py - foo/__init__.py - setup.cfg #or setup.py + foo + ├── pyproject.toml # AND/OR setup.cfg, setup.py + └── src + └── timmins + ├── __init__.py + └── foo + └── __init__.py Repeat the same for other packages and you can achieve the same result as the previous section. @@ -248,4 +572,18 @@ file contains the following: __path__ = __import__('pkgutil').extend_path(__path__, __name__) -The project layout remains the same and ``setup.cfg`` remains the same. +The project layout remains the same and ``pyproject.toml/setup.cfg`` remains the same. + + +---- + + +.. [#beta] + Support for adding build configuration options via the ``[tool.setuptools]`` + table in the ``pyproject.toml`` file is still in **beta** stage. + See :doc:`/userguide/pyproject_config`. +.. [#layout1] https://blog.ionelmc.ro/2014/05/25/python-packaging/#the-structure +.. [#layout2] https://blog.ionelmc.ro/2017/09/25/rehashing-the-src-layout/ + +.. _editable install: https://pip.pypa.io/en/stable/cli/pip_install/#editable-installs +.. _7zip: https://www.7-zip.org diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst new file mode 100644 index 0000000000..c97984ba09 --- /dev/null +++ b/docs/userguide/pyproject_config.rst @@ -0,0 +1,260 @@ +.. _pyproject.toml config: + +----------------------------------------------------- +Configuring setuptools using ``pyproject.toml`` files +----------------------------------------------------- + +.. note:: New in 61.0.0 + +.. important:: + If compatibility with legacy builds or versions of tools that don't support + certain packaging standards (e.g. :pep:`517` or :pep:`660`), a simple ``setup.py`` + script can be added to your project [#setupcfg-caveats]_ + (while keeping the configuration in ``pyproject.toml``): + + .. code-block:: python + + from setuptools import setup + + setup() + +Starting with :pep:`621`, the Python community selected ``pyproject.toml`` as +a standard way of specifying *project metadata*. +``Setuptools`` has adopted this standard and will use the information contained +in this file as an input in the build process. + +The example below illustrates how to write a ``pyproject.toml`` file that can +be used with ``setuptools``. It contains two TOML tables (identified by the +``[table-header]`` syntax): ``build-system`` and ``project``. +The ``build-system`` table is used to tell the build frontend (e.g. +:pypi:`build` or :pypi:`pip`) to use ``setuptools`` and any other plugins (e.g. +``setuptools-scm``) to build the package. +The ``project`` table contains metadata fields as described by +:doc:`PyPUG:specifications/declaring-project-metadata` guide. + +.. _example-pyproject-config: + +.. code-block:: toml + + [build-system] + requires = ["setuptools", "setuptools-scm"] + build-backend = "setuptools.build_meta" + + [project] + name = "my_package" + authors = [ + {name = "Josiah Carberry", email = "josiah_carberry@brown.edu"}, + ] + description = "My package description" + readme = "README.rst" + requires-python = ">=3.7" + keywords = ["one", "two"] + license = {text = "BSD-3-Clause"} + classifiers = [ + "Framework :: Django", + "Programming Language :: Python :: 3", + ] + dependencies = [ + "requests", + 'importlib-metadata; python_version<"3.8"', + ] + dynamic = ["version"] + + [project.optional-dependencies] + pdf = ["ReportLab>=1.2", "RXP"] + rest = ["docutils>=0.3", "pack ==1.1, ==1.3"] + + [project.scripts] + my-script = "my_package.module:function" + + # ... other project metadata fields as specified in: + # https://packaging.python.org/en/latest/specifications/declaring-project-metadata/ + +.. _setuptools-table: + +Setuptools-specific configuration +================================= + +.. warning:: + Support for declaring configurations not standardized by :pep:`621` + (i.e. the ``[tool.setuptools]`` table), + is still in **beta** stage and might change in future releases. + +While the standard ``project`` table in the ``pyproject.toml`` file covers most +of the metadata used during the packaging process, there are still some +``setuptools``-specific configurations that can be set by users that require +customization. +These configurations are completely optional and probably can be skipped when +creating simple packages. +They are equivalent to the :doc:`/references/keywords` used by the ``setup.py`` +file, and can be set via the ``tool.setuptools`` table: + +========================= =========================== ========================= +Key Value Type (TOML) Notes +========================= =========================== ========================= +``platforms`` array +``zip-safe`` boolean If not specified, ``setuptools`` will try to guess + a reasonable default for the package +``eager-resources`` array +``py-modules`` array See tip below +``packages`` array or ``find`` directive See tip below +``package-dir`` table/inline-table Used when explicitly listing ``packages`` +``namespace-packages`` array **Deprecated** - Use implicit namespaces instead (:pep:`420`) +``package-data`` table/inline-table See :doc:`/userguide/datafiles` +``include-package-data`` boolean ``True`` by default +``exclude-package-data`` table/inline-table +``license-files`` array of glob patterns **Provisional** - likely to change with :pep:`639` + (by default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``) +``data-files`` table/inline-table **Discouraged** - check :doc:`/userguide/datafiles` +``script-files`` array **Deprecated** - equivalent to the ``script`` keyword in ``setup.py`` + (should be avoided in favour of ``project.scripts``) +``provides`` array **Ignored by pip** +``obsoletes`` array **Ignored by pip** +========================= =========================== ========================= + +.. note:: + The `TOML value types`_ ``array`` and ``table/inline-table`` are roughly + equivalent to the Python's :obj:`list` and :obj:`dict` data types, respectively. + +Please note that some of these configurations are deprecated or at least +discouraged, but they are made available to ensure portability. +New packages should avoid relying on deprecated/discouraged fields, and +existing packages should consider alternatives. + +.. tip:: + When both ``py-modules`` and ``packages`` are left unspecified, + ``setuptools`` will attempt to perform :ref:`auto-discovery`, which should + cover most popular project directory organization techniques, such as the + :ref:`src-layout` and the :ref:`flat-layout`. + + However if your project does not follow these conventional layouts + (e.g. you want to use a ``flat-layout`` but at the same time have custom + directories at the root of your project), you might need to use the ``find`` + directive [#directives]_ as shown below: + + .. code-block:: toml + + [tool.setuptools.packages.find] + where = ["src"] # list of folders that contain the packages (["."] by default) + include = ["my_package*"] # package names should match these glob patterns (["*"] by default) + exclude = ["my_package.tests*"] # exclude packages matching these glob patterns (empty by default) + namespaces = false # to disable scanning PEP 420 namespaces (true by default) + + Note that the glob patterns in the example above need to be matched + by the **entire** package name. This means that if you specify ``exclude = ["tests"]``, + modules like ``tests.my_package.test1`` will still be included in the distribution + (to remove them, add a wildcard to the end of the pattern: ``"tests*"``). + + Alternatively, you can explicitly list the packages in modules: + + .. code-block:: toml + + [tool.setuptools] + packages = ["my_package"] + + +.. _dynamic-pyproject-config: + +Dynamic Metadata +================ + +Note that in the first example of this page we use ``dynamic`` to identify +which metadata fields are dynamically computed during the build by either +``setuptools`` itself or the plugins installed via ``build-system.requires`` +(e.g. ``setuptools-scm`` is capable of deriving the current project version +directly from the ``git`` :wiki:`version control` system). + +Currently the following fields can be listed as dynamic: ``version``, +``classifiers``, ``description``, ``entry-points``, ``scripts``, +``gui-scripts`` and ``readme``. +When these fields are expected to be provided by ``setuptools`` a +corresponding entry is required in the ``tool.setuptools.dynamic`` table +[#entry-points]_. For example: + +.. code-block:: toml + + # ... + [project] + name = "my_package" + dynamic = ["version", "readme"] + # ... + [tool.setuptools.dynamic] + version = {attr = "my_package.VERSION"} + readme = {file = ["README.rst", "USAGE.rst"]} + +In the ``dynamic`` table, the ``attr`` directive [#directives]_ will read an +attribute from the given module [#attr]_, while ``file`` will read the contents +of all given files and concatenate them in a single string. + +========================== =================== ================================================================================================= +Key Directive Notes +========================== =================== ================================================================================================= +``version`` ``attr``, ``file`` +``readme`` ``file`` Here you can also set ``"content-type"``: + + ``readme = {file = ["README", "USAGE"], content-type = "text/plain"}`` + + If ``content-type`` is not given, ``"text/x-rst"`` is used by default. +``description`` ``file`` One-line text (no line breaks) +``classifiers`` ``file`` Multi-line text with one classifier per line +``entry-points`` ``file`` INI format following :doc:`PyPUG:specifications/entry-points` + (``console_scripts`` and ``gui_scripts`` can be included) +``dependencies`` ``file`` *subset* of the ``requirements.txt`` format + (``#`` comments and blank lines excluded) **BETA** +``optional-dependencies`` ``file`` *subset* of the ``requirements.txt`` format per group + (``#`` comments and blank lines excluded) **BETA** +========================== =================== ================================================================================================= + +Supporting ``file`` for dependencies is meant for a convenience for packaging +applications with possibly strictly versioned dependencies. + +Library packagers are discouraged from using overly strict (or "locked") +dependency versions in their ``dependencies`` and ``optional-dependencies``. + +Currently, when specifying ``optional-dependencies`` dynamically, all of the groups +must be specified dynamically; one can not specify some of them statically and +some of them dynamically. + +Also note that the file format for specifying dependencies resembles a ``requirements.txt`` file, +however please keep in mind that all non-comment lines must conform with :pep:`508` +(``pip``-specify syntaxes, e.g. ``-c/-r/-e`` flags, are not supported). + + +.. note:: + If you are using an old version of ``setuptools``, you might need to ensure + that all files referenced by the ``file`` directive are included in the ``sdist`` + (you can do that via ``MANIFEST.in`` or using plugins such as ``setuptools-scm``, + please have a look on :doc:`/userguide/miscellaneous` for more information). + + .. versionchanged:: 66.1.0 + Newer versions of ``setuptools`` will automatically add these files to the ``sdist``. + +---- + +.. rubric:: Notes + +.. [#setupcfg-caveats] ``pip`` may allow editable install only with ``pyproject.toml`` + and ``setup.cfg``. However, this behavior may not be consistent over various ``pip`` + versions and other packaging-related tools + (``setup.py`` is more reliable on those scenarios). + +.. [#entry-points] Dynamic ``scripts`` and ``gui-scripts`` are a special case. + When resolving these metadata keys, ``setuptools`` will look for + ``tool.setuptool.dynamic.entry-points``, and use the values of the + ``console_scripts`` and ``gui_scripts`` :doc:`entry-point groups + `. + +.. [#directives] In the context of this document, *directives* are special TOML + values that are interpreted differently by ``setuptools`` (usually triggering an + associated function). Most of the times they correspond to a special TOML table + (or inline-table) with a single top-level key. + For example, you can have the ``{find = {where = ["src"], exclude=["tests*"]}}`` + directive for ``tool.setuptools.packages``, or ``{attr = "mymodule.attr"}`` + directive for ``tool.setuptools.dynamic.version``. + +.. [#attr] ``attr`` is meant to be used when the module attribute is statically + specified (e.g. as a string, list or tuple). As a rule of thumb, the + attribute should be able to be parsed with :func:`ast.literal_eval`, and + should not be modified or re-assigned. + +.. _TOML value types: https://toml.io/en/v1.0.0 diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index 203d620473..3f97e8ef9b 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -1,33 +1,51 @@ -========================== -``setuptools`` Quickstart -========================== +========== +Quickstart +========== Installation ============ -To install the latest version of setuptools, use:: +You can install the latest version of ``setuptools`` using :pypi:`pip`:: pip install --upgrade setuptools +Most of the times, however, you don't have to... -Python packaging at a glance -============================ -The landscape of Python packaging is shifting and ``Setuptools`` has evolved to -only provide backend support, no longer being the de-facto packaging tool in -the market. Every python package must provide a ``pyproject.toml`` and specify +Instead, when creating new Python packages, it is recommended to use +a command line tool called :pypi:`build`. This tool will automatically download +``setuptools`` and any other build-time dependencies that your project might +have. You just need to specify them in a ``pyproject.toml`` file at the root of +your package, as indicated in the :ref:`following section `. + +.. _install-build: + +You can also :doc:`install build ` using :pypi:`pip`:: + + pip install --upgrade build + +This will allow you to run the command: ``python -m build``. + +.. important:: + Please note that some operating systems might be equipped with + the ``python3`` and ``pip3`` commands instead of ``python`` and ``pip`` + (but they should be equivalent). + If you don't have ``pip`` or ``pip3`` available in your system, please + check out :doc:`pip installation docs `. + + +Every python package must provide a ``pyproject.toml`` and specify the backend (build system) it wants to use. The distribution can then be generated with whatever tool that provides a ``build sdist``-like -functionality. While this may appear cumbersome, given the added pieces, -it in fact tremendously enhances the portability of your package. The -change is driven under :pep:`PEP 517 <517#build-requirements>`. To learn more about Python packaging in general, -navigate to the :ref:`bottom ` of this page. +functionality. + +.. _basic-use: Basic Use ========= -For basic use of setuptools, you will need a ``pyproject.toml`` with the -exact following info, which declares you want to use ``setuptools`` to -package your project: + +When creating a Python package, you must provide a ``pyproject.toml`` file +containing a ``build-system`` section similar to the example below: .. code-block:: toml @@ -35,9 +53,42 @@ package your project: requires = ["setuptools"] build-backend = "setuptools.build_meta" -Then, you will need a ``setup.cfg`` or ``setup.py`` to specify your package -information, such as metadata, contents, dependencies, etc. Here we demonstrate -the minimum +This section declares what are your build system dependencies, and which +library will be used to actually do the packaging. + +.. note:: + + Historically this documentation has unnecessarily listed ``wheel`` + in the ``requires`` list, and many projects still do that. This is + not recommended. The backend automatically adds ``wheel`` dependency + when it is required, and listing it explicitly causes it to be + unnecessarily required for source distribution builds. + You should only include ``wheel`` in ``requires`` if you need to explicitly + access it during build time (e.g. if your project needs a ``setup.py`` + script that imports ``wheel``). + +In addition to specifying a build system, you also will need to add +some package information such as metadata, contents, dependencies, etc. +This can be done in the same ``pyproject.toml`` [#beta]_ file, +or in a separated one: ``setup.cfg`` or ``setup.py`` [#setup.py]_. + +The following example demonstrates a minimum configuration +(which assumes the project depends on :pypi:`requests` and +:pypi:`importlib-metadata` to be able to run): + +.. tab:: pyproject.toml + + .. code-block:: toml + + [project] + name = "mypackage" + version = "0.0.1" + dependencies = [ + "requests", + 'importlib-metadata; python_version<"3.8"', + ] + + See :doc:`/userguide/pyproject_config` for more information. .. tab:: setup.cfg @@ -48,12 +99,13 @@ the minimum version = 0.0.1 [options] - packages = mypackage install_requires = requests - importlib; python_version == "2.6" + importlib-metadata; python_version < "3.8" -.. tab:: setup.py + See :doc:`/userguide/declarative_config` for more information. + +.. tab:: setup.py [#setup.py]_ .. code-block:: python @@ -62,136 +114,291 @@ the minimum setup( name='mypackage', version='0.0.1', - packages=['mypackage'], install_requires=[ 'requests', - 'importlib; python_version == "2.6"', + 'importlib-metadata; python_version == "3.8"', ], ) -This is what your project would look like:: + See :doc:`/references/keywords` for more information. + +Finally, you will need to organize your Python code to make it ready for +distributing into something that looks like the following +(optional files marked with ``#``):: - ~/mypackage/ - pyproject.toml - setup.cfg # or setup.py - mypackage/__init__.py + mypackage + ├── pyproject.toml # and/or setup.cfg/setup.py (depending on the configuration method) + | # README.rst or README.md (a nice description of your package) + | # LICENCE (properly chosen license information, e.g. MIT, BSD-3, GPL-3, MPL-2, etc...) + └── mypackage + ├── __init__.py + └── ... (other Python files) -Then, you need a builder, such as :std:doc:`PyPA build ` -which you can obtain via ``pip install build``. After downloading it, invoke -the builder:: +With :ref:`build installed in your system `, you can then run:: python -m build -You now have your distribution ready (e.g. a ``tar.gz`` file and a ``.whl`` -file in the ``dist`` directory), which you can upload to PyPI! +You now have your distribution ready (e.g. a ``tar.gz`` file and a ``.whl`` file +in the ``dist`` directory), which you can :doc:`upload ` to PyPI_! -Of course, before you release your project to PyPI, you'll want to add a bit -more information to your setup script to help people find or learn about your -project. And maybe your project will have grown by then to include a few +Of course, before you release your project to PyPI_, you'll want to add a bit +more information to help people find or learn about your project. +And maybe your project will have grown by then to include a few dependencies, and perhaps some data files and scripts. In the next few sections, we will walk through the additional but essential information you need to specify to properly package your project. -Automatic package discovery -=========================== -For simple projects, it's usually easy enough to manually add packages to -the ``packages`` keyword in ``setup.cfg``. However, for very large projects, -it can be a big burden to keep the package list updated. ``setuptools`` -therefore provides two convenient tools to ease the burden: :literal:`find:\ ` and -:literal:`find_namespace:\ `. To use it in your project: +.. + TODO: A previous generation of this document included a section called + "Python packaging at a glance". This is a nice title, but the content + removed because it assumed the reader had familiarity with the history of + setuptools and PEP 517. We should take advantage of this nice title and add + this section back, but use it to explain important concepts of the + ecosystem, such as "sdist", "wheel", "index". It would also be nice if we + could have a diagram for that (explaining for example that "wheels" are + built from "sdists" not the source tree). + +.. _setuppy_discouraged: +.. admonition:: Info: Using ``setup.py`` + :class: seealso + + Setuptools offers first class support for ``setup.py`` files as a configuration + mechanism. + + It is important to remember, however, that running this file as a + script (e.g. ``python setup.py sdist``) is strongly **discouraged**, and + that the majority of the command line interfaces are (or will be) **deprecated** + (e.g. ``python setup.py install``, ``python setup.py bdist_wininst``, ...). + + We also recommend users to expose as much as possible configuration in a + more *declarative* way via the :doc:`pyproject.toml ` or + :doc:`setup.cfg `, and keep the ``setup.py`` minimal + with only the dynamic parts (or even omit it completely if applicable). + + See `Why you shouldn't invoke setup.py directly`_ for more background. + +.. _Why you shouldn't invoke setup.py directly: https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html + + +Overview +======== + +Package discovery +----------------- +For projects that follow a simple directory structure, ``setuptools`` should be +able to automatically detect all :term:`packages ` and +:term:`namespaces `. However, complex projects might include +additional folders and supporting files that not necessarily should be +distributed (or that can confuse ``setuptools`` auto discovery algorithm). + +Therefore, ``setuptools`` provides a convenient way to customize +which packages should be distributed and in which directory they should be +found, as shown in the example below: -.. code-block:: ini +.. tab:: pyproject.toml (**BETA**) [#beta]_ - [options] - packages = find: + .. code-block:: toml - [options.packages.find] #optional - include=pkg1, pkg2 - exclude=pk3, pk4 + # ... + [tool.setuptools.packages] + find = {} # Scan the project directory with the default parameters + + # OR + [tool.setuptools.packages.find] + # All the following settings are optional: + where = ["src"] # ["."] by default + include = ["mypackage*"] # ["*"] by default + exclude = ["mypackage.tests*"] # empty by default + namespaces = false # true by default + +.. tab:: setup.cfg + + .. code-block:: ini + + [options] + packages = find: # OR `find_namespace:` if you want to use namespaces + + [options.packages.find] # (always `find` even if `find_namespace:` was used before) + # This section is optional as well as each of the following options: + where=src # . by default + include=mypackage* # * by default + exclude=mypackage.tests* # empty by default + +.. tab:: setup.py [#setup.py]_ + + .. code-block:: python + + from setuptools import find_packages # or find_namespace_packages + + setup( + # ... + packages=find_packages( + # All keyword arguments below are optional: + where='src', # '.' by default + include=['mypackage*'], # ['*'] by default + exclude=['mypackage.tests'], # empty by default + ), + # ... + ) When you pass the above information, alongside other necessary information, -``setuptools`` walks through the directory specified in ``where`` (omitted -here as the package resides in the current directory) and filters the packages -it can find following the ``include`` (defaults to none), then removes -those that match the ``exclude`` and returns a list of Python packages. Note -that each entry in the ``[options.packages.find]`` is optional. The above -setup also allows you to adopt a ``src/`` layout. For more details and advanced -use, go to :ref:`package_discovery` +``setuptools`` walks through the directory specified in ``where`` (defaults to ``.``) and filters the packages +it can find following the ``include`` patterns (defaults to ``*``), then it removes +those that match the ``exclude`` patterns (defaults to empty) and returns a list of Python packages. + +For more details and advanced use, go to :ref:`package_discovery`. + +.. tip:: + Starting with version 61.0.0, setuptools' automatic discovery capabilities + have been improved to detect popular project layouts (such as the + :ref:`flat-layout` and :ref:`src-layout`) without requiring any + special configuration. Check out our :ref:`reference docs ` + for more information, but please keep in mind that this functionality is + still considered **beta** and might change in future releases. Entry points and automatic script creation -=========================================== -Setuptools supports automatic creation of scripts upon installation, that runs -code within your package if you specify them with the ``entry_points`` keyword. -This is what allows you to run commands like ``pip install`` instead of having -to type ``python -m pip install``. To accomplish this, add the entry_points -keyword in your ``setup.cfg``: +------------------------------------------- +Setuptools supports automatic creation of scripts upon installation, that run +code within your package if you specify them as :doc:`entry points +`. +An example of how this feature can be used in ``pip``: +it allows you to run commands like ``pip install`` instead of having +to type ``python -m pip install``. + +The following configuration examples show how to accomplish this: + + +.. tab:: pyproject.toml + + .. code-block:: toml -.. code-block:: ini + [project.scripts] + cli-name = "mypkg.mymodule:some_func" - [options.entry_points] - console_scripts = - main = mypkg:some_func +.. tab:: setup.cfg + + .. code-block:: ini -When this project is installed, a ``main`` script will be installed and will -invoke the ``some_func`` in the ``__init__.py`` file when called by the user. -For detailed usage, including managing the additional or optional dependencies, -go to :doc:`entry_point`. + [options.entry_points] + console_scripts = + cli-name = mypkg.mymodule:some_func + +.. tab:: setup.py [#setup.py]_ + + .. code-block:: python + + setup( + # ... + entry_points={ + 'console_scripts': [ + 'cli-name = mypkg.mymodule:some_func', + ] + } + ) + +When this project is installed, a ``cli-name`` executable will be created. +``cli-name`` will invoke the function ``some_func`` in the +``mypkg/mymodule.py`` file when called by the user. +Note that you can also use the ``entry-points`` mechanism to advertise +components between installed packages and implement plugin systems. +For detailed usage, go to :doc:`entry_point`. Dependency management -===================== -``setuptools`` supports automatically installing dependencies when a package is -installed. The simplest way to include requirement specifiers is to use the -``install_requires`` argument to ``setup.cfg``. It takes a string or list of -strings containing requirement specifiers (A version specifier is one of the -operators <, >, <=, >=, == or !=, followed by a version identifier): +--------------------- +Packages built with ``setuptools`` can specify dependencies to be automatically +installed when the package itself is installed. +The example below shows how to configure this kind of dependencies: + +.. tab:: pyproject.toml -.. code-block:: ini + .. code-block:: toml - [options] - install_requires = - docutils >= 0.3 - requests <= 0.4 + [project] + # ... + dependencies = [ + "docutils", + "requests <= 0.4", + ] + # ... + +.. tab:: setup.cfg + + .. code-block:: ini + + [options] + install_requires = + docutils + requests <= 0.4 + +.. tab:: setup.py [#setup.py]_ + + .. code-block:: python + + setup( + # ... + install_requires=["docutils", "requests <= 0.4"], + # ... + ) + +Each dependency is represented by a string that can optionally contain version requirements +(e.g. one of the operators <, >, <=, >=, == or !=, followed by a version identifier), +and/or conditional environment markers, e.g. ``sys_platform == "win32"`` +(see :doc:`PyPUG:specifications/version-specifiers` for more information). When your project is installed, all of the dependencies not already installed will be located (via PyPI), downloaded, built (if necessary), and installed. -This, of course, is a simplified scenarios. ``setuptools`` also provides -additional keywords such as ``setup_requires`` that allows you to install -dependencies before running the script, and ``extras_require`` that take -care of those needed by automatically generated scripts. It also provides -mechanisms to handle dependencies that are not in PyPI. For more advanced use, -see :doc:`dependency_management` +This, of course, is a simplified scenario. You can also specify groups of +extra dependencies that are not strictly required by your package to work, but +that will provide additional functionalities. +For more advanced use, see :doc:`dependency_management`. .. _Including Data Files: Including Data Files -==================== -The distutils have traditionally allowed installation of "data files", which -are placed in a platform-specific location. Setuptools offers three ways to -specify data files to be included in your packages. For the simplest use, you -can simply use the ``include_package_data`` keyword: +-------------------- +Setuptools offers three ways to specify data files to be included in your packages. +For the simplest use, you can simply use the ``include_package_data`` keyword: -.. code-block:: ini +.. tab:: pyproject.toml (**BETA**) [#beta]_ - [options] - include_package_data = True + .. code-block:: toml -This tells setuptools to install any data files it finds in your packages. -The data files must be specified via the distutils' ``MANIFEST.in`` file. -For more details, see :doc:`datafiles` + [tool.setuptools] + include-package-data = true + # This is already the default behaviour if your are using + # pyproject.toml to configure your build. + # You can deactivate that with `include-package-data = false` +.. tab:: setup.cfg -Development mode -================ + .. code-block:: ini -.. tip:: + [options] + include_package_data = True - Prior to :ref:`pip v21.1 `, a ``setup.py`` script was - required to be compatible with development mode. With late - versions of pip, any project may be installed in this mode. +.. tab:: setup.py [#setup.py]_ + + .. code-block:: python + + setup( + # ... + include_package_data=True, + # ... + ) + +This tells setuptools to install any data files it finds in your packages. +The data files must be specified via the |MANIFEST.in|_ file +or automatically added by a :ref:`Revision Control System plugin +`. +For more details, see :doc:`datafiles`. + + +Development mode +---------------- ``setuptools`` allows you to install a package without copying any files to your interpreter directory (e.g. the ``site-packages`` directory). @@ -201,25 +408,48 @@ Here's how to do it:: pip install --editable . -This creates a link file in your interpreter site package directory which -associate with your source code. For more information, see :doc:`development_mode`. +See :doc:`development_mode` for more information. + +.. tip:: + + Prior to :ref:`pip v21.1 `, a ``setup.py`` script was + required to be compatible with development mode. With late + versions of pip, projects without ``setup.py`` may be installed in this mode. + + If you have a version of ``pip`` older than v21.1 or is using a different + packaging-related tool that does not support :pep:`660`, you might need to keep a + ``setup.py`` file in file in your repository if you want to use editable + installs. + + A simple script will suffice, for example: + + .. code-block:: python + + from setuptools import setup + + setup() + + You can still keep all the configuration in + :doc:`pyproject.toml ` and/or + :doc:`setup.cfg ` Uploading your package to PyPI -============================== +------------------------------ After generating the distribution files, the next step would be to upload your distribution so others can use it. This functionality is provided by -`twine `_ and we will only demonstrate the -basic use here. +:pypi:`twine` and is documented in the :doc:`Python packaging tutorial +`. Transitioning from ``setup.py`` to ``setup.cfg`` -================================================ +------------------------------------------------ To avoid executing arbitrary scripts and boilerplate code, we are transitioning into a full-fledged ``setup.cfg`` to declare your package information instead of running ``setup()``. This inevitably brings challenges due to a different -syntax. Here we provide a quick guide to understanding how ``setup.cfg`` is -parsed by ``setuptool`` to ease the pain of transition. +syntax. :doc:`Here ` we provide a quick guide to +understanding how ``setup.cfg`` is parsed by ``setuptools`` to ease the pain of +transition. .. _packaging-resources: @@ -228,3 +458,30 @@ Resources on Python packaging Packaging in Python can be hard and is constantly evolving. `Python Packaging User Guide `_ has tutorials and up-to-date references that can help you when it is time to distribute your work. + + +.. |MANIFEST.in| replace:: ``MANIFEST.in`` +.. _MANIFEST.in: https://packaging.python.org/en/latest/guides/using-manifest-in/ + + +---- + +.. rubric:: Notes + +.. [#setup.py] + New projects are advised to avoid ``setup.py`` configurations (beyond the minimal stub) + when custom scripting during the build is not necessary. + Examples are kept in this document to help people interested in maintaining or + contributing to existing packages that use ``setup.py``. + Note that you can still keep most of configuration declarative in + :doc:`setup.cfg ` or :doc:`pyproject.toml + ` and use ``setup.py`` only for the parts not + supported in those files (e.g. C extensions). + See :ref:`note `. + +.. [#beta] + Support for adding build configuration options via the ``[tool.setuptools]`` + table in the ``pyproject.toml`` file is still in **beta** stage. + See :doc:`/userguide/pyproject_config`. + +.. _PyPI: https://pypi.org diff --git a/launcher.c b/launcher.c index 23ef3ac206..83b487898a 100644 --- a/launcher.c +++ b/launcher.c @@ -180,7 +180,7 @@ void pass_control_to_child(DWORD control_type) { } BOOL control_handler(DWORD control_type) { - /* + /* * distribute-issue207 * control event handler callback function */ @@ -204,12 +204,12 @@ int create_and_wait_for_subprocess(char* command) { ZeroMemory(&p_info, sizeof(p_info)); ZeroMemory(&s_info, sizeof(s_info)); s_info.cb = sizeof(STARTUPINFO); - // set-up control handler callback funciotn + // set-up control handler callback function SetConsoleCtrlHandler((PHANDLER_ROUTINE) control_handler, TRUE); if (!CreateProcessA(NULL, commandline, NULL, NULL, TRUE, 0, NULL, NULL, &s_info, &p_info)) { fprintf(stderr, "failed to create process.\n"); return 0; - } + } child_pid = p_info.dwProcessId; // wait for Python to exit WaitForSingleObject(p_info.hProcess, INFINITE); @@ -229,7 +229,7 @@ char* join_executable_and_args(char *executable, char **args, int argc) */ int len,counter; char* cmdline; - + len=strlen(executable)+2; for (counter=1; counter`_ +and +`importlib.metadata `_ +instead. """ import sys @@ -34,7 +40,6 @@ import errno import tempfile import textwrap -import itertools import inspect import ntpath import posixpath @@ -54,8 +59,10 @@ # capture these to bypass sandboxing from os import utime + try: from os import mkdir, rename, unlink + WRITE_SUPPORT = True except ImportError: # no write support, probably under GAE @@ -66,6 +73,7 @@ try: import importlib.machinery as importlib_machinery + # access attribute to force import under delayed import mechanisms. importlib_machinery.__name__ except ImportError: @@ -77,12 +85,14 @@ join_continuation, ) -from pkg_resources.extern import appdirs +from pkg_resources.extern import platformdirs from pkg_resources.extern import packaging + __import__('pkg_resources.extern.packaging.version') __import__('pkg_resources.extern.packaging.specifiers') __import__('pkg_resources.extern.packaging.requirements') __import__('pkg_resources.extern.packaging.markers') +__import__('pkg_resources.extern.packaging.utils') if sys.version_info < (3, 5): raise RuntimeError("Python 3.5 or later is required") @@ -108,6 +118,12 @@ _namespace_packages = None +warnings.warn("pkg_resources is deprecated as an API", DeprecationWarning) + + +_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I) + + class PEP440Warning(RuntimeWarning): """ Used when there is an issue with a version or specifier not complying with @@ -115,16 +131,7 @@ class PEP440Warning(RuntimeWarning): """ -def parse_version(v): - try: - return packaging.version.Version(v) - except packaging.version.InvalidVersion: - warnings.warn( - f"{v} is an invalid version and will not be supported in " - "a future release", - PkgResourcesDeprecationWarning, - ) - return packaging.version.LegacyVersion(v) +parse_version = packaging.version.Version _state_vars = {} @@ -196,51 +203,87 @@ def get_supported_platform(): __all__ = [ # Basic resource access and distribution/entry point discovery - 'require', 'run_script', 'get_provider', 'get_distribution', - 'load_entry_point', 'get_entry_map', 'get_entry_info', + 'require', + 'run_script', + 'get_provider', + 'get_distribution', + 'load_entry_point', + 'get_entry_map', + 'get_entry_info', 'iter_entry_points', - 'resource_string', 'resource_stream', 'resource_filename', - 'resource_listdir', 'resource_exists', 'resource_isdir', - + 'resource_string', + 'resource_stream', + 'resource_filename', + 'resource_listdir', + 'resource_exists', + 'resource_isdir', # Environmental control - 'declare_namespace', 'working_set', 'add_activation_listener', - 'find_distributions', 'set_extraction_path', 'cleanup_resources', + 'declare_namespace', + 'working_set', + 'add_activation_listener', + 'find_distributions', + 'set_extraction_path', + 'cleanup_resources', 'get_default_cache', - # Primary implementation classes - 'Environment', 'WorkingSet', 'ResourceManager', - 'Distribution', 'Requirement', 'EntryPoint', - + 'Environment', + 'WorkingSet', + 'ResourceManager', + 'Distribution', + 'Requirement', + 'EntryPoint', # Exceptions - 'ResolutionError', 'VersionConflict', 'DistributionNotFound', - 'UnknownExtra', 'ExtractionError', - + 'ResolutionError', + 'VersionConflict', + 'DistributionNotFound', + 'UnknownExtra', + 'ExtractionError', # Warnings 'PEP440Warning', - # Parsing functions and string utilities - 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', - 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', - 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', - + 'parse_requirements', + 'parse_version', + 'safe_name', + 'safe_version', + 'get_platform', + 'compatible_platforms', + 'yield_lines', + 'split_sections', + 'safe_extra', + 'to_filename', + 'invalid_marker', + 'evaluate_marker', # filesystem utilities - 'ensure_directory', 'normalize_path', - + 'ensure_directory', + 'normalize_path', # Distribution "precedence" constants - 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', - + 'EGG_DIST', + 'BINARY_DIST', + 'SOURCE_DIST', + 'CHECKOUT_DIST', + 'DEVELOP_DIST', # "Provider" interfaces, implementations, and registration/lookup APIs - 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', - 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', - 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', - 'register_finder', 'register_namespace_handler', 'register_loader_type', - 'fixup_namespace_packages', 'get_importer', - + 'IMetadataProvider', + 'IResourceProvider', + 'FileMetadata', + 'PathMetadata', + 'EggMetadata', + 'EmptyProvider', + 'empty_provider', + 'NullProvider', + 'EggProvider', + 'DefaultProvider', + 'ZipProvider', + 'register_finder', + 'register_namespace_handler', + 'register_loader_type', + 'fixup_namespace_packages', + 'get_importer', # Warnings 'PkgResourcesDeprecationWarning', - # Deprecated/backward compatibility only - 'run_main', 'AvailableDistributions', + 'run_main', + 'AvailableDistributions', ] @@ -299,8 +342,10 @@ def required_by(self): class DistributionNotFound(ResolutionError): """A requested distribution was not found""" - _template = ("The '{self.req}' distribution was not found " - "and is required by {self.requirers_str}") + _template = ( + "The '{self.req}' distribution was not found " + "and is required by {self.requirers_str}" + ) @property def req(self): @@ -394,7 +439,8 @@ def get_build_platform(): version = _macos_vers() machine = os.uname()[4].replace(" ", "_") return "macosx-%d.%d-%s" % ( - int(version[0]), int(version[1]), + int(version[0]), + int(version[1]), _macos_arch(machine), ) except ValueError: @@ -435,15 +481,18 @@ def compatible_platforms(provided, required): if provDarwin: dversion = int(provDarwin.group(1)) macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) - if dversion == 7 and macosversion >= "10.3" or \ - dversion == 8 and macosversion >= "10.4": + if ( + dversion == 7 + and macosversion >= "10.3" + or dversion == 8 + and macosversion >= "10.4" + ): return True # egg isn't macOS or legacy darwin return False # are they the same major version and machine type? - if provMac.group(1) != reqMac.group(1) or \ - provMac.group(3) != reqMac.group(3): + if provMac.group(1) != reqMac.group(1) or provMac.group(3) != reqMac.group(3): return False # is the required OS major update >= the provided one? @@ -505,8 +554,8 @@ def get_metadata(name): def get_metadata_lines(name): """Yield named metadata resource as list of non-blank non-comment lines - Leading and trailing whitespace is stripped from each line, and lines - with ``#`` as the first non-blank character are omitted.""" + Leading and trailing whitespace is stripped from each line, and lines + with ``#`` as the first non-blank character are omitted.""" def metadata_isdir(name): """Is the named metadata a directory? (like ``os.path.isdir()``)""" @@ -554,6 +603,7 @@ def __init__(self, entries=None): self.entries = [] self.entry_keys = {} self.by_key = {} + self.normalized_to_canonical_keys = {} self.callbacks = [] if entries is None: @@ -634,6 +684,14 @@ def find(self, req): is returned. """ dist = self.by_key.get(req.key) + + if dist is None: + canonical_key = self.normalized_to_canonical_keys.get(req.key) + + if canonical_key is not None: + req.key = canonical_key + dist = self.by_key.get(canonical_key) + if dist is not None and dist not in req: # XXX add more info raise VersionConflict(dist, req) @@ -702,15 +760,22 @@ def add(self, dist, entry=None, insert=True, replace=False): return self.by_key[dist.key] = dist + normalized_name = packaging.utils.canonicalize_name(dist.key) + self.normalized_to_canonical_keys[normalized_name] = dist.key if dist.key not in keys: keys.append(dist.key) if dist.key not in keys2: keys2.append(dist.key) self._added_new(dist) - # FIXME: 'WorkingSet.resolve' is too complex (11) - def resolve(self, requirements, env=None, installer=None, # noqa: C901 - replace_conflicting=False, extras=None): + def resolve( + self, + requirements, + env=None, + installer=None, + replace_conflicting=False, + extras=None, + ): """List all distributions needed to (recursively) meet `requirements` `requirements` must be a sequence of ``Requirement`` objects. `env`, @@ -759,33 +824,9 @@ def resolve(self, requirements, env=None, installer=None, # noqa: C901 if not req_extras.markers_pass(req, extras): continue - dist = best.get(req.key) - if dist is None: - # Find the best distribution and add it to the map - dist = self.by_key.get(req.key) - if dist is None or (dist not in req and replace_conflicting): - ws = self - if env is None: - if dist is None: - env = Environment(self.entries) - else: - # Use an empty environment and workingset to avoid - # any further conflicts with the conflicting - # distribution - env = Environment([]) - ws = WorkingSet([]) - dist = best[req.key] = env.best_match( - req, ws, installer, - replace_conflicting=replace_conflicting - ) - if dist is None: - requirers = required_by.get(req, None) - raise DistributionNotFound(req, requirers) - to_activate.append(dist) - if dist not in req: - # Oops, the "best" so far conflicts with a dependency - dependent_req = required_by[req] - raise VersionConflict(dist, req).with_context(dependent_req) + dist = self._resolve_dist( + req, best, replace_conflicting, env, installer, required_by, to_activate + ) # push the new requirements onto the stack new_requirements = dist.requires(req.extras)[::-1] @@ -801,8 +842,38 @@ def resolve(self, requirements, env=None, installer=None, # noqa: C901 # return list of distros to activate return to_activate - def find_plugins( - self, plugin_env, full_env=None, installer=None, fallback=True): + def _resolve_dist( + self, req, best, replace_conflicting, env, installer, required_by, to_activate + ): + dist = best.get(req.key) + if dist is None: + # Find the best distribution and add it to the map + dist = self.by_key.get(req.key) + if dist is None or (dist not in req and replace_conflicting): + ws = self + if env is None: + if dist is None: + env = Environment(self.entries) + else: + # Use an empty environment and workingset to avoid + # any further conflicts with the conflicting + # distribution + env = Environment([]) + ws = WorkingSet([]) + dist = best[req.key] = env.best_match( + req, ws, installer, replace_conflicting=replace_conflicting + ) + if dist is None: + requirers = required_by.get(req, None) + raise DistributionNotFound(req, requirers) + to_activate.append(dist) + if dist not in req: + # Oops, the "best" so far conflicts with a dependency + dependent_req = required_by[req] + raise VersionConflict(dist, req).with_context(dependent_req) + return dist + + def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True): """Find all activatable distributions in `plugin_env` Example usage:: @@ -855,9 +926,7 @@ def find_plugins( list(map(shadow_set.add, self)) for project_name in plugin_projects: - for dist in plugin_env[project_name]: - req = [dist.as_requirement()] try: @@ -921,15 +990,19 @@ def _added_new(self, dist): def __getstate__(self): return ( - self.entries[:], self.entry_keys.copy(), self.by_key.copy(), - self.callbacks[:] + self.entries[:], + self.entry_keys.copy(), + self.by_key.copy(), + self.normalized_to_canonical_keys.copy(), + self.callbacks[:], ) - def __setstate__(self, e_k_b_c): - entries, keys, by_key, callbacks = e_k_b_c + def __setstate__(self, e_k_b_n_c): + entries, keys, by_key, normalized_to_canonical_keys, callbacks = e_k_b_n_c self.entries = entries[:] self.entry_keys = keys.copy() self.by_key = by_key.copy() + self.normalized_to_canonical_keys = normalized_to_canonical_keys.copy() self.callbacks = callbacks[:] @@ -957,8 +1030,8 @@ class Environment: """Searchable snapshot of distributions on a search path""" def __init__( - self, search_path=None, platform=get_supported_platform(), - python=PY_MAJOR): + self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR + ): """Snapshot distributions available on a search path Any distributions found on `search_path` are added to the environment. @@ -1025,16 +1098,14 @@ def __getitem__(self, project_name): return self._distmap.get(distribution_key, []) def add(self, dist): - """Add `dist` if we ``can_add()`` it and it has not already been added - """ + """Add `dist` if we ``can_add()`` it and it has not already been added""" if self.can_add(dist) and dist.has_version(): dists = self._distmap.setdefault(dist.key, []) if dist not in dists: dists.append(dist) dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) - def best_match( - self, req, working_set, installer=None, replace_conflicting=False): + def best_match(self, req, working_set, installer=None, replace_conflicting=False): """Find distribution best matching `req` and usable on `working_set` This calls the ``find(req)`` method of the `working_set` to see if a @@ -1121,6 +1192,7 @@ class ExtractionError(RuntimeError): class ResourceManager: """Manage resource extraction and packages""" + extraction_path = None def __init__(self): @@ -1132,9 +1204,7 @@ def resource_exists(self, package_or_requirement, resource_name): def resource_isdir(self, package_or_requirement, resource_name): """Is the named resource an existing directory?""" - return get_provider(package_or_requirement).resource_isdir( - resource_name - ) + return get_provider(package_or_requirement).resource_isdir(resource_name) def resource_filename(self, package_or_requirement, resource_name): """Return a true filesystem path for specified resource""" @@ -1156,9 +1226,7 @@ def resource_string(self, package_or_requirement, resource_name): def resource_listdir(self, package_or_requirement, resource_name): """List the contents of the named resource directory""" - return get_provider(package_or_requirement).resource_listdir( - resource_name - ) + return get_provider(package_or_requirement).resource_listdir(resource_name) def extraction_error(self): """Give an error message for problems extracting file(s)""" @@ -1166,7 +1234,8 @@ def extraction_error(self): old_exc = sys.exc_info()[1] cache_path = self.extraction_path or get_default_cache() - tmpl = textwrap.dedent(""" + tmpl = textwrap.dedent( + """ Can't extract file(s) to egg cache The following error occurred while trying to extract file(s) @@ -1181,7 +1250,8 @@ def extraction_error(self): Perhaps your account does not have write access to this directory? You can change the cache directory by setting the PYTHON_EGG_CACHE environment variable to point to an accessible directory. - """).lstrip() + """ + ).lstrip() err = ExtractionError(tmpl.format(**locals())) err.manager = self err.cache_path = cache_path @@ -1280,9 +1350,7 @@ def set_extraction_path(self, path): ``cleanup_resources()``.) """ if self.cached_files: - raise ValueError( - "Can't change extraction path, files already extracted" - ) + raise ValueError("Can't change extraction path, files already extracted") self.extraction_path = path @@ -1306,9 +1374,8 @@ def get_default_cache(): or a platform-relevant user cache dir for an app named "Python-Eggs". """ - return ( - os.environ.get('PYTHON_EGG_CACHE') - or appdirs.user_cache_dir(appname='Python-Eggs') + return os.environ.get('PYTHON_EGG_CACHE') or platformdirs.user_cache_dir( + appname='Python-Eggs' ) @@ -1332,6 +1399,38 @@ def safe_version(version): return re.sub('[^A-Za-z0-9.]+', '-', version) +def _forgiving_version(version): + """Fallback when ``safe_version`` is not safe enough + >>> parse_version(_forgiving_version('0.23ubuntu1')) + + >>> parse_version(_forgiving_version('0.23-')) + + >>> parse_version(_forgiving_version('0.-_')) + + >>> parse_version(_forgiving_version('42.+?1')) + + >>> parse_version(_forgiving_version('hello world')) + + """ + version = version.replace(' ', '.') + match = _PEP440_FALLBACK.search(version) + if match: + safe = match["safe"] + rest = version[len(safe):] + else: + safe = "0" + rest = version + local = f"sanitized.{_safe_segment(rest)}".strip(".") + return f"{safe}.dev0+{local}" + + +def _safe_segment(segment): + """Convert an arbitrary string into a safe segment""" + segment = re.sub('[^A-Za-z0-9.]+', '-', segment) + segment = re.sub('-[^A-Za-z0-9]+', '-', segment) + return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-") + + def safe_extra(extra): """Convert an arbitrary string to a standard 'extra' name @@ -1445,8 +1544,9 @@ def run_script(self, script_name, namespace): script = 'scripts/' + script_name if not self.has_metadata(script): raise ResolutionError( - "Script {script!r} not found in metadata at {self.egg_info!r}" - .format(**locals()), + "Script {script!r} not found in metadata at {self.egg_info!r}".format( + **locals() + ), ) script_text = self.get_metadata(script).replace('\r\n', '\n') script_text = script_text.replace('\r', '\n') @@ -1459,8 +1559,12 @@ def run_script(self, script_name, namespace): exec(code, namespace, namespace) else: from linecache import cache + cache[script_filename] = ( - len(script_text), 0, script_text.split('\n'), script_filename + len(script_text), + 0, + script_text.split('\n'), + script_filename, ) script_code = compile(script_text, script_filename, 'exec') exec(script_code, namespace, namespace) @@ -1540,9 +1644,9 @@ def _validate_resource_path(path): AttributeError: ... """ invalid = ( - os.path.pardir in path.split(posixpath.sep) or - posixpath.isabs(path) or - ntpath.isabs(path) + os.path.pardir in path.split(posixpath.sep) + or posixpath.isabs(path) + or ntpath.isabs(path) ) if not invalid: return @@ -1624,7 +1728,10 @@ def _get(self, path): @classmethod def _register(cls): - loader_names = 'SourceFileLoader', 'SourcelessFileLoader', + loader_names = ( + 'SourceFileLoader', + 'SourcelessFileLoader', + ) for name in loader_names: loader_cls = getattr(importlib_machinery, name, type(None)) register_loader_type(loader_cls, cls) @@ -1684,6 +1791,7 @@ class MemoizedZipManifests(ZipManifests): """ Memoized zipfile manifests. """ + manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') def load(self, path): @@ -1717,20 +1825,16 @@ def _zipinfo_name(self, fspath): if fspath == self.loader.archive: return '' if fspath.startswith(self.zip_pre): - return fspath[len(self.zip_pre):] - raise AssertionError( - "%s is not a subpath of %s" % (fspath, self.zip_pre) - ) + return fspath[len(self.zip_pre) :] + raise AssertionError("%s is not a subpath of %s" % (fspath, self.zip_pre)) def _parts(self, zip_path): # Convert a zipfile subpath into an egg-relative path part list. # pseudo-fs path fspath = self.zip_pre + zip_path if fspath.startswith(self.egg_root + os.sep): - return fspath[len(self.egg_root) + 1:].split(os.sep) - raise AssertionError( - "%s is not a subpath of %s" % (fspath, self.egg_root) - ) + return fspath[len(self.egg_root) + 1 :].split(os.sep) + raise AssertionError("%s is not a subpath of %s" % (fspath, self.egg_root)) @property def zipinfo(self): @@ -1760,25 +1864,20 @@ def _get_date_and_size(zip_stat): # FIXME: 'ZipProvider._extract_resource' is too complex (12) def _extract_resource(self, manager, zip_path): # noqa: C901 - if zip_path in self._index(): for name in self._index()[zip_path]: - last = self._extract_resource( - manager, os.path.join(zip_path, name) - ) + last = self._extract_resource(manager, os.path.join(zip_path, name)) # return the extracted directory name return os.path.dirname(last) timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) if not WRITE_SUPPORT: - raise IOError('"os.rename" and "os.unlink" are not supported ' - 'on this platform') - try: - - real_path = manager.get_cache_path( - self.egg_name, self._parts(zip_path) + raise IOError( + '"os.rename" and "os.unlink" are not supported ' 'on this platform' ) + try: + real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path)) if self._is_current(real_path, zip_path): return real_path @@ -2014,70 +2113,21 @@ def find_nothing(importer, path_item, only=False): register_finder(object, find_nothing) -def _by_version_descending(names): - """ - Given a list of filenames, return them in descending order - by version number. - - >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' - >>> _by_version_descending(names) - ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'bar', 'foo'] - >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' - >>> _by_version_descending(names) - ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] - >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' - >>> _by_version_descending(names) - ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] - """ - def try_parse(name): - """ - Attempt to parse as a version or return a null version. - """ - try: - return packaging.version.Version(name) - except Exception: - return packaging.version.Version('0') - - def _by_version(name): - """ - Parse each component of the filename - """ - name, ext = os.path.splitext(name) - parts = itertools.chain(name.split('-'), [ext]) - return [try_parse(part) for part in parts] - - return sorted(names, key=_by_version, reverse=True) - - def find_on_path(importer, path_item, only=False): """Yield distributions accessible on a sys.path directory""" path_item = _normalize_cached(path_item) if _is_unpacked_egg(path_item): yield Distribution.from_filename( - path_item, metadata=PathMetadata( - path_item, os.path.join(path_item, 'EGG-INFO') - ) + path_item, + metadata=PathMetadata(path_item, os.path.join(path_item, 'EGG-INFO')), ) return - entries = ( - os.path.join(path_item, child) - for child in safe_listdir(path_item) - ) - - # for performance, before sorting by version, - # screen entries for only those that will yield - # distributions - filtered = ( - entry - for entry in entries - if dist_factory(path_item, entry, only) - ) + entries = (os.path.join(path_item, child) for child in safe_listdir(path_item)) # scan for .egg and .egg-info in directory - path_item_entries = _by_version_descending(filtered) - for entry in path_item_entries: + for entry in sorted(entries): fullpath = os.path.join(path_item, entry) factory = dist_factory(path_item, entry, only) for dist in factory(fullpath): @@ -2088,19 +2138,18 @@ def dist_factory(path_item, entry, only): """Return a dist_factory for the given entry.""" lower = entry.lower() is_egg_info = lower.endswith('.egg-info') - is_dist_info = ( - lower.endswith('.dist-info') and - os.path.isdir(os.path.join(path_item, entry)) + is_dist_info = lower.endswith('.dist-info') and os.path.isdir( + os.path.join(path_item, entry) ) is_meta = is_egg_info or is_dist_info return ( distributions_from_metadata - if is_meta else - find_distributions - if not only and _is_egg_path(entry) else - resolve_egg_link - if not only and lower.endswith('.egg-link') else - NoDists() + if is_meta + else find_distributions + if not only and _is_egg_path(entry) + else resolve_egg_link + if not only and lower.endswith('.egg-link') + else NoDists() ) @@ -2112,6 +2161,7 @@ class NoDists: >>> list(NoDists()('anything')) [] """ + def __bool__(self): return False @@ -2146,7 +2196,10 @@ def distributions_from_metadata(path): metadata = FileMetadata(path) entry = os.path.basename(path) yield Distribution.from_location( - root, entry, metadata, precedence=DEVELOP_DIST, + root, + entry, + metadata, + precedence=DEVELOP_DIST, ) @@ -2168,17 +2221,16 @@ def resolve_egg_link(path): """ referenced_paths = non_empty_lines(path) resolved_paths = ( - os.path.join(os.path.dirname(path), ref) - for ref in referenced_paths + os.path.join(os.path.dirname(path), ref) for ref in referenced_paths ) dist_groups = map(find_distributions, resolved_paths) return next(dist_groups, ()) -register_finder(pkgutil.ImpImporter, find_on_path) +if hasattr(pkgutil, 'ImpImporter'): + register_finder(pkgutil.ImpImporter, find_on_path) -if hasattr(importlib_machinery, 'FileFinder'): - register_finder(importlib_machinery.FileFinder, find_on_path) +register_finder(importlib_machinery.FileFinder, find_on_path) _declare_state('dict', _namespace_handlers={}) _declare_state('dict', _namespace_packages={}) @@ -2276,6 +2328,15 @@ def position_in_sys_path(path): def declare_namespace(packageName): """Declare that package 'packageName' is a namespace package""" + msg = ( + f"Deprecated call to `pkg_resources.declare_namespace({packageName!r})`.\n" + "Implementing implicit namespace packages (as specified in PEP 420) " + "is preferred to `pkg_resources.declare_namespace`. " + "See https://setuptools.pypa.io/en/latest/references/" + "keywords.html#keyword-namespace-packages" + ) + warnings.warn(msg, DeprecationWarning, stacklevel=2) + _imp.acquire_lock() try: if packageName in _namespace_packages: @@ -2332,11 +2393,11 @@ def file_ns_handler(importer, path_item, packageName, module): return subpath -register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) -register_namespace_handler(zipimport.zipimporter, file_ns_handler) +if hasattr(pkgutil, 'ImpImporter'): + register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) -if hasattr(importlib_machinery, 'FileFinder'): - register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) +register_namespace_handler(zipimport.zipimporter, file_ns_handler) +register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) def null_ns_handler(importer, path_item, packageName, module): @@ -2348,8 +2409,7 @@ def null_ns_handler(importer, path_item, packageName, module): def normalize_path(filename): """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(os.path.normpath( - _cygwin_patch(filename)))) + return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) def _cygwin_patch(filename): # pragma: nocover @@ -2380,9 +2440,9 @@ def _is_egg_path(path): def _is_zip_egg(path): return ( - path.lower().endswith('.egg') and - os.path.isfile(path) and - zipfile.is_zipfile(path) + path.lower().endswith('.egg') + and os.path.isfile(path) + and zipfile.is_zipfile(path) ) @@ -2390,9 +2450,8 @@ def _is_unpacked_egg(path): """ Determine if given path appears to be an unpacked egg. """ - return ( - path.lower().endswith('.egg') and - os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) + return path.lower().endswith('.egg') and os.path.isfile( + os.path.join(path, 'EGG-INFO', 'PKG-INFO') ) @@ -2556,8 +2615,10 @@ def _version_from_file(lines): Given an iterable of lines from a Metadata file, return the value of the Version field, if present, or None otherwise. """ + def is_version_line(line): return line.lower().startswith('version:') + version_lines = filter(is_version_line, lines) line = next(iter(version_lines), '') _, _, value = line.partition(':') @@ -2566,12 +2627,19 @@ def is_version_line(line): class Distribution: """Wrap an actual or potential sys.path entry w/metadata""" + PKG_INFO = 'PKG-INFO' def __init__( - self, location=None, metadata=None, project_name=None, - version=None, py_version=PY_MAJOR, platform=None, - precedence=EGG_DIST): + self, + location=None, + metadata=None, + project_name=None, + version=None, + py_version=PY_MAJOR, + platform=None, + precedence=EGG_DIST, + ): self.project_name = safe_name(project_name or 'Unknown') if version is not None: self._version = safe_version(version) @@ -2594,8 +2662,13 @@ def from_location(cls, location, basename, metadata=None, **kw): 'name', 'ver', 'pyver', 'plat' ) return cls( - location, metadata, project_name=project_name, version=version, - py_version=py_version, platform=platform, **kw + location, + metadata, + project_name=project_name, + version=version, + py_version=py_version, + platform=platform, + **kw, )._reload_version() def _reload_version(self): @@ -2604,7 +2677,7 @@ def _reload_version(self): @property def hashcmp(self): return ( - self.parsed_version, + self._forgiving_parsed_version, self.precedence, self.key, self.location, @@ -2651,35 +2724,42 @@ def key(self): @property def parsed_version(self): if not hasattr(self, "_parsed_version"): - self._parsed_version = parse_version(self.version) + try: + self._parsed_version = parse_version(self.version) + except packaging.version.InvalidVersion as ex: + info = f"(package: {self.project_name})" + if hasattr(ex, "add_note"): + ex.add_note(info) # PEP 678 + raise + raise packaging.version.InvalidVersion(f"{str(ex)} {info}") from None return self._parsed_version - def _warn_legacy_version(self): - LV = packaging.version.LegacyVersion - is_legacy = isinstance(self._parsed_version, LV) - if not is_legacy: - return + @property + def _forgiving_parsed_version(self): + try: + return self.parsed_version + except packaging.version.InvalidVersion as ex: + self._parsed_version = parse_version(_forgiving_version(self.version)) - # While an empty version is technically a legacy version and - # is not a valid PEP 440 version, it's also unlikely to - # actually come from someone and instead it is more likely that - # it comes from setuptools attempting to parse a filename and - # including it in the list. So for that we'll gate this warning - # on if the version is anything at all or not. - if not self.version: - return + notes = "\n".join(getattr(ex, "__notes__", [])) # PEP 678 + msg = f"""!!\n\n + ************************************************************************* + {str(ex)}\n{notes} + + This is a long overdue deprecation. + For the time being, `pkg_resources` will use `{self._parsed_version}` + as a replacement to avoid breaking existing environments, + but no future compatibility is guaranteed. - tmpl = textwrap.dedent(""" - '{project_name} ({version})' is being parsed as a legacy, - non PEP 440, - version. You may find odd behavior and sort order. - In particular it will be sorted as less than 0.0. It - is recommended to migrate to PEP 440 compatible - versions. - """).strip().replace('\n', ' ') + If you maintain package {self.project_name} you should implement + the relevant changes to adequate the project to PEP 440 immediately. + ************************************************************************* + \n\n!! + """ + warnings.warn(msg, DeprecationWarning) - warnings.warn(tmpl.format(**vars(self)), PEP440Warning) + return self._parsed_version @property def version(self): @@ -2689,9 +2769,9 @@ def version(self): version = self._get_version() if version is None: path = self._get_metadata_path_for_display(self.PKG_INFO) - msg = ( - "Missing 'Version:' header and/or {} file at path: {}" - ).format(self.PKG_INFO, path) + msg = ("Missing 'Version:' header and/or {} file at path: {}").format( + self.PKG_INFO, path + ) raise ValueError(msg, self) from e return version @@ -2720,8 +2800,7 @@ def _filter_extras(dm): reqs = dm.pop(extra) new_extra, _, marker = extra.partition(':') fails_marker = marker and ( - invalid_marker(marker) - or not evaluate_marker(marker) + invalid_marker(marker) or not evaluate_marker(marker) ) if fails_marker: reqs = [] @@ -2793,8 +2872,9 @@ def activate(self, path=None, replace=False): def egg_name(self): """Return what this distribution's standard .egg filename should be""" filename = "%s-%s-py%s" % ( - to_filename(self.project_name), to_filename(self.version), - self.py_version or PY_MAJOR + to_filename(self.project_name), + to_filename(self.version), + self.py_version or PY_MAJOR, ) if self.platform: @@ -2824,17 +2904,13 @@ def __getattr__(self, attr): def __dir__(self): return list( set(super(Distribution, self).__dir__()) - | set( - attr for attr in self._provider.__dir__() - if not attr.startswith('_') - ) + | set(attr for attr in self._provider.__dir__() if not attr.startswith('_')) ) @classmethod def from_filename(cls, filename, metadata=None, **kw): return cls.from_location( - _normalize_cached(filename), os.path.basename(filename), metadata, - **kw + _normalize_cached(filename), os.path.basename(filename), metadata, **kw ) def as_requirement(self): @@ -2946,14 +3022,18 @@ def check_version_conflict(self): nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) loc = normalize_path(self.location) for modname in self._get_metadata('top_level.txt'): - if (modname not in sys.modules or modname in nsp - or modname in _namespace_packages): + if ( + modname not in sys.modules + or modname in nsp + or modname in _namespace_packages + ): continue if modname in ('pkg_resources', 'setuptools', 'site'): continue fn = getattr(sys.modules[modname], '__file__', None) - if fn and (normalize_path(fn).startswith(loc) or - fn.startswith(self.location)): + if fn and ( + normalize_path(fn).startswith(loc) or fn.startswith(self.location) + ): continue issue_warning( "Module %s was already imported from %s, but %s is being added" @@ -2966,6 +3046,9 @@ def has_version(self): except ValueError: issue_warning("Unbuilt egg for " + repr(self)) return False + except SystemError: + # TODO: remove this except clause when python/cpython#103632 is fixed. + return False return True def clone(self, **kw): @@ -3005,6 +3088,7 @@ class DistInfoDistribution(Distribution): Wrap an actual or potential sys.path entry w/metadata, .dist-info style. """ + PKG_INFO = 'METADATA' EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") @@ -3090,8 +3174,7 @@ def __init__(self, requirement_string): self.unsafe_name = self.name project_name = safe_name(self.name) self.project_name, self.key = project_name, project_name.lower() - self.specs = [ - (spec.operator, spec.version) for spec in self.specifier] + self.specs = [(spec.operator, spec.version) for spec in self.specifier] self.extras = tuple(map(safe_extra, self.extras)) self.hashCmp = ( self.key, @@ -3103,10 +3186,7 @@ def __init__(self, requirement_string): self.__hash = hash(self.hashCmp) def __eq__(self, other): - return ( - isinstance(other, Requirement) and - self.hashCmp == other.hashCmp - ) + return isinstance(other, Requirement) and self.hashCmp == other.hashCmp def __ne__(self, other): return not self == other @@ -3131,7 +3211,7 @@ def __repr__(self): @staticmethod def parse(s): - req, = parse_requirements(s) + (req,) = parse_requirements(s) return req @@ -3269,10 +3349,7 @@ def _initialize_master_working_set(): # ensure that all distributions added to the working set in the future # (e.g. by calling ``require()``) will get activated as well, # with higher priority (replace=True). - tuple( - dist.activate(replace=False) - for dist in working_set - ) + tuple(dist.activate(replace=False) for dist in working_set) add_activation_listener( lambda dist: dist.activate(replace=True), existing=False, diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/DESCRIPTION.rst b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/DESCRIPTION.rst deleted file mode 100644 index c605ec2634..0000000000 --- a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,227 +0,0 @@ - -.. image:: https://secure.travis-ci.org/ActiveState/appdirs.png - :target: http://travis-ci.org/ActiveState/appdirs - -the problem -=========== - -What directory should your app use for storing user data? If running on Mac OS X, you -should use:: - - ~/Library/Application Support/ - -If on Windows (at least English Win XP) that should be:: - - C:\Documents and Settings\\Application Data\Local Settings\\ - -or possibly:: - - C:\Documents and Settings\\Application Data\\ - -for `roaming profiles `_ but that is another story. - -On Linux (and other Unices) the dir, according to the `XDG -spec `_, is:: - - ~/.local/share/ - - -``appdirs`` to the rescue -========================= - -This kind of thing is what the ``appdirs`` module is for. ``appdirs`` will -help you choose an appropriate: - -- user data dir (``user_data_dir``) -- user config dir (``user_config_dir``) -- user cache dir (``user_cache_dir``) -- site data dir (``site_data_dir``) -- site config dir (``site_config_dir``) -- user log dir (``user_log_dir``) - -and also: - -- is a single module so other Python packages can include their own private copy -- is slightly opinionated on the directory names used. Look for "OPINION" in - documentation and code for when an opinion is being applied. - - -some example output -=================== - -On Mac OS X:: - - >>> from appdirs import * - >>> appname = "SuperApp" - >>> appauthor = "Acme" - >>> user_data_dir(appname, appauthor) - '/Users/trentm/Library/Application Support/SuperApp' - >>> site_data_dir(appname, appauthor) - '/Library/Application Support/SuperApp' - >>> user_cache_dir(appname, appauthor) - '/Users/trentm/Library/Caches/SuperApp' - >>> user_log_dir(appname, appauthor) - '/Users/trentm/Library/Logs/SuperApp' - -On Windows 7:: - - >>> from appdirs import * - >>> appname = "SuperApp" - >>> appauthor = "Acme" - >>> user_data_dir(appname, appauthor) - 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp' - >>> user_data_dir(appname, appauthor, roaming=True) - 'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp' - >>> user_cache_dir(appname, appauthor) - 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache' - >>> user_log_dir(appname, appauthor) - 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs' - -On Linux:: - - >>> from appdirs import * - >>> appname = "SuperApp" - >>> appauthor = "Acme" - >>> user_data_dir(appname, appauthor) - '/home/trentm/.local/share/SuperApp - >>> site_data_dir(appname, appauthor) - '/usr/local/share/SuperApp' - >>> site_data_dir(appname, appauthor, multipath=True) - '/usr/local/share/SuperApp:/usr/share/SuperApp' - >>> user_cache_dir(appname, appauthor) - '/home/trentm/.cache/SuperApp' - >>> user_log_dir(appname, appauthor) - '/home/trentm/.cache/SuperApp/log' - >>> user_config_dir(appname) - '/home/trentm/.config/SuperApp' - >>> site_config_dir(appname) - '/etc/xdg/SuperApp' - >>> os.environ['XDG_CONFIG_DIRS'] = '/etc:/usr/local/etc' - >>> site_config_dir(appname, multipath=True) - '/etc/SuperApp:/usr/local/etc/SuperApp' - - -``AppDirs`` for convenience -=========================== - -:: - - >>> from appdirs import AppDirs - >>> dirs = AppDirs("SuperApp", "Acme") - >>> dirs.user_data_dir - '/Users/trentm/Library/Application Support/SuperApp' - >>> dirs.site_data_dir - '/Library/Application Support/SuperApp' - >>> dirs.user_cache_dir - '/Users/trentm/Library/Caches/SuperApp' - >>> dirs.user_log_dir - '/Users/trentm/Library/Logs/SuperApp' - - - -Per-version isolation -===================== - -If you have multiple versions of your app in use that you want to be -able to run side-by-side, then you may want version-isolation for these -dirs:: - - >>> from appdirs import AppDirs - >>> dirs = AppDirs("SuperApp", "Acme", version="1.0") - >>> dirs.user_data_dir - '/Users/trentm/Library/Application Support/SuperApp/1.0' - >>> dirs.site_data_dir - '/Library/Application Support/SuperApp/1.0' - >>> dirs.user_cache_dir - '/Users/trentm/Library/Caches/SuperApp/1.0' - >>> dirs.user_log_dir - '/Users/trentm/Library/Logs/SuperApp/1.0' - - - -appdirs Changelog -================= - -appdirs 1.4.3 -------------- -- [PR #76] Python 3.6 invalid escape sequence deprecation fixes -- Fix for Python 3.6 support - -appdirs 1.4.2 -------------- -- [PR #84] Allow installing without setuptools -- [PR #86] Fix string delimiters in setup.py description -- Add Python 3.6 support - -appdirs 1.4.1 -------------- -- [issue #38] Fix _winreg import on Windows Py3 -- [issue #55] Make appname optional - -appdirs 1.4.0 -------------- -- [PR #42] AppAuthor is now optional on Windows -- [issue 41] Support Jython on Windows, Mac, and Unix-like platforms. Windows - support requires `JNA `_. -- [PR #44] Fix incorrect behaviour of the site_config_dir method - -appdirs 1.3.0 -------------- -- [Unix, issue 16] Conform to XDG standard, instead of breaking it for - everybody -- [Unix] Removes gratuitous case mangling of the case, since \*nix-es are - usually case sensitive, so mangling is not wise -- [Unix] Fixes the utterly wrong behaviour in ``site_data_dir``, return result - based on XDG_DATA_DIRS and make room for respecting the standard which - specifies XDG_DATA_DIRS is a multiple-value variable -- [Issue 6] Add ``*_config_dir`` which are distinct on nix-es, according to - XDG specs; on Windows and Mac return the corresponding ``*_data_dir`` - -appdirs 1.2.0 -------------- - -- [Unix] Put ``user_log_dir`` under the *cache* dir on Unix. Seems to be more - typical. -- [issue 9] Make ``unicode`` work on py3k. - -appdirs 1.1.0 -------------- - -- [issue 4] Add ``AppDirs.user_log_dir``. -- [Unix, issue 2, issue 7] appdirs now conforms to `XDG base directory spec - `_. -- [Mac, issue 5] Fix ``site_data_dir()`` on Mac. -- [Mac] Drop use of 'Carbon' module in favour of hardcoded paths; supports - Python3 now. -- [Windows] Append "Cache" to ``user_cache_dir`` on Windows by default. Use - ``opinion=False`` option to disable this. -- Add ``appdirs.AppDirs`` convenience class. Usage: - - >>> dirs = AppDirs("SuperApp", "Acme", version="1.0") - >>> dirs.user_data_dir - '/Users/trentm/Library/Application Support/SuperApp/1.0' - -- [Windows] Cherry-pick Komodo's change to downgrade paths to the Windows short - paths if there are high bit chars. -- [Linux] Change default ``user_cache_dir()`` on Linux to be singular, e.g. - "~/.superapp/cache". -- [Windows] Add ``roaming`` option to ``user_data_dir()`` (for use on Windows only) - and change the default ``user_data_dir`` behaviour to use a *non*-roaming - profile dir (``CSIDL_LOCAL_APPDATA`` instead of ``CSIDL_APPDATA``). Why? Because - a large roaming profile can cause login speed issues. The "only syncs on - logout" behaviour can cause surprises in appdata info. - - -appdirs 1.0.1 (never released) ------------------------------- - -Started this changelog 27 July 2010. Before that this module originated in the -`Komodo `_ product as ``applib.py`` and then -as `applib/location.py -`_ (used by -`PyPM `_ in `ActivePython -`_). This is basically a fork of -applib.py 1.0.1 and applib/location.py 1.0.1. - - - diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/METADATA b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/METADATA deleted file mode 100644 index 69ddf93459..0000000000 --- a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/METADATA +++ /dev/null @@ -1,254 +0,0 @@ -Metadata-Version: 2.0 -Name: appdirs -Version: 1.4.3 -Summary: A small Python module for determining appropriate platform-specific dirs, e.g. a "user data dir". -Home-page: http://github.com/ActiveState/appdirs -Author: Trent Mick; Sridhar Ratnakumar; Jeff Rouse -Author-email: trentm@gmail.com; github@srid.name; jr@its.to -License: MIT -Keywords: application directory log cache user -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.2 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Topic :: Software Development :: Libraries :: Python Modules - - -.. image:: https://secure.travis-ci.org/ActiveState/appdirs.png - :target: http://travis-ci.org/ActiveState/appdirs - -the problem -=========== - -What directory should your app use for storing user data? If running on Mac OS X, you -should use:: - - ~/Library/Application Support/ - -If on Windows (at least English Win XP) that should be:: - - C:\Documents and Settings\\Application Data\Local Settings\\ - -or possibly:: - - C:\Documents and Settings\\Application Data\\ - -for `roaming profiles `_ but that is another story. - -On Linux (and other Unices) the dir, according to the `XDG -spec `_, is:: - - ~/.local/share/ - - -``appdirs`` to the rescue -========================= - -This kind of thing is what the ``appdirs`` module is for. ``appdirs`` will -help you choose an appropriate: - -- user data dir (``user_data_dir``) -- user config dir (``user_config_dir``) -- user cache dir (``user_cache_dir``) -- site data dir (``site_data_dir``) -- site config dir (``site_config_dir``) -- user log dir (``user_log_dir``) - -and also: - -- is a single module so other Python packages can include their own private copy -- is slightly opinionated on the directory names used. Look for "OPINION" in - documentation and code for when an opinion is being applied. - - -some example output -=================== - -On Mac OS X:: - - >>> from appdirs import * - >>> appname = "SuperApp" - >>> appauthor = "Acme" - >>> user_data_dir(appname, appauthor) - '/Users/trentm/Library/Application Support/SuperApp' - >>> site_data_dir(appname, appauthor) - '/Library/Application Support/SuperApp' - >>> user_cache_dir(appname, appauthor) - '/Users/trentm/Library/Caches/SuperApp' - >>> user_log_dir(appname, appauthor) - '/Users/trentm/Library/Logs/SuperApp' - -On Windows 7:: - - >>> from appdirs import * - >>> appname = "SuperApp" - >>> appauthor = "Acme" - >>> user_data_dir(appname, appauthor) - 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp' - >>> user_data_dir(appname, appauthor, roaming=True) - 'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp' - >>> user_cache_dir(appname, appauthor) - 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache' - >>> user_log_dir(appname, appauthor) - 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs' - -On Linux:: - - >>> from appdirs import * - >>> appname = "SuperApp" - >>> appauthor = "Acme" - >>> user_data_dir(appname, appauthor) - '/home/trentm/.local/share/SuperApp - >>> site_data_dir(appname, appauthor) - '/usr/local/share/SuperApp' - >>> site_data_dir(appname, appauthor, multipath=True) - '/usr/local/share/SuperApp:/usr/share/SuperApp' - >>> user_cache_dir(appname, appauthor) - '/home/trentm/.cache/SuperApp' - >>> user_log_dir(appname, appauthor) - '/home/trentm/.cache/SuperApp/log' - >>> user_config_dir(appname) - '/home/trentm/.config/SuperApp' - >>> site_config_dir(appname) - '/etc/xdg/SuperApp' - >>> os.environ['XDG_CONFIG_DIRS'] = '/etc:/usr/local/etc' - >>> site_config_dir(appname, multipath=True) - '/etc/SuperApp:/usr/local/etc/SuperApp' - - -``AppDirs`` for convenience -=========================== - -:: - - >>> from appdirs import AppDirs - >>> dirs = AppDirs("SuperApp", "Acme") - >>> dirs.user_data_dir - '/Users/trentm/Library/Application Support/SuperApp' - >>> dirs.site_data_dir - '/Library/Application Support/SuperApp' - >>> dirs.user_cache_dir - '/Users/trentm/Library/Caches/SuperApp' - >>> dirs.user_log_dir - '/Users/trentm/Library/Logs/SuperApp' - - - -Per-version isolation -===================== - -If you have multiple versions of your app in use that you want to be -able to run side-by-side, then you may want version-isolation for these -dirs:: - - >>> from appdirs import AppDirs - >>> dirs = AppDirs("SuperApp", "Acme", version="1.0") - >>> dirs.user_data_dir - '/Users/trentm/Library/Application Support/SuperApp/1.0' - >>> dirs.site_data_dir - '/Library/Application Support/SuperApp/1.0' - >>> dirs.user_cache_dir - '/Users/trentm/Library/Caches/SuperApp/1.0' - >>> dirs.user_log_dir - '/Users/trentm/Library/Logs/SuperApp/1.0' - - - -appdirs Changelog -================= - -appdirs 1.4.3 -------------- -- [PR #76] Python 3.6 invalid escape sequence deprecation fixes -- Fix for Python 3.6 support - -appdirs 1.4.2 -------------- -- [PR #84] Allow installing without setuptools -- [PR #86] Fix string delimiters in setup.py description -- Add Python 3.6 support - -appdirs 1.4.1 -------------- -- [issue #38] Fix _winreg import on Windows Py3 -- [issue #55] Make appname optional - -appdirs 1.4.0 -------------- -- [PR #42] AppAuthor is now optional on Windows -- [issue 41] Support Jython on Windows, Mac, and Unix-like platforms. Windows - support requires `JNA `_. -- [PR #44] Fix incorrect behaviour of the site_config_dir method - -appdirs 1.3.0 -------------- -- [Unix, issue 16] Conform to XDG standard, instead of breaking it for - everybody -- [Unix] Removes gratuitous case mangling of the case, since \*nix-es are - usually case sensitive, so mangling is not wise -- [Unix] Fixes the utterly wrong behaviour in ``site_data_dir``, return result - based on XDG_DATA_DIRS and make room for respecting the standard which - specifies XDG_DATA_DIRS is a multiple-value variable -- [Issue 6] Add ``*_config_dir`` which are distinct on nix-es, according to - XDG specs; on Windows and Mac return the corresponding ``*_data_dir`` - -appdirs 1.2.0 -------------- - -- [Unix] Put ``user_log_dir`` under the *cache* dir on Unix. Seems to be more - typical. -- [issue 9] Make ``unicode`` work on py3k. - -appdirs 1.1.0 -------------- - -- [issue 4] Add ``AppDirs.user_log_dir``. -- [Unix, issue 2, issue 7] appdirs now conforms to `XDG base directory spec - `_. -- [Mac, issue 5] Fix ``site_data_dir()`` on Mac. -- [Mac] Drop use of 'Carbon' module in favour of hardcoded paths; supports - Python3 now. -- [Windows] Append "Cache" to ``user_cache_dir`` on Windows by default. Use - ``opinion=False`` option to disable this. -- Add ``appdirs.AppDirs`` convenience class. Usage: - - >>> dirs = AppDirs("SuperApp", "Acme", version="1.0") - >>> dirs.user_data_dir - '/Users/trentm/Library/Application Support/SuperApp/1.0' - -- [Windows] Cherry-pick Komodo's change to downgrade paths to the Windows short - paths if there are high bit chars. -- [Linux] Change default ``user_cache_dir()`` on Linux to be singular, e.g. - "~/.superapp/cache". -- [Windows] Add ``roaming`` option to ``user_data_dir()`` (for use on Windows only) - and change the default ``user_data_dir`` behaviour to use a *non*-roaming - profile dir (``CSIDL_LOCAL_APPDATA`` instead of ``CSIDL_APPDATA``). Why? Because - a large roaming profile can cause login speed issues. The "only syncs on - logout" behaviour can cause surprises in appdata info. - - -appdirs 1.0.1 (never released) ------------------------------- - -Started this changelog 27 July 2010. Before that this module originated in the -`Komodo `_ product as ``applib.py`` and then -as `applib/location.py -`_ (used by -`PyPM `_ in `ActivePython -`_). This is basically a fork of -applib.py 1.0.1 and applib/location.py 1.0.1. - - - diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/RECORD b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/RECORD deleted file mode 100644 index 3f45ff59b9..0000000000 --- a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -__pycache__/appdirs.cpython-310.pyc,, -appdirs-1.4.3.dist-info/DESCRIPTION.rst,sha256=77Fe8OIOLSjDSNdLiL5xywMKO-AGE42rdXkqKo4Ee-k,7531 -appdirs-1.4.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -appdirs-1.4.3.dist-info/METADATA,sha256=3IFw6jTfImdOqsCb2GYvVR157tL7KEzfRAszn382csk,8773 -appdirs-1.4.3.dist-info/RECORD,, -appdirs-1.4.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -appdirs-1.4.3.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 -appdirs-1.4.3.dist-info/metadata.json,sha256=fL_Q-GuFJu3PJxMrwU7SdsI8RGqjIfi2AvouCSF5DSA,1359 -appdirs-1.4.3.dist-info/top_level.txt,sha256=nKncE8CUqZERJ6VuQWL4_bkunSPDNfn7KZqb4Tr5YEM,8 -appdirs.py,sha256=MievUEuv3l_mQISH5SF0shDk_BNhHHzYiAPrT3ITN4I,24701 diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/WHEEL b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/WHEEL deleted file mode 100644 index 8b6dd1b5a8..0000000000 --- a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.29.0) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/metadata.json b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/metadata.json deleted file mode 100644 index da1e5f3a7e..0000000000 --- a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Software Development :: Libraries :: Python Modules"], "extensions": {"python.details": {"contacts": [{"email": "trentm@gmail.com; github@srid.name; jr@its.to", "name": "Trent Mick; Sridhar Ratnakumar; Jeff Rouse", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://github.com/ActiveState/appdirs"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["application", "directory", "log", "cache", "user"], "license": "MIT", "metadata_version": "2.0", "name": "appdirs", "summary": "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\".", "test_requires": [{"requires": []}], "version": "1.4.3"} \ No newline at end of file diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/top_level.txt b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/top_level.txt deleted file mode 100644 index d64bc321a1..0000000000 --- a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -appdirs diff --git a/pkg_resources/_vendor/appdirs.py b/pkg_resources/_vendor/appdirs.py deleted file mode 100644 index ae67001af8..0000000000 --- a/pkg_resources/_vendor/appdirs.py +++ /dev/null @@ -1,608 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2005-2010 ActiveState Software Inc. -# Copyright (c) 2013 Eddy Petrișor - -"""Utilities for determining application-specific dirs. - -See for details and usage. -""" -# Dev Notes: -# - MSDN on where to store app data files: -# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 -# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html -# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html - -__version_info__ = (1, 4, 3) -__version__ = '.'.join(map(str, __version_info__)) - - -import sys -import os - -PY3 = sys.version_info[0] == 3 - -if PY3: - unicode = str - -if sys.platform.startswith('java'): - import platform - os_name = platform.java_ver()[3][0] - if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. - system = 'win32' - elif os_name.startswith('Mac'): # "Mac OS X", etc. - system = 'darwin' - else: # "Linux", "SunOS", "FreeBSD", etc. - # Setting this to "linux2" is not ideal, but only Windows or Mac - # are actually checked for and the rest of the module expects - # *sys.platform* style strings. - system = 'linux2' -else: - system = sys.platform - - - -def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): - r"""Return full path to the user-specific data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - - for a discussion of issues. - - Typical user data directories are: - Mac OS X: ~/Library/Application Support/ - Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined - Win XP (not roaming): C:\Documents and Settings\\Application Data\\ - Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ - Win 7 (not roaming): C:\Users\\AppData\Local\\ - Win 7 (roaming): C:\Users\\AppData\Roaming\\ - - For Unix, we follow the XDG spec and support $XDG_DATA_HOME. - That means, by default "~/.local/share/". - """ - if system == "win32": - if appauthor is None: - appauthor = appname - const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" - path = os.path.normpath(_get_win_folder(const)) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - elif system == 'darwin': - path = os.path.expanduser('~/Library/Application Support/') - if appname: - path = os.path.join(path, appname) - else: - path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) - if appname: - path = os.path.join(path, appname) - if appname and version: - path = os.path.join(path, version) - return path - - -def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): - r"""Return full path to the user-shared data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "multipath" is an optional parameter only applicable to *nix - which indicates that the entire list of data dirs should be - returned. By default, the first item from XDG_DATA_DIRS is - returned, or '/usr/local/share/', - if XDG_DATA_DIRS is not set - - Typical site data directories are: - Mac OS X: /Library/Application Support/ - Unix: /usr/local/share/ or /usr/share/ - Win XP: C:\Documents and Settings\All Users\Application Data\\ - Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) - Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. - - For Unix, this is using the $XDG_DATA_DIRS[0] default. - - WARNING: Do not use this on Windows. See the Vista-Fail note above for why. - """ - if system == "win32": - if appauthor is None: - appauthor = appname - path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - elif system == 'darwin': - path = os.path.expanduser('/Library/Application Support') - if appname: - path = os.path.join(path, appname) - else: - # XDG default for $XDG_DATA_DIRS - # only first, if multipath is False - path = os.getenv('XDG_DATA_DIRS', - os.pathsep.join(['/usr/local/share', '/usr/share'])) - pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] - if appname: - if version: - appname = os.path.join(appname, version) - pathlist = [os.sep.join([x, appname]) for x in pathlist] - - if multipath: - path = os.pathsep.join(pathlist) - else: - path = pathlist[0] - return path - - if appname and version: - path = os.path.join(path, version) - return path - - -def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): - r"""Return full path to the user-specific config dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - - for a discussion of issues. - - Typical user config directories are: - Mac OS X: same as user_data_dir - Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined - Win *: same as user_data_dir - - For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. - That means, by default "~/.config/". - """ - if system in ["win32", "darwin"]: - path = user_data_dir(appname, appauthor, None, roaming) - else: - path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) - if appname: - path = os.path.join(path, appname) - if appname and version: - path = os.path.join(path, version) - return path - - -def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): - r"""Return full path to the user-shared data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "multipath" is an optional parameter only applicable to *nix - which indicates that the entire list of config dirs should be - returned. By default, the first item from XDG_CONFIG_DIRS is - returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set - - Typical site config directories are: - Mac OS X: same as site_data_dir - Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in - $XDG_CONFIG_DIRS - Win *: same as site_data_dir - Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) - - For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False - - WARNING: Do not use this on Windows. See the Vista-Fail note above for why. - """ - if system in ["win32", "darwin"]: - path = site_data_dir(appname, appauthor) - if appname and version: - path = os.path.join(path, version) - else: - # XDG default for $XDG_CONFIG_DIRS - # only first, if multipath is False - path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') - pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] - if appname: - if version: - appname = os.path.join(appname, version) - pathlist = [os.sep.join([x, appname]) for x in pathlist] - - if multipath: - path = os.pathsep.join(pathlist) - else: - path = pathlist[0] - return path - - -def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): - r"""Return full path to the user-specific cache dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "opinion" (boolean) can be False to disable the appending of - "Cache" to the base app data dir for Windows. See - discussion below. - - Typical user cache directories are: - Mac OS X: ~/Library/Caches/ - Unix: ~/.cache/ (XDG default) - Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache - Vista: C:\Users\\AppData\Local\\\Cache - - On Windows the only suggestion in the MSDN docs is that local settings go in - the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming - app data dir (the default returned by `user_data_dir` above). Apps typically - put cache data somewhere *under* the given dir here. Some examples: - ...\Mozilla\Firefox\Profiles\\Cache - ...\Acme\SuperApp\Cache\1.0 - OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. - This can be disabled with the `opinion=False` option. - """ - if system == "win32": - if appauthor is None: - appauthor = appname - path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - if opinion: - path = os.path.join(path, "Cache") - elif system == 'darwin': - path = os.path.expanduser('~/Library/Caches') - if appname: - path = os.path.join(path, appname) - else: - path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) - if appname: - path = os.path.join(path, appname) - if appname and version: - path = os.path.join(path, version) - return path - - -def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): - r"""Return full path to the user-specific state dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - - for a discussion of issues. - - Typical user state directories are: - Mac OS X: same as user_data_dir - Unix: ~/.local/state/ # or in $XDG_STATE_HOME, if defined - Win *: same as user_data_dir - - For Unix, we follow this Debian proposal - to extend the XDG spec and support $XDG_STATE_HOME. - - That means, by default "~/.local/state/". - """ - if system in ["win32", "darwin"]: - path = user_data_dir(appname, appauthor, None, roaming) - else: - path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state")) - if appname: - path = os.path.join(path, appname) - if appname and version: - path = os.path.join(path, version) - return path - - -def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): - r"""Return full path to the user-specific log dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "opinion" (boolean) can be False to disable the appending of - "Logs" to the base app data dir for Windows, and "log" to the - base cache dir for Unix. See discussion below. - - Typical user log directories are: - Mac OS X: ~/Library/Logs/ - Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined - Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs - Vista: C:\Users\\AppData\Local\\\Logs - - On Windows the only suggestion in the MSDN docs is that local settings - go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in - examples of what some windows apps use for a logs dir.) - - OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` - value for Windows and appends "log" to the user cache dir for Unix. - This can be disabled with the `opinion=False` option. - """ - if system == "darwin": - path = os.path.join( - os.path.expanduser('~/Library/Logs'), - appname) - elif system == "win32": - path = user_data_dir(appname, appauthor, version) - version = False - if opinion: - path = os.path.join(path, "Logs") - else: - path = user_cache_dir(appname, appauthor, version) - version = False - if opinion: - path = os.path.join(path, "log") - if appname and version: - path = os.path.join(path, version) - return path - - -class AppDirs(object): - """Convenience wrapper for getting application dirs.""" - def __init__(self, appname=None, appauthor=None, version=None, - roaming=False, multipath=False): - self.appname = appname - self.appauthor = appauthor - self.version = version - self.roaming = roaming - self.multipath = multipath - - @property - def user_data_dir(self): - return user_data_dir(self.appname, self.appauthor, - version=self.version, roaming=self.roaming) - - @property - def site_data_dir(self): - return site_data_dir(self.appname, self.appauthor, - version=self.version, multipath=self.multipath) - - @property - def user_config_dir(self): - return user_config_dir(self.appname, self.appauthor, - version=self.version, roaming=self.roaming) - - @property - def site_config_dir(self): - return site_config_dir(self.appname, self.appauthor, - version=self.version, multipath=self.multipath) - - @property - def user_cache_dir(self): - return user_cache_dir(self.appname, self.appauthor, - version=self.version) - - @property - def user_state_dir(self): - return user_state_dir(self.appname, self.appauthor, - version=self.version) - - @property - def user_log_dir(self): - return user_log_dir(self.appname, self.appauthor, - version=self.version) - - -#---- internal support stuff - -def _get_win_folder_from_registry(csidl_name): - """This is a fallback technique at best. I'm not sure if using the - registry for this guarantees us the correct answer for all CSIDL_* - names. - """ - if PY3: - import winreg as _winreg - else: - import _winreg - - shell_folder_name = { - "CSIDL_APPDATA": "AppData", - "CSIDL_COMMON_APPDATA": "Common AppData", - "CSIDL_LOCAL_APPDATA": "Local AppData", - }[csidl_name] - - key = _winreg.OpenKey( - _winreg.HKEY_CURRENT_USER, - r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" - ) - dir, type = _winreg.QueryValueEx(key, shell_folder_name) - return dir - - -def _get_win_folder_with_pywin32(csidl_name): - from win32com.shell import shellcon, shell - dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) - # Try to make this a unicode path because SHGetFolderPath does - # not return unicode strings when there is unicode data in the - # path. - try: - dir = unicode(dir) - - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in dir: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - try: - import win32api - dir = win32api.GetShortPathName(dir) - except ImportError: - pass - except UnicodeError: - pass - return dir - - -def _get_win_folder_with_ctypes(csidl_name): - import ctypes - - csidl_const = { - "CSIDL_APPDATA": 26, - "CSIDL_COMMON_APPDATA": 35, - "CSIDL_LOCAL_APPDATA": 28, - }[csidl_name] - - buf = ctypes.create_unicode_buffer(1024) - ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) - - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in buf: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf2 = ctypes.create_unicode_buffer(1024) - if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): - buf = buf2 - - return buf.value - -def _get_win_folder_with_jna(csidl_name): - import array - from com.sun import jna - from com.sun.jna.platform import win32 - - buf_size = win32.WinDef.MAX_PATH * 2 - buf = array.zeros('c', buf_size) - shell = win32.Shell32.INSTANCE - shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) - dir = jna.Native.toString(buf.tostring()).rstrip("\0") - - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in dir: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf = array.zeros('c', buf_size) - kernel = win32.Kernel32.INSTANCE - if kernel.GetShortPathName(dir, buf, buf_size): - dir = jna.Native.toString(buf.tostring()).rstrip("\0") - - return dir - -if system == "win32": - try: - import win32com.shell - _get_win_folder = _get_win_folder_with_pywin32 - except ImportError: - try: - from ctypes import windll - _get_win_folder = _get_win_folder_with_ctypes - except ImportError: - try: - import com.sun.jna - _get_win_folder = _get_win_folder_with_jna - except ImportError: - _get_win_folder = _get_win_folder_from_registry - - -#---- self test code - -if __name__ == "__main__": - appname = "MyApp" - appauthor = "MyCompany" - - props = ("user_data_dir", - "user_config_dir", - "user_cache_dir", - "user_state_dir", - "user_log_dir", - "site_data_dir", - "site_config_dir") - - print("-- app dirs %s --" % __version__) - - print("-- app dirs (with optional 'version')") - dirs = AppDirs(appname, appauthor, version="1.0") - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) - - print("\n-- app dirs (without optional 'version')") - dirs = AppDirs(appname, appauthor) - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) - - print("\n-- app dirs (without optional 'appauthor')") - dirs = AppDirs(appname) - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) - - print("\n-- app dirs (with disabled 'appauthor')") - dirs = AppDirs(appname, appauthor=False) - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/INSTALLER b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/INSTALLER similarity index 100% rename from pkg_resources/_vendor/appdirs-1.4.3.dist-info/INSTALLER rename to pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/INSTALLER diff --git a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/LICENSE b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/LICENSE new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/METADATA b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/METADATA similarity index 62% rename from pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/METADATA rename to pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/METADATA index cdb1e78399..a9995f09a3 100644 --- a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/METADATA +++ b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/METADATA @@ -1,41 +1,40 @@ Metadata-Version: 2.1 Name: importlib-resources -Version: 5.4.0 +Version: 5.10.2 Summary: Read resources from Python packages Home-page: https://github.com/python/importlib_resources Author: Barry Warsaw Author-email: barry@python.org -License: UNKNOWN Project-URL: Documentation, https://importlib-resources.readthedocs.io/ -Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: Apache Software License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3 :: Only -Requires-Python: >=3.6 +Requires-Python: >=3.7 License-File: LICENSE Requires-Dist: zipp (>=3.1.0) ; python_version < "3.10" Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: sphinx (>=3.5) ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx-lint ; extra == 'docs' +Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' Provides-Extra: testing Requires-Dist: pytest (>=6) ; extra == 'testing' Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' -Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: flake8 (<5) ; extra == 'testing' Requires-Dist: pytest-cov ; extra == 'testing' -Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing' Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' -Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing' .. image:: https://img.shields.io/pypi/v/importlib_resources.svg - :target: `PyPI link`_ + :target: https://pypi.org/project/importlib_resources .. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg - :target: `PyPI link`_ - -.. _PyPI link: https://pypi.org/project/importlib_resources .. image:: https://github.com/python/importlib_resources/workflows/tests/badge.svg :target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22 @@ -48,9 +47,12 @@ Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extr .. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest :target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest -.. image:: https://img.shields.io/badge/skeleton-2021-informational +.. image:: https://img.shields.io/badge/skeleton-2022-informational :target: https://blog.jaraco.com/skeleton +.. image:: https://tidelift.com/badges/package/pypi/importlib-resources + :target: https://tidelift.com/subscription/pkg/pypi-importlib-resources?utm_source=pypi-importlib-resources&utm_medium=readme + ``importlib_resources`` is a backport of Python standard library `importlib.resources `_ @@ -74,7 +76,9 @@ were contributed to different versions in the standard library: * - importlib_resources - stdlib - * - 5.2 + * - 5.9 + - 3.12 + * - 5.7 - 3.11 * - 5.0 - 3.10 @@ -83,4 +87,18 @@ were contributed to different versions in the standard library: * - 0.5 (?) - 3.7 +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. + +Security Contact +================ +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. diff --git a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD new file mode 100644 index 0000000000..50fe7fc5c2 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD @@ -0,0 +1,77 @@ +importlib_resources-5.10.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +importlib_resources-5.10.2.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +importlib_resources-5.10.2.dist-info/METADATA,sha256=Xo5ntATvDYUxdmW8tr8kxtfdiOC9889mOk-LE1LtZfI,4111 +importlib_resources-5.10.2.dist-info/RECORD,, +importlib_resources-5.10.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources-5.10.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +importlib_resources-5.10.2.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20 +importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506 +importlib_resources/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/__pycache__/_adapters.cpython-38.pyc,, +importlib_resources/__pycache__/_common.cpython-38.pyc,, +importlib_resources/__pycache__/_compat.cpython-38.pyc,, +importlib_resources/__pycache__/_itertools.cpython-38.pyc,, +importlib_resources/__pycache__/_legacy.cpython-38.pyc,, +importlib_resources/__pycache__/abc.cpython-38.pyc,, +importlib_resources/__pycache__/readers.cpython-38.pyc,, +importlib_resources/__pycache__/simple.cpython-38.pyc,, +importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504 +importlib_resources/_common.py,sha256=jSC4xfLdcMNbtbWHtpzbFkNa0W7kvf__nsYn14C_AEU,5457 +importlib_resources/_compat.py,sha256=dSadF6WPt8MwOqSm_NIOQPhw4x0iaMYTWxi-XS93p7M,2923 +importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884 +importlib_resources/_legacy.py,sha256=0TKdZixxLWA-xwtAZw4HcpqJmj4Xprx1Zkcty0gTRZY,3481 +importlib_resources/abc.py,sha256=Icr2IJ2QtH7vvAB9vC5WRJ9KBoaDyJa7KUs8McuROzo,5140 +importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/readers.py,sha256=PZsi5qacr2Qn3KHw4qw3Gm1MzrBblPHoTdjqjH7EKWw,3581 +importlib_resources/simple.py,sha256=0__2TQBTQoqkajYmNPt1HxERcReAT6boVKJA328pr04,2576 +importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/__pycache__/_compat.cpython-38.pyc,, +importlib_resources/tests/__pycache__/_path.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_contents.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_files.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_open.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_path.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_read.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_reader.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_resource.cpython-38.pyc,, +importlib_resources/tests/__pycache__/update-zips.cpython-38.pyc,, +importlib_resources/tests/__pycache__/util.cpython-38.pyc,, +importlib_resources/tests/_compat.py,sha256=YTSB0U1R9oADnh6GrQcOCgojxcF_N6H1LklymEWf9SQ,708 +importlib_resources/tests/_path.py,sha256=yZyWsQzJZQ1Z8ARAxWkjAdaVVsjlzyqxO0qjBUofJ8M,1039 +importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data01/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 +importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 +importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 +importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 +importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/one/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13 +importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/two/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13 +importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 +importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 +importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 +importlib_resources/tests/test_compatibilty_files.py,sha256=NWkbIsylI8Wz3Dwsxo1quT4ZI6ToXFA2mojCG6Dzuxw,3260 +importlib_resources/tests/test_contents.py,sha256=V1Xfk3lqTDdvUsZuV18Kndf0CT_tkM2oEIwk9Vv0rhg,968 +importlib_resources/tests/test_files.py,sha256=1Y8da-g0xOQLzuREDYUiRc_qhWlvFNeydW_mUH7l15w,3251 +importlib_resources/tests/test_open.py,sha256=pmEgdrSFdM83L6FxtR8U_RT9BfI3JZ4snGmM_ZZIegY,2565 +importlib_resources/tests/test_path.py,sha256=xvPteNA-UKavDhKgLgrQuXSxKWYH7Q4nSNDVfBX95Gs,2103 +importlib_resources/tests/test_read.py,sha256=EyYvpHJ_7F4LuX2EU_c5EerIBQfRhOFmiIR7LOc5Y5E,2408 +importlib_resources/tests/test_reader.py,sha256=nPhldbYPq3fXoQs0ZAub4atjhp2lgNyLNv2G1pg6Agw,4480 +importlib_resources/tests/test_resource.py,sha256=EMoarxTEHcrq8R41LQDsndIG8Idtm4I_LpN8DYpHtT0,8478 +importlib_resources/tests/update-zips.py,sha256=x-SrO5v87iLLUMXyefxDwAd3imAs_slI94sLWvJ6N40,1417 +importlib_resources/tests/util.py,sha256=ARAlxZ47wC-lgR7PGlmgBoi4HnhzcykD5Is2-TAwY0I,4873 +importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876 +importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698 diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/REQUESTED b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/REQUESTED similarity index 100% rename from pkg_resources/_vendor/appdirs-1.4.3.dist-info/REQUESTED rename to pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/REQUESTED diff --git a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/WHEEL b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/WHEEL similarity index 65% rename from pkg_resources/_vendor/more_itertools-8.12.0.dist-info/WHEEL rename to pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/WHEEL index 5bad85fdc1..57e3d840d5 100644 --- a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/WHEEL +++ b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) +Generator: bdist_wheel (0.38.4) Root-Is-Purelib: true Tag: py3-none-any diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/top_level.txt similarity index 100% rename from pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt rename to pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/top_level.txt diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/LICENSE b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/LICENSE deleted file mode 100644 index 378b991a4d..0000000000 --- a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2017-2019 Brett Cannon, Barry Warsaw - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/RECORD b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/RECORD deleted file mode 100644 index 7a68a2f26c..0000000000 --- a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/RECORD +++ /dev/null @@ -1,75 +0,0 @@ -importlib_resources-5.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -importlib_resources-5.4.0.dist-info/LICENSE,sha256=uWRjFdYGataJX2ziXk048ItUglQmjng3GWBALaWA36U,568 -importlib_resources-5.4.0.dist-info/METADATA,sha256=i5jH25IbM0Ls6u6UzSSCOa0c8hpDvePxqgnQwh2T5Io,3135 -importlib_resources-5.4.0.dist-info/RECORD,, -importlib_resources-5.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources-5.4.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -importlib_resources-5.4.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20 -importlib_resources/__init__.py,sha256=zuA0lbRgtVVCcAztM0z5LuBiOCV9L_3qtI6mW2p5xAg,525 -importlib_resources/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/__pycache__/_adapters.cpython-310.pyc,, -importlib_resources/__pycache__/_common.cpython-310.pyc,, -importlib_resources/__pycache__/_compat.cpython-310.pyc,, -importlib_resources/__pycache__/_itertools.cpython-310.pyc,, -importlib_resources/__pycache__/_legacy.cpython-310.pyc,, -importlib_resources/__pycache__/abc.cpython-310.pyc,, -importlib_resources/__pycache__/readers.cpython-310.pyc,, -importlib_resources/__pycache__/simple.cpython-310.pyc,, -importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504 -importlib_resources/_common.py,sha256=iIxAaQhotSh6TLLUEfL_ynU2fzEeyHMz9JcL46mUhLg,2741 -importlib_resources/_compat.py,sha256=3LpkIfeN9x4oXjRea5TxZP5VYhPlzuVRhGe-hEv-S0s,2704 -importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884 -importlib_resources/_legacy.py,sha256=TMLkx6aEM6U8xIREPXqGZrMbUhTiPUuPl6ESD7RdYj4,3494 -importlib_resources/abc.py,sha256=MvTJJXajbl74s36Gyeesf76egtbFnh-TMtzQMVhFWXo,3886 -importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/readers.py,sha256=_9QLGQ5AzrED3PY8S2Zf8V6yLR0-nqqYqtQmgleDJzY,3566 -importlib_resources/simple.py,sha256=xt0qhXbwt3bZ86zuaaKbTiE9A0mDbwu0saRjUq_pcY0,2836 -importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/__pycache__/_compat.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_contents.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_files.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_open.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_path.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_read.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_reader.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_resource.cpython-310.pyc,, -importlib_resources/tests/__pycache__/update-zips.cpython-310.pyc,, -importlib_resources/tests/__pycache__/util.cpython-310.pyc,, -importlib_resources/tests/_compat.py,sha256=QGI_4p0DXybypoYvw0kr3jfQqvls3p8u4wy4Wvf0Z_o,435 -importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data01/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 -importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 -importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 -importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 -importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/one/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13 -importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/two/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13 -importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 -importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 -importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 -importlib_resources/tests/test_compatibilty_files.py,sha256=NWkbIsylI8Wz3Dwsxo1quT4ZI6ToXFA2mojCG6Dzuxw,3260 -importlib_resources/tests/test_contents.py,sha256=V1Xfk3lqTDdvUsZuV18Kndf0CT_tkM2oEIwk9Vv0rhg,968 -importlib_resources/tests/test_files.py,sha256=1Nqv6VM_MjfwrmtXYL1a1CMT0QhCxi3hNMqwXlfMQTg,1184 -importlib_resources/tests/test_open.py,sha256=pmEgdrSFdM83L6FxtR8U_RT9BfI3JZ4snGmM_ZZIegY,2565 -importlib_resources/tests/test_path.py,sha256=xvPteNA-UKavDhKgLgrQuXSxKWYH7Q4nSNDVfBX95Gs,2103 -importlib_resources/tests/test_read.py,sha256=EyYvpHJ_7F4LuX2EU_c5EerIBQfRhOFmiIR7LOc5Y5E,2408 -importlib_resources/tests/test_reader.py,sha256=hgXHquqAEnioemv20ZZcDlVaiOrcZKADO37_FkiQ00Y,4286 -importlib_resources/tests/test_resource.py,sha256=DqfLNc9kaN5obqxU8kn0sRUWMf9MygagrpfMV5-QfWg,8145 -importlib_resources/tests/update-zips.py,sha256=x3iJVqWnMM5qp4Oob2Pl3o6Yi03sUjEv_5Wf-UCg3ps,1415 -importlib_resources/tests/util.py,sha256=X1j-0C96pu3_tmtJuLhzfBfcfMenOphDLkxtCt5j7t4,5309 -importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876 -importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698 diff --git a/pkg_resources/_vendor/importlib_resources/_common.py b/pkg_resources/_vendor/importlib_resources/_common.py index a12e2c75d1..3c6de1cfb2 100644 --- a/pkg_resources/_vendor/importlib_resources/_common.py +++ b/pkg_resources/_vendor/importlib_resources/_common.py @@ -5,25 +5,58 @@ import contextlib import types import importlib +import inspect +import warnings +import itertools -from typing import Union, Optional +from typing import Union, Optional, cast from .abc import ResourceReader, Traversable from ._compat import wrap_spec Package = Union[types.ModuleType, str] +Anchor = Package -def files(package): - # type: (Package) -> Traversable +def package_to_anchor(func): """ - Get a Traversable resource from a package + Replace 'package' parameter as 'anchor' and warn about the change. + + Other errors should fall through. + + >>> files('a', 'b') + Traceback (most recent call last): + TypeError: files() takes from 0 to 1 positional arguments but 2 were given + """ + undefined = object() + + @functools.wraps(func) + def wrapper(anchor=undefined, package=undefined): + if package is not undefined: + if anchor is not undefined: + return func(anchor, package) + warnings.warn( + "First parameter to files is renamed to 'anchor'", + DeprecationWarning, + stacklevel=2, + ) + return func(package) + elif anchor is undefined: + return func() + return func(anchor) + + return wrapper + + +@package_to_anchor +def files(anchor: Optional[Anchor] = None) -> Traversable: + """ + Get a Traversable resource for an anchor. """ - return from_package(get_package(package)) + return from_package(resolve(anchor)) -def get_resource_reader(package): - # type: (types.ModuleType) -> Optional[ResourceReader] +def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]: """ Return the package's loader if it's a ResourceReader. """ @@ -39,24 +72,39 @@ def get_resource_reader(package): return reader(spec.name) # type: ignore -def resolve(cand): - # type: (Package) -> types.ModuleType - return cand if isinstance(cand, types.ModuleType) else importlib.import_module(cand) +@functools.singledispatch +def resolve(cand: Optional[Anchor]) -> types.ModuleType: + return cast(types.ModuleType, cand) + + +@resolve.register +def _(cand: str) -> types.ModuleType: + return importlib.import_module(cand) + +@resolve.register +def _(cand: None) -> types.ModuleType: + return resolve(_infer_caller().f_globals['__name__']) -def get_package(package): - # type: (Package) -> types.ModuleType - """Take a package name or module object and return the module. - Raise an exception if the resolved module is not a package. +def _infer_caller(): """ - resolved = resolve(package) - if wrap_spec(resolved).submodule_search_locations is None: - raise TypeError(f'{package!r} is not a package') - return resolved + Walk the stack and find the frame of the first caller not in this module. + """ + + def is_this_file(frame_info): + return frame_info.filename == __file__ + + def is_wrapper(frame_info): + return frame_info.function == 'wrapper' + + not_this_file = itertools.filterfalse(is_this_file, inspect.stack()) + # also exclude 'wrapper' due to singledispatch in the call stack + callers = itertools.filterfalse(is_wrapper, not_this_file) + return next(callers).frame -def from_package(package): +def from_package(package: types.ModuleType): """ Return a Traversable object for the given package. @@ -67,7 +115,14 @@ def from_package(package): @contextlib.contextmanager -def _tempfile(reader, suffix=''): +def _tempfile( + reader, + suffix='', + # gh-93353: Keep a reference to call os.remove() in late Python + # finalization. + *, + _os_remove=os.remove, +): # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' # blocks due to the need to close the temporary file to work on Windows # properly. @@ -81,18 +136,35 @@ def _tempfile(reader, suffix=''): yield pathlib.Path(raw_path) finally: try: - os.remove(raw_path) + _os_remove(raw_path) except FileNotFoundError: pass +def _temp_file(path): + return _tempfile(path.read_bytes, suffix=path.name) + + +def _is_present_dir(path: Traversable) -> bool: + """ + Some Traversables implement ``is_dir()`` to raise an + exception (i.e. ``FileNotFoundError``) when the + directory doesn't exist. This function wraps that call + to always return a boolean and only return True + if there's a dir and it exists. + """ + with contextlib.suppress(FileNotFoundError): + return path.is_dir() + return False + + @functools.singledispatch def as_file(path): """ Given a Traversable object, return that object as a path on the local file system in a context manager. """ - return _tempfile(path.read_bytes, suffix=path.name) + return _temp_dir(path) if _is_present_dir(path) else _temp_file(path) @as_file.register(pathlib.Path) @@ -102,3 +174,34 @@ def _(path): Degenerate behavior for pathlib.Path objects. """ yield path + + +@contextlib.contextmanager +def _temp_path(dir: tempfile.TemporaryDirectory): + """ + Wrap tempfile.TemporyDirectory to return a pathlib object. + """ + with dir as result: + yield pathlib.Path(result) + + +@contextlib.contextmanager +def _temp_dir(path): + """ + Given a traversable dir, recursively replicate the whole tree + to the file system in a context manager. + """ + assert path.is_dir() + with _temp_path(tempfile.TemporaryDirectory()) as temp_dir: + yield _write_contents(temp_dir, path) + + +def _write_contents(target, source): + child = target.joinpath(source.name) + if source.is_dir(): + child.mkdir() + for item in source.iterdir(): + _write_contents(child, item) + else: + child.write_bytes(source.read_bytes()) + return child diff --git a/pkg_resources/_vendor/importlib_resources/_compat.py b/pkg_resources/_vendor/importlib_resources/_compat.py index cb9fc820cb..8b5b1d280f 100644 --- a/pkg_resources/_vendor/importlib_resources/_compat.py +++ b/pkg_resources/_vendor/importlib_resources/_compat.py @@ -1,9 +1,12 @@ # flake8: noqa import abc +import os import sys import pathlib from contextlib import suppress +from typing import Union + if sys.version_info >= (3, 10): from zipfile import Path as ZipPath # type: ignore @@ -96,3 +99,10 @@ def wrap_spec(package): from . import _adapters return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) + + +if sys.version_info >= (3, 9): + StrPath = Union[str, os.PathLike[str]] +else: + # PathLike is only subscriptable at runtime in 3.9+ + StrPath = Union[str, "os.PathLike[str]"] diff --git a/pkg_resources/_vendor/importlib_resources/_legacy.py b/pkg_resources/_vendor/importlib_resources/_legacy.py index 1d5d3f1fbb..b1ea8105da 100644 --- a/pkg_resources/_vendor/importlib_resources/_legacy.py +++ b/pkg_resources/_vendor/importlib_resources/_legacy.py @@ -27,8 +27,7 @@ def wrapper(*args, **kwargs): return wrapper -def normalize_path(path): - # type: (Any) -> str +def normalize_path(path: Any) -> str: """Normalize a path by ensuring it is a string. If the resulting string contains path separators, an exception is raised. diff --git a/pkg_resources/_vendor/importlib_resources/abc.py b/pkg_resources/_vendor/importlib_resources/abc.py index d39dc1adba..23b6aeafe4 100644 --- a/pkg_resources/_vendor/importlib_resources/abc.py +++ b/pkg_resources/_vendor/importlib_resources/abc.py @@ -1,7 +1,13 @@ import abc -from typing import BinaryIO, Iterable, Text +import io +import itertools +import pathlib +from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional -from ._compat import runtime_checkable, Protocol +from ._compat import runtime_checkable, Protocol, StrPath + + +__all__ = ["ResourceReader", "Traversable", "TraversableResources"] class ResourceReader(metaclass=abc.ABCMeta): @@ -46,27 +52,34 @@ def contents(self) -> Iterable[str]: raise FileNotFoundError +class TraversalError(Exception): + pass + + @runtime_checkable class Traversable(Protocol): """ An object with a subset of pathlib.Path methods suitable for traversing directories and opening files. + + Any exceptions that occur when accessing the backing resource + may propagate unaltered. """ @abc.abstractmethod - def iterdir(self): + def iterdir(self) -> Iterator["Traversable"]: """ Yield Traversable objects in self """ - def read_bytes(self): + def read_bytes(self) -> bytes: """ Read contents of self as bytes """ with self.open('rb') as strm: return strm.read() - def read_text(self, encoding=None): + def read_text(self, encoding: Optional[str] = None) -> str: """ Read contents of self as text """ @@ -85,13 +98,32 @@ def is_file(self) -> bool: Return True if self is a file """ - @abc.abstractmethod - def joinpath(self, child): + def joinpath(self, *descendants: StrPath) -> "Traversable": """ - Return Traversable child in self + Return Traversable resolved with any descendants applied. + + Each descendant should be a path segment relative to self + and each may contain multiple levels separated by + ``posixpath.sep`` (``/``). """ + if not descendants: + return self + names = itertools.chain.from_iterable( + path.parts for path in map(pathlib.PurePosixPath, descendants) + ) + target = next(names) + matches = ( + traversable for traversable in self.iterdir() if traversable.name == target + ) + try: + match = next(matches) + except StopIteration: + raise TraversalError( + "Target not found during traversal.", target, list(names) + ) + return match.joinpath(*names) - def __truediv__(self, child): + def __truediv__(self, child: StrPath) -> "Traversable": """ Return Traversable child in self """ @@ -107,7 +139,8 @@ def open(self, mode='r', *args, **kwargs): accepted by io.TextIOWrapper. """ - @abc.abstractproperty + @property + @abc.abstractmethod def name(self) -> str: """ The base name of this object without any parent references. @@ -121,17 +154,17 @@ class TraversableResources(ResourceReader): """ @abc.abstractmethod - def files(self): + def files(self) -> "Traversable": """Return a Traversable object for the loaded package.""" - def open_resource(self, resource): + def open_resource(self, resource: StrPath) -> io.BufferedReader: return self.files().joinpath(resource).open('rb') - def resource_path(self, resource): + def resource_path(self, resource: Any) -> NoReturn: raise FileNotFoundError(resource) - def is_resource(self, path): + def is_resource(self, path: StrPath) -> bool: return self.files().joinpath(path).is_file() - def contents(self): + def contents(self) -> Iterator[str]: return (item.name for item in self.files().iterdir()) diff --git a/pkg_resources/_vendor/importlib_resources/readers.py b/pkg_resources/_vendor/importlib_resources/readers.py index f1190ca452..ab34db7409 100644 --- a/pkg_resources/_vendor/importlib_resources/readers.py +++ b/pkg_resources/_vendor/importlib_resources/readers.py @@ -82,15 +82,13 @@ def is_dir(self): def is_file(self): return False - def joinpath(self, child): - # first try to find child in current paths - for file in self.iterdir(): - if file.name == child: - return file - # if it does not exist, construct it with the first path - return self._paths[0] / child - - __truediv__ = joinpath + def joinpath(self, *descendants): + try: + return super().joinpath(*descendants) + except abc.TraversalError: + # One of the paths did not resolve (a directory does not exist). + # Just return something that will not exist. + return self._paths[0].joinpath(*descendants) def open(self, *args, **kwargs): raise FileNotFoundError(f'{self} is not a file') diff --git a/pkg_resources/_vendor/importlib_resources/simple.py b/pkg_resources/_vendor/importlib_resources/simple.py index da073cbdb1..7770c922c8 100644 --- a/pkg_resources/_vendor/importlib_resources/simple.py +++ b/pkg_resources/_vendor/importlib_resources/simple.py @@ -16,31 +16,28 @@ class SimpleReader(abc.ABC): provider. """ - @abc.abstractproperty - def package(self): - # type: () -> str + @property + @abc.abstractmethod + def package(self) -> str: """ The name of the package for which this reader loads resources. """ @abc.abstractmethod - def children(self): - # type: () -> List['SimpleReader'] + def children(self) -> List['SimpleReader']: """ Obtain an iterable of SimpleReader for available child containers (e.g. directories). """ @abc.abstractmethod - def resources(self): - # type: () -> List[str] + def resources(self) -> List[str]: """ Obtain available named resources for this virtual package. """ @abc.abstractmethod - def open_binary(self, resource): - # type: (str) -> BinaryIO + def open_binary(self, resource: str) -> BinaryIO: """ Obtain a File-like for a named resource. """ @@ -50,39 +47,12 @@ def name(self): return self.package.split('.')[-1] -class ResourceHandle(Traversable): - """ - Handle to a named resource in a ResourceReader. - """ - - def __init__(self, parent, name): - # type: (ResourceContainer, str) -> None - self.parent = parent - self.name = name # type: ignore - - def is_file(self): - return True - - def is_dir(self): - return False - - def open(self, mode='r', *args, **kwargs): - stream = self.parent.reader.open_binary(self.name) - if 'b' not in mode: - stream = io.TextIOWrapper(*args, **kwargs) - return stream - - def joinpath(self, name): - raise RuntimeError("Cannot traverse into a resource") - - class ResourceContainer(Traversable): """ Traversable container for a package's resources via its reader. """ - def __init__(self, reader): - # type: (SimpleReader) -> None + def __init__(self, reader: SimpleReader): self.reader = reader def is_dir(self): @@ -99,10 +69,30 @@ def iterdir(self): def open(self, *args, **kwargs): raise IsADirectoryError() + +class ResourceHandle(Traversable): + """ + Handle to a named resource in a ResourceReader. + """ + + def __init__(self, parent: ResourceContainer, name: str): + self.parent = parent + self.name = name # type: ignore + + def is_file(self): + return True + + def is_dir(self): + return False + + def open(self, mode='r', *args, **kwargs): + stream = self.parent.reader.open_binary(self.name) + if 'b' not in mode: + stream = io.TextIOWrapper(*args, **kwargs) + return stream + def joinpath(self, name): - return next( - traversable for traversable in self.iterdir() if traversable.name == name - ) + raise RuntimeError("Cannot traverse into a resource") class TraversableReader(TraversableResources, SimpleReader): diff --git a/pkg_resources/_vendor/importlib_resources/tests/_compat.py b/pkg_resources/_vendor/importlib_resources/tests/_compat.py index 4c99cffd02..e7bf06dd4e 100644 --- a/pkg_resources/_vendor/importlib_resources/tests/_compat.py +++ b/pkg_resources/_vendor/importlib_resources/tests/_compat.py @@ -6,7 +6,20 @@ except ImportError: # Python 3.9 and earlier class import_helper: # type: ignore - from test.support import modules_setup, modules_cleanup + from test.support import ( + modules_setup, + modules_cleanup, + DirsOnSysPath, + CleanImport, + ) + + +try: + from test.support import os_helper # type: ignore +except ImportError: + # Python 3.9 compat + class os_helper: # type:ignore + from test.support import temp_dir try: diff --git a/pkg_resources/_vendor/importlib_resources/tests/_path.py b/pkg_resources/_vendor/importlib_resources/tests/_path.py new file mode 100644 index 0000000000..c630e4d3d3 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/_path.py @@ -0,0 +1,50 @@ +import pathlib +import functools + + +#### +# from jaraco.path 3.4 + + +def build(spec, prefix=pathlib.Path()): + """ + Build a set of files/directories, as described by the spec. + + Each key represents a pathname, and the value represents + the content. Content may be a nested directory. + + >>> spec = { + ... 'README.txt': "A README file", + ... "foo": { + ... "__init__.py": "", + ... "bar": { + ... "__init__.py": "", + ... }, + ... "baz.py": "# Some code", + ... } + ... } + >>> tmpdir = getfixture('tmpdir') + >>> build(spec, tmpdir) + """ + for name, contents in spec.items(): + create(contents, pathlib.Path(prefix) / name) + + +@functools.singledispatch +def create(content, path): + path.mkdir(exist_ok=True) + build(content, prefix=path) # type: ignore + + +@create.register +def _(content: bytes, path): + path.write_bytes(content) + + +@create.register +def _(content: str, path): + path.write_text(content) + + +# end from jaraco.path +#### diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_files.py b/pkg_resources/_vendor/importlib_resources/tests/test_files.py index 2676b49e7d..d258fb5f0f 100644 --- a/pkg_resources/_vendor/importlib_resources/tests/test_files.py +++ b/pkg_resources/_vendor/importlib_resources/tests/test_files.py @@ -1,10 +1,23 @@ import typing +import textwrap import unittest +import warnings +import importlib +import contextlib import importlib_resources as resources -from importlib_resources.abc import Traversable +from ..abc import Traversable from . import data01 from . import util +from . import _path +from ._compat import os_helper, import_helper + + +@contextlib.contextmanager +def suppress_known_deprecation(): + with warnings.catch_warnings(record=True) as ctx: + warnings.simplefilter('default', category=DeprecationWarning) + yield ctx class FilesTests: @@ -25,6 +38,14 @@ def test_read_text(self): def test_traversable(self): assert isinstance(resources.files(self.data), Traversable) + def test_old_parameter(self): + """ + Files used to take a 'package' parameter. Make sure anyone + passing by name is still supported. + """ + with suppress_known_deprecation(): + resources.files(package=self.data) + class OpenDiskTests(FilesTests, unittest.TestCase): def setUp(self): @@ -42,5 +63,50 @@ def setUp(self): self.data = namespacedata01 +class SiteDir: + def setUp(self): + self.fixtures = contextlib.ExitStack() + self.addCleanup(self.fixtures.close) + self.site_dir = self.fixtures.enter_context(os_helper.temp_dir()) + self.fixtures.enter_context(import_helper.DirsOnSysPath(self.site_dir)) + self.fixtures.enter_context(import_helper.CleanImport()) + + +class ModulesFilesTests(SiteDir, unittest.TestCase): + def test_module_resources(self): + """ + A module can have resources found adjacent to the module. + """ + spec = { + 'mod.py': '', + 'res.txt': 'resources are the best', + } + _path.build(spec, self.site_dir) + import mod + + actual = resources.files(mod).joinpath('res.txt').read_text() + assert actual == spec['res.txt'] + + +class ImplicitContextFilesTests(SiteDir, unittest.TestCase): + def test_implicit_files(self): + """ + Without any parameter, files() will infer the location as the caller. + """ + spec = { + 'somepkg': { + '__init__.py': textwrap.dedent( + """ + import importlib_resources as res + val = res.files().joinpath('res.txt').read_text() + """ + ), + 'res.txt': 'resources are the best', + }, + } + _path.build(spec, self.site_dir) + assert importlib.import_module('somepkg').val == 'resources are the best' + + if __name__ == '__main__': unittest.main() diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_reader.py b/pkg_resources/_vendor/importlib_resources/tests/test_reader.py index 16841a508a..1c8ebeeb13 100644 --- a/pkg_resources/_vendor/importlib_resources/tests/test_reader.py +++ b/pkg_resources/_vendor/importlib_resources/tests/test_reader.py @@ -75,6 +75,11 @@ def test_join_path(self): str(path.joinpath('imaginary'))[len(prefix) + 1 :], os.path.join('namespacedata01', 'imaginary'), ) + self.assertEqual(path.joinpath(), path) + + def test_join_path_compound(self): + path = MultiplexedPath(self.folder) + assert not path.joinpath('imaginary/foo.py').exists() def test_repr(self): self.assertEqual( diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_resource.py b/pkg_resources/_vendor/importlib_resources/tests/test_resource.py index 5affd8b0de..8239027167 100644 --- a/pkg_resources/_vendor/importlib_resources/tests/test_resource.py +++ b/pkg_resources/_vendor/importlib_resources/tests/test_resource.py @@ -111,6 +111,14 @@ def test_submodule_contents_by_name(self): {'__init__.py', 'binary.file'}, ) + def test_as_file_directory(self): + with resources.as_file(resources.files('ziptestdata')) as data: + assert data.name == 'ziptestdata' + assert data.is_dir() + assert data.joinpath('subdirectory').is_dir() + assert len(list(data.iterdir())) + assert not data.parent.exists() + class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase): ZIP_MODULE = zipdata02 # type: ignore diff --git a/pkg_resources/_vendor/importlib_resources/tests/update-zips.py b/pkg_resources/_vendor/importlib_resources/tests/update-zips.py index 9ef0224ca6..231334aa7e 100644 --- a/pkg_resources/_vendor/importlib_resources/tests/update-zips.py +++ b/pkg_resources/_vendor/importlib_resources/tests/update-zips.py @@ -42,7 +42,7 @@ def generate(suffix): def walk(datapath): for dirpath, dirnames, filenames in os.walk(datapath): - with contextlib.suppress(KeyError): + with contextlib.suppress(ValueError): dirnames.remove('__pycache__') for filename in filenames: res = pathlib.Path(dirpath) / filename diff --git a/pkg_resources/_vendor/importlib_resources/tests/util.py b/pkg_resources/_vendor/importlib_resources/tests/util.py index c6d83e4bd3..b596c0ce4f 100644 --- a/pkg_resources/_vendor/importlib_resources/tests/util.py +++ b/pkg_resources/_vendor/importlib_resources/tests/util.py @@ -3,7 +3,7 @@ import io import sys import types -from pathlib import Path, PurePath +import pathlib from . import data01 from . import zipdata01 @@ -94,7 +94,7 @@ def test_string_path(self): def test_pathlib_path(self): # Passing in a pathlib.PurePath object for the path should succeed. - path = PurePath('utf-8.file') + path = pathlib.PurePath('utf-8.file') self.execute(data01, path) def test_importing_module_as_side_effect(self): @@ -102,17 +102,6 @@ def test_importing_module_as_side_effect(self): del sys.modules[data01.__name__] self.execute(data01.__name__, 'utf-8.file') - def test_non_package_by_name(self): - # The anchor package cannot be a module. - with self.assertRaises(TypeError): - self.execute(__name__, 'utf-8.file') - - def test_non_package_by_package(self): - # The anchor package cannot be a module. - with self.assertRaises(TypeError): - module = sys.modules['importlib_resources.tests.util'] - self.execute(module, 'utf-8.file') - def test_missing_path(self): # Attempting to open or read or request the path for a # non-existent path should succeed if open_resource @@ -144,7 +133,7 @@ class ZipSetupBase: @classmethod def setUpClass(cls): - data_path = Path(cls.ZIP_MODULE.__file__) + data_path = pathlib.Path(cls.ZIP_MODULE.__file__) data_dir = data_path.parent cls._zip_path = str(data_dir / 'ziptestdata.zip') sys.path.append(cls._zip_path) diff --git a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/RECORD b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/RECORD deleted file mode 100644 index f40d48c7ee..0000000000 --- a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/RECORD +++ /dev/null @@ -1,8 +0,0 @@ -jaraco.context-4.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -jaraco.context-4.1.1.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 -jaraco.context-4.1.1.dist-info/METADATA,sha256=bvqDGCk6Z7TkohUqr5XZm19SbF9mVxrtXjN6uF_BAMQ,2031 -jaraco.context-4.1.1.dist-info/RECORD,, -jaraco.context-4.1.1.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -jaraco.context-4.1.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 -jaraco/__pycache__/context.cpython-310.pyc,, -jaraco/context.py,sha256=7X1tpCLc5EN45iWGzGcsH0Unx62REIkvtRvglj0SiUA,5420 diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/INSTALLER similarity index 100% rename from pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/INSTALLER rename to pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/INSTALLER diff --git a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/LICENSE similarity index 100% rename from pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/LICENSE rename to pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/LICENSE diff --git a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/METADATA b/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/METADATA similarity index 50% rename from setuptools/_vendor/jaraco.context-4.1.1.dist-info/METADATA rename to pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/METADATA index 908711b7ca..281137a035 100644 --- a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/METADATA +++ b/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/METADATA @@ -1,39 +1,38 @@ Metadata-Version: 2.1 Name: jaraco.context -Version: 4.1.1 +Version: 4.3.0 Summary: Context managers by jaraco Home-page: https://github.com/jaraco/jaraco.context Author: Jason R. Coombs Author-email: jaraco@jaraco.com -License: UNKNOWN -Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3 :: Only -Requires-Python: >=3.6 +Requires-Python: >=3.7 License-File: LICENSE Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: sphinx (>=3.5) ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx-lint ; extra == 'docs' +Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' Provides-Extra: testing Requires-Dist: pytest (>=6) ; extra == 'testing' Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' -Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: flake8 (<5) ; extra == 'testing' Requires-Dist: pytest-cov ; extra == 'testing' -Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing' Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' -Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing' .. image:: https://img.shields.io/pypi/v/jaraco.context.svg - :target: `PyPI link`_ + :target: https://pypi.org/project/jaraco.context .. image:: https://img.shields.io/pypi/pyversions/jaraco.context.svg - :target: `PyPI link`_ - -.. _PyPI link: https://pypi.org/project/jaraco.context .. image:: https://github.com/jaraco/jaraco.context/workflows/tests/badge.svg :target: https://github.com/jaraco/jaraco.context/actions?query=workflow%3A%22tests%22 @@ -46,7 +45,24 @@ Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extr .. image:: https://readthedocs.org/projects/jaracocontext/badge/?version=latest :target: https://jaracocontext.readthedocs.io/en/latest/?badge=latest -.. image:: https://img.shields.io/badge/skeleton-2021-informational +.. image:: https://img.shields.io/badge/skeleton-2023-informational :target: https://blog.jaraco.com/skeleton +.. image:: https://tidelift.com/badges/package/pypi/jaraco.context + :target: https://tidelift.com/subscription/pkg/pypi-jaraco.context?utm_source=pypi-jaraco.context&utm_medium=readme + +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. + +Security Contact +================ +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. diff --git a/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/RECORD new file mode 100644 index 0000000000..561d3a1bdf --- /dev/null +++ b/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/RECORD @@ -0,0 +1,8 @@ +jaraco.context-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +jaraco.context-4.3.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 +jaraco.context-4.3.0.dist-info/METADATA,sha256=GqMykAm33E7Tt_t_MHc5O7GJN62Qwp6MEHX9WD-LPow,2958 +jaraco.context-4.3.0.dist-info/RECORD,, +jaraco.context-4.3.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +jaraco.context-4.3.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 +jaraco/__pycache__/context.cpython-38.pyc,, +jaraco/context.py,sha256=vlyDzb_PvZ9H7R9bbTr_CMRnveW5Dc56eC7eyd_GfoA,7460 diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/WHEEL b/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/WHEEL similarity index 65% rename from pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/WHEEL rename to pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/WHEEL index 5bad85fdc1..57e3d840d5 100644 --- a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/WHEEL +++ b/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) +Generator: bdist_wheel (0.38.4) Root-Is-Purelib: true Tag: py3-none-any diff --git a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/top_level.txt similarity index 100% rename from pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt rename to pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/top_level.txt diff --git a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/RECORD deleted file mode 100644 index fbda3d1f03..0000000000 --- a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/RECORD +++ /dev/null @@ -1,8 +0,0 @@ -jaraco.functools-3.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -jaraco.functools-3.5.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 -jaraco.functools-3.5.0.dist-info/METADATA,sha256=cE9C7u9bo_GjLAuw4nML67a25kUaPDiHn4j03lG4jd0,2276 -jaraco.functools-3.5.0.dist-info/RECORD,, -jaraco.functools-3.5.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -jaraco.functools-3.5.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 -jaraco/__pycache__/functools.cpython-310.pyc,, -jaraco/functools.py,sha256=PtEHbXZstgVJrwje4GvJOsz5pEbgslOcgEn2EJNpr2c,13494 diff --git a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.functools-3.5.2.dist-info/INSTALLER similarity index 100% rename from pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/INSTALLER rename to pkg_resources/_vendor/jaraco.functools-3.5.2.dist-info/INSTALLER diff --git a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.functools-3.5.2.dist-info/LICENSE similarity index 100% rename from pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/LICENSE rename to pkg_resources/_vendor/jaraco.functools-3.5.2.dist-info/LICENSE diff --git a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/METADATA b/pkg_resources/_vendor/jaraco.functools-3.5.2.dist-info/METADATA similarity index 60% rename from setuptools/_vendor/jaraco.functools-3.5.0.dist-info/METADATA rename to pkg_resources/_vendor/jaraco.functools-3.5.2.dist-info/METADATA index 12dfbdd00c..fa8f0211b9 100644 --- a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/METADATA +++ b/pkg_resources/_vendor/jaraco.functools-3.5.2.dist-info/METADATA @@ -1,12 +1,10 @@ Metadata-Version: 2.1 Name: jaraco.functools -Version: 3.5.0 +Version: 3.5.2 Summary: Functools like those found in stdlib Home-page: https://github.com/jaraco/jaraco.functools Author: Jason R. Coombs Author-email: jaraco@jaraco.com -License: UNKNOWN -Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License @@ -16,18 +14,20 @@ Requires-Python: >=3.7 License-File: LICENSE Requires-Dist: more-itertools Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: sphinx (>=3.5) ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' Provides-Extra: testing Requires-Dist: pytest (>=6) ; extra == 'testing' Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: flake8 (<5) ; extra == 'testing' Requires-Dist: pytest-cov ; extra == 'testing' -Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing' Requires-Dist: jaraco.classes ; extra == 'testing' Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' -Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' .. image:: https://img.shields.io/pypi/v/jaraco.functools.svg :target: `PyPI link`_ @@ -50,9 +50,26 @@ Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extr .. image:: https://readthedocs.org/projects/jaracofunctools/badge/?version=latest :target: https://jaracofunctools.readthedocs.io/en/latest/?badge=latest -.. image:: https://img.shields.io/badge/skeleton-2021-informational +.. image:: https://img.shields.io/badge/skeleton-2022-informational :target: https://blog.jaraco.com/skeleton +.. image:: https://tidelift.com/badges/package/pypi/jaraco.functools + :target: https://tidelift.com/subscription/pkg/pypi-jaraco.functools?utm_source=pypi-jaraco.functools&utm_medium=readme + Additional functools in the spirit of stdlib's functools. +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. + +Security Contact +================ +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. diff --git a/pkg_resources/_vendor/jaraco.functools-3.5.2.dist-info/RECORD b/pkg_resources/_vendor/jaraco.functools-3.5.2.dist-info/RECORD new file mode 100644 index 0000000000..127b826762 --- /dev/null +++ b/pkg_resources/_vendor/jaraco.functools-3.5.2.dist-info/RECORD @@ -0,0 +1,8 @@ +jaraco.functools-3.5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +jaraco.functools-3.5.2.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 +jaraco.functools-3.5.2.dist-info/METADATA,sha256=ZIViwS4ZOmaWwA5ArwZ_xXQGR9WDnUSzx-0MO5kGPi8,3154 +jaraco.functools-3.5.2.dist-info/RECORD,, +jaraco.functools-3.5.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +jaraco.functools-3.5.2.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 +jaraco/__pycache__/functools.cpython-311.pyc,, +jaraco/functools.py,sha256=PtEHbXZstgVJrwje4GvJOsz5pEbgslOcgEn2EJNpr2c,13494 diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/WHEEL b/pkg_resources/_vendor/jaraco.functools-3.5.2.dist-info/WHEEL similarity index 100% rename from setuptools/_vendor/importlib_metadata-4.11.1.dist-info/WHEEL rename to pkg_resources/_vendor/jaraco.functools-3.5.2.dist-info/WHEEL diff --git a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.functools-3.5.2.dist-info/top_level.txt similarity index 100% rename from pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt rename to pkg_resources/_vendor/jaraco.functools-3.5.2.dist-info/top_level.txt diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD index 916ad7d3f8..dd471b0708 100644 --- a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD +++ b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD @@ -7,4 +7,4 @@ jaraco.text-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FG jaraco.text-3.7.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335 jaraco/text/__init__.py,sha256=I56MW2ZFwPrYXIxzqxMBe2A1t-T4uZBgEgAKe9-JoqM,15538 -jaraco/text/__pycache__/__init__.cpython-310.pyc,, +jaraco/text/__pycache__/__init__.cpython-311.pyc,, diff --git a/pkg_resources/_vendor/jaraco/context.py b/pkg_resources/_vendor/jaraco/context.py index 87a4e3dca2..b0d1ef37cb 100644 --- a/pkg_resources/_vendor/jaraco/context.py +++ b/pkg_resources/_vendor/jaraco/context.py @@ -5,10 +5,18 @@ import tempfile import shutil import operator +import warnings @contextlib.contextmanager def pushd(dir): + """ + >>> tmp_path = getfixture('tmp_path') + >>> with pushd(tmp_path): + ... assert os.getcwd() == os.fspath(tmp_path) + >>> assert os.getcwd() != os.fspath(tmp_path) + """ + orig = os.getcwd() os.chdir(dir) try: @@ -29,6 +37,8 @@ def tarball_context(url, target_dir=None, runner=None, pushd=pushd): target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '') if runner is None: runner = functools.partial(subprocess.check_call, shell=True) + else: + warnings.warn("runner parameter is deprecated", DeprecationWarning) # In the tar command, use --strip-components=1 to strip the first path and # then # use -C to cause the files to be extracted to {target_dir}. This ensures @@ -48,6 +58,15 @@ def tarball_context(url, target_dir=None, runner=None, pushd=pushd): def infer_compression(url): """ Given a URL or filename, infer the compression code for tar. + + >>> infer_compression('http://foo/bar.tar.gz') + 'z' + >>> infer_compression('http://foo/bar.tgz') + 'z' + >>> infer_compression('file.bz') + 'j' + >>> infer_compression('file.xz') + 'J' """ # cheat and just assume it's the last two characters compression_indicator = url[-2:] @@ -61,6 +80,12 @@ def temp_dir(remover=shutil.rmtree): """ Create a temporary directory context. Pass a custom remover to override the removal behavior. + + >>> import pathlib + >>> with temp_dir() as the_dir: + ... assert os.path.isdir(the_dir) + ... _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents') + >>> assert not os.path.exists(the_dir) """ temp_dir = tempfile.mkdtemp() try: @@ -90,6 +115,12 @@ def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir): @contextlib.contextmanager def null(): + """ + A null context suitable to stand in for a meaningful context. + + >>> with null() as value: + ... assert value is None + """ yield @@ -112,6 +143,10 @@ class ExceptionTrap: ... raise ValueError("1 + 1 is not 3") >>> bool(trap) True + >>> trap.value + ValueError('1 + 1 is not 3') + >>> trap.tb + >>> with ExceptionTrap(ValueError) as trap: ... raise Exception() @@ -211,3 +246,43 @@ class suppress(contextlib.suppress, contextlib.ContextDecorator): ... {}[''] >>> key_error() """ + + +class on_interrupt(contextlib.ContextDecorator): + """ + Replace a KeyboardInterrupt with SystemExit(1) + + >>> def do_interrupt(): + ... raise KeyboardInterrupt() + >>> on_interrupt('error')(do_interrupt)() + Traceback (most recent call last): + ... + SystemExit: 1 + >>> on_interrupt('error', code=255)(do_interrupt)() + Traceback (most recent call last): + ... + SystemExit: 255 + >>> on_interrupt('suppress')(do_interrupt)() + >>> with __import__('pytest').raises(KeyboardInterrupt): + ... on_interrupt('ignore')(do_interrupt)() + """ + + def __init__( + self, + action='error', + # py3.7 compat + # /, + code=1, + ): + self.action = action + self.code = code + + def __enter__(self): + return self + + def __exit__(self, exctype, excinst, exctb): + if exctype is not KeyboardInterrupt or self.action == 'ignore': + return + elif self.action == 'error': + raise SystemExit(self.code) from excinst + return self.action == 'suppress' diff --git a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/RECORD b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/RECORD deleted file mode 100644 index 44847291ed..0000000000 --- a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/RECORD +++ /dev/null @@ -1,16 +0,0 @@ -more_itertools-8.12.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -more_itertools-8.12.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053 -more_itertools-8.12.0.dist-info/METADATA,sha256=QCCEcisEPr7iSfBIKCukhP-FbG9ehMK8tDIliZ3FBDc,39405 -more_itertools-8.12.0.dist-info/RECORD,, -more_itertools-8.12.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -more_itertools-8.12.0.dist-info/top_level.txt,sha256=fAuqRXu9LPhxdB9ujJowcFOu1rZ8wzSpOW9_jlKis6M,15 -more_itertools/__init__.py,sha256=ZQYu_9H6stSG7viUgT32TFqslqcZwq82kWRZooKiI8Y,83 -more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43 -more_itertools/__pycache__/__init__.cpython-310.pyc,, -more_itertools/__pycache__/more.cpython-310.pyc,, -more_itertools/__pycache__/recipes.cpython-310.pyc,, -more_itertools/more.py,sha256=jSrvV9BK-XKa4x7MPPp9yWYRDtRgR5h7yryEqHMU4mg,132578 -more_itertools/more.pyi,sha256=kWOkRKx0V8ZwC1D2j0c0DUfy56dazzpmRcm5ZuY_aqo,20006 -more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -more_itertools/recipes.py,sha256=N6aCDwoIPvE-aiqpGU-nbFwqiM3X8MKRcxBM84naW88,18410 -more_itertools/recipes.pyi,sha256=Lx3vb0p_vY7rF8MQuguvOcVaS9qd1WRL8JO_qVo7hiY,3925 diff --git a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/top_level.txt b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/top_level.txt deleted file mode 100644 index a5035befb3..0000000000 --- a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -more_itertools diff --git a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/INSTALLER b/pkg_resources/_vendor/more_itertools-9.0.0.dist-info/INSTALLER similarity index 100% rename from pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/INSTALLER rename to pkg_resources/_vendor/more_itertools-9.0.0.dist-info/INSTALLER diff --git a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/LICENSE b/pkg_resources/_vendor/more_itertools-9.0.0.dist-info/LICENSE similarity index 100% rename from pkg_resources/_vendor/more_itertools-8.12.0.dist-info/LICENSE rename to pkg_resources/_vendor/more_itertools-9.0.0.dist-info/LICENSE diff --git a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/METADATA b/pkg_resources/_vendor/more_itertools-9.0.0.dist-info/METADATA similarity index 72% rename from pkg_resources/_vendor/more_itertools-8.12.0.dist-info/METADATA rename to pkg_resources/_vendor/more_itertools-9.0.0.dist-info/METADATA index 9efacdd745..bd988923aa 100644 --- a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/METADATA +++ b/pkg_resources/_vendor/more_itertools-9.0.0.dist-info/METADATA @@ -1,29 +1,26 @@ Metadata-Version: 2.1 Name: more-itertools -Version: 8.12.0 +Version: 9.0.0 Summary: More routines for operating on iterables, beyond itertools -Home-page: https://github.com/more-itertools/more-itertools -Author: Erik Rose -Author-email: erikrose@grinchcentral.com -License: MIT -Keywords: itertools,iterator,iteration,filter,peek,peekable,collate,chunk,chunked -Platform: UNKNOWN +Keywords: itertools,iterator,iteration,filter,peek,peekable,chunk,chunked +Author-email: Erik Rose +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: Natural Language :: English Classifier: License :: OSI Approved :: MIT License Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Topic :: Software Development :: Libraries -Requires-Python: >=3.5 -Description-Content-Type: text/x-rst -License-File: LICENSE +Project-URL: Homepage, https://github.com/more-itertools/more-itertools ============== More Itertools @@ -40,7 +37,9 @@ Python iterables. +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ | Grouping | `chunked `_, | | | `ichunked `_, | +| | `chunked_even `_, | | | `sliced `_, | +| | `constrained_batches `_, | | | `distribute `_, | | | `divide `_, | | | `split_at `_, | @@ -50,6 +49,7 @@ Python iterables. | | `split_when `_, | | | `bucket `_, | | | `unzip `_, | +| | `batched `_, | | | `grouper `_, | | | `partition `_ | +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ @@ -64,11 +64,13 @@ Python iterables. | | `windowed_complete `_, | | | `pairwise `_, | | | `triplewise `_, | -| | `sliding_window `_ | +| | `sliding_window `_, | +| | `subslices `_ | +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ | Augmenting | `count_cycle `_, | | | `intersperse `_, | | | `padded `_, | +| | `repeat_each `_, | | | `mark_ends `_, | | | `repeat_last `_, | | | `adjacent `_, | @@ -103,7 +105,8 @@ Python iterables. | | `all_unique `_, | | | `minmax `_, | | | `first_true `_, | -| | `quantify `_ | +| | `quantify `_, | +| | `iequals `_ | +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ | Selecting | `islice_extended `_, | | | `first `_, | @@ -125,7 +128,8 @@ Python iterables. | | `unique_everseen `_, | | | `unique_justseen `_, | | | `duplicates_everseen `_, | -| | `duplicates_justseen `_ | +| | `duplicates_justseen `_, | +| | `longest_common_prefix `_ | +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ | Combinatorics | `distinct_permutations `_, | | | `distinct_combinations `_, | @@ -161,9 +165,12 @@ Python iterables. | | `make_decorator `_, | | | `SequenceView `_, | | | `time_limited `_, | +| | `map_if `_, | | | `consume `_, | | | `tabulate `_, | | | `repeatfunc `_ | +| | `polynomial_from_roots `_ | +| | `sieve `_ | +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ @@ -230,292 +237,5 @@ repository. Thanks for contributing! Version History =============== - - :noindex: - -8.12.0 ------- - -* Bug fixes - * Some documentation issues were fixed (thanks to Masynchin, spookylukey, astrojuanlu, and stephengmatthews) - * Python 3.5 support was temporarily restored (thanks to mattbonnell) - -8.11.0 ------- - -* New functions - * The before_and_after, sliding_window, and triplewise recipes from the Python 3.10 docs were added - * duplicates_everseen and duplicates_justseen (thanks to OrBin and DavidPratt512) - * minmax (thanks to Ricocotam, MSeifert04, and ruancomelli) - * strictly_n (thanks to hwalinga and NotWearingPants) - * unique_in_window - -* Changes to existing functions - * groupby_transform had its type stub improved (thanks to mjk4 and ruancomelli) - * is_sorted now accepts a ``strict`` parameter (thanks to Dutcho and ruancomelli) - * zip_broadcast was updated to fix a bug (thanks to kalekundert) - -8.10.0 ------- - -* Changes to existing functions - * The type stub for iter_except was improved (thanks to MarcinKonowalczyk) - -* Other changes: - * Type stubs now ship with the source release (thanks to saaketp) - * The Sphinx docs were improved (thanks to MarcinKonowalczyk) - -8.9.0 ------ - -* New functions - * interleave_evenly (thanks to mbugert) - * repeat_each (thanks to FinalSh4re) - * chunked_even (thanks to valtron) - * map_if (thanks to sassbalint) - * zip_broadcast (thanks to kalekundert) - -* Changes to existing functions - * The type stub for chunked was improved (thanks to PhilMacKay) - * The type stubs for zip_equal and `zip_offset` were improved (thanks to maffoo) - * Building Sphinx docs locally was improved (thanks to MarcinKonowalczyk) - -8.8.0 ------ - -* New functions - * countable (thanks to krzysieq) - -* Changes to existing functions - * split_before was updated to handle empy collections (thanks to TiunovNN) - * unique_everseen got a performance boost (thanks to Numerlor) - * The type hint for value_chain was corrected (thanks to vr2262) - -8.7.0 ------ - -* New functions - * convolve (from the Python itertools docs) - * product_index, combination_index, and permutation_index (thanks to N8Brooks) - * value_chain (thanks to jenstroeger) - -* Changes to existing functions - * distinct_combinations now uses a non-recursive algorithm (thanks to knutdrand) - * pad_none is now the preferred name for padnone, though the latter remains available. - * pairwise will now use the Python standard library implementation on Python 3.10+ - * sort_together now accepts a ``key`` argument (thanks to brianmaissy) - * seekable now has a ``peek`` method, and can indicate whether the iterator it's wrapping is exhausted (thanks to gsakkis) - * time_limited can now indicate whether its iterator has expired (thanks to roysmith) - * The implementation of unique_everseen was improved (thanks to plammens) - -* Other changes: - * Various documentation updates (thanks to cthoyt, Evantm, and cyphase) - -8.6.0 ------ - -* New itertools - * all_unique (thanks to brianmaissy) - * nth_product and nth_permutation (thanks to N8Brooks) - -* Changes to existing itertools - * chunked and sliced now accept a ``strict`` parameter (thanks to shlomif and jtwool) - -* Other changes - * Python 3.5 has reached its end of life and is no longer supported. - * Python 3.9 is officially supported. - * Various documentation fixes (thanks to timgates42) - -8.5.0 ------ - -* New itertools - * windowed_complete (thanks to MarcinKonowalczyk) - -* Changes to existing itertools: - * The is_sorted implementation was improved (thanks to cool-RR) - * The groupby_transform now accepts a ``reducefunc`` parameter. - * The last implementation was improved (thanks to brianmaissy) - -* Other changes - * Various documentation fixes (thanks to craigrosie, samuelstjean, PiCT0) - * The tests for distinct_combinations were improved (thanks to Minabsapi) - * Automated tests now run on GitHub Actions. All commits now check: - * That unit tests pass - * That the examples in docstrings work - * That test coverage remains high (using `coverage`) - * For linting errors (using `flake8`) - * For consistent style (using `black`) - * That the type stubs work (using `mypy`) - * That the docs build correctly (using `sphinx`) - * That packages build correctly (using `twine`) - -8.4.0 ------ - -* New itertools - * mark_ends (thanks to kalekundert) - * is_sorted - -* Changes to existing itertools: - * islice_extended can now be used with real slices (thanks to cool-RR) - * The implementations for filter_except and map_except were improved (thanks to SergBobrovsky) - -* Other changes - * Automated tests now enforce code style (using `black `__) - * The various signatures of islice_extended and numeric_range now appear in the docs (thanks to dsfulf) - * The test configuration for mypy was updated (thanks to blueyed) - - -8.3.0 ------ - -* New itertools - * zip_equal (thanks to frankier and alexmojaki) - -* Changes to existing itertools: - * split_at, split_before, split_after, and split_when all got a ``maxsplit`` paramter (thanks to jferard and ilai-deutel) - * split_at now accepts a ``keep_separator`` parameter (thanks to jferard) - * distinct_permutations can now generate ``r``-length permutations (thanks to SergBobrovsky and ilai-deutel) - * The windowed implementation was improved (thanks to SergBobrovsky) - * The spy implementation was improved (thanks to has2k1) - -* Other changes - * Type stubs are now tested with ``stubtest`` (thanks to ilai-deutel) - * Tests now run with ``python -m unittest`` instead of ``python setup.py test`` (thanks to jdufresne) - -8.2.0 ------ - -* Bug fixes - * The .pyi files for typing were updated. (thanks to blueyed and ilai-deutel) - -* Changes to existing itertools: - * numeric_range now behaves more like the built-in range. (thanks to jferard) - * bucket now allows for enumerating keys. (thanks to alexchandel) - * sliced now should now work for numpy arrays. (thanks to sswingle) - * seekable now has a ``maxlen`` parameter. - -8.1.0 ------ - -* Bug fixes - * partition works with ``pred=None`` again. (thanks to MSeifert04) - -* New itertools - * sample (thanks to tommyod) - * nth_or_last (thanks to d-ryzhikov) - -* Changes to existing itertools: - * The implementation for divide was improved. (thanks to jferard) - -8.0.2 ------ - -* Bug fixes - * The type stub files are now part of the wheel distribution (thanks to keisheiled) - -8.0.1 ------ - -* Bug fixes - * The type stub files now work for functions imported from the - root package (thanks to keisheiled) - -8.0.0 ------ - -* New itertools and other additions - * This library now ships type hints for use with mypy. - (thanks to ilai-deutel for the implementation, and to gabbard and fmagin for assistance) - * split_when (thanks to jferard) - * repeat_last (thanks to d-ryzhikov) - -* Changes to existing itertools: - * The implementation for set_partitions was improved. (thanks to jferard) - * partition was optimized for expensive predicates. (thanks to stevecj) - * unique_everseen and groupby_transform were re-factored. (thanks to SergBobrovsky) - * The implementation for difference was improved. (thanks to Jabbey92) - -* Other changes - * Python 3.4 has reached its end of life and is no longer supported. - * Python 3.8 is officially supported. (thanks to jdufresne) - * The ``collate`` function has been deprecated. - It raises a ``DeprecationWarning`` if used, and will be removed in a future release. - * one and only now provide more informative error messages. (thanks to gabbard) - * Unit tests were moved outside of the main package (thanks to jdufresne) - * Various documentation fixes (thanks to kriomant, gabbard, jdufresne) - - -7.2.0 ------ - -* New itertools - * distinct_combinations - * set_partitions (thanks to kbarrett) - * filter_except - * map_except - -7.1.0 ------ - -* New itertools - * ichunked (thanks davebelais and youtux) - * only (thanks jaraco) - -* Changes to existing itertools: - * numeric_range now supports ranges specified by - ``datetime.datetime`` and ``datetime.timedelta`` objects (thanks to MSeifert04 for tests). - * difference now supports an *initial* keyword argument. - - -* Other changes - * Various documentation fixes (thanks raimon49, pylang) - -7.0.0 ------ - -* New itertools: - * time_limited - * partitions (thanks to rominf and Saluev) - * substrings_indexes (thanks to rominf) - -* Changes to existing itertools: - * collapse now treats ``bytes`` objects the same as ``str`` objects. (thanks to Sweenpet) - -The major version update is due to the change in the default behavior of -collapse. It now treats ``bytes`` objects the same as ``str`` objects. -This aligns its behavior with always_iterable. - -.. code-block:: python - - >>> from more_itertools import collapse - >>> iterable = [[1, 2], b'345', [6]] - >>> print(list(collapse(iterable))) - [1, 2, b'345', 6] - -6.0.0 ------ - -* Major changes: - * Python 2.7 is no longer supported. The 5.0.0 release will be the last - version targeting Python 2.7. - * All future releases will target the active versions of Python 3. - As of 2019, those are Python 3.4 and above. - * The ``six`` library is no longer a dependency. - * The accumulate function is no longer part of this library. You - may import a better version from the standard ``itertools`` module. - -* Changes to existing itertools: - * The order of the parameters in grouper have changed to match - the latest recipe in the itertools documentation. Use of the old order - will be supported in this release, but emit a ``DeprecationWarning``. - The legacy behavior will be dropped in a future release. (thanks to jaraco) - * distinct_permutations was improved (thanks to jferard - see also `permutations with unique values `_ at StackOverflow.) - * An unused parameter was removed from substrings. (thanks to pylang) - -* Other changes: - * The docs for unique_everseen were improved. (thanks to jferard and MSeifert04) - * Several Python 2-isms were removed. (thanks to jaraco, MSeifert04, and hugovk) - +The version history can be found in `documentation `_. diff --git a/pkg_resources/_vendor/more_itertools-9.0.0.dist-info/RECORD b/pkg_resources/_vendor/more_itertools-9.0.0.dist-info/RECORD new file mode 100644 index 0000000000..4ae7d82121 --- /dev/null +++ b/pkg_resources/_vendor/more_itertools-9.0.0.dist-info/RECORD @@ -0,0 +1,15 @@ +more_itertools-9.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +more_itertools-9.0.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053 +more_itertools-9.0.0.dist-info/METADATA,sha256=o71Ks93mbZ7709yvfFg09Vxgj-8t2ZZaYIyUprS8GHw,31266 +more_itertools-9.0.0.dist-info/RECORD,, +more_itertools-9.0.0.dist-info/WHEEL,sha256=4TfKIB_xu-04bc2iKz6_zFt-gEFEEDU_31HGhqzOCE8,81 +more_itertools/__init__.py,sha256=5PNQMpy400s5GB3jcWwzje0RCw8k0bvU9W_C49V0fd0,148 +more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43 +more_itertools/__pycache__/__init__.cpython-311.pyc,, +more_itertools/__pycache__/more.cpython-311.pyc,, +more_itertools/__pycache__/recipes.cpython-311.pyc,, +more_itertools/more.py,sha256=FmmtkT-j69qILkxEELk5ZRoZK8St1Vg_fOGW0sTFd7g,133336 +more_itertools/more.pyi,sha256=hbf2oqg56wctXwf6yM1B0QLWqsNyDrqtX2znvQRAe3Q,20297 +more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +more_itertools/recipes.py,sha256=ZX4-2IfbZKlPIVaDITH2buX_fPuMDe1EVc6e2XSsCz8,22975 +more_itertools/recipes.pyi,sha256=NA6qqcKMbQ2fly9hCyCzMcx46Tn9TLl-9mFnZsRytZM,3851 diff --git a/pkg_resources/_vendor/more_itertools-9.0.0.dist-info/WHEEL b/pkg_resources/_vendor/more_itertools-9.0.0.dist-info/WHEEL new file mode 100644 index 0000000000..668ba4d015 --- /dev/null +++ b/pkg_resources/_vendor/more_itertools-9.0.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.7.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/pkg_resources/_vendor/more_itertools/__init__.py b/pkg_resources/_vendor/more_itertools/__init__.py index ea38bef1f6..557bfc206f 100644 --- a/pkg_resources/_vendor/more_itertools/__init__.py +++ b/pkg_resources/_vendor/more_itertools/__init__.py @@ -1,4 +1,6 @@ +"""More routines for operating on iterables, beyond itertools""" + from .more import * # noqa from .recipes import * # noqa -__version__ = '8.12.0' +__version__ = '9.0.0' diff --git a/pkg_resources/_vendor/more_itertools/more.py b/pkg_resources/_vendor/more_itertools/more.py old mode 100644 new mode 100755 index 6b6a5cab25..0b29fca02b --- a/pkg_resources/_vendor/more_itertools/more.py +++ b/pkg_resources/_vendor/more_itertools/more.py @@ -3,7 +3,7 @@ from collections import Counter, defaultdict, deque, abc from collections.abc import Sequence from functools import partial, reduce, wraps -from heapq import merge, heapify, heapreplace, heappop +from heapq import heapify, heapreplace, heappop from itertools import ( chain, compress, @@ -26,12 +26,16 @@ from time import monotonic from .recipes import ( + _marker, + _zip_equal, + UnequalIterablesError, consume, flatten, pairwise, powerset, take, unique_everseen, + all_equal, ) __all__ = [ @@ -48,9 +52,9 @@ 'chunked_even', 'circular_shifts', 'collapse', - 'collate', 'combination_index', 'consecutive_groups', + 'constrained_batches', 'consumer', 'count_cycle', 'countable', @@ -66,6 +70,7 @@ 'first', 'groupby_transform', 'ichunked', + 'iequals', 'ilen', 'interleave', 'interleave_evenly', @@ -76,6 +81,7 @@ 'iterate', 'last', 'locate', + 'longest_common_prefix', 'lstrip', 'make_decorator', 'map_except', @@ -132,9 +138,6 @@ ] -_marker = object() - - def chunked(iterable, n, strict=False): """Break *iterable* into lists of length *n*: @@ -409,44 +412,6 @@ def __getitem__(self, index): return self._cache[index] -def collate(*iterables, **kwargs): - """Return a sorted merge of the items from each of several already-sorted - *iterables*. - - >>> list(collate('ACDZ', 'AZ', 'JKL')) - ['A', 'A', 'C', 'D', 'J', 'K', 'L', 'Z', 'Z'] - - Works lazily, keeping only the next value from each iterable in memory. Use - :func:`collate` to, for example, perform a n-way mergesort of items that - don't fit in memory. - - If a *key* function is specified, the iterables will be sorted according - to its result: - - >>> key = lambda s: int(s) # Sort by numeric value, not by string - >>> list(collate(['1', '10'], ['2', '11'], key=key)) - ['1', '2', '10', '11'] - - - If the *iterables* are sorted in descending order, set *reverse* to - ``True``: - - >>> list(collate([5, 3, 1], [4, 2, 0], reverse=True)) - [5, 4, 3, 2, 1, 0] - - If the elements of the passed-in iterables are out of order, you might get - unexpected results. - - On Python 3.5+, this function is an alias for :func:`heapq.merge`. - - """ - warnings.warn( - "collate is no longer part of more_itertools, use heapq.merge", - DeprecationWarning, - ) - return merge(*iterables, **kwargs) - - def consumer(func): """Decorator that automatically advances a PEP-342-style "reverse iterator" to its first yield point so you don't have to call ``next()`` on it @@ -872,7 +837,9 @@ def windowed(seq, n, fillvalue=None, step=1): yield tuple(window) size = len(window) - if size < n: + if size == 0: + return + elif size < n: yield tuple(chain(window, repeat(fillvalue, n - size))) elif 0 < i < min(step, n): window += (fillvalue,) * i @@ -1645,45 +1612,6 @@ def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None): ) -class UnequalIterablesError(ValueError): - def __init__(self, details=None): - msg = 'Iterables have different lengths' - if details is not None: - msg += (': index 0 has length {}; index {} has length {}').format( - *details - ) - - super().__init__(msg) - - -def _zip_equal_generator(iterables): - for combo in zip_longest(*iterables, fillvalue=_marker): - for val in combo: - if val is _marker: - raise UnequalIterablesError() - yield combo - - -def _zip_equal(*iterables): - # Check whether the iterables are all the same size. - try: - first_size = len(iterables[0]) - for i, it in enumerate(iterables[1:], 1): - size = len(it) - if size != first_size: - break - else: - # If we didn't break out, we can use the built-in zip. - return zip(*iterables) - - # If we did break out, there was a mismatch. - raise UnequalIterablesError(details=(first_size, i, size)) - # If any one of the iterables didn't have a length, start reading - # them until one runs out. - except TypeError: - return _zip_equal_generator(iterables) - - def zip_equal(*iterables): """``zip`` the input *iterables* together, but raise ``UnequalIterablesError`` if they aren't all the same length. @@ -1825,7 +1753,7 @@ def unzip(iterable): of the zipped *iterable*. The ``i``-th iterable contains the ``i``-th element from each element - of the zipped iterable. The first element is used to to determine the + of the zipped iterable. The first element is used to determine the length of the remaining elements. >>> iterable = [('a', 1), ('b', 2), ('c', 3), ('d', 4)] @@ -2375,6 +2303,16 @@ def locate(iterable, pred=bool, window_size=None): return compress(count(), starmap(pred, it)) +def longest_common_prefix(iterables): + """Yield elements of the longest common prefix amongst given *iterables*. + + >>> ''.join(longest_common_prefix(['abcd', 'abc', 'abf'])) + 'ab' + + """ + return (c[0] for c in takewhile(all_equal, zip(*iterables))) + + def lstrip(iterable, pred): """Yield the items from *iterable*, but strip any from the beginning for which *pred* returns ``True``. @@ -2683,7 +2621,7 @@ def difference(iterable, func=sub, *, initial=None): if initial is not None: first = [] - return chain(first, starmap(func, zip(b, a))) + return chain(first, map(func, b, a)) class SequenceView(Sequence): @@ -3326,6 +3264,27 @@ def only(iterable, default=None, too_long=None): return first_value +class _IChunk: + def __init__(self, iterable, n): + self._it = islice(iterable, n) + self._cache = deque() + + def fill_cache(self): + self._cache.extend(self._it) + + def __iter__(self): + return self + + def __next__(self): + try: + return next(self._it) + except StopIteration: + if self._cache: + return self._cache.popleft() + else: + raise + + def ichunked(iterable, n): """Break *iterable* into sub-iterables with *n* elements each. :func:`ichunked` is like :func:`chunked`, but it yields iterables @@ -3347,20 +3306,39 @@ def ichunked(iterable, n): [8, 9, 10, 11] """ - source = iter(iterable) - + source = peekable(iter(iterable)) + ichunk_marker = object() while True: # Check to see whether we're at the end of the source iterable - item = next(source, _marker) - if item is _marker: + item = source.peek(ichunk_marker) + if item is ichunk_marker: return - # Clone the source and yield an n-length slice - source, it = tee(chain([item], source)) - yield islice(it, n) + chunk = _IChunk(source, n) + yield chunk + + # Advance the source iterable and fill previous chunk's cache + chunk.fill_cache() + + +def iequals(*iterables): + """Return ``True`` if all given *iterables* are equal to each other, + which means that they contain the same elements in the same order. - # Advance the source iterable - consume(source, n) + The function is useful for comparing iterables of different data types + or iterables that do not support equality checks. + + >>> iequals("abc", ['a', 'b', 'c'], ('a', 'b', 'c'), iter("abc")) + True + + >>> iequals("abc", "acb") + False + + Not to be confused with :func:`all_equals`, which checks whether all + elements of iterable are equal to each other. + + """ + return all(map(all_equal, zip_longest(*iterables, fillvalue=object()))) def distinct_combinations(iterable, r): @@ -3655,7 +3633,9 @@ def __init__(self, func, callback_kwd='callback', wait_seconds=0.1): self._aborted = False self._future = None self._wait_seconds = wait_seconds - self._executor = __import__("concurrent.futures").futures.ThreadPoolExecutor(max_workers=1) + # Lazily import concurrent.future + self._executor = __import__( + ).futures.__import__("concurrent.futures").futures.ThreadPoolExecutor(max_workers=1) self._iterator = self._reader() def __enter__(self): @@ -3960,7 +3940,7 @@ def combination_index(element, iterable): n, _ = last(pool, default=(n, None)) - # Python versiosn below 3.8 don't have math.comb + # Python versions below 3.8 don't have math.comb index = 1 for i, j in enumerate(reversed(indexes), start=1): j = n - j @@ -4113,7 +4093,7 @@ def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False): If the *strict* keyword argument is ``True``, then ``UnequalIterablesError`` will be raised if any of the iterables have - different lengthss. + different lengths. """ def is_scalar(obj): @@ -4314,3 +4294,53 @@ def minmax(iterable_or_value, *others, key=None, default=_marker): hi, hi_key = y, y_key return lo, hi + + +def constrained_batches( + iterable, max_size, max_count=None, get_len=len, strict=True +): + """Yield batches of items from *iterable* with a combined size limited by + *max_size*. + + >>> iterable = [b'12345', b'123', b'12345678', b'1', b'1', b'12', b'1'] + >>> list(constrained_batches(iterable, 10)) + [(b'12345', b'123'), (b'12345678', b'1', b'1'), (b'12', b'1')] + + If a *max_count* is supplied, the number of items per batch is also + limited: + + >>> iterable = [b'12345', b'123', b'12345678', b'1', b'1', b'12', b'1'] + >>> list(constrained_batches(iterable, 10, max_count = 2)) + [(b'12345', b'123'), (b'12345678', b'1'), (b'1', b'12'), (b'1',)] + + If a *get_len* function is supplied, use that instead of :func:`len` to + determine item size. + + If *strict* is ``True``, raise ``ValueError`` if any single item is bigger + than *max_size*. Otherwise, allow single items to exceed *max_size*. + """ + if max_size <= 0: + raise ValueError('maximum size must be greater than zero') + + batch = [] + batch_size = 0 + batch_count = 0 + for item in iterable: + item_len = get_len(item) + if strict and item_len > max_size: + raise ValueError('item size exceeds maximum size') + + reached_count = batch_count == max_count + reached_size = item_len + batch_size > max_size + if batch_count and (reached_size or reached_count): + yield tuple(batch) + batch.clear() + batch_size = 0 + batch_count = 0 + + batch.append(item) + batch_size += item_len + batch_count += 1 + + if batch: + yield tuple(batch) diff --git a/pkg_resources/_vendor/more_itertools/more.pyi b/pkg_resources/_vendor/more_itertools/more.pyi index fe7d4bdd7a..1413fae72f 100644 --- a/pkg_resources/_vendor/more_itertools/more.pyi +++ b/pkg_resources/_vendor/more_itertools/more.pyi @@ -72,7 +72,6 @@ class peekable(Generic[_T], Iterator[_T]): @overload def __getitem__(self, index: slice) -> List[_T]: ... -def collate(*iterables: Iterable[_T], **kwargs: Any) -> Iterable[_T]: ... def consumer(func: _GenFn) -> _GenFn: ... def ilen(iterable: Iterable[object]) -> int: ... def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ... @@ -179,7 +178,7 @@ def padded( iterable: Iterable[_T], *, n: Optional[int] = ..., - next_multiple: bool = ... + next_multiple: bool = ..., ) -> Iterator[Optional[_T]]: ... @overload def padded( @@ -225,7 +224,7 @@ def zip_equal( __iter1: Iterable[_T], __iter2: Iterable[_T], __iter3: Iterable[_T], - *iterables: Iterable[_T] + *iterables: Iterable[_T], ) -> Iterator[Tuple[_T, ...]]: ... @overload def zip_offset( @@ -233,7 +232,7 @@ def zip_offset( *, offsets: _SizedIterable[int], longest: bool = ..., - fillvalue: None = None + fillvalue: None = None, ) -> Iterator[Tuple[Optional[_T1]]]: ... @overload def zip_offset( @@ -242,7 +241,7 @@ def zip_offset( *, offsets: _SizedIterable[int], longest: bool = ..., - fillvalue: None = None + fillvalue: None = None, ) -> Iterator[Tuple[Optional[_T1], Optional[_T2]]]: ... @overload def zip_offset( @@ -252,7 +251,7 @@ def zip_offset( *iterables: Iterable[_T], offsets: _SizedIterable[int], longest: bool = ..., - fillvalue: None = None + fillvalue: None = None, ) -> Iterator[Tuple[Optional[_T], ...]]: ... @overload def zip_offset( @@ -420,7 +419,7 @@ def difference( iterable: Iterable[_T], func: Callable[[_T, _T], _U] = ..., *, - initial: None = ... + initial: None = ..., ) -> Iterator[Union[_T, _U]]: ... @overload def difference( @@ -529,12 +528,12 @@ def distinct_combinations( def filter_except( validator: Callable[[Any], object], iterable: Iterable[_T], - *exceptions: Type[BaseException] + *exceptions: Type[BaseException], ) -> Iterator[_T]: ... def map_except( function: Callable[[Any], _U], iterable: Iterable[_T], - *exceptions: Type[BaseException] + *exceptions: Type[BaseException], ) -> Iterator[_U]: ... def map_if( iterable: Iterable[Any], @@ -610,7 +609,7 @@ def zip_broadcast( scalar_types: Union[ type, Tuple[Union[type, Tuple[Any, ...]], ...], None ] = ..., - strict: bool = ... + strict: bool = ..., ) -> Iterable[Tuple[_T, ...]]: ... def unique_in_window( iterable: Iterable[_T], n: int, key: Optional[Callable[[_T], _U]] = ... @@ -640,7 +639,7 @@ def minmax( iterable_or_value: Iterable[_SupportsLessThanT], *, key: None = None, - default: _U + default: _U, ) -> Union[_U, Tuple[_SupportsLessThanT, _SupportsLessThanT]]: ... @overload def minmax( @@ -653,12 +652,23 @@ def minmax( def minmax( iterable_or_value: _SupportsLessThanT, __other: _SupportsLessThanT, - *others: _SupportsLessThanT + *others: _SupportsLessThanT, ) -> Tuple[_SupportsLessThanT, _SupportsLessThanT]: ... @overload def minmax( iterable_or_value: _T, __other: _T, *others: _T, - key: Callable[[_T], _SupportsLessThan] + key: Callable[[_T], _SupportsLessThan], ) -> Tuple[_T, _T]: ... +def longest_common_prefix( + iterables: Iterable[Iterable[_T]], +) -> Iterator[_T]: ... +def iequals(*iterables: Iterable[object]) -> bool: ... +def constrained_batches( + iterable: Iterable[object], + max_size: int, + max_count: Optional[int] = ..., + get_len: Callable[[_T], object] = ..., + strict: bool = ..., +) -> Iterator[Tuple[_T]]: ... diff --git a/pkg_resources/_vendor/more_itertools/recipes.py b/pkg_resources/_vendor/more_itertools/recipes.py index a2596423a4..8579620785 100644 --- a/pkg_resources/_vendor/more_itertools/recipes.py +++ b/pkg_resources/_vendor/more_itertools/recipes.py @@ -7,11 +7,16 @@ .. [1] http://docs.python.org/library/itertools.html#recipes """ -import warnings +import math +import operator + from collections import deque +from collections.abc import Sized +from functools import reduce from itertools import ( chain, combinations, + compress, count, cycle, groupby, @@ -21,11 +26,11 @@ tee, zip_longest, ) -import operator from random import randrange, sample, choice __all__ = [ 'all_equal', + 'batched', 'before_and_after', 'consume', 'convolve', @@ -41,6 +46,7 @@ 'pad_none', 'pairwise', 'partition', + 'polynomial_from_roots', 'powerset', 'prepend', 'quantify', @@ -50,7 +56,9 @@ 'random_product', 'repeatfunc', 'roundrobin', + 'sieve', 'sliding_window', + 'subslices', 'tabulate', 'tail', 'take', @@ -59,6 +67,8 @@ 'unique_justseen', ] +_marker = object() + def take(n, iterable): """Return first *n* items of the iterable as a list. @@ -102,7 +112,14 @@ def tail(n, iterable): ['E', 'F', 'G'] """ - return iter(deque(iterable, maxlen=n)) + # If the given iterable has a length, then we can use islice to get its + # final elements. Note that if the iterable is not actually Iterable, + # either islice or deque will throw a TypeError. This is why we don't + # check if it is Iterable. + if isinstance(iterable, Sized): + yield from islice(iterable, max(0, len(iterable) - n), None) + else: + yield from iter(deque(iterable, maxlen=n)) def consume(iterator, n=None): @@ -284,20 +301,83 @@ def pairwise(iterable): pairwise.__doc__ = _pairwise.__doc__ -def grouper(iterable, n, fillvalue=None): - """Collect data into fixed-length chunks or blocks. +class UnequalIterablesError(ValueError): + def __init__(self, details=None): + msg = 'Iterables have different lengths' + if details is not None: + msg += (': index 0 has length {}; index {} has length {}').format( + *details + ) + + super().__init__(msg) + + +def _zip_equal_generator(iterables): + for combo in zip_longest(*iterables, fillvalue=_marker): + for val in combo: + if val is _marker: + raise UnequalIterablesError() + yield combo + + +def _zip_equal(*iterables): + # Check whether the iterables are all the same size. + try: + first_size = len(iterables[0]) + for i, it in enumerate(iterables[1:], 1): + size = len(it) + if size != first_size: + break + else: + # If we didn't break out, we can use the built-in zip. + return zip(*iterables) - >>> list(grouper('ABCDEFG', 3, 'x')) + # If we did break out, there was a mismatch. + raise UnequalIterablesError(details=(first_size, i, size)) + # If any one of the iterables didn't have a length, start reading + # them until one runs out. + except TypeError: + return _zip_equal_generator(iterables) + + +def grouper(iterable, n, incomplete='fill', fillvalue=None): + """Group elements from *iterable* into fixed-length groups of length *n*. + + >>> list(grouper('ABCDEF', 3)) + [('A', 'B', 'C'), ('D', 'E', 'F')] + + The keyword arguments *incomplete* and *fillvalue* control what happens for + iterables whose length is not a multiple of *n*. + + When *incomplete* is `'fill'`, the last group will contain instances of + *fillvalue*. + + >>> list(grouper('ABCDEFG', 3, incomplete='fill', fillvalue='x')) [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')] + When *incomplete* is `'ignore'`, the last group will not be emitted. + + >>> list(grouper('ABCDEFG', 3, incomplete='ignore', fillvalue='x')) + [('A', 'B', 'C'), ('D', 'E', 'F')] + + When *incomplete* is `'strict'`, a subclass of `ValueError` will be raised. + + >>> it = grouper('ABCDEFG', 3, incomplete='strict') + >>> list(it) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + UnequalIterablesError + """ - if isinstance(iterable, int): - warnings.warn( - "grouper expects iterable as first parameter", DeprecationWarning - ) - n, iterable = iterable, n args = [iter(iterable)] * n - return zip_longest(fillvalue=fillvalue, *args) + if incomplete == 'fill': + return zip_longest(*args, fillvalue=fillvalue) + if incomplete == 'strict': + return _zip_equal(*args) + if incomplete == 'ignore': + return zip(*args) + else: + raise ValueError('Expected fill, strict, or ignore') def roundrobin(*iterables): @@ -658,11 +738,12 @@ def true_iterator(): transition.append(elem) return - def remainder_iterator(): - yield from transition - yield from it + # Note: this is different from itertools recipes to allow nesting + # before_and_after remainders into before_and_after again. See tests + # for an example. + remainder_iterator = chain(transition, it) - return true_iterator(), remainder_iterator() + return true_iterator(), remainder_iterator def triplewise(iterable): @@ -696,3 +777,65 @@ def sliding_window(iterable, n): for x in it: window.append(x) yield tuple(window) + + +def subslices(iterable): + """Return all contiguous non-empty subslices of *iterable*. + + >>> list(subslices('ABC')) + [['A'], ['A', 'B'], ['A', 'B', 'C'], ['B'], ['B', 'C'], ['C']] + + This is similar to :func:`substrings`, but emits items in a different + order. + """ + seq = list(iterable) + slices = starmap(slice, combinations(range(len(seq) + 1), 2)) + return map(operator.getitem, repeat(seq), slices) + + +def polynomial_from_roots(roots): + """Compute a polynomial's coefficients from its roots. + + >>> roots = [5, -4, 3] # (x - 5) * (x + 4) * (x - 3) + >>> polynomial_from_roots(roots) # x^3 - 4 * x^2 - 17 * x + 60 + [1, -4, -17, 60] + """ + # Use math.prod for Python 3.8+, + prod = getattr(math, 'prod', lambda x: reduce(operator.mul, x, 1)) + roots = list(map(operator.neg, roots)) + return [ + sum(map(prod, combinations(roots, k))) for k in range(len(roots) + 1) + ] + + +def sieve(n): + """Yield the primes less than n. + + >>> list(sieve(30)) + [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] + """ + isqrt = getattr(math, 'isqrt', lambda x: int(math.sqrt(x))) + limit = isqrt(n) + 1 + data = bytearray([1]) * n + data[:2] = 0, 0 + for p in compress(range(limit), data): + data[p + p : n : p] = bytearray(len(range(p + p, n, p))) + + return compress(count(), data) + + +def batched(iterable, n): + """Batch data into lists of length *n*. The last batch may be shorter. + + >>> list(batched('ABCDEFG', 3)) + [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']] + + This recipe is from the ``itertools`` docs. This library also provides + :func:`chunked`, which has a different implementation. + """ + it = iter(iterable) + while True: + batch = list(islice(it, n)) + if not batch: + break + yield batch diff --git a/pkg_resources/_vendor/more_itertools/recipes.pyi b/pkg_resources/_vendor/more_itertools/recipes.pyi index 4648a41b5e..29415c5a56 100644 --- a/pkg_resources/_vendor/more_itertools/recipes.pyi +++ b/pkg_resources/_vendor/more_itertools/recipes.pyi @@ -6,6 +6,7 @@ from typing import ( Iterator, List, Optional, + Sequence, Tuple, TypeVar, Union, @@ -39,21 +40,11 @@ def repeatfunc( func: Callable[..., _U], times: Optional[int] = ..., *args: Any ) -> Iterator[_U]: ... def pairwise(iterable: Iterable[_T]) -> Iterator[Tuple[_T, _T]]: ... -@overload def grouper( - iterable: Iterable[_T], n: int -) -> Iterator[Tuple[Optional[_T], ...]]: ... -@overload -def grouper( - iterable: Iterable[_T], n: int, fillvalue: _U -) -> Iterator[Tuple[Union[_T, _U], ...]]: ... -@overload -def grouper( # Deprecated interface - iterable: int, n: Iterable[_T] -) -> Iterator[Tuple[Optional[_T], ...]]: ... -@overload -def grouper( # Deprecated interface - iterable: int, n: Iterable[_T], fillvalue: _U + iterable: Iterable[_T], + n: int, + incomplete: str = ..., + fillvalue: _U = ..., ) -> Iterator[Tuple[Union[_T, _U], ...]]: ... def roundrobin(*iterables: Iterable[_T]) -> Iterator[_T]: ... def partition( @@ -110,3 +101,10 @@ def triplewise(iterable: Iterable[_T]) -> Iterator[Tuple[_T, _T, _T]]: ... def sliding_window( iterable: Iterable[_T], n: int ) -> Iterator[Tuple[_T, ...]]: ... +def subslices(iterable: Iterable[_T]) -> Iterator[List[_T]]: ... +def polynomial_from_roots(roots: Sequence[int]) -> List[int]: ... +def sieve(n: int) -> Iterator[int]: ... +def batched( + iterable: Iterable[_T], + n: int, +) -> Iterator[List[_T]]: ... diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/METADATA b/pkg_resources/_vendor/packaging-21.3.dist-info/METADATA deleted file mode 100644 index 358ace5362..0000000000 --- a/pkg_resources/_vendor/packaging-21.3.dist-info/METADATA +++ /dev/null @@ -1,453 +0,0 @@ -Metadata-Version: 2.1 -Name: packaging -Version: 21.3 -Summary: Core utilities for Python packages -Home-page: https://github.com/pypa/packaging -Author: Donald Stufft and individual contributors -Author-email: donald@stufft.io -License: BSD-2-Clause or Apache-2.0 -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: License :: OSI Approved :: BSD License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Requires-Python: >=3.6 -Description-Content-Type: text/x-rst -License-File: LICENSE -License-File: LICENSE.APACHE -License-File: LICENSE.BSD -Requires-Dist: pyparsing (!=3.0.5,>=2.0.2) - -packaging -========= - -.. start-intro - -Reusable core utilities for various Python Packaging -`interoperability specifications `_. - -This library provides utilities that implement the interoperability -specifications which have clearly one correct behaviour (eg: :pep:`440`) -or benefit greatly from having a single shared implementation (eg: :pep:`425`). - -.. end-intro - -The ``packaging`` project includes the following: version handling, specifiers, -markers, requirements, tags, utilities. - -Documentation -------------- - -The `documentation`_ provides information and the API for the following: - -- Version Handling -- Specifiers -- Markers -- Requirements -- Tags -- Utilities - -Installation ------------- - -Use ``pip`` to install these utilities:: - - pip install packaging - -Discussion ----------- - -If you run into bugs, you can file them in our `issue tracker`_. - -You can also join ``#pypa`` on Freenode to ask questions or get involved. - - -.. _`documentation`: https://packaging.pypa.io/ -.. _`issue tracker`: https://github.com/pypa/packaging/issues - - -Code of Conduct ---------------- - -Everyone interacting in the packaging project's codebases, issue trackers, chat -rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. - -.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md - -Contributing ------------- - -The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as -well as how to report a potential security issue. The documentation for this -project also covers information about `project development`_ and `security`_. - -.. _`project development`: https://packaging.pypa.io/en/latest/development/ -.. _`security`: https://packaging.pypa.io/en/latest/security/ - -Project History ---------------- - -Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for -recent changes and project history. - -.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ - -Changelog ---------- - -21.3 - 2021-11-17 -~~~~~~~~~~~~~~~~~ - -* Add a ``pp3-none-any`` tag (`#311 `__) -* Replace the blank pyparsing 3 exclusion with a 3.0.5 exclusion (`#481 `__, `#486 `__) -* Fix a spelling mistake (`#479 `__) - -21.2 - 2021-10-29 -~~~~~~~~~~~~~~~~~ - -* Update documentation entry for 21.1. - -21.1 - 2021-10-29 -~~~~~~~~~~~~~~~~~ - -* Update pin to pyparsing to exclude 3.0.0. - -21.0 - 2021-07-03 -~~~~~~~~~~~~~~~~~ - -* PEP 656: musllinux support (`#411 `__) -* Drop support for Python 2.7, Python 3.4 and Python 3.5. -* Replace distutils usage with sysconfig (`#396 `__) -* Add support for zip files in ``parse_sdist_filename`` (`#429 `__) -* Use cached ``_hash`` attribute to short-circuit tag equality comparisons (`#417 `__) -* Specify the default value for the ``specifier`` argument to ``SpecifierSet`` (`#437 `__) -* Proper keyword-only "warn" argument in packaging.tags (`#403 `__) -* Correctly remove prerelease suffixes from ~= check (`#366 `__) -* Fix type hints for ``Version.post`` and ``Version.dev`` (`#393 `__) -* Use typing alias ``UnparsedVersion`` (`#398 `__) -* Improve type inference for ``packaging.specifiers.filter()`` (`#430 `__) -* Tighten the return type of ``canonicalize_version()`` (`#402 `__) - -20.9 - 2021-01-29 -~~~~~~~~~~~~~~~~~ - -* Run `isort `_ over the code base (`#377 `__) -* Add support for the ``macosx_10_*_universal2`` platform tags (`#379 `__) -* Introduce ``packaging.utils.parse_wheel_filename()`` and ``parse_sdist_filename()`` - (`#387 `__ and `#389 `__) - -20.8 - 2020-12-11 -~~~~~~~~~~~~~~~~~ - -* Revert back to setuptools for compatibility purposes for some Linux distros (`#363 `__) -* Do not insert an underscore in wheel tags when the interpreter version number - is more than 2 digits (`#372 `__) - -20.7 - 2020-11-28 -~~~~~~~~~~~~~~~~~ - -No unreleased changes. - -20.6 - 2020-11-28 -~~~~~~~~~~~~~~~~~ - -.. note:: This release was subsequently yanked, and these changes were included in 20.7. - -* Fix flit configuration, to include LICENSE files (`#357 `__) -* Make `intel` a recognized CPU architecture for the `universal` macOS platform tag (`#361 `__) -* Add some missing type hints to `packaging.requirements` (issue:`350`) - -20.5 - 2020-11-27 -~~~~~~~~~~~~~~~~~ - -* Officially support Python 3.9 (`#343 `__) -* Deprecate the ``LegacyVersion`` and ``LegacySpecifier`` classes (`#321 `__) -* Handle ``OSError`` on non-dynamic executables when attempting to resolve - the glibc version string. - -20.4 - 2020-05-19 -~~~~~~~~~~~~~~~~~ - -* Canonicalize version before comparing specifiers. (`#282 `__) -* Change type hint for ``canonicalize_name`` to return - ``packaging.utils.NormalizedName``. - This enables the use of static typing tools (like mypy) to detect mixing of - normalized and un-normalized names. - -20.3 - 2020-03-05 -~~~~~~~~~~~~~~~~~ - -* Fix changelog for 20.2. - -20.2 - 2020-03-05 -~~~~~~~~~~~~~~~~~ - -* Fix a bug that caused a 32-bit OS that runs on a 64-bit ARM CPU (e.g. ARM-v8, - aarch64), to report the wrong bitness. - -20.1 - 2020-01-24 -~~~~~~~~~~~~~~~~~~~ - -* Fix a bug caused by reuse of an exhausted iterator. (`#257 `__) - -20.0 - 2020-01-06 -~~~~~~~~~~~~~~~~~ - -* Add type hints (`#191 `__) - -* Add proper trove classifiers for PyPy support (`#198 `__) - -* Scale back depending on ``ctypes`` for manylinux support detection (`#171 `__) - -* Use ``sys.implementation.name`` where appropriate for ``packaging.tags`` (`#193 `__) - -* Expand upon the API provided by ``packaging.tags``: ``interpreter_name()``, ``mac_platforms()``, ``compatible_tags()``, ``cpython_tags()``, ``generic_tags()`` (`#187 `__) - -* Officially support Python 3.8 (`#232 `__) - -* Add ``major``, ``minor``, and ``micro`` aliases to ``packaging.version.Version`` (`#226 `__) - -* Properly mark ``packaging`` has being fully typed by adding a `py.typed` file (`#226 `__) - -19.2 - 2019-09-18 -~~~~~~~~~~~~~~~~~ - -* Remove dependency on ``attrs`` (`#178 `__, `#179 `__) - -* Use appropriate fallbacks for CPython ABI tag (`#181 `__, `#185 `__) - -* Add manylinux2014 support (`#186 `__) - -* Improve ABI detection (`#181 `__) - -* Properly handle debug wheels for Python 3.8 (`#172 `__) - -* Improve detection of debug builds on Windows (`#194 `__) - -19.1 - 2019-07-30 -~~~~~~~~~~~~~~~~~ - -* Add the ``packaging.tags`` module. (`#156 `__) - -* Correctly handle two-digit versions in ``python_version`` (`#119 `__) - - -19.0 - 2019-01-20 -~~~~~~~~~~~~~~~~~ - -* Fix string representation of PEP 508 direct URL requirements with markers. - -* Better handling of file URLs - - This allows for using ``file:///absolute/path``, which was previously - prevented due to the missing ``netloc``. - - This allows for all file URLs that ``urlunparse`` turns back into the - original URL to be valid. - - -18.0 - 2018-09-26 -~~~~~~~~~~~~~~~~~ - -* Improve error messages when invalid requirements are given. (`#129 `__) - - -17.1 - 2017-02-28 -~~~~~~~~~~~~~~~~~ - -* Fix ``utils.canonicalize_version`` when supplying non PEP 440 versions. - - -17.0 - 2017-02-28 -~~~~~~~~~~~~~~~~~ - -* Drop support for python 2.6, 3.2, and 3.3. - -* Define minimal pyparsing version to 2.0.2 (`#91 `__). - -* Add ``epoch``, ``release``, ``pre``, ``dev``, and ``post`` attributes to - ``Version`` and ``LegacyVersion`` (`#34 `__). - -* Add ``Version().is_devrelease`` and ``LegacyVersion().is_devrelease`` to - make it easy to determine if a release is a development release. - -* Add ``utils.canonicalize_version`` to canonicalize version strings or - ``Version`` instances (`#121 `__). - - -16.8 - 2016-10-29 -~~~~~~~~~~~~~~~~~ - -* Fix markers that utilize ``in`` so that they render correctly. - -* Fix an erroneous test on Python RC releases. - - -16.7 - 2016-04-23 -~~~~~~~~~~~~~~~~~ - -* Add support for the deprecated ``python_implementation`` marker which was - an undocumented setuptools marker in addition to the newer markers. - - -16.6 - 2016-03-29 -~~~~~~~~~~~~~~~~~ - -* Add support for the deprecated, PEP 345 environment markers in addition to - the newer markers. - - -16.5 - 2016-02-26 -~~~~~~~~~~~~~~~~~ - -* Fix a regression in parsing requirements with whitespaces between the comma - separators. - - -16.4 - 2016-02-22 -~~~~~~~~~~~~~~~~~ - -* Fix a regression in parsing requirements like ``foo (==4)``. - - -16.3 - 2016-02-21 -~~~~~~~~~~~~~~~~~ - -* Fix a bug where ``packaging.requirements:Requirement`` was overly strict when - matching legacy requirements. - - -16.2 - 2016-02-09 -~~~~~~~~~~~~~~~~~ - -* Add a function that implements the name canonicalization from PEP 503. - - -16.1 - 2016-02-07 -~~~~~~~~~~~~~~~~~ - -* Implement requirement specifiers from PEP 508. - - -16.0 - 2016-01-19 -~~~~~~~~~~~~~~~~~ - -* Relicense so that packaging is available under *either* the Apache License, - Version 2.0 or a 2 Clause BSD license. - -* Support installation of packaging when only distutils is available. - -* Fix ``==`` comparison when there is a prefix and a local version in play. - (`#41 `__). - -* Implement environment markers from PEP 508. - - -15.3 - 2015-08-01 -~~~~~~~~~~~~~~~~~ - -* Normalize post-release spellings for rev/r prefixes. `#35 `__ - - -15.2 - 2015-05-13 -~~~~~~~~~~~~~~~~~ - -* Fix an error where the arbitrary specifier (``===``) was not correctly - allowing pre-releases when it was being used. - -* Expose the specifier and version parts through properties on the - ``Specifier`` classes. - -* Allow iterating over the ``SpecifierSet`` to get access to all of the - ``Specifier`` instances. - -* Allow testing if a version is contained within a specifier via the ``in`` - operator. - - -15.1 - 2015-04-13 -~~~~~~~~~~~~~~~~~ - -* Fix a logic error that was causing inconsistent answers about whether or not - a pre-release was contained within a ``SpecifierSet`` or not. - - -15.0 - 2015-01-02 -~~~~~~~~~~~~~~~~~ - -* Add ``Version().is_postrelease`` and ``LegacyVersion().is_postrelease`` to - make it easy to determine if a release is a post release. - -* Add ``Version().base_version`` and ``LegacyVersion().base_version`` to make - it easy to get the public version without any pre or post release markers. - -* Support the update to PEP 440 which removed the implied ``!=V.*`` when using - either ``>V`` or ``V`` or ````) operator. - - -14.3 - 2014-11-19 -~~~~~~~~~~~~~~~~~ - -* **BACKWARDS INCOMPATIBLE** Refactor specifier support so that it can sanely - handle legacy specifiers as well as PEP 440 specifiers. - -* **BACKWARDS INCOMPATIBLE** Move the specifier support out of - ``packaging.version`` into ``packaging.specifiers``. - - -14.2 - 2014-09-10 -~~~~~~~~~~~~~~~~~ - -* Add prerelease support to ``Specifier``. -* Remove the ability to do ``item in Specifier()`` and replace it with - ``Specifier().contains(item)`` in order to allow flags that signal if a - prerelease should be accepted or not. -* Add a method ``Specifier().filter()`` which will take an iterable and returns - an iterable with items that do not match the specifier filtered out. - - -14.1 - 2014-09-08 -~~~~~~~~~~~~~~~~~ - -* Allow ``LegacyVersion`` and ``Version`` to be sorted together. -* Add ``packaging.version.parse()`` to enable easily parsing a version string - as either a ``Version`` or a ``LegacyVersion`` depending on it's PEP 440 - validity. - - -14.0 - 2014-09-05 -~~~~~~~~~~~~~~~~~ - -* Initial release. - - -.. _`master`: https://github.com/pypa/packaging/ - - diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/RECORD b/pkg_resources/_vendor/packaging-21.3.dist-info/RECORD deleted file mode 100644 index 97cace1022..0000000000 --- a/pkg_resources/_vendor/packaging-21.3.dist-info/RECORD +++ /dev/null @@ -1,32 +0,0 @@ -packaging-21.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -packaging-21.3.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 -packaging-21.3.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 -packaging-21.3.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 -packaging-21.3.dist-info/METADATA,sha256=KuKIy6qDLP3svIt6ejCbxBDhvq11ebkgUN55MeyKFyc,15147 -packaging-21.3.dist-info/RECORD,, -packaging-21.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -packaging-21.3.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -packaging-21.3.dist-info/top_level.txt,sha256=zFdHrhWnPslzsiP455HutQsqPB6v0KCtNUMtUtrefDw,10 -packaging/__about__.py,sha256=ugASIO2w1oUyH8_COqQ2X_s0rDhjbhQC3yJocD03h2c,661 -packaging/__init__.py,sha256=b9Kk5MF7KxhhLgcDmiUWukN-LatWFxPdNug0joPhHSk,497 -packaging/__pycache__/__about__.cpython-310.pyc,, -packaging/__pycache__/__init__.cpython-310.pyc,, -packaging/__pycache__/_manylinux.cpython-310.pyc,, -packaging/__pycache__/_musllinux.cpython-310.pyc,, -packaging/__pycache__/_structures.cpython-310.pyc,, -packaging/__pycache__/markers.cpython-310.pyc,, -packaging/__pycache__/requirements.cpython-310.pyc,, -packaging/__pycache__/specifiers.cpython-310.pyc,, -packaging/__pycache__/tags.cpython-310.pyc,, -packaging/__pycache__/utils.cpython-310.pyc,, -packaging/__pycache__/version.cpython-310.pyc,, -packaging/_manylinux.py,sha256=XcbiXB-qcjv3bcohp6N98TMpOP4_j3m-iOA8ptK2GWY,11488 -packaging/_musllinux.py,sha256=_KGgY_qc7vhMGpoqss25n2hiLCNKRtvz9mCrS7gkqyc,4378 -packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 -packaging/markers.py,sha256=Fygi3_eZnjQ-3VJizW5AhI5wvo0Hb6RMk4DidsKpOC0,8475 -packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -packaging/requirements.py,sha256=rjaGRCMepZS1mlYMjJ5Qh6rfq3gtsCRQUQmftGZ_bu8,4664 -packaging/specifiers.py,sha256=LRQ0kFsHrl5qfcFNEEJrIFYsnIHQUJXY9fIsakTrrqE,30110 -packaging/tags.py,sha256=lmsnGNiJ8C4D_Pf9PbM0qgbZvD9kmB9lpZBQUZa3R_Y,15699 -packaging/utils.py,sha256=dJjeat3BS-TYn1RrUFVwufUMasbtzLfYRoy_HXENeFQ,4200 -packaging/version.py,sha256=_fLRNrFrxYcHVfyo8vk9j8s6JM8N_xsSxVFr6RJyco8,14665 diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/WHEEL b/pkg_resources/_vendor/packaging-21.3.dist-info/WHEEL deleted file mode 100644 index 5bad85fdc1..0000000000 --- a/pkg_resources/_vendor/packaging-21.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/top_level.txt b/pkg_resources/_vendor/packaging-21.3.dist-info/top_level.txt deleted file mode 100644 index 748809f75c..0000000000 --- a/pkg_resources/_vendor/packaging-21.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -packaging diff --git a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/INSTALLER b/pkg_resources/_vendor/packaging-23.0.dist-info/INSTALLER similarity index 100% rename from pkg_resources/_vendor/more_itertools-8.12.0.dist-info/INSTALLER rename to pkg_resources/_vendor/packaging-23.0.dist-info/INSTALLER diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE b/pkg_resources/_vendor/packaging-23.0.dist-info/LICENSE similarity index 100% rename from pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE rename to pkg_resources/_vendor/packaging-23.0.dist-info/LICENSE diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE.APACHE b/pkg_resources/_vendor/packaging-23.0.dist-info/LICENSE.APACHE similarity index 100% rename from pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE.APACHE rename to pkg_resources/_vendor/packaging-23.0.dist-info/LICENSE.APACHE diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE.BSD b/pkg_resources/_vendor/packaging-23.0.dist-info/LICENSE.BSD similarity index 100% rename from pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE.BSD rename to pkg_resources/_vendor/packaging-23.0.dist-info/LICENSE.BSD diff --git a/pkg_resources/_vendor/packaging-23.0.dist-info/METADATA b/pkg_resources/_vendor/packaging-23.0.dist-info/METADATA new file mode 100644 index 0000000000..7c5087aaef --- /dev/null +++ b/pkg_resources/_vendor/packaging-23.0.dist-info/METADATA @@ -0,0 +1,98 @@ +Metadata-Version: 2.1 +Name: packaging +Version: 23.0 +Summary: Core utilities for Python packages +Author-email: Donald Stufft +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Project-URL: Documentation, https://packaging.pypa.io/ +Project-URL: Source, https://github.com/pypa/packaging + +packaging +========= + +.. start-intro + +Reusable core utilities for various Python Packaging +`interoperability specifications `_. + +This library provides utilities that implement the interoperability +specifications which have clearly one correct behaviour (eg: :pep:`440`) +or benefit greatly from having a single shared implementation (eg: :pep:`425`). + +.. end-intro + +The ``packaging`` project includes the following: version handling, specifiers, +markers, requirements, tags, utilities. + +Documentation +------------- + +The `documentation`_ provides information and the API for the following: + +- Version Handling +- Specifiers +- Markers +- Requirements +- Tags +- Utilities + +Installation +------------ + +Use ``pip`` to install these utilities:: + + pip install packaging + +Discussion +---------- + +If you run into bugs, you can file them in our `issue tracker`_. + +You can also join ``#pypa`` on Freenode to ask questions or get involved. + + +.. _`documentation`: https://packaging.pypa.io/ +.. _`issue tracker`: https://github.com/pypa/packaging/issues + + +Code of Conduct +--------------- + +Everyone interacting in the packaging project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + +Contributing +------------ + +The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as +well as how to report a potential security issue. The documentation for this +project also covers information about `project development`_ and `security`_. + +.. _`project development`: https://packaging.pypa.io/en/latest/development/ +.. _`security`: https://packaging.pypa.io/en/latest/security/ + +Project History +--------------- + +Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for +recent changes and project history. + +.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ + diff --git a/pkg_resources/_vendor/packaging-23.0.dist-info/RECORD b/pkg_resources/_vendor/packaging-23.0.dist-info/RECORD new file mode 100644 index 0000000000..3cdb0c289e --- /dev/null +++ b/pkg_resources/_vendor/packaging-23.0.dist-info/RECORD @@ -0,0 +1,35 @@ +packaging-23.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +packaging-23.0.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 +packaging-23.0.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 +packaging-23.0.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 +packaging-23.0.dist-info/METADATA,sha256=RFXOWcbEEITO7DWWyhtk55j4BGh7QaKb2VqL0TF8Y_4,3054 +packaging-23.0.dist-info/RECORD,, +packaging-23.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +packaging-23.0.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81 +packaging/__init__.py,sha256=7BlJ_DcIt1zv01UQcZLozidczzNcivKj66zIBkRL3R4,501 +packaging/__pycache__/__init__.cpython-38.pyc,, +packaging/__pycache__/_elffile.cpython-38.pyc,, +packaging/__pycache__/_manylinux.cpython-38.pyc,, +packaging/__pycache__/_musllinux.cpython-38.pyc,, +packaging/__pycache__/_parser.cpython-38.pyc,, +packaging/__pycache__/_structures.cpython-38.pyc,, +packaging/__pycache__/_tokenizer.cpython-38.pyc,, +packaging/__pycache__/markers.cpython-38.pyc,, +packaging/__pycache__/requirements.cpython-38.pyc,, +packaging/__pycache__/specifiers.cpython-38.pyc,, +packaging/__pycache__/tags.cpython-38.pyc,, +packaging/__pycache__/utils.cpython-38.pyc,, +packaging/__pycache__/version.cpython-38.pyc,, +packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266 +packaging/_manylinux.py,sha256=uZ821PBqQrokhUbwe7E0UodEraMHqzoSgTvfJ8MIl30,8813 +packaging/_musllinux.py,sha256=mvPk7FNjjILKRLIdMxR7IvJ1uggLgCszo-L9rjfpi0M,2524 +packaging/_parser.py,sha256=jjFjSqNf7W2-Ta6YUkywK0P4d2i0Bz_MqLOfl7O1Tkw,9399 +packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 +packaging/_tokenizer.py,sha256=czGibL-4oPofx1pCSt_hrozNbHlOPrqGv6m-0d-iTdo,5148 +packaging/markers.py,sha256=HDPXE0_MPBSwsw_9upez8t8mdrqUGrgiOG_qyQy-W30,8161 +packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +packaging/requirements.py,sha256=4nOKheaBbVEQXTGSqaOGTy1Tkg7J_sEno3u8jxC-baw,3264 +packaging/specifiers.py,sha256=-3ajZ5CkQrjNW5H8NPjvCV2RBgr-w9wcYBdb8kjPBfg,39046 +packaging/tags.py,sha256=fOKnZVfiU3oc9CPSzjJUsMk5VTfgOfpNhWobUH0sAlg,18065 +packaging/utils.py,sha256=es0cCezKspzriQ-3V88h3yJzxz028euV2sUwM61kE-o,4355 +packaging/version.py,sha256=_ULefmddLDLJ9VKRFAXhshEd0zP8OYPhcjCPfYolUbo,16295 diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/REQUESTED b/pkg_resources/_vendor/packaging-23.0.dist-info/REQUESTED similarity index 100% rename from pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/REQUESTED rename to pkg_resources/_vendor/packaging-23.0.dist-info/REQUESTED diff --git a/pkg_resources/_vendor/packaging-23.0.dist-info/WHEEL b/pkg_resources/_vendor/packaging-23.0.dist-info/WHEEL new file mode 100644 index 0000000000..db4a255f3a --- /dev/null +++ b/pkg_resources/_vendor/packaging-23.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.8.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/pkg_resources/_vendor/packaging/__about__.py b/pkg_resources/_vendor/packaging/__about__.py deleted file mode 100644 index 3551bc2d29..0000000000 --- a/pkg_resources/_vendor/packaging/__about__.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -__all__ = [ - "__title__", - "__summary__", - "__uri__", - "__version__", - "__author__", - "__email__", - "__license__", - "__copyright__", -] - -__title__ = "packaging" -__summary__ = "Core utilities for Python packages" -__uri__ = "https://github.com/pypa/packaging" - -__version__ = "21.3" - -__author__ = "Donald Stufft and individual contributors" -__email__ = "donald@stufft.io" - -__license__ = "BSD-2-Clause or Apache-2.0" -__copyright__ = "2014-2019 %s" % __author__ diff --git a/pkg_resources/_vendor/packaging/__init__.py b/pkg_resources/_vendor/packaging/__init__.py index 3c50c5dcfe..4112fec0a5 100644 --- a/pkg_resources/_vendor/packaging/__init__.py +++ b/pkg_resources/_vendor/packaging/__init__.py @@ -2,24 +2,14 @@ # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. -from .__about__ import ( - __author__, - __copyright__, - __email__, - __license__, - __summary__, - __title__, - __uri__, - __version__, -) +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" -__all__ = [ - "__title__", - "__summary__", - "__uri__", - "__version__", - "__author__", - "__email__", - "__license__", - "__copyright__", -] +__version__ = "23.0" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD-2-Clause or Apache-2.0" +__copyright__ = "2014-2019 %s" % __author__ diff --git a/pkg_resources/_vendor/packaging/_elffile.py b/pkg_resources/_vendor/packaging/_elffile.py new file mode 100644 index 0000000000..6fb19b30bb --- /dev/null +++ b/pkg_resources/_vendor/packaging/_elffile.py @@ -0,0 +1,108 @@ +""" +ELF file parser. + +This provides a class ``ELFFile`` that parses an ELF executable in a similar +interface to ``ZipFile``. Only the read interface is implemented. + +Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca +ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html +""" + +import enum +import os +import struct +from typing import IO, Optional, Tuple + + +class ELFInvalid(ValueError): + pass + + +class EIClass(enum.IntEnum): + C32 = 1 + C64 = 2 + + +class EIData(enum.IntEnum): + Lsb = 1 + Msb = 2 + + +class EMachine(enum.IntEnum): + I386 = 3 + S390 = 22 + Arm = 40 + X8664 = 62 + AArc64 = 183 + + +class ELFFile: + """ + Representation of an ELF executable. + """ + + def __init__(self, f: IO[bytes]) -> None: + self._f = f + + try: + ident = self._read("16B") + except struct.error: + raise ELFInvalid("unable to parse identification") + magic = bytes(ident[:4]) + if magic != b"\x7fELF": + raise ELFInvalid(f"invalid magic: {magic!r}") + + self.capacity = ident[4] # Format for program header (bitness). + self.encoding = ident[5] # Data structure encoding (endianness). + + try: + # e_fmt: Format for program header. + # p_fmt: Format for section header. + # p_idx: Indexes to find p_type, p_offset, and p_filesz. + e_fmt, self._p_fmt, self._p_idx = { + (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB. + (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB. + }[(self.capacity, self.encoding)] + except KeyError: + raise ELFInvalid( + f"unrecognized capacity ({self.capacity}) or " + f"encoding ({self.encoding})" + ) + + try: + ( + _, + self.machine, # Architecture type. + _, + _, + self._e_phoff, # Offset of program header. + _, + self.flags, # Processor-specific flags. + _, + self._e_phentsize, # Size of section. + self._e_phnum, # Number of sections. + ) = self._read(e_fmt) + except struct.error as e: + raise ELFInvalid("unable to parse machine and section information") from e + + def _read(self, fmt: str) -> Tuple[int, ...]: + return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) + + @property + def interpreter(self) -> Optional[str]: + """ + The path recorded in the ``PT_INTERP`` section header. + """ + for index in range(self._e_phnum): + self._f.seek(self._e_phoff + self._e_phentsize * index) + try: + data = self._read(self._p_fmt) + except struct.error: + continue + if data[self._p_idx[0]] != 3: # Not PT_INTERP. + continue + self._f.seek(data[self._p_idx[1]]) + return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0") + return None diff --git a/pkg_resources/_vendor/packaging/_manylinux.py b/pkg_resources/_vendor/packaging/_manylinux.py index 4c379aa6f6..2f0cc7439a 100644 --- a/pkg_resources/_vendor/packaging/_manylinux.py +++ b/pkg_resources/_vendor/packaging/_manylinux.py @@ -1,121 +1,58 @@ import collections +import contextlib import functools import os import re -import struct import sys import warnings -from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple - - -# Python does not provide platform information at sufficient granularity to -# identify the architecture of the running executable in some cases, so we -# determine it dynamically by reading the information from the running -# process. This only applies on Linux, which uses the ELF format. -class _ELFFileHeader: - # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header - class _InvalidELFFileHeader(ValueError): - """ - An invalid ELF file header was found. - """ - - ELF_MAGIC_NUMBER = 0x7F454C46 - ELFCLASS32 = 1 - ELFCLASS64 = 2 - ELFDATA2LSB = 1 - ELFDATA2MSB = 2 - EM_386 = 3 - EM_S390 = 22 - EM_ARM = 40 - EM_X86_64 = 62 - EF_ARM_ABIMASK = 0xFF000000 - EF_ARM_ABI_VER5 = 0x05000000 - EF_ARM_ABI_FLOAT_HARD = 0x00000400 - - def __init__(self, file: IO[bytes]) -> None: - def unpack(fmt: str) -> int: - try: - data = file.read(struct.calcsize(fmt)) - result: Tuple[int, ...] = struct.unpack(fmt, data) - except struct.error: - raise _ELFFileHeader._InvalidELFFileHeader() - return result[0] - - self.e_ident_magic = unpack(">I") - if self.e_ident_magic != self.ELF_MAGIC_NUMBER: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_class = unpack("B") - if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_data = unpack("B") - if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_version = unpack("B") - self.e_ident_osabi = unpack("B") - self.e_ident_abiversion = unpack("B") - self.e_ident_pad = file.read(7) - format_h = "H" - format_i = "I" - format_q = "Q" - format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q - self.e_type = unpack(format_h) - self.e_machine = unpack(format_h) - self.e_version = unpack(format_i) - self.e_entry = unpack(format_p) - self.e_phoff = unpack(format_p) - self.e_shoff = unpack(format_p) - self.e_flags = unpack(format_i) - self.e_ehsize = unpack(format_h) - self.e_phentsize = unpack(format_h) - self.e_phnum = unpack(format_h) - self.e_shentsize = unpack(format_h) - self.e_shnum = unpack(format_h) - self.e_shstrndx = unpack(format_h) - - -def _get_elf_header() -> Optional[_ELFFileHeader]: +from typing import Dict, Generator, Iterator, NamedTuple, Optional, Tuple + +from ._elffile import EIClass, EIData, ELFFile, EMachine + +EF_ARM_ABIMASK = 0xFF000000 +EF_ARM_ABI_VER5 = 0x05000000 +EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + +@contextlib.contextmanager +def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]: try: - with open(sys.executable, "rb") as f: - elf_header = _ELFFileHeader(f) - except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): - return None - return elf_header + with open(path, "rb") as f: + yield ELFFile(f) + except (OSError, TypeError, ValueError): + yield None -def _is_linux_armhf() -> bool: +def _is_linux_armhf(executable: str) -> bool: # hard-float ABI can be detected from the ELF header of the running # process # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf - elf_header = _get_elf_header() - if elf_header is None: - return False - result = elf_header.e_ident_class == elf_header.ELFCLASS32 - result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB - result &= elf_header.e_machine == elf_header.EM_ARM - result &= ( - elf_header.e_flags & elf_header.EF_ARM_ABIMASK - ) == elf_header.EF_ARM_ABI_VER5 - result &= ( - elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD - ) == elf_header.EF_ARM_ABI_FLOAT_HARD - return result - - -def _is_linux_i686() -> bool: - elf_header = _get_elf_header() - if elf_header is None: - return False - result = elf_header.e_ident_class == elf_header.ELFCLASS32 - result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB - result &= elf_header.e_machine == elf_header.EM_386 - return result + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.Arm + and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5 + and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD + ) + + +def _is_linux_i686(executable: str) -> bool: + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.I386 + ) -def _have_compatible_abi(arch: str) -> bool: +def _have_compatible_abi(executable: str, arch: str) -> bool: if arch == "armv7l": - return _is_linux_armhf() + return _is_linux_armhf(executable) if arch == "i686": - return _is_linux_i686() + return _is_linux_i686(executable) return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"} @@ -141,10 +78,10 @@ def _glibc_version_string_confstr() -> Optional[str]: # platform module. # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 try: - # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". - version_string = os.confstr("CS_GNU_LIBC_VERSION") + # Should be a string like "glibc 2.17". + version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION") assert version_string is not None - _, version = version_string.split() + _, version = version_string.rsplit() except (AssertionError, AttributeError, OSError, ValueError): # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... return None @@ -211,8 +148,8 @@ def _parse_glibc_version(version_str: str) -> Tuple[int, int]: m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) if not m: warnings.warn( - "Expected glibc version with 2 components major.minor," - " got: %s" % version_str, + f"Expected glibc version with 2 components major.minor," + f" got: {version_str}", RuntimeWarning, ) return -1, -1 @@ -265,7 +202,7 @@ def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool: def platform_tags(linux: str, arch: str) -> Iterator[str]: - if not _have_compatible_abi(arch): + if not _have_compatible_abi(sys.executable, arch): return # Oldest glibc to be supported regardless of architecture is (2, 17). too_old_glibc2 = _GLibCVersion(2, 16) diff --git a/pkg_resources/_vendor/packaging/_musllinux.py b/pkg_resources/_vendor/packaging/_musllinux.py index 8ac3059ba3..706ba600a9 100644 --- a/pkg_resources/_vendor/packaging/_musllinux.py +++ b/pkg_resources/_vendor/packaging/_musllinux.py @@ -4,68 +4,13 @@ linked against musl, and what musl version is used. """ -import contextlib import functools -import operator -import os import re -import struct import subprocess import sys -from typing import IO, Iterator, NamedTuple, Optional, Tuple +from typing import Iterator, NamedTuple, Optional - -def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]: - return struct.unpack(fmt, f.read(struct.calcsize(fmt))) - - -def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]: - """Detect musl libc location by parsing the Python executable. - - Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca - ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html - """ - f.seek(0) - try: - ident = _read_unpacked(f, "16B") - except struct.error: - return None - if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF. - return None - f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version. - - try: - # e_fmt: Format for program header. - # p_fmt: Format for section header. - # p_idx: Indexes to find p_type, p_offset, and p_filesz. - e_fmt, p_fmt, p_idx = { - 1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit. - 2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit. - }[ident[4]] - except KeyError: - return None - else: - p_get = operator.itemgetter(*p_idx) - - # Find the interpreter section and return its content. - try: - _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt) - except struct.error: - return None - for i in range(e_phnum + 1): - f.seek(e_phoff + e_phentsize * i) - try: - p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt)) - except struct.error: - return None - if p_type != 3: # Not PT_INTERP. - continue - f.seek(p_offset) - interpreter = os.fsdecode(f.read(p_filesz)).strip("\0") - if "musl" not in interpreter: - return None - return interpreter - return None +from ._elffile import ELFFile class _MuslVersion(NamedTuple): @@ -95,13 +40,12 @@ def _get_musl_version(executable: str) -> Optional[_MuslVersion]: Version 1.2.2 Dynamic Program Loader """ - with contextlib.ExitStack() as stack: - try: - f = stack.enter_context(open(executable, "rb")) - except OSError: - return None - ld = _parse_ld_musl_from_elf(f) - if not ld: + try: + with open(executable, "rb") as f: + ld = ELFFile(f).interpreter + except (OSError, TypeError, ValueError): + return None + if ld is None or "musl" not in ld: return None proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True) return _parse_musl_version(proc.stderr) diff --git a/pkg_resources/_vendor/packaging/_parser.py b/pkg_resources/_vendor/packaging/_parser.py new file mode 100644 index 0000000000..2bc6a8f98b --- /dev/null +++ b/pkg_resources/_vendor/packaging/_parser.py @@ -0,0 +1,328 @@ +"""Handwritten parser of dependency specifiers. + +The docstring for each __parse_* function contains ENBF-inspired grammar representing +the implementation. +""" + +import ast +from typing import Any, List, NamedTuple, Optional, Tuple, Union + +from ._tokenizer import DEFAULT_RULES, Tokenizer + + +class Node: + def __init__(self, value: str) -> None: + self.value = value + + def __str__(self) -> str: + return self.value + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}('{self}')>" + + def serialize(self) -> str: + raise NotImplementedError + + +class Variable(Node): + def serialize(self) -> str: + return str(self) + + +class Value(Node): + def serialize(self) -> str: + return f'"{self}"' + + +class Op(Node): + def serialize(self) -> str: + return str(self) + + +MarkerVar = Union[Variable, Value] +MarkerItem = Tuple[MarkerVar, Op, MarkerVar] +# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]] +# MarkerList = List[Union["MarkerList", MarkerAtom, str]] +# mypy does not support recursive type definition +# https://github.com/python/mypy/issues/731 +MarkerAtom = Any +MarkerList = List[Any] + + +class ParsedRequirement(NamedTuple): + name: str + url: str + extras: List[str] + specifier: str + marker: Optional[MarkerList] + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for dependency specifier +# -------------------------------------------------------------------------------------- +def parse_requirement(source: str) -> ParsedRequirement: + return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: + """ + requirement = WS? IDENTIFIER WS? extras WS? requirement_details + """ + tokenizer.consume("WS") + + name_token = tokenizer.expect( + "IDENTIFIER", expected="package name at the start of dependency specifier" + ) + name = name_token.text + tokenizer.consume("WS") + + extras = _parse_extras(tokenizer) + tokenizer.consume("WS") + + url, specifier, marker = _parse_requirement_details(tokenizer) + tokenizer.expect("END", expected="end of dependency specifier") + + return ParsedRequirement(name, url, extras, specifier, marker) + + +def _parse_requirement_details( + tokenizer: Tokenizer, +) -> Tuple[str, str, Optional[MarkerList]]: + """ + requirement_details = AT URL (WS requirement_marker?)? + | specifier WS? (requirement_marker)? + """ + + specifier = "" + url = "" + marker = None + + if tokenizer.check("AT"): + tokenizer.read() + tokenizer.consume("WS") + + url_start = tokenizer.position + url = tokenizer.expect("URL", expected="URL after @").text + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + tokenizer.expect("WS", expected="whitespace after URL") + + # The input might end after whitespace. + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, span_start=url_start, after="URL and whitespace" + ) + else: + specifier_start = tokenizer.position + specifier = _parse_specifier(tokenizer) + tokenizer.consume("WS") + + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, + span_start=specifier_start, + after=( + "version specifier" + if specifier + else "name and no valid version specifier" + ), + ) + + return (url, specifier, marker) + + +def _parse_requirement_marker( + tokenizer: Tokenizer, *, span_start: int, after: str +) -> MarkerList: + """ + requirement_marker = SEMICOLON marker WS? + """ + + if not tokenizer.check("SEMICOLON"): + tokenizer.raise_syntax_error( + f"Expected end or semicolon (after {after})", + span_start=span_start, + ) + tokenizer.read() + + marker = _parse_marker(tokenizer) + tokenizer.consume("WS") + + return marker + + +def _parse_extras(tokenizer: Tokenizer) -> List[str]: + """ + extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? + """ + if not tokenizer.check("LEFT_BRACKET", peek=True): + return [] + + with tokenizer.enclosing_tokens("LEFT_BRACKET", "RIGHT_BRACKET"): + tokenizer.consume("WS") + extras = _parse_extras_list(tokenizer) + tokenizer.consume("WS") + + return extras + + +def _parse_extras_list(tokenizer: Tokenizer) -> List[str]: + """ + extras_list = identifier (wsp* ',' wsp* identifier)* + """ + extras: List[str] = [] + + if not tokenizer.check("IDENTIFIER"): + return extras + + extras.append(tokenizer.read().text) + + while True: + tokenizer.consume("WS") + if tokenizer.check("IDENTIFIER", peek=True): + tokenizer.raise_syntax_error("Expected comma between extra names") + elif not tokenizer.check("COMMA"): + break + + tokenizer.read() + tokenizer.consume("WS") + + extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma") + extras.append(extra_token.text) + + return extras + + +def _parse_specifier(tokenizer: Tokenizer) -> str: + """ + specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS + | WS? version_many WS? + """ + with tokenizer.enclosing_tokens("LEFT_PARENTHESIS", "RIGHT_PARENTHESIS"): + tokenizer.consume("WS") + parsed_specifiers = _parse_version_many(tokenizer) + tokenizer.consume("WS") + + return parsed_specifiers + + +def _parse_version_many(tokenizer: Tokenizer) -> str: + """ + version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)? + """ + parsed_specifiers = "" + while tokenizer.check("SPECIFIER"): + parsed_specifiers += tokenizer.read().text + tokenizer.consume("WS") + if not tokenizer.check("COMMA"): + break + parsed_specifiers += tokenizer.read().text + tokenizer.consume("WS") + + return parsed_specifiers + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for marker expression +# -------------------------------------------------------------------------------------- +def parse_marker(source: str) -> MarkerList: + return _parse_marker(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_marker(tokenizer: Tokenizer) -> MarkerList: + """ + marker = marker_atom (BOOLOP marker_atom)+ + """ + expression = [_parse_marker_atom(tokenizer)] + while tokenizer.check("BOOLOP"): + token = tokenizer.read() + expr_right = _parse_marker_atom(tokenizer) + expression.extend((token.text, expr_right)) + return expression + + +def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: + """ + marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? + | WS? marker_item WS? + """ + + tokenizer.consume("WS") + if tokenizer.check("LEFT_PARENTHESIS", peek=True): + with tokenizer.enclosing_tokens("LEFT_PARENTHESIS", "RIGHT_PARENTHESIS"): + tokenizer.consume("WS") + marker: MarkerAtom = _parse_marker(tokenizer) + tokenizer.consume("WS") + else: + marker = _parse_marker_item(tokenizer) + tokenizer.consume("WS") + return marker + + +def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem: + """ + marker_item = WS? marker_var WS? marker_op WS? marker_var WS? + """ + tokenizer.consume("WS") + marker_var_left = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + marker_op = _parse_marker_op(tokenizer) + tokenizer.consume("WS") + marker_var_right = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + return (marker_var_left, marker_op, marker_var_right) + + +def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: + """ + marker_var = VARIABLE | QUOTED_STRING + """ + if tokenizer.check("VARIABLE"): + return process_env_var(tokenizer.read().text.replace(".", "_")) + elif tokenizer.check("QUOTED_STRING"): + return process_python_str(tokenizer.read().text) + else: + tokenizer.raise_syntax_error( + message="Expected a marker variable or quoted string" + ) + + +def process_env_var(env_var: str) -> Variable: + if ( + env_var == "platform_python_implementation" + or env_var == "python_implementation" + ): + return Variable("platform_python_implementation") + else: + return Variable(env_var) + + +def process_python_str(python_str: str) -> Value: + value = ast.literal_eval(python_str) + return Value(str(value)) + + +def _parse_marker_op(tokenizer: Tokenizer) -> Op: + """ + marker_op = IN | NOT IN | OP + """ + if tokenizer.check("IN"): + tokenizer.read() + return Op("in") + elif tokenizer.check("NOT"): + tokenizer.read() + tokenizer.expect("WS", expected="whitespace after 'not'") + tokenizer.expect("IN", expected="'in' after 'not'") + return Op("not in") + elif tokenizer.check("OP"): + return Op(tokenizer.read().text) + else: + return tokenizer.raise_syntax_error( + "Expected marker operator, one of " + "<=, <, !=, ==, >=, >, ~=, ===, in, not in" + ) diff --git a/pkg_resources/_vendor/packaging/_tokenizer.py b/pkg_resources/_vendor/packaging/_tokenizer.py new file mode 100644 index 0000000000..b1fb207c7f --- /dev/null +++ b/pkg_resources/_vendor/packaging/_tokenizer.py @@ -0,0 +1,188 @@ +import contextlib +import re +from dataclasses import dataclass +from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union + +from .specifiers import Specifier + + +@dataclass +class Token: + name: str + text: str + position: int + + +class ParserSyntaxError(Exception): + """The provided source text could not be parsed correctly.""" + + def __init__( + self, + message: str, + *, + source: str, + span: Tuple[int, int], + ) -> None: + self.span = span + self.message = message + self.source = source + + super().__init__() + + def __str__(self) -> str: + marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^" + return "\n ".join([self.message, self.source, marker]) + + +DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = { + "LEFT_PARENTHESIS": r"\(", + "RIGHT_PARENTHESIS": r"\)", + "LEFT_BRACKET": r"\[", + "RIGHT_BRACKET": r"\]", + "SEMICOLON": r";", + "COMMA": r",", + "QUOTED_STRING": re.compile( + r""" + ( + ('[^']*') + | + ("[^"]*") + ) + """, + re.VERBOSE, + ), + "OP": r"(===|==|~=|!=|<=|>=|<|>)", + "BOOLOP": r"\b(or|and)\b", + "IN": r"\bin\b", + "NOT": r"\bnot\b", + "VARIABLE": re.compile( + r""" + \b( + python_version + |python_full_version + |os[._]name + |sys[._]platform + |platform_(release|system) + |platform[._](version|machine|python_implementation) + |python_implementation + |implementation_(name|version) + |extra + )\b + """, + re.VERBOSE, + ), + "SPECIFIER": re.compile( + Specifier._operator_regex_str + Specifier._version_regex_str, + re.VERBOSE | re.IGNORECASE, + ), + "AT": r"\@", + "URL": r"[^ \t]+", + "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b", + "WS": r"[ \t]+", + "END": r"$", +} + + +class Tokenizer: + """Context-sensitive token parsing. + + Provides methods to examine the input stream to check whether the next token + matches. + """ + + def __init__( + self, + source: str, + *, + rules: "Dict[str, Union[str, re.Pattern[str]]]", + ) -> None: + self.source = source + self.rules: Dict[str, re.Pattern[str]] = { + name: re.compile(pattern) for name, pattern in rules.items() + } + self.next_token: Optional[Token] = None + self.position = 0 + + def consume(self, name: str) -> None: + """Move beyond provided token name, if at current position.""" + if self.check(name): + self.read() + + def check(self, name: str, *, peek: bool = False) -> bool: + """Check whether the next token has the provided name. + + By default, if the check succeeds, the token *must* be read before + another check. If `peek` is set to `True`, the token is not loaded and + would need to be checked again. + """ + assert ( + self.next_token is None + ), f"Cannot check for {name!r}, already have {self.next_token!r}" + assert name in self.rules, f"Unknown token name: {name!r}" + + expression = self.rules[name] + + match = expression.match(self.source, self.position) + if match is None: + return False + if not peek: + self.next_token = Token(name, match[0], self.position) + return True + + def expect(self, name: str, *, expected: str) -> Token: + """Expect a certain token name next, failing with a syntax error otherwise. + + The token is *not* read. + """ + if not self.check(name): + raise self.raise_syntax_error(f"Expected {expected}") + return self.read() + + def read(self) -> Token: + """Consume the next token and return it.""" + token = self.next_token + assert token is not None + + self.position += len(token.text) + self.next_token = None + + return token + + def raise_syntax_error( + self, + message: str, + *, + span_start: Optional[int] = None, + span_end: Optional[int] = None, + ) -> NoReturn: + """Raise ParserSyntaxError at the given position.""" + span = ( + self.position if span_start is None else span_start, + self.position if span_end is None else span_end, + ) + raise ParserSyntaxError( + message, + source=self.source, + span=span, + ) + + @contextlib.contextmanager + def enclosing_tokens(self, open_token: str, close_token: str) -> Iterator[bool]: + if self.check(open_token): + open_position = self.position + self.read() + else: + open_position = None + + yield open_position is not None + + if open_position is None: + return + + if not self.check(close_token): + self.raise_syntax_error( + f"Expected closing {close_token}", + span_start=open_position, + ) + + self.read() diff --git a/pkg_resources/_vendor/packaging/markers.py b/pkg_resources/_vendor/packaging/markers.py index 18769b09a8..68369c981b 100644 --- a/pkg_resources/_vendor/packaging/markers.py +++ b/pkg_resources/_vendor/packaging/markers.py @@ -8,19 +8,10 @@ import sys from typing import Any, Callable, Dict, List, Optional, Tuple, Union -from pkg_resources.extern.pyparsing import ( # noqa: N817 - Forward, - Group, - Literal as L, - ParseException, - ParseResults, - QuotedString, - ZeroOrMore, - stringEnd, - stringStart, -) - +from ._parser import MarkerAtom, MarkerList, Op, Value, Variable, parse_marker +from ._tokenizer import ParserSyntaxError from .specifiers import InvalidSpecifier, Specifier +from .utils import canonicalize_name __all__ = [ "InvalidMarker", @@ -52,101 +43,24 @@ class UndefinedEnvironmentName(ValueError): """ -class Node: - def __init__(self, value: Any) -> None: - self.value = value - - def __str__(self) -> str: - return str(self.value) - - def __repr__(self) -> str: - return f"<{self.__class__.__name__}('{self}')>" - - def serialize(self) -> str: - raise NotImplementedError - - -class Variable(Node): - def serialize(self) -> str: - return str(self) - - -class Value(Node): - def serialize(self) -> str: - return f'"{self}"' - - -class Op(Node): - def serialize(self) -> str: - return str(self) - - -VARIABLE = ( - L("implementation_version") - | L("platform_python_implementation") - | L("implementation_name") - | L("python_full_version") - | L("platform_release") - | L("platform_version") - | L("platform_machine") - | L("platform_system") - | L("python_version") - | L("sys_platform") - | L("os_name") - | L("os.name") # PEP-345 - | L("sys.platform") # PEP-345 - | L("platform.version") # PEP-345 - | L("platform.machine") # PEP-345 - | L("platform.python_implementation") # PEP-345 - | L("python_implementation") # undocumented setuptools legacy - | L("extra") # PEP-508 -) -ALIASES = { - "os.name": "os_name", - "sys.platform": "sys_platform", - "platform.version": "platform_version", - "platform.machine": "platform_machine", - "platform.python_implementation": "platform_python_implementation", - "python_implementation": "platform_python_implementation", -} -VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) - -VERSION_CMP = ( - L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") -) - -MARKER_OP = VERSION_CMP | L("not in") | L("in") -MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) - -MARKER_VALUE = QuotedString("'") | QuotedString('"') -MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) - -BOOLOP = L("and") | L("or") - -MARKER_VAR = VARIABLE | MARKER_VALUE - -MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) -MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) - -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() - -MARKER_EXPR = Forward() -MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) -MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) - -MARKER = stringStart + MARKER_EXPR + stringEnd - - -def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]: - if isinstance(results, ParseResults): - return [_coerce_parse_result(i) for i in results] - else: - return results +def _normalize_extra_values(results: Any) -> Any: + """ + Normalize extra values. + """ + if isinstance(results[0], tuple): + lhs, op, rhs = results[0] + if isinstance(lhs, Variable) and lhs.value == "extra": + normalized_extra = canonicalize_name(rhs.value) + rhs = Value(normalized_extra) + elif isinstance(rhs, Variable) and rhs.value == "extra": + normalized_extra = canonicalize_name(lhs.value) + lhs = Value(normalized_extra) + results[0] = lhs, op, rhs + return results def _format_marker( - marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True + marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True ) -> str: assert isinstance(marker, (list, tuple, str)) @@ -192,7 +106,7 @@ def _eval_op(lhs: str, op: Op, rhs: str) -> bool: except InvalidSpecifier: pass else: - return spec.contains(lhs) + return spec.contains(lhs, prereleases=True) oper: Optional[Operator] = _operators.get(op.serialize()) if oper is None: @@ -201,25 +115,19 @@ def _eval_op(lhs: str, op: Op, rhs: str) -> bool: return oper(lhs, rhs) -class Undefined: - pass - +def _normalize(*values: str, key: str) -> Tuple[str, ...]: + # PEP 685 – Comparison of extra names for optional distribution dependencies + # https://peps.python.org/pep-0685/ + # > When comparing extra names, tools MUST normalize the names being + # > compared using the semantics outlined in PEP 503 for names + if key == "extra": + return tuple(canonicalize_name(v) for v in values) -_undefined = Undefined() + # other environment markers don't have such standards + return values -def _get_env(environment: Dict[str, str], name: str) -> str: - value: Union[str, Undefined] = environment.get(name, _undefined) - - if isinstance(value, Undefined): - raise UndefinedEnvironmentName( - f"{name!r} does not exist in evaluation environment." - ) - - return value - - -def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool: +def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool: groups: List[List[bool]] = [[]] for marker in markers: @@ -231,12 +139,15 @@ def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool: lhs, op, rhs = marker if isinstance(lhs, Variable): - lhs_value = _get_env(environment, lhs.value) + environment_key = lhs.value + lhs_value = environment[environment_key] rhs_value = rhs.value else: lhs_value = lhs.value - rhs_value = _get_env(environment, rhs.value) + environment_key = rhs.value + rhs_value = environment[environment_key] + lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key) groups[-1].append(_eval_op(lhs_value, op, rhs_value)) else: assert marker in ["and", "or"] @@ -274,13 +185,29 @@ def default_environment() -> Dict[str, str]: class Marker: def __init__(self, marker: str) -> None: + # Note: We create a Marker object without calling this constructor in + # packaging.requirements.Requirement. If any additional logic is + # added here, make sure to mirror/adapt Requirement. try: - self._markers = _coerce_parse_result(MARKER.parseString(marker)) - except ParseException as e: - raise InvalidMarker( - f"Invalid marker: {marker!r}, parse error at " - f"{marker[e.loc : e.loc + 8]!r}" - ) + self._markers = _normalize_extra_values(parse_marker(marker)) + # The attribute `_markers` can be described in terms of a recursive type: + # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]] + # + # For example, the following expression: + # python_version > "3.6" or (python_version == "3.6" and os_name == "unix") + # + # is parsed into: + # [ + # (, ')>, ), + # 'and', + # [ + # (, , ), + # 'or', + # (, , ) + # ] + # ] + except ParserSyntaxError as e: + raise InvalidMarker(str(e)) from e def __str__(self) -> str: return _format_marker(self._markers) @@ -288,6 +215,15 @@ def __str__(self) -> str: def __repr__(self) -> str: return f"" + def __hash__(self) -> int: + return hash((self.__class__.__name__, str(self))) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Marker): + return NotImplemented + + return str(self) == str(other) + def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: """Evaluate a marker. @@ -298,7 +234,12 @@ def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: The environment is determined from the current Python process. """ current_environment = default_environment() + current_environment["extra"] = "" if environment is not None: current_environment.update(environment) + # The API used to allow setting extra to None. We need to handle this + # case for backwards compatibility. + if current_environment["extra"] is None: + current_environment["extra"] = "" return _evaluate_markers(self._markers, current_environment) diff --git a/pkg_resources/_vendor/packaging/requirements.py b/pkg_resources/_vendor/packaging/requirements.py index 6af14ec4ce..a9f9b9c7c9 100644 --- a/pkg_resources/_vendor/packaging/requirements.py +++ b/pkg_resources/_vendor/packaging/requirements.py @@ -2,26 +2,13 @@ # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. -import re -import string import urllib.parse -from typing import List, Optional as TOptional, Set +from typing import Any, List, Optional, Set -from pkg_resources.extern.pyparsing import ( # noqa - Combine, - Literal as L, - Optional, - ParseException, - Regex, - Word, - ZeroOrMore, - originalTextFor, - stringEnd, - stringStart, -) - -from .markers import MARKER_EXPR, Marker -from .specifiers import LegacySpecifier, Specifier, SpecifierSet +from ._parser import parse_requirement +from ._tokenizer import ParserSyntaxError +from .markers import Marker, _normalize_extra_values +from .specifiers import SpecifierSet class InvalidRequirement(ValueError): @@ -30,60 +17,6 @@ class InvalidRequirement(ValueError): """ -ALPHANUM = Word(string.ascii_letters + string.digits) - -LBRACKET = L("[").suppress() -RBRACKET = L("]").suppress() -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() -COMMA = L(",").suppress() -SEMICOLON = L(";").suppress() -AT = L("@").suppress() - -PUNCTUATION = Word("-_.") -IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) -IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) - -NAME = IDENTIFIER("name") -EXTRA = IDENTIFIER - -URI = Regex(r"[^ ]+")("url") -URL = AT + URI - -EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) -EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") - -VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) -VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) - -VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY -VERSION_MANY = Combine( - VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False -)("_raw_spec") -_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY) -_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") - -VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") -VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) - -MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") -MARKER_EXPR.setParseAction( - lambda s, l, t: Marker(s[t._original_start : t._original_end]) -) -MARKER_SEPARATOR = SEMICOLON -MARKER = MARKER_SEPARATOR + MARKER_EXPR - -VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) -URL_AND_MARKER = URL + Optional(MARKER) - -NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) - -REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd -# pkg_resources.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see -# issue #104 -REQUIREMENT.parseString("x[]") - - class Requirement: """Parse a requirement. @@ -99,28 +32,29 @@ class Requirement: def __init__(self, requirement_string: str) -> None: try: - req = REQUIREMENT.parseString(requirement_string) - except ParseException as e: - raise InvalidRequirement( - f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}' - ) - - self.name: str = req.name - if req.url: - parsed_url = urllib.parse.urlparse(req.url) + parsed = parse_requirement(requirement_string) + except ParserSyntaxError as e: + raise InvalidRequirement(str(e)) from e + + self.name: str = parsed.name + if parsed.url: + parsed_url = urllib.parse.urlparse(parsed.url) if parsed_url.scheme == "file": - if urllib.parse.urlunparse(parsed_url) != req.url: + if urllib.parse.urlunparse(parsed_url) != parsed.url: raise InvalidRequirement("Invalid URL given") elif not (parsed_url.scheme and parsed_url.netloc) or ( not parsed_url.scheme and not parsed_url.netloc ): - raise InvalidRequirement(f"Invalid URL: {req.url}") - self.url: TOptional[str] = req.url + raise InvalidRequirement(f"Invalid URL: {parsed.url}") + self.url: Optional[str] = parsed.url else: self.url = None - self.extras: Set[str] = set(req.extras.asList() if req.extras else []) - self.specifier: SpecifierSet = SpecifierSet(req.specifier) - self.marker: TOptional[Marker] = req.marker if req.marker else None + self.extras: Set[str] = set(parsed.extras if parsed.extras else []) + self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) + self.marker: Optional[Marker] = None + if parsed.marker is not None: + self.marker = Marker.__new__(Marker) + self.marker._markers = _normalize_extra_values(parsed.marker) def __str__(self) -> str: parts: List[str] = [self.name] @@ -144,3 +78,18 @@ def __str__(self) -> str: def __repr__(self) -> str: return f"" + + def __hash__(self) -> int: + return hash((self.__class__.__name__, str(self))) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Requirement): + return NotImplemented + + return ( + self.name == other.name + and self.extras == other.extras + and self.specifier == other.specifier + and self.url == other.url + and self.marker == other.marker + ) diff --git a/pkg_resources/_vendor/packaging/specifiers.py b/pkg_resources/_vendor/packaging/specifiers.py index 0e218a6f9f..e715ecc8c2 100644 --- a/pkg_resources/_vendor/packaging/specifiers.py +++ b/pkg_resources/_vendor/packaging/specifiers.py @@ -1,20 +1,22 @@ # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. +""" +.. testsetup:: + + from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier + from packaging.version import Version +""" import abc -import functools import itertools import re -import warnings from typing import ( Callable, - Dict, Iterable, Iterator, List, Optional, - Pattern, Set, Tuple, TypeVar, @@ -22,17 +24,28 @@ ) from .utils import canonicalize_version -from .version import LegacyVersion, Version, parse +from .version import Version + +UnparsedVersion = Union[Version, str] +UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) +CallableOperator = Callable[[Version, str], bool] -ParsedVersion = Union[Version, LegacyVersion] -UnparsedVersion = Union[Version, LegacyVersion, str] -VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion) -CallableOperator = Callable[[ParsedVersion, str], bool] + +def _coerce_version(version: UnparsedVersion) -> Version: + if not isinstance(version, Version): + version = Version(version) + return version class InvalidSpecifier(ValueError): """ - An invalid specifier was found, users should refer to PEP 440. + Raised when attempting to create a :class:`Specifier` with a specifier + string that is invalid. + + >>> Specifier("lolwat") + Traceback (most recent call last): + ... + packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat' """ @@ -40,35 +53,39 @@ class BaseSpecifier(metaclass=abc.ABCMeta): @abc.abstractmethod def __str__(self) -> str: """ - Returns the str representation of this Specifier like object. This + Returns the str representation of this Specifier-like object. This should be representative of the Specifier itself. """ @abc.abstractmethod def __hash__(self) -> int: """ - Returns a hash value for this Specifier like object. + Returns a hash value for this Specifier-like object. """ @abc.abstractmethod def __eq__(self, other: object) -> bool: """ - Returns a boolean representing whether or not the two Specifier like + Returns a boolean representing whether or not the two Specifier-like objects are equal. + + :param other: The other object to check against. """ - @abc.abstractproperty + @property + @abc.abstractmethod def prereleases(self) -> Optional[bool]: - """ - Returns whether or not pre-releases as a whole are allowed by this - specifier. + """Whether or not pre-releases as a whole are allowed. + + This can be set to either ``True`` or ``False`` to explicitly enable or disable + prereleases or it can be set to ``None`` (the default) to use default semantics. """ @prereleases.setter def prereleases(self, value: bool) -> None: - """ - Sets whether or not pre-releases as a whole are allowed by this - specifier. + """Setter for :attr:`prereleases`. + + :param value: The value to set. """ @abc.abstractmethod @@ -79,227 +96,28 @@ def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: @abc.abstractmethod def filter( - self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None - ) -> Iterable[VersionTypeVar]: + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: """ Takes an iterable of items and filters them so that only items which are contained within this specifier are allowed in it. """ -class _IndividualSpecifier(BaseSpecifier): - - _operators: Dict[str, str] = {} - _regex: Pattern[str] - - def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: - match = self._regex.search(spec) - if not match: - raise InvalidSpecifier(f"Invalid specifier: '{spec}'") - - self._spec: Tuple[str, str] = ( - match.group("operator").strip(), - match.group("version").strip(), - ) - - # Store whether or not this Specifier should accept prereleases - self._prereleases = prereleases - - def __repr__(self) -> str: - pre = ( - f", prereleases={self.prereleases!r}" - if self._prereleases is not None - else "" - ) - - return f"<{self.__class__.__name__}({str(self)!r}{pre})>" - - def __str__(self) -> str: - return "{}{}".format(*self._spec) - - @property - def _canonical_spec(self) -> Tuple[str, str]: - return self._spec[0], canonicalize_version(self._spec[1]) - - def __hash__(self) -> int: - return hash(self._canonical_spec) - - def __eq__(self, other: object) -> bool: - if isinstance(other, str): - try: - other = self.__class__(str(other)) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._canonical_spec == other._canonical_spec - - def _get_operator(self, op: str) -> CallableOperator: - operator_callable: CallableOperator = getattr( - self, f"_compare_{self._operators[op]}" - ) - return operator_callable - - def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion: - if not isinstance(version, (LegacyVersion, Version)): - version = parse(version) - return version +class Specifier(BaseSpecifier): + """This class abstracts handling of version specifiers. - @property - def operator(self) -> str: - return self._spec[0] + .. tip:: - @property - def version(self) -> str: - return self._spec[1] - - @property - def prereleases(self) -> Optional[bool]: - return self._prereleases - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - def __contains__(self, item: str) -> bool: - return self.contains(item) - - def contains( - self, item: UnparsedVersion, prereleases: Optional[bool] = None - ) -> bool: - - # Determine if prereleases are to be allowed or not. - if prereleases is None: - prereleases = self.prereleases - - # Normalize item to a Version or LegacyVersion, this allows us to have - # a shortcut for ``"2.0" in Specifier(">=2") - normalized_item = self._coerce_version(item) - - # Determine if we should be supporting prereleases in this specifier - # or not, if we do not support prereleases than we can short circuit - # logic if this version is a prereleases. - if normalized_item.is_prerelease and not prereleases: - return False - - # Actually do the comparison to determine if this item is contained - # within this Specifier or not. - operator_callable: CallableOperator = self._get_operator(self.operator) - return operator_callable(normalized_item, self.version) - - def filter( - self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None - ) -> Iterable[VersionTypeVar]: - - yielded = False - found_prereleases = [] - - kw = {"prereleases": prereleases if prereleases is not None else True} - - # Attempt to iterate over all the values in the iterable and if any of - # them match, yield them. - for version in iterable: - parsed_version = self._coerce_version(version) - - if self.contains(parsed_version, **kw): - # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later in case nothing - # else matches this specifier. - if parsed_version.is_prerelease and not ( - prereleases or self.prereleases - ): - found_prereleases.append(version) - # Either this is not a prerelease, or we should have been - # accepting prereleases from the beginning. - else: - yielded = True - yield version - - # Now that we've iterated over everything, determine if we've yielded - # any values, and if we have not and we have any prereleases stored up - # then we will go ahead and yield the prereleases. - if not yielded and found_prereleases: - for version in found_prereleases: - yield version - - -class LegacySpecifier(_IndividualSpecifier): - - _regex_str = r""" - (?P(==|!=|<=|>=|<|>)) - \s* - (?P - [^,;\s)]* # Since this is a "legacy" specifier, and the version - # string can be just about anything, we match everything - # except for whitespace, a semi-colon for marker support, - # a closing paren since versions can be enclosed in - # them, and a comma since it's a version separator. - ) - """ - - _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) - - _operators = { - "==": "equal", - "!=": "not_equal", - "<=": "less_than_equal", - ">=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - } - - def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: - super().__init__(spec, prereleases) - - warnings.warn( - "Creating a LegacyVersion has been deprecated and will be " - "removed in the next major release", - DeprecationWarning, - ) - - def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion: - if not isinstance(version, LegacyVersion): - version = LegacyVersion(str(version)) - return version - - def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective == self._coerce_version(spec) - - def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective != self._coerce_version(spec) - - def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective <= self._coerce_version(spec) - - def _compare_greater_than_equal( - self, prospective: LegacyVersion, spec: str - ) -> bool: - return prospective >= self._coerce_version(spec) - - def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective < self._coerce_version(spec) - - def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective > self._coerce_version(spec) - - -def _require_version_compare( - fn: Callable[["Specifier", ParsedVersion, str], bool] -) -> Callable[["Specifier", ParsedVersion, str], bool]: - @functools.wraps(fn) - def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool: - if not isinstance(prospective, Version): - return False - return fn(self, prospective, spec) - - return wrapped - - -class Specifier(_IndividualSpecifier): + It is generally not required to instantiate this manually. You should instead + prefer to work with :class:`SpecifierSet` instead, which can parse + comma-separated version specifiers (which is what package metadata contains). + """ - _regex_str = r""" + _operator_regex_str = r""" (?P(~=|==|!=|<=|>=|<|>|===)) + """ + _version_regex_str = r""" (?P (?: # The identity operators allow for an escape hatch that will @@ -309,8 +127,10 @@ class Specifier(_IndividualSpecifier): # but included entirely as an escape hatch. (?<====) # Only match for the identity operator \s* - [^\s]* # We just match everything, except for whitespace - # since we are only testing for strict identity. + [^\s;)]* # The arbitrary version can be just about anything, + # we match everything except for whitespace, a + # semi-colon for marker support, and a closing paren + # since versions can be enclosed in them. ) | (?: @@ -323,23 +143,23 @@ class Specifier(_IndividualSpecifier): v? (?:[0-9]+!)? # epoch [0-9]+(?:\.[0-9]+)* # release - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - # You cannot use a wild card and a dev or local version - # together so group them with a | and make them optional. + # You cannot use a wild card and a pre-release, post-release, a dev or + # local version together so group them with a | and make them optional. (?: + \.\* # Wild card syntax of .* + | + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local - | - \.\* # Wild card syntax of .* )? ) | @@ -354,7 +174,7 @@ class Specifier(_IndividualSpecifier): [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) (?: # pre release [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) + (alpha|beta|preview|pre|a|b|c|rc) [-_\.]? [0-9]* )? @@ -379,7 +199,7 @@ class Specifier(_IndividualSpecifier): [0-9]+(?:\.[0-9]+)* # release (?: # pre release [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) + (alpha|beta|preview|pre|a|b|c|rc) [-_\.]? [0-9]* )? @@ -391,7 +211,10 @@ class Specifier(_IndividualSpecifier): ) """ - _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + _regex = re.compile( + r"^\s*" + _operator_regex_str + _version_regex_str + r"\s*$", + re.VERBOSE | re.IGNORECASE, + ) _operators = { "~=": "compatible", @@ -404,8 +227,152 @@ class Specifier(_IndividualSpecifier): "===": "arbitrary", } - @_require_version_compare - def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool: + def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: + """Initialize a Specifier instance. + + :param spec: + The string representation of a specifier which will be parsed and + normalized before use. + :param prereleases: + This tells the specifier if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + :raises InvalidSpecifier: + If the given specifier is invalid (i.e. bad syntax). + """ + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier(f"Invalid specifier: '{spec}'") + + self._spec: Tuple[str, str] = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + @property + def prereleases(self) -> bool: + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if Version(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + @property + def operator(self) -> str: + """The operator of this specifier. + + >>> Specifier("==1.2.3").operator + '==' + """ + return self._spec[0] + + @property + def version(self) -> str: + """The version of this specifier. + + >>> Specifier("==1.2.3").version + '1.2.3' + """ + return self._spec[1] + + def __repr__(self) -> str: + """A representation of the Specifier that shows all internal state. + + >>> Specifier('>=1.0.0') + =1.0.0')> + >>> Specifier('>=1.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> Specifier('>=1.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"<{self.__class__.__name__}({str(self)!r}{pre})>" + + def __str__(self) -> str: + """A string representation of the Specifier that can be round-tripped. + + >>> str(Specifier('>=1.0.0')) + '>=1.0.0' + >>> str(Specifier('>=1.0.0', prereleases=False)) + '>=1.0.0' + """ + return "{}{}".format(*self._spec) + + @property + def _canonical_spec(self) -> Tuple[str, str]: + canonical_version = canonicalize_version( + self._spec[1], + strip_trailing_zero=(self._spec[0] != "~="), + ) + return self._spec[0], canonical_version + + def __hash__(self) -> int: + return hash(self._canonical_spec) + + def __eq__(self, other: object) -> bool: + """Whether or not the two Specifier-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0") + True + >>> (Specifier("==1.2.3", prereleases=False) == + ... Specifier("==1.2.3", prereleases=True)) + True + >>> Specifier("==1.2.3") == "==1.2.3" + True + >>> Specifier("==1.2.3") == Specifier("==1.2.4") + False + >>> Specifier("==1.2.3") == Specifier("~=1.2.3") + False + """ + if isinstance(other, str): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._canonical_spec == other._canonical_spec + + def _get_operator(self, op: str) -> CallableOperator: + operator_callable: CallableOperator = getattr( + self, f"_compare_{self._operators[op]}" + ) + return operator_callable + + def _compare_compatible(self, prospective: Version, spec: str) -> bool: # Compatible releases have an equivalent combination of >= and ==. That # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to @@ -426,34 +393,33 @@ def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool: prospective, prefix ) - @_require_version_compare - def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool: + def _compare_equal(self, prospective: Version, spec: str) -> bool: # We need special logic to handle prefix matching if spec.endswith(".*"): # In the case of prefix matching we want to ignore local segment. - prospective = Version(prospective.public) + normalized_prospective = canonicalize_version(prospective.public) + # Get the normalized version string ignoring the trailing .* + normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False) # Split the spec out by dots, and pretend that there is an implicit # dot in between a release segment and a pre-release segment. - split_spec = _version_split(spec[:-2]) # Remove the trailing .* + split_spec = _version_split(normalized_spec) # Split the prospective version out by dots, and pretend that there # is an implicit dot in between a release segment and a pre-release # segment. - split_prospective = _version_split(str(prospective)) + split_prospective = _version_split(normalized_prospective) + + # 0-pad the prospective version before shortening it to get the correct + # shortened version. + padded_prospective, _ = _pad_version(split_prospective, split_spec) # Shorten the prospective version to be the same length as the spec # so that we can determine if the specifier is a prefix of the # prospective version or not. - shortened_prospective = split_prospective[: len(split_spec)] + shortened_prospective = padded_prospective[: len(split_spec)] - # Pad out our two sides with zeros so that they both equal the same - # length. - padded_spec, padded_prospective = _pad_version( - split_spec, shortened_prospective - ) - - return padded_prospective == padded_spec + return shortened_prospective == split_spec else: # Convert our spec string into a Version spec_version = Version(spec) @@ -466,30 +432,24 @@ def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool: return prospective == spec_version - @_require_version_compare - def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool: + def _compare_not_equal(self, prospective: Version, spec: str) -> bool: return not self._compare_equal(prospective, spec) - @_require_version_compare - def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool: + def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: # NB: Local version identifiers are NOT permitted in the version # specifier, so local version labels can be universally removed from # the prospective version. return Version(prospective.public) <= Version(spec) - @_require_version_compare - def _compare_greater_than_equal( - self, prospective: ParsedVersion, spec: str - ) -> bool: + def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: # NB: Local version identifiers are NOT permitted in the version # specifier, so local version labels can be universally removed from # the prospective version. return Version(prospective.public) >= Version(spec) - @_require_version_compare - def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool: + def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: # Convert our spec to a Version instance, since we'll want to work with # it as a version. @@ -514,8 +474,7 @@ def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool: # version in the spec. return True - @_require_version_compare - def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool: + def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: # Convert our spec to a Version instance, since we'll want to work with # it as a version. @@ -549,34 +508,133 @@ def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bo def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: return str(prospective).lower() == str(spec).lower() - @property - def prereleases(self) -> bool: + def __contains__(self, item: Union[str, Version]) -> bool: + """Return whether or not the item is contained in this specifier. - # If there is an explicit prereleases set for this, then we'll just - # blindly use that. - if self._prereleases is not None: - return self._prereleases + :param item: The item to check for. - # Look at all of our specifiers and determine if they are inclusive - # operators, and if they are if they are including an explicit - # prerelease. - operator, version = self._spec - if operator in ["==", ">=", "<=", "~=", "==="]: - # The == specifier can include a trailing .*, if it does we - # want to remove before parsing. - if operator == "==" and version.endswith(".*"): - version = version[:-2] + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. - # Parse the version, and if it is a pre-release than this - # specifier allows pre-releases. - if parse(version).is_prerelease: - return True + >>> "1.2.3" in Specifier(">=1.2.3") + True + >>> Version("1.2.3") in Specifier(">=1.2.3") + True + >>> "1.0.0" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True) + True + """ + return self.contains(item) - return False + def contains( + self, item: UnparsedVersion, prereleases: Optional[bool] = None + ) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this Specifier. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> Specifier(">=1.2.3").contains("1.2.3") + True + >>> Specifier(">=1.2.3").contains(Version("1.2.3")) + True + >>> Specifier(">=1.2.3").contains("1.0.0") + False + >>> Specifier(">=1.2.3").contains("1.3.0a1") + False + >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1") + True + >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True) + True + """ - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version, this allows us to have a shortcut for + # "2.0" in Specifier(">=2") + normalized_item = _coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if normalized_item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + operator_callable: CallableOperator = self._get_operator(self.operator) + return operator_callable(normalized_item, self.version) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifier. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(Specifier().contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")])) + ['1.2.3', '1.3', ] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"])) + ['1.5a1'] + >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + """ + + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = _coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later in case nothing + # else matches this specifier. + if parsed_version.is_prerelease and not ( + prereleases or self.prereleases + ): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the beginning. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") @@ -618,22 +676,39 @@ def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str class SpecifierSet(BaseSpecifier): + """This class abstracts handling of a set of version specifiers. + + It can be passed a single specifier (``>=3.0``), a comma-separated list of + specifiers (``>=3.0,!=3.1``), or no specifier at all. + """ + def __init__( self, specifiers: str = "", prereleases: Optional[bool] = None ) -> None: + """Initialize a SpecifierSet instance. + + :param specifiers: + The string representation of a specifier or a comma-separated list of + specifiers which will be parsed and normalized before use. + :param prereleases: + This tells the SpecifierSet if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + + :raises InvalidSpecifier: + If the given ``specifiers`` are not parseable than this exception will be + raised. + """ - # Split on , to break each individual specifier into it's own item, and + # Split on `,` to break each individual specifier into it's own item, and # strip each item to remove leading/trailing whitespace. split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] # Parsed each individual specifier, attempting first to make it a - # Specifier and falling back to a LegacySpecifier. - parsed: Set[_IndividualSpecifier] = set() + # Specifier. + parsed: Set[Specifier] = set() for specifier in split_specifiers: - try: - parsed.add(Specifier(specifier)) - except InvalidSpecifier: - parsed.add(LegacySpecifier(specifier)) + parsed.add(Specifier(specifier)) # Turn our parsed specifiers into a frozen set and save them for later. self._specs = frozenset(parsed) @@ -642,7 +717,40 @@ def __init__( # we accept prereleases or not. self._prereleases = prereleases + @property + def prereleases(self) -> Optional[bool]: + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + def __repr__(self) -> str: + """A representation of the specifier set that shows all internal state. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> SpecifierSet('>=1.0.0,!=2.0.0') + =1.0.0')> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ pre = ( f", prereleases={self.prereleases!r}" if self._prereleases is not None @@ -652,12 +760,31 @@ def __repr__(self) -> str: return f"" def __str__(self) -> str: + """A string representation of the specifier set that can be round-tripped. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> str(SpecifierSet(">=1.0.0,!=1.0.1")) + '!=1.0.1,>=1.0.0' + >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False)) + '!=1.0.1,>=1.0.0' + """ return ",".join(sorted(str(s) for s in self._specs)) def __hash__(self) -> int: return hash(self._specs) def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": + """Return a SpecifierSet which is a combination of the two sets. + + :param other: The other object to combine with. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1' + =1.0.0')> + >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1') + =1.0.0')> + """ if isinstance(other, str): other = SpecifierSet(other) elif not isinstance(other, SpecifierSet): @@ -681,7 +808,25 @@ def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": return specifier def __eq__(self, other: object) -> bool: - if isinstance(other, (str, _IndividualSpecifier)): + """Whether or not the two SpecifierSet-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) == + ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1" + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2") + False + """ + if isinstance(other, (str, Specifier)): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented @@ -689,43 +834,72 @@ def __eq__(self, other: object) -> bool: return self._specs == other._specs def __len__(self) -> int: + """Returns the number of specifiers in this specifier set.""" return len(self._specs) - def __iter__(self) -> Iterator[_IndividualSpecifier]: - return iter(self._specs) - - @property - def prereleases(self) -> Optional[bool]: - - # If we have been given an explicit prerelease modifier, then we'll - # pass that through here. - if self._prereleases is not None: - return self._prereleases - - # If we don't have any specifiers, and we don't have a forced value, - # then we'll just return None since we don't know if this should have - # pre-releases or not. - if not self._specs: - return None - - # Otherwise we'll see if any of the given specifiers accept - # prereleases, if any of them do we'll return True, otherwise False. - return any(s.prereleases for s in self._specs) + def __iter__(self) -> Iterator[Specifier]: + """ + Returns an iterator over all the underlying :class:`Specifier` instances + in this specifier set. - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value + >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str) + [, =1.0.0')>] + """ + return iter(self._specs) def __contains__(self, item: UnparsedVersion) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True) + True + """ return self.contains(item) def contains( - self, item: UnparsedVersion, prereleases: Optional[bool] = None + self, + item: UnparsedVersion, + prereleases: Optional[bool] = None, + installed: Optional[bool] = None, ) -> bool: - - # Ensure that our item is a Version or LegacyVersion instance. - if not isinstance(item, (LegacyVersion, Version)): - item = parse(item) + """Return whether or not the item is contained in this SpecifierSet. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this SpecifierSet. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3")) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True) + True + """ + # Ensure that our item is a Version instance. + if not isinstance(item, Version): + item = Version(item) # Determine if we're forcing a prerelease or not, if we're not forcing # one for this particular filter call, then we'll use whatever the @@ -742,6 +916,9 @@ def contains( if not prereleases and item.is_prerelease: return False + if installed and item.is_prerelease: + item = Version(item.base_version) + # We simply dispatch to the underlying specs here to make sure that the # given version is contained within all of them. # Note: This use of all() here means that an empty set of specifiers @@ -749,9 +926,46 @@ def contains( return all(s.contains(item, prereleases=prereleases) for s in self._specs) def filter( - self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None - ) -> Iterable[VersionTypeVar]: - + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifiers in this set. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(SpecifierSet(...).contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")])) + ['1.3', ] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"])) + [] + >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + + An "empty" SpecifierSet will filter items based on the presence of prerelease + versions in the set. + + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet("").filter(["1.5a1"])) + ['1.5a1'] + >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + """ # Determine if we're forcing a prerelease or not, if we're not forcing # one for this particular filter call, then we'll use whatever the # SpecifierSet thinks for whether or not we should support prereleases. @@ -764,27 +978,16 @@ def filter( if self._specs: for spec in self._specs: iterable = spec.filter(iterable, prereleases=bool(prereleases)) - return iterable + return iter(iterable) # If we do not have any specifiers, then we need to have a rough filter # which will filter out any pre-releases, unless there are no final - # releases, and which will filter out LegacyVersion in general. + # releases. else: - filtered: List[VersionTypeVar] = [] - found_prereleases: List[VersionTypeVar] = [] - - item: UnparsedVersion - parsed_version: Union[Version, LegacyVersion] + filtered: List[UnparsedVersionVar] = [] + found_prereleases: List[UnparsedVersionVar] = [] for item in iterable: - # Ensure that we some kind of Version class for this item. - if not isinstance(item, (LegacyVersion, Version)): - parsed_version = parse(item) - else: - parsed_version = item - - # Filter out any item which is parsed as a LegacyVersion - if isinstance(parsed_version, LegacyVersion): - continue + parsed_version = _coerce_version(item) # Store any item which is a pre-release for later unless we've # already found a final version or we are accepting prereleases @@ -797,6 +1000,6 @@ def filter( # If we've found no items except for pre-releases, then we'll go # ahead and use the pre-releases if not filtered and found_prereleases and prereleases is None: - return found_prereleases + return iter(found_prereleases) - return filtered + return iter(filtered) diff --git a/pkg_resources/_vendor/packaging/tags.py b/pkg_resources/_vendor/packaging/tags.py index 9a3d25a71c..19ccbde3ea 100644 --- a/pkg_resources/_vendor/packaging/tags.py +++ b/pkg_resources/_vendor/packaging/tags.py @@ -4,6 +4,7 @@ import logging import platform +import subprocess import sys import sysconfig from importlib.machinery import EXTENSION_SUFFIXES @@ -36,7 +37,7 @@ } -_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 +_32_BIT_INTERPRETER = sys.maxsize <= 2**32 class Tag: @@ -224,10 +225,45 @@ def cpython_tags( yield Tag(interpreter, "abi3", platform_) -def _generic_abi() -> Iterator[str]: - abi = sysconfig.get_config_var("SOABI") - if abi: - yield _normalize_string(abi) +def _generic_abi() -> List[str]: + """ + Return the ABI tag based on EXT_SUFFIX. + """ + # The following are examples of `EXT_SUFFIX`. + # We want to keep the parts which are related to the ABI and remove the + # parts which are related to the platform: + # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310 + # - mac: '.cpython-310-darwin.so' => cp310 + # - win: '.cp310-win_amd64.pyd' => cp310 + # - win: '.pyd' => cp37 (uses _cpython_abis()) + # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73 + # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib' + # => graalpy_38_native + + ext_suffix = _get_config_var("EXT_SUFFIX", warn=True) + if not isinstance(ext_suffix, str) or ext_suffix[0] != ".": + raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')") + parts = ext_suffix.split(".") + if len(parts) < 3: + # CPython3.7 and earlier uses ".pyd" on Windows. + return _cpython_abis(sys.version_info[:2]) + soabi = parts[1] + if soabi.startswith("cpython"): + # non-windows + abi = "cp" + soabi.split("-")[1] + elif soabi.startswith("cp"): + # windows + abi = soabi.split("-")[0] + elif soabi.startswith("pypy"): + abi = "-".join(soabi.split("-")[:2]) + elif soabi.startswith("graalpy"): + abi = "-".join(soabi.split("-")[:3]) + elif soabi: + # pyston, ironpython, others? + abi = soabi + else: + return [] + return [_normalize_string(abi)] def generic_tags( @@ -251,8 +287,9 @@ def generic_tags( interpreter = "".join([interp_name, interp_version]) if abis is None: abis = _generic_abi() + else: + abis = list(abis) platforms = list(platforms or platform_tags()) - abis = list(abis) if "none" not in abis: abis.append("none") for abi in abis: @@ -356,6 +393,22 @@ def mac_platforms( version_str, _, cpu_arch = platform.mac_ver() if version is None: version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + if version == (10, 16): + # When built against an older macOS SDK, Python will report macOS 10.16 + # instead of the real version. + version_str = subprocess.run( + [ + sys.executable, + "-sS", + "-c", + "import platform; print(platform.mac_ver()[0])", + ], + check=True, + env={"SYSTEM_VERSION_COMPAT": "0"}, + stdout=subprocess.PIPE, + universal_newlines=True, + ).stdout + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) else: version = version if arch is None: @@ -446,6 +499,9 @@ def platform_tags() -> Iterator[str]: def interpreter_name() -> str: """ Returns the name of the running interpreter. + + Some implementations have a reserved, two-letter abbreviation which will + be returned when appropriate. """ name = sys.implementation.name return INTERPRETER_SHORT_NAMES.get(name) or name @@ -482,6 +538,9 @@ def sys_tags(*, warn: bool = False) -> Iterator[Tag]: yield from generic_tags() if interp_name == "pp": - yield from compatible_tags(interpreter="pp3") + interp = "pp3" + elif interp_name == "cp": + interp = "cp" + interpreter_version(warn=warn) else: - yield from compatible_tags() + interp = None + yield from compatible_tags(interpreter=interp) diff --git a/pkg_resources/_vendor/packaging/utils.py b/pkg_resources/_vendor/packaging/utils.py index bab11b80c6..33c613b749 100644 --- a/pkg_resources/_vendor/packaging/utils.py +++ b/pkg_resources/_vendor/packaging/utils.py @@ -35,7 +35,9 @@ def canonicalize_name(name: str) -> NormalizedName: return cast(NormalizedName, value) -def canonicalize_version(version: Union[Version, str]) -> str: +def canonicalize_version( + version: Union[Version, str], *, strip_trailing_zero: bool = True +) -> str: """ This is very similar to Version.__str__, but has one subtle difference with the way it handles the release segment. @@ -56,8 +58,11 @@ def canonicalize_version(version: Union[Version, str]) -> str: parts.append(f"{parsed.epoch}!") # Release segment - # NB: This strips trailing '.0's to normalize - parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release))) + release_segment = ".".join(str(x) for x in parsed.release) + if strip_trailing_zero: + # NB: This strips trailing '.0's to normalize + release_segment = re.sub(r"(\.0)+$", "", release_segment) + parts.append(release_segment) # Pre-release if parsed.pre is not None: diff --git a/pkg_resources/_vendor/packaging/version.py b/pkg_resources/_vendor/packaging/version.py index de9a09a4ed..e5c738cfda 100644 --- a/pkg_resources/_vendor/packaging/version.py +++ b/pkg_resources/_vendor/packaging/version.py @@ -1,16 +1,20 @@ # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. +""" +.. testsetup:: + + from packaging.version import parse, Version +""" import collections import itertools import re -import warnings -from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union +from typing import Callable, Optional, SupportsInt, Tuple, Union from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType -__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] +__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"] InfiniteTypes = Union[InfinityType, NegativeInfinityType] PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] @@ -29,36 +33,37 @@ CmpKey = Tuple[ int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType ] -LegacyCmpKey = Tuple[int, Tuple[str, ...]] -VersionComparisonMethod = Callable[ - [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool -] +VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool] _Version = collections.namedtuple( "_Version", ["epoch", "release", "dev", "pre", "post", "local"] ) -def parse(version: str) -> Union["LegacyVersion", "Version"]: - """ - Parse the given version string and return either a :class:`Version` object - or a :class:`LegacyVersion` object depending on if the given version is - a valid PEP 440 version or a legacy version. +def parse(version: str) -> "Version": + """Parse the given version string. + + >>> parse('1.0.dev1') + + + :param version: The version string to parse. + :raises InvalidVersion: When the version string is not a valid version. """ - try: - return Version(version) - except InvalidVersion: - return LegacyVersion(version) + return Version(version) class InvalidVersion(ValueError): - """ - An invalid version was found, users should refer to PEP 440. + """Raised when a version string is not a valid version. + + >>> Version("invalid") + Traceback (most recent call last): + ... + packaging.version.InvalidVersion: Invalid version: 'invalid' """ class _BaseVersion: - _key: Union[CmpKey, LegacyCmpKey] + _key: CmpKey def __hash__(self) -> int: return hash(self._key) @@ -103,126 +108,9 @@ def __ne__(self, other: object) -> bool: return self._key != other._key -class LegacyVersion(_BaseVersion): - def __init__(self, version: str) -> None: - self._version = str(version) - self._key = _legacy_cmpkey(self._version) - - warnings.warn( - "Creating a LegacyVersion has been deprecated and will be " - "removed in the next major release", - DeprecationWarning, - ) - - def __str__(self) -> str: - return self._version - - def __repr__(self) -> str: - return f"" - - @property - def public(self) -> str: - return self._version - - @property - def base_version(self) -> str: - return self._version - - @property - def epoch(self) -> int: - return -1 - - @property - def release(self) -> None: - return None - - @property - def pre(self) -> None: - return None - - @property - def post(self) -> None: - return None - - @property - def dev(self) -> None: - return None - - @property - def local(self) -> None: - return None - - @property - def is_prerelease(self) -> bool: - return False - - @property - def is_postrelease(self) -> bool: - return False - - @property - def is_devrelease(self) -> bool: - return False - - -_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) - -_legacy_version_replacement_map = { - "pre": "c", - "preview": "c", - "-": "final-", - "rc": "c", - "dev": "@", -} - - -def _parse_version_parts(s: str) -> Iterator[str]: - for part in _legacy_version_component_re.split(s): - part = _legacy_version_replacement_map.get(part, part) - - if not part or part == ".": - continue - - if part[:1] in "0123456789": - # pad for numeric comparison - yield part.zfill(8) - else: - yield "*" + part - - # ensure that alpha/beta/candidate are before final - yield "*final" - - -def _legacy_cmpkey(version: str) -> LegacyCmpKey: - - # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch - # greater than or equal to 0. This will effectively put the LegacyVersion, - # which uses the defacto standard originally implemented by setuptools, - # as before all PEP 440 versions. - epoch = -1 - - # This scheme is taken from pkg_resources.parse_version setuptools prior to - # it's adoption of the packaging library. - parts: List[str] = [] - for part in _parse_version_parts(version.lower()): - if part.startswith("*"): - # remove "-" before a prerelease tag - if part < "*final": - while parts and parts[-1] == "*final-": - parts.pop() - - # remove trailing zeros from each series of numeric parts - while parts and parts[-1] == "00000000": - parts.pop() - - parts.append(part) - - return epoch, tuple(parts) - - # Deliberately not anchored to the start and end of the string, to make it # easier for 3rd party code to reuse -VERSION_PATTERN = r""" +_VERSION_PATTERN = r""" v? (?: (?:(?P[0-9]+)!)? # epoch @@ -253,12 +141,55 @@ def _legacy_cmpkey(version: str) -> LegacyCmpKey: (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version """ +VERSION_PATTERN = _VERSION_PATTERN +""" +A string containing the regular expression used to match a valid version. + +The pattern is not anchored at either end, and is intended for embedding in larger +expressions (for example, matching a version number as part of a file name). The +regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE`` +flags set. + +:meta hide-value: +""" + class Version(_BaseVersion): + """This class abstracts handling of a project's versions. + + A :class:`Version` instance is comparison aware and can be compared and + sorted using the standard Python interfaces. + + >>> v1 = Version("1.0a5") + >>> v2 = Version("1.0") + >>> v1 + + >>> v2 + + >>> v1 < v2 + True + >>> v1 == v2 + False + >>> v1 > v2 + False + >>> v1 >= v2 + False + >>> v1 <= v2 + True + """ _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE) def __init__(self, version: str) -> None: + """Initialize a Version object. + + :param version: + The string representation of a version which will be parsed and normalized + before use. + :raises InvalidVersion: + If the ``version`` does not conform to PEP 440 in any way then this + exception will be raised. + """ # Validate the version and parse it into pieces match = self._regex.search(version) @@ -288,9 +219,19 @@ def __init__(self, version: str) -> None: ) def __repr__(self) -> str: + """A representation of the Version that shows all internal state. + + >>> Version('1.0.0') + + """ return f"" def __str__(self) -> str: + """A string representation of the version that can be rounded-tripped. + + >>> str(Version("1.0a5")) + '1.0a5' + """ parts = [] # Epoch @@ -320,29 +261,80 @@ def __str__(self) -> str: @property def epoch(self) -> int: + """The epoch of the version. + + >>> Version("2.0.0").epoch + 0 + >>> Version("1!2.0.0").epoch + 1 + """ _epoch: int = self._version.epoch return _epoch @property def release(self) -> Tuple[int, ...]: + """The components of the "release" segment of the version. + + >>> Version("1.2.3").release + (1, 2, 3) + >>> Version("2.0.0").release + (2, 0, 0) + >>> Version("1!2.0.0.post0").release + (2, 0, 0) + + Includes trailing zeroes but not the epoch or any pre-release / development / + post-release suffixes. + """ _release: Tuple[int, ...] = self._version.release return _release @property def pre(self) -> Optional[Tuple[str, int]]: + """The pre-release segment of the version. + + >>> print(Version("1.2.3").pre) + None + >>> Version("1.2.3a1").pre + ('a', 1) + >>> Version("1.2.3b1").pre + ('b', 1) + >>> Version("1.2.3rc1").pre + ('rc', 1) + """ _pre: Optional[Tuple[str, int]] = self._version.pre return _pre @property def post(self) -> Optional[int]: + """The post-release number of the version. + + >>> print(Version("1.2.3").post) + None + >>> Version("1.2.3.post1").post + 1 + """ return self._version.post[1] if self._version.post else None @property def dev(self) -> Optional[int]: + """The development number of the version. + + >>> print(Version("1.2.3").dev) + None + >>> Version("1.2.3.dev1").dev + 1 + """ return self._version.dev[1] if self._version.dev else None @property def local(self) -> Optional[str]: + """The local version segment of the version. + + >>> print(Version("1.2.3").local) + None + >>> Version("1.2.3+abc").local + 'abc' + """ if self._version.local: return ".".join(str(x) for x in self._version.local) else: @@ -350,10 +342,31 @@ def local(self) -> Optional[str]: @property def public(self) -> str: + """The public portion of the version. + + >>> Version("1.2.3").public + '1.2.3' + >>> Version("1.2.3+abc").public + '1.2.3' + >>> Version("1.2.3+abc.dev1").public + '1.2.3' + """ return str(self).split("+", 1)[0] @property def base_version(self) -> str: + """The "base version" of the version. + + >>> Version("1.2.3").base_version + '1.2.3' + >>> Version("1.2.3+abc").base_version + '1.2.3' + >>> Version("1!1.2.3+abc.dev1").base_version + '1!1.2.3' + + The "base version" is the public version of the project without any pre or post + release markers. + """ parts = [] # Epoch @@ -367,26 +380,72 @@ def base_version(self) -> str: @property def is_prerelease(self) -> bool: + """Whether this version is a pre-release. + + >>> Version("1.2.3").is_prerelease + False + >>> Version("1.2.3a1").is_prerelease + True + >>> Version("1.2.3b1").is_prerelease + True + >>> Version("1.2.3rc1").is_prerelease + True + >>> Version("1.2.3dev1").is_prerelease + True + """ return self.dev is not None or self.pre is not None @property def is_postrelease(self) -> bool: + """Whether this version is a post-release. + + >>> Version("1.2.3").is_postrelease + False + >>> Version("1.2.3.post1").is_postrelease + True + """ return self.post is not None @property def is_devrelease(self) -> bool: + """Whether this version is a development release. + + >>> Version("1.2.3").is_devrelease + False + >>> Version("1.2.3.dev1").is_devrelease + True + """ return self.dev is not None @property def major(self) -> int: + """The first item of :attr:`release` or ``0`` if unavailable. + + >>> Version("1.2.3").major + 1 + """ return self.release[0] if len(self.release) >= 1 else 0 @property def minor(self) -> int: + """The second item of :attr:`release` or ``0`` if unavailable. + + >>> Version("1.2.3").minor + 2 + >>> Version("1").minor + 0 + """ return self.release[1] if len(self.release) >= 2 else 0 @property def micro(self) -> int: + """The third item of :attr:`release` or ``0`` if unavailable. + + >>> Version("1.2.3").micro + 3 + >>> Version("1").micro + 0 + """ return self.release[2] if len(self.release) >= 3 else 0 diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/INSTALLER b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/INSTALLER similarity index 100% rename from pkg_resources/_vendor/packaging-21.3.dist-info/INSTALLER rename to pkg_resources/_vendor/platformdirs-2.6.2.dist-info/INSTALLER diff --git a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/METADATA b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/METADATA new file mode 100644 index 0000000000..608afde321 --- /dev/null +++ b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/METADATA @@ -0,0 +1,253 @@ +Metadata-Version: 2.1 +Name: platformdirs +Version: 2.6.2 +Summary: A small Python package for determining appropriate platform-specific dirs, e.g. a "user data dir". +Project-URL: Documentation, https://platformdirs.readthedocs.io +Project-URL: Homepage, https://github.com/platformdirs/platformdirs +Project-URL: Source, https://github.com/platformdirs/platformdirs +Project-URL: Tracker, https://github.com/platformdirs/platformdirs/issues +Maintainer-email: Bernát Gábor , Julian Berman , Ofek Lev , Ronny Pfannschmidt +License-File: LICENSE +Keywords: application,cache,directory,log,user +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.7 +Requires-Dist: typing-extensions>=4.4; python_version < '3.8' +Provides-Extra: docs +Requires-Dist: furo>=2022.12.7; extra == 'docs' +Requires-Dist: proselint>=0.13; extra == 'docs' +Requires-Dist: sphinx-autodoc-typehints>=1.19.5; extra == 'docs' +Requires-Dist: sphinx>=5.3; extra == 'docs' +Provides-Extra: test +Requires-Dist: appdirs==1.4.4; extra == 'test' +Requires-Dist: covdefaults>=2.2.2; extra == 'test' +Requires-Dist: pytest-cov>=4; extra == 'test' +Requires-Dist: pytest-mock>=3.10; extra == 'test' +Requires-Dist: pytest>=7.2; extra == 'test' +Description-Content-Type: text/x-rst + +The problem +=========== + +.. image:: https://github.com/platformdirs/platformdirs/workflows/Test/badge.svg + :target: https://github.com/platformdirs/platformdirs/actions?query=workflow%3ATest + +When writing desktop application, finding the right location to store user data +and configuration varies per platform. Even for single-platform apps, there +may by plenty of nuances in figuring out the right location. + +For example, if running on macOS, you should use:: + + ~/Library/Application Support/ + +If on Windows (at least English Win XP) that should be:: + + C:\Documents and Settings\\Application Data\Local Settings\\ + +or possibly:: + + C:\Documents and Settings\\Application Data\\ + +for `roaming profiles `_ but that is another story. + +On Linux (and other Unices), according to the `XDG Basedir Spec`_, it should be:: + + ~/.local/share/ + +.. _XDG Basedir Spec: https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html + +``platformdirs`` to the rescue +============================== + +This kind of thing is what the ``platformdirs`` package is for. +``platformdirs`` will help you choose an appropriate: + +- user data dir (``user_data_dir``) +- user config dir (``user_config_dir``) +- user cache dir (``user_cache_dir``) +- site data dir (``site_data_dir``) +- site config dir (``site_config_dir``) +- user log dir (``user_log_dir``) +- user documents dir (``user_documents_dir``) +- user runtime dir (``user_runtime_dir``) + +And also: + +- Is slightly opinionated on the directory names used. Look for "OPINION" in + documentation and code for when an opinion is being applied. + +Example output +============== + +On macOS: + +.. code-block:: pycon + + >>> from platformdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + '/Users/trentm/Library/Application Support/SuperApp' + >>> site_data_dir(appname, appauthor) + '/Library/Application Support/SuperApp' + >>> user_cache_dir(appname, appauthor) + '/Users/trentm/Library/Caches/SuperApp' + >>> user_log_dir(appname, appauthor) + '/Users/trentm/Library/Logs/SuperApp' + >>> user_documents_dir() + '/Users/trentm/Documents' + >>> user_runtime_dir(appname, appauthor) + '/Users/trentm/Library/Caches/TemporaryItems/SuperApp' + +On Windows 7: + +.. code-block:: pycon + + >>> from platformdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp' + >>> user_data_dir(appname, appauthor, roaming=True) + 'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp' + >>> user_cache_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache' + >>> user_log_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs' + >>> user_documents_dir() + 'C:\\Users\\trentm\\Documents' + >>> user_runtime_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Temp\\Acme\\SuperApp' + +On Linux: + +.. code-block:: pycon + + >>> from platformdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + '/home/trentm/.local/share/SuperApp' + >>> site_data_dir(appname, appauthor) + '/usr/local/share/SuperApp' + >>> site_data_dir(appname, appauthor, multipath=True) + '/usr/local/share/SuperApp:/usr/share/SuperApp' + >>> user_cache_dir(appname, appauthor) + '/home/trentm/.cache/SuperApp' + >>> user_log_dir(appname, appauthor) + '/home/trentm/.cache/SuperApp/log' + >>> user_config_dir(appname) + '/home/trentm/.config/SuperApp' + >>> user_documents_dir() + '/home/trentm/Documents' + >>> user_runtime_dir(appname, appauthor) + '/run/user/{os.getuid()}/SuperApp' + >>> site_config_dir(appname) + '/etc/xdg/SuperApp' + >>> os.environ["XDG_CONFIG_DIRS"] = "/etc:/usr/local/etc" + >>> site_config_dir(appname, multipath=True) + '/etc/SuperApp:/usr/local/etc/SuperApp' + +On Android:: + + >>> from platformdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + '/data/data/com.myApp/files/SuperApp' + >>> user_cache_dir(appname, appauthor) + '/data/data/com.myApp/cache/SuperApp' + >>> user_log_dir(appname, appauthor) + '/data/data/com.myApp/cache/SuperApp/log' + >>> user_config_dir(appname) + '/data/data/com.myApp/shared_prefs/SuperApp' + >>> user_documents_dir() + '/storage/emulated/0/Documents' + >>> user_runtime_dir(appname, appauthor) + '/data/data/com.myApp/cache/SuperApp/tmp' + +Note: Some android apps like Termux and Pydroid are used as shells. These +apps are used by the end user to emulate Linux environment. Presence of +``SHELL`` environment variable is used by Platformdirs to differentiate +between general android apps and android apps used as shells. Shell android +apps also support ``XDG_*`` environment variables. + + +``PlatformDirs`` for convenience +================================ + +.. code-block:: pycon + + >>> from platformdirs import PlatformDirs + >>> dirs = PlatformDirs("SuperApp", "Acme") + >>> dirs.user_data_dir + '/Users/trentm/Library/Application Support/SuperApp' + >>> dirs.site_data_dir + '/Library/Application Support/SuperApp' + >>> dirs.user_cache_dir + '/Users/trentm/Library/Caches/SuperApp' + >>> dirs.user_log_dir + '/Users/trentm/Library/Logs/SuperApp' + >>> dirs.user_documents_dir + '/Users/trentm/Documents' + >>> dirs.user_runtime_dir + '/Users/trentm/Library/Caches/TemporaryItems/SuperApp' + +Per-version isolation +===================== + +If you have multiple versions of your app in use that you want to be +able to run side-by-side, then you may want version-isolation for these +dirs:: + + >>> from platformdirs import PlatformDirs + >>> dirs = PlatformDirs("SuperApp", "Acme", version="1.0") + >>> dirs.user_data_dir + '/Users/trentm/Library/Application Support/SuperApp/1.0' + >>> dirs.site_data_dir + '/Library/Application Support/SuperApp/1.0' + >>> dirs.user_cache_dir + '/Users/trentm/Library/Caches/SuperApp/1.0' + >>> dirs.user_log_dir + '/Users/trentm/Library/Logs/SuperApp/1.0' + >>> dirs.user_documents_dir + '/Users/trentm/Documents' + >>> dirs.user_runtime_dir + '/Users/trentm/Library/Caches/TemporaryItems/SuperApp/1.0' + +Be wary of using this for configuration files though; you'll need to handle +migrating configuration files manually. + +Why this Fork? +============== + +This repository is a friendly fork of the wonderful work started by +`ActiveState `_ who created +``appdirs``, this package's ancestor. + +Maintaining an open source project is no easy task, particularly +from within an organization, and the Python community is indebted +to ``appdirs`` (and to Trent Mick and Jeff Rouse in particular) for +creating an incredibly useful simple module, as evidenced by the wide +number of users it has attracted over the years. + +Nonetheless, given the number of long-standing open issues +and pull requests, and no clear path towards `ensuring +that maintenance of the package would continue or grow +`_, this fork was +created. + +Contributions are most welcome. diff --git a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD new file mode 100644 index 0000000000..843a5baf9d --- /dev/null +++ b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD @@ -0,0 +1,23 @@ +platformdirs-2.6.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +platformdirs-2.6.2.dist-info/METADATA,sha256=rDoFsb9-2tVym02IIeYCoKgGaCpY2v8xw8WWXywxhIM,9502 +platformdirs-2.6.2.dist-info/RECORD,, +platformdirs-2.6.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +platformdirs-2.6.2.dist-info/WHEEL,sha256=NaLmgHHW_f9jTvv_wRh9vcK7c7EK9o5fwsIXMOzoGgM,87 +platformdirs-2.6.2.dist-info/licenses/LICENSE,sha256=KeD9YukphQ6G6yjD_czwzv30-pSHkBHP-z0NS-1tTbY,1089 +platformdirs/__init__.py,sha256=td0a-fHENmnG8ess2WRoysKv9ud5j6TQ-p_iUM_uE18,12864 +platformdirs/__main__.py,sha256=VsC0t5m-6f0YVr96PVks93G3EDF8MSNY4KpUMvPahDA,1164 +platformdirs/__pycache__/__init__.cpython-311.pyc,, +platformdirs/__pycache__/__main__.cpython-311.pyc,, +platformdirs/__pycache__/android.cpython-311.pyc,, +platformdirs/__pycache__/api.cpython-311.pyc,, +platformdirs/__pycache__/macos.cpython-311.pyc,, +platformdirs/__pycache__/unix.cpython-311.pyc,, +platformdirs/__pycache__/version.cpython-311.pyc,, +platformdirs/__pycache__/windows.cpython-311.pyc,, +platformdirs/android.py,sha256=GKizhyS7ESRiU67u8UnBJLm46goau9937EchXWbPBlk,4068 +platformdirs/api.py,sha256=MXKHXOL3eh_-trSok-JUTjAR_zjmmKF3rjREVABjP8s,4910 +platformdirs/macos.py,sha256=-3UXQewbT0yMhMdkzRXfXGAntmLIH7Qt4a9Hlf8I5_Y,2655 +platformdirs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +platformdirs/unix.py,sha256=P-WQjSSieE38DXjMDa1t4XHnKJQ5idEaKT0PyXwm8KQ,6911 +platformdirs/version.py,sha256=qaN-fw_htIgKUVXoAuAEVgKxQu3tZ9qE2eiKkWIS7LA,160 +platformdirs/windows.py,sha256=LOrXLgI0CjQldDo2zhOZYGYZ6g4e_cJOCB_pF9aMRWQ,6596 diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/REQUESTED b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/REQUESTED similarity index 100% rename from pkg_resources/_vendor/packaging-21.3.dist-info/REQUESTED rename to pkg_resources/_vendor/platformdirs-2.6.2.dist-info/REQUESTED diff --git a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/WHEEL b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/WHEEL new file mode 100644 index 0000000000..6d803659b7 --- /dev/null +++ b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.11.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/licenses/LICENSE b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/licenses/LICENSE new file mode 100644 index 0000000000..f35fed9191 --- /dev/null +++ b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2010-202x The platformdirs developers + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/pkg_resources/_vendor/platformdirs/__init__.py b/pkg_resources/_vendor/platformdirs/__init__.py new file mode 100644 index 0000000000..aef2821b83 --- /dev/null +++ b/pkg_resources/_vendor/platformdirs/__init__.py @@ -0,0 +1,342 @@ +""" +Utilities for determining application-specific dirs. See for details and +usage. +""" +from __future__ import annotations + +import os +import sys +from pathlib import Path + +if sys.version_info >= (3, 8): # pragma: no cover (py38+) + from typing import Literal +else: # pragma: no cover (py38+) + from ..typing_extensions import Literal + +from .api import PlatformDirsABC +from .version import __version__ +from .version import __version_tuple__ as __version_info__ + + +def _set_platform_dir_class() -> type[PlatformDirsABC]: + if sys.platform == "win32": + from .windows import Windows as Result + elif sys.platform == "darwin": + from .macos import MacOS as Result + else: + from .unix import Unix as Result + + if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system": + + if os.getenv("SHELL") or os.getenv("PREFIX"): + return Result + + from .android import _android_folder + + if _android_folder() is not None: + from .android import Android + + return Android # return to avoid redefinition of result + + return Result + + +PlatformDirs = _set_platform_dir_class() #: Currently active platform +AppDirs = PlatformDirs #: Backwards compatibility with appdirs + + +def user_data_dir( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + roaming: bool = False, +) -> str: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param roaming: See `roaming `. + :returns: data directory tied to the user + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_dir + + +def site_data_dir( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + multipath: bool = False, +) -> str: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param multipath: See `roaming `. + :returns: data directory shared by users + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_dir + + +def user_config_dir( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + roaming: bool = False, +) -> str: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param roaming: See `roaming `. + :returns: config directory tied to the user + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_dir + + +def site_config_dir( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + multipath: bool = False, +) -> str: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param multipath: See `roaming `. + :returns: config directory shared by the users + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_dir + + +def user_cache_dir( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + opinion: bool = True, +) -> str: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param opinion: See `roaming `. + :returns: cache directory tied to the user + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_dir + + +def user_state_dir( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + roaming: bool = False, +) -> str: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param roaming: See `roaming `. + :returns: state directory tied to the user + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_dir + + +def user_log_dir( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + opinion: bool = True, +) -> str: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param opinion: See `roaming `. + :returns: log directory tied to the user + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_dir + + +def user_documents_dir() -> str: + """ + :returns: documents directory tied to the user + """ + return PlatformDirs().user_documents_dir + + +def user_runtime_dir( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + opinion: bool = True, +) -> str: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param opinion: See `opinion `. + :returns: runtime directory tied to the user + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_dir + + +def user_data_path( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + roaming: bool = False, +) -> Path: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param roaming: See `roaming `. + :returns: data path tied to the user + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_path + + +def site_data_path( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + multipath: bool = False, +) -> Path: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param multipath: See `multipath `. + :returns: data path shared by users + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_path + + +def user_config_path( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + roaming: bool = False, +) -> Path: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param roaming: See `roaming `. + :returns: config path tied to the user + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_path + + +def site_config_path( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + multipath: bool = False, +) -> Path: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param multipath: See `roaming `. + :returns: config path shared by the users + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_path + + +def user_cache_path( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + opinion: bool = True, +) -> Path: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param opinion: See `roaming `. + :returns: cache path tied to the user + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_path + + +def user_state_path( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + roaming: bool = False, +) -> Path: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param roaming: See `roaming `. + :returns: state path tied to the user + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_path + + +def user_log_path( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + opinion: bool = True, +) -> Path: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param opinion: See `roaming `. + :returns: log path tied to the user + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_path + + +def user_documents_path() -> Path: + """ + :returns: documents path tied to the user + """ + return PlatformDirs().user_documents_path + + +def user_runtime_path( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + opinion: bool = True, +) -> Path: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param opinion: See `opinion `. + :returns: runtime path tied to the user + """ + return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_path + + +__all__ = [ + "__version__", + "__version_info__", + "PlatformDirs", + "AppDirs", + "PlatformDirsABC", + "user_data_dir", + "user_config_dir", + "user_cache_dir", + "user_state_dir", + "user_log_dir", + "user_documents_dir", + "user_runtime_dir", + "site_data_dir", + "site_config_dir", + "user_data_path", + "user_config_path", + "user_cache_path", + "user_state_path", + "user_log_path", + "user_documents_path", + "user_runtime_path", + "site_data_path", + "site_config_path", +] diff --git a/pkg_resources/_vendor/platformdirs/__main__.py b/pkg_resources/_vendor/platformdirs/__main__.py new file mode 100644 index 0000000000..0fc1edd59c --- /dev/null +++ b/pkg_resources/_vendor/platformdirs/__main__.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +from platformdirs import PlatformDirs, __version__ + +PROPS = ( + "user_data_dir", + "user_config_dir", + "user_cache_dir", + "user_state_dir", + "user_log_dir", + "user_documents_dir", + "user_runtime_dir", + "site_data_dir", + "site_config_dir", +) + + +def main() -> None: + app_name = "MyApp" + app_author = "MyCompany" + + print(f"-- platformdirs {__version__} --") + + print("-- app dirs (with optional 'version')") + dirs = PlatformDirs(app_name, app_author, version="1.0") + for prop in PROPS: + print(f"{prop}: {getattr(dirs, prop)}") + + print("\n-- app dirs (without optional 'version')") + dirs = PlatformDirs(app_name, app_author) + for prop in PROPS: + print(f"{prop}: {getattr(dirs, prop)}") + + print("\n-- app dirs (without optional 'appauthor')") + dirs = PlatformDirs(app_name) + for prop in PROPS: + print(f"{prop}: {getattr(dirs, prop)}") + + print("\n-- app dirs (with disabled 'appauthor')") + dirs = PlatformDirs(app_name, appauthor=False) + for prop in PROPS: + print(f"{prop}: {getattr(dirs, prop)}") + + +if __name__ == "__main__": + main() diff --git a/pkg_resources/_vendor/platformdirs/android.py b/pkg_resources/_vendor/platformdirs/android.py new file mode 100644 index 0000000000..eda8093512 --- /dev/null +++ b/pkg_resources/_vendor/platformdirs/android.py @@ -0,0 +1,120 @@ +from __future__ import annotations + +import os +import re +import sys +from functools import lru_cache +from typing import cast + +from .api import PlatformDirsABC + + +class Android(PlatformDirsABC): + """ + Follows the guidance `from here `_. Makes use of the + `appname ` and + `version `. + """ + + @property + def user_data_dir(self) -> str: + """:return: data directory tied to the user, e.g. ``/data/user///files/``""" + return self._append_app_name_and_version(cast(str, _android_folder()), "files") + + @property + def site_data_dir(self) -> str: + """:return: data directory shared by users, same as `user_data_dir`""" + return self.user_data_dir + + @property + def user_config_dir(self) -> str: + """ + :return: config directory tied to the user, e.g. ``/data/user///shared_prefs/`` + """ + return self._append_app_name_and_version(cast(str, _android_folder()), "shared_prefs") + + @property + def site_config_dir(self) -> str: + """:return: config directory shared by the users, same as `user_config_dir`""" + return self.user_config_dir + + @property + def user_cache_dir(self) -> str: + """:return: cache directory tied to the user, e.g. e.g. ``/data/user///cache/``""" + return self._append_app_name_and_version(cast(str, _android_folder()), "cache") + + @property + def user_state_dir(self) -> str: + """:return: state directory tied to the user, same as `user_data_dir`""" + return self.user_data_dir + + @property + def user_log_dir(self) -> str: + """ + :return: log directory tied to the user, same as `user_cache_dir` if not opinionated else ``log`` in it, + e.g. ``/data/user///cache//log`` + """ + path = self.user_cache_dir + if self.opinion: + path = os.path.join(path, "log") + return path + + @property + def user_documents_dir(self) -> str: + """ + :return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents`` + """ + return _android_documents_folder() + + @property + def user_runtime_dir(self) -> str: + """ + :return: runtime directory tied to the user, same as `user_cache_dir` if not opinionated else ``tmp`` in it, + e.g. ``/data/user///cache//tmp`` + """ + path = self.user_cache_dir + if self.opinion: + path = os.path.join(path, "tmp") + return path + + +@lru_cache(maxsize=1) +def _android_folder() -> str | None: + """:return: base folder for the Android OS or None if cannot be found""" + try: + # First try to get path to android app via pyjnius + from jnius import autoclass + + Context = autoclass("android.content.Context") # noqa: N806 + result: str | None = Context.getFilesDir().getParentFile().getAbsolutePath() + except Exception: + # if fails find an android folder looking path on the sys.path + pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files") + for path in sys.path: + if pattern.match(path): + result = path.split("/files")[0] + break + else: + result = None + return result + + +@lru_cache(maxsize=1) +def _android_documents_folder() -> str: + """:return: documents folder for the Android OS""" + # Get directories with pyjnius + try: + from jnius import autoclass + + Context = autoclass("android.content.Context") # noqa: N806 + Environment = autoclass("android.os.Environment") # noqa: N806 + documents_dir: str = Context.getExternalFilesDir(Environment.DIRECTORY_DOCUMENTS).getAbsolutePath() + except Exception: + documents_dir = "/storage/emulated/0/Documents" + + return documents_dir + + +__all__ = [ + "Android", +] diff --git a/pkg_resources/_vendor/platformdirs/api.py b/pkg_resources/_vendor/platformdirs/api.py new file mode 100644 index 0000000000..6f6e2c2c69 --- /dev/null +++ b/pkg_resources/_vendor/platformdirs/api.py @@ -0,0 +1,156 @@ +from __future__ import annotations + +import os +import sys +from abc import ABC, abstractmethod +from pathlib import Path + +if sys.version_info >= (3, 8): # pragma: no branch + from typing import Literal # pragma: no cover + + +class PlatformDirsABC(ABC): + """ + Abstract base class for platform directories. + """ + + def __init__( + self, + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + roaming: bool = False, + multipath: bool = False, + opinion: bool = True, + ): + """ + Create a new platform directory. + + :param appname: See `appname`. + :param appauthor: See `appauthor`. + :param version: See `version`. + :param roaming: See `roaming`. + :param multipath: See `multipath`. + :param opinion: See `opinion`. + """ + self.appname = appname #: The name of application. + self.appauthor = appauthor + """ + The name of the app author or distributing body for this application. Typically, it is the owning company name. + Defaults to `appname`. You may pass ``False`` to disable it. + """ + self.version = version + """ + An optional version path element to append to the path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this would typically be ``.``. + """ + self.roaming = roaming + """ + Whether to use the roaming appdata directory on Windows. That means that for users on a Windows network setup + for roaming profiles, this user data will be synced on login (see + `here `_). + """ + self.multipath = multipath + """ + An optional parameter only applicable to Unix/Linux which indicates that the entire list of data dirs should be + returned. By default, the first item would only be returned. + """ + self.opinion = opinion #: A flag to indicating to use opinionated values. + + def _append_app_name_and_version(self, *base: str) -> str: + params = list(base[1:]) + if self.appname: + params.append(self.appname) + if self.version: + params.append(self.version) + return os.path.join(base[0], *params) + + @property + @abstractmethod + def user_data_dir(self) -> str: + """:return: data directory tied to the user""" + + @property + @abstractmethod + def site_data_dir(self) -> str: + """:return: data directory shared by users""" + + @property + @abstractmethod + def user_config_dir(self) -> str: + """:return: config directory tied to the user""" + + @property + @abstractmethod + def site_config_dir(self) -> str: + """:return: config directory shared by the users""" + + @property + @abstractmethod + def user_cache_dir(self) -> str: + """:return: cache directory tied to the user""" + + @property + @abstractmethod + def user_state_dir(self) -> str: + """:return: state directory tied to the user""" + + @property + @abstractmethod + def user_log_dir(self) -> str: + """:return: log directory tied to the user""" + + @property + @abstractmethod + def user_documents_dir(self) -> str: + """:return: documents directory tied to the user""" + + @property + @abstractmethod + def user_runtime_dir(self) -> str: + """:return: runtime directory tied to the user""" + + @property + def user_data_path(self) -> Path: + """:return: data path tied to the user""" + return Path(self.user_data_dir) + + @property + def site_data_path(self) -> Path: + """:return: data path shared by users""" + return Path(self.site_data_dir) + + @property + def user_config_path(self) -> Path: + """:return: config path tied to the user""" + return Path(self.user_config_dir) + + @property + def site_config_path(self) -> Path: + """:return: config path shared by the users""" + return Path(self.site_config_dir) + + @property + def user_cache_path(self) -> Path: + """:return: cache path tied to the user""" + return Path(self.user_cache_dir) + + @property + def user_state_path(self) -> Path: + """:return: state path tied to the user""" + return Path(self.user_state_dir) + + @property + def user_log_path(self) -> Path: + """:return: log path tied to the user""" + return Path(self.user_log_dir) + + @property + def user_documents_path(self) -> Path: + """:return: documents path tied to the user""" + return Path(self.user_documents_dir) + + @property + def user_runtime_path(self) -> Path: + """:return: runtime path tied to the user""" + return Path(self.user_runtime_dir) diff --git a/pkg_resources/_vendor/platformdirs/macos.py b/pkg_resources/_vendor/platformdirs/macos.py new file mode 100644 index 0000000000..a01337c776 --- /dev/null +++ b/pkg_resources/_vendor/platformdirs/macos.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +import os + +from .api import PlatformDirsABC + + +class MacOS(PlatformDirsABC): + """ + Platform directories for the macOS operating system. Follows the guidance from `Apple documentation + `_. + Makes use of the `appname ` and + `version `. + """ + + @property + def user_data_dir(self) -> str: + """:return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``""" + return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support/")) + + @property + def site_data_dir(self) -> str: + """:return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``""" + return self._append_app_name_and_version("/Library/Application Support") + + @property + def user_config_dir(self) -> str: + """:return: config directory tied to the user, e.g. ``~/Library/Preferences/$appname/$version``""" + return self._append_app_name_and_version(os.path.expanduser("~/Library/Preferences/")) + + @property + def site_config_dir(self) -> str: + """:return: config directory shared by the users, e.g. ``/Library/Preferences/$appname``""" + return self._append_app_name_and_version("/Library/Preferences") + + @property + def user_cache_dir(self) -> str: + """:return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``""" + return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches")) + + @property + def user_state_dir(self) -> str: + """:return: state directory tied to the user, same as `user_data_dir`""" + return self.user_data_dir + + @property + def user_log_dir(self) -> str: + """:return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``""" + return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs")) + + @property + def user_documents_dir(self) -> str: + """:return: documents directory tied to the user, e.g. ``~/Documents``""" + return os.path.expanduser("~/Documents") + + @property + def user_runtime_dir(self) -> str: + """:return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``""" + return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems")) + + +__all__ = [ + "MacOS", +] diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/REQUESTED b/pkg_resources/_vendor/platformdirs/py.typed similarity index 100% rename from pkg_resources/_vendor/pyparsing-2.2.1.dist-info/REQUESTED rename to pkg_resources/_vendor/platformdirs/py.typed diff --git a/pkg_resources/_vendor/platformdirs/unix.py b/pkg_resources/_vendor/platformdirs/unix.py new file mode 100644 index 0000000000..9aca5a0305 --- /dev/null +++ b/pkg_resources/_vendor/platformdirs/unix.py @@ -0,0 +1,181 @@ +from __future__ import annotations + +import os +import sys +from configparser import ConfigParser +from pathlib import Path + +from .api import PlatformDirsABC + +if sys.platform.startswith("linux"): # pragma: no branch # no op check, only to please the type checker + from os import getuid +else: + + def getuid() -> int: + raise RuntimeError("should only be used on Linux") + + +class Unix(PlatformDirsABC): + """ + On Unix/Linux, we follow the + `XDG Basedir Spec `_. The spec allows + overriding directories with environment variables. The examples show are the default values, alongside the name of + the environment variable that overrides them. Makes use of the + `appname `, + `version `, + `multipath `, + `opinion `. + """ + + @property + def user_data_dir(self) -> str: + """ + :return: data directory tied to the user, e.g. ``~/.local/share/$appname/$version`` or + ``$XDG_DATA_HOME/$appname/$version`` + """ + path = os.environ.get("XDG_DATA_HOME", "") + if not path.strip(): + path = os.path.expanduser("~/.local/share") + return self._append_app_name_and_version(path) + + @property + def site_data_dir(self) -> str: + """ + :return: data directories shared by users (if `multipath ` is + enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS + path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version`` + """ + # XDG default for $XDG_DATA_DIRS; only first, if multipath is False + path = os.environ.get("XDG_DATA_DIRS", "") + if not path.strip(): + path = f"/usr/local/share{os.pathsep}/usr/share" + return self._with_multi_path(path) + + def _with_multi_path(self, path: str) -> str: + path_list = path.split(os.pathsep) + if not self.multipath: + path_list = path_list[0:1] + path_list = [self._append_app_name_and_version(os.path.expanduser(p)) for p in path_list] + return os.pathsep.join(path_list) + + @property + def user_config_dir(self) -> str: + """ + :return: config directory tied to the user, e.g. ``~/.config/$appname/$version`` or + ``$XDG_CONFIG_HOME/$appname/$version`` + """ + path = os.environ.get("XDG_CONFIG_HOME", "") + if not path.strip(): + path = os.path.expanduser("~/.config") + return self._append_app_name_and_version(path) + + @property + def site_config_dir(self) -> str: + """ + :return: config directories shared by users (if `multipath ` + is enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS + path separator), e.g. ``/etc/xdg/$appname/$version`` + """ + # XDG default for $XDG_CONFIG_DIRS only first, if multipath is False + path = os.environ.get("XDG_CONFIG_DIRS", "") + if not path.strip(): + path = "/etc/xdg" + return self._with_multi_path(path) + + @property + def user_cache_dir(self) -> str: + """ + :return: cache directory tied to the user, e.g. ``~/.cache/$appname/$version`` or + ``~/$XDG_CACHE_HOME/$appname/$version`` + """ + path = os.environ.get("XDG_CACHE_HOME", "") + if not path.strip(): + path = os.path.expanduser("~/.cache") + return self._append_app_name_and_version(path) + + @property + def user_state_dir(self) -> str: + """ + :return: state directory tied to the user, e.g. ``~/.local/state/$appname/$version`` or + ``$XDG_STATE_HOME/$appname/$version`` + """ + path = os.environ.get("XDG_STATE_HOME", "") + if not path.strip(): + path = os.path.expanduser("~/.local/state") + return self._append_app_name_and_version(path) + + @property + def user_log_dir(self) -> str: + """ + :return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it + """ + path = self.user_state_dir + if self.opinion: + path = os.path.join(path, "log") + return path + + @property + def user_documents_dir(self) -> str: + """ + :return: documents directory tied to the user, e.g. ``~/Documents`` + """ + documents_dir = _get_user_dirs_folder("XDG_DOCUMENTS_DIR") + if documents_dir is None: + documents_dir = os.environ.get("XDG_DOCUMENTS_DIR", "").strip() + if not documents_dir: + documents_dir = os.path.expanduser("~/Documents") + + return documents_dir + + @property + def user_runtime_dir(self) -> str: + """ + :return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or + ``$XDG_RUNTIME_DIR/$appname/$version`` + """ + path = os.environ.get("XDG_RUNTIME_DIR", "") + if not path.strip(): + path = f"/run/user/{getuid()}" + return self._append_app_name_and_version(path) + + @property + def site_data_path(self) -> Path: + """:return: data path shared by users. Only return first item, even if ``multipath`` is set to ``True``""" + return self._first_item_as_path_if_multipath(self.site_data_dir) + + @property + def site_config_path(self) -> Path: + """:return: config path shared by the users. Only return first item, even if ``multipath`` is set to ``True``""" + return self._first_item_as_path_if_multipath(self.site_config_dir) + + def _first_item_as_path_if_multipath(self, directory: str) -> Path: + if self.multipath: + # If multipath is True, the first path is returned. + directory = directory.split(os.pathsep)[0] + return Path(directory) + + +def _get_user_dirs_folder(key: str) -> str | None: + """Return directory from user-dirs.dirs config file. See https://freedesktop.org/wiki/Software/xdg-user-dirs/""" + user_dirs_config_path = os.path.join(Unix().user_config_dir, "user-dirs.dirs") + if os.path.exists(user_dirs_config_path): + parser = ConfigParser() + + with open(user_dirs_config_path) as stream: + # Add fake section header, so ConfigParser doesn't complain + parser.read_string(f"[top]\n{stream.read()}") + + if key not in parser["top"]: + return None + + path = parser["top"][key].strip('"') + # Handle relative home paths + path = path.replace("$HOME", os.path.expanduser("~")) + return path + + return None + + +__all__ = [ + "Unix", +] diff --git a/pkg_resources/_vendor/platformdirs/version.py b/pkg_resources/_vendor/platformdirs/version.py new file mode 100644 index 0000000000..9f6eb98e8f --- /dev/null +++ b/pkg_resources/_vendor/platformdirs/version.py @@ -0,0 +1,4 @@ +# file generated by setuptools_scm +# don't change, don't track in version control +__version__ = version = '2.6.2' +__version_tuple__ = version_tuple = (2, 6, 2) diff --git a/pkg_resources/_vendor/platformdirs/windows.py b/pkg_resources/_vendor/platformdirs/windows.py new file mode 100644 index 0000000000..d5c27b3414 --- /dev/null +++ b/pkg_resources/_vendor/platformdirs/windows.py @@ -0,0 +1,184 @@ +from __future__ import annotations + +import ctypes +import os +import sys +from functools import lru_cache +from typing import Callable + +from .api import PlatformDirsABC + + +class Windows(PlatformDirsABC): + """`MSDN on where to store app data files + `_. + Makes use of the + `appname `, + `appauthor `, + `version `, + `roaming `, + `opinion `.""" + + @property + def user_data_dir(self) -> str: + """ + :return: data directory tied to the user, e.g. + ``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname`` (not roaming) or + ``%USERPROFILE%\\AppData\\Roaming\\$appauthor\\$appname`` (roaming) + """ + const = "CSIDL_APPDATA" if self.roaming else "CSIDL_LOCAL_APPDATA" + path = os.path.normpath(get_win_folder(const)) + return self._append_parts(path) + + def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str: + params = [] + if self.appname: + if self.appauthor is not False: + author = self.appauthor or self.appname + params.append(author) + params.append(self.appname) + if opinion_value is not None and self.opinion: + params.append(opinion_value) + if self.version: + params.append(self.version) + return os.path.join(path, *params) + + @property + def site_data_dir(self) -> str: + """:return: data directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname``""" + path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA")) + return self._append_parts(path) + + @property + def user_config_dir(self) -> str: + """:return: config directory tied to the user, same as `user_data_dir`""" + return self.user_data_dir + + @property + def site_config_dir(self) -> str: + """:return: config directory shared by the users, same as `site_data_dir`""" + return self.site_data_dir + + @property + def user_cache_dir(self) -> str: + """ + :return: cache directory tied to the user (if opinionated with ``Cache`` folder within ``$appname``) e.g. + ``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname\\Cache\\$version`` + """ + path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA")) + return self._append_parts(path, opinion_value="Cache") + + @property + def user_state_dir(self) -> str: + """:return: state directory tied to the user, same as `user_data_dir`""" + return self.user_data_dir + + @property + def user_log_dir(self) -> str: + """ + :return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it + """ + path = self.user_data_dir + if self.opinion: + path = os.path.join(path, "Logs") + return path + + @property + def user_documents_dir(self) -> str: + """ + :return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents`` + """ + return os.path.normpath(get_win_folder("CSIDL_PERSONAL")) + + @property + def user_runtime_dir(self) -> str: + """ + :return: runtime directory tied to the user, e.g. + ``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname`` + """ + path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp")) + return self._append_parts(path) + + +def get_win_folder_from_env_vars(csidl_name: str) -> str: + """Get folder from environment variables.""" + if csidl_name == "CSIDL_PERSONAL": # does not have an environment name + return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents") + + env_var_name = { + "CSIDL_APPDATA": "APPDATA", + "CSIDL_COMMON_APPDATA": "ALLUSERSPROFILE", + "CSIDL_LOCAL_APPDATA": "LOCALAPPDATA", + }.get(csidl_name) + if env_var_name is None: + raise ValueError(f"Unknown CSIDL name: {csidl_name}") + result = os.environ.get(env_var_name) + if result is None: + raise ValueError(f"Unset environment variable: {env_var_name}") + return result + + +def get_win_folder_from_registry(csidl_name: str) -> str: + """Get folder from the registry. + + This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + "CSIDL_PERSONAL": "Personal", + }.get(csidl_name) + if shell_folder_name is None: + raise ValueError(f"Unknown CSIDL name: {csidl_name}") + if sys.platform != "win32": # only needed for mypy type checker to know that this code runs only on Windows + raise NotImplementedError + import winreg + + key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders") + directory, _ = winreg.QueryValueEx(key, shell_folder_name) + return str(directory) + + +def get_win_folder_via_ctypes(csidl_name: str) -> str: + """Get folder with ctypes.""" + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + "CSIDL_PERSONAL": 5, + }.get(csidl_name) + if csidl_const is None: + raise ValueError(f"Unknown CSIDL name: {csidl_name}") + + buf = ctypes.create_unicode_buffer(1024) + windll = getattr(ctypes, "windll") # noqa: B009 # using getattr to avoid false positive with mypy type checker + windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if it has highbit chars. + if any(ord(c) > 255 for c in buf): + buf2 = ctypes.create_unicode_buffer(1024) + if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + + +def _pick_get_win_folder() -> Callable[[str], str]: + if hasattr(ctypes, "windll"): + return get_win_folder_via_ctypes + try: + import winreg # noqa: F401 + except ImportError: + return get_win_folder_from_env_vars + else: + return get_win_folder_from_registry + + +get_win_folder = lru_cache(maxsize=None)(_pick_get_win_folder()) + +__all__ = [ + "Windows", +] diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/DESCRIPTION.rst b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/DESCRIPTION.rst deleted file mode 100644 index e1187231a3..0000000000 --- a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,3 +0,0 @@ -UNKNOWN - - diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt deleted file mode 100644 index bbc959e0d6..0000000000 --- a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt +++ /dev/null @@ -1,18 +0,0 @@ -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/METADATA b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/METADATA deleted file mode 100644 index a15c350e36..0000000000 --- a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/METADATA +++ /dev/null @@ -1,30 +0,0 @@ -Metadata-Version: 2.0 -Name: pyparsing -Version: 2.2.1 -Summary: Python parsing module -Home-page: https://github.com/pyparsing/pyparsing/ -Author: Paul McGuire -Author-email: ptmcg@users.sourceforge.net -License: MIT License -Download-URL: https://pypi.org/project/pyparsing/ -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: Information Technology -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.* - -UNKNOWN - - diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/RECORD b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/RECORD deleted file mode 100644 index 09cc30e34f..0000000000 --- a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -__pycache__/pyparsing.cpython-310.pyc,, -pyparsing-2.2.1.dist-info/DESCRIPTION.rst,sha256=OCTuuN6LcWulhHS3d5rfjdsQtW22n7HENFRh6jC6ego,10 -pyparsing-2.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyparsing-2.2.1.dist-info/LICENSE.txt,sha256=081Pq74Spe1XdwrGkewNKSqa078kLIh7UWI-wVjdj8I,1041 -pyparsing-2.2.1.dist-info/METADATA,sha256=I0jhx9vpUYlQXjn4gVDnFFoAt3nNrxwR4iuqA_pknYs,1091 -pyparsing-2.2.1.dist-info/RECORD,, -pyparsing-2.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pyparsing-2.2.1.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 -pyparsing-2.2.1.dist-info/metadata.json,sha256=v1_77-dSdajUZSItSJg8Ov9M713STY3PzhyrRvs1ax4,1185 -pyparsing-2.2.1.dist-info/top_level.txt,sha256=eUOjGzJVhlQ3WS2rFAy2mN3LX_7FKTM5GSJ04jfnLmU,10 -pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055 diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/WHEEL b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/WHEEL deleted file mode 100644 index 7332a419cd..0000000000 --- a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.30.0) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/metadata.json b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/metadata.json deleted file mode 100644 index b760b766b0..0000000000 --- a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7"], "download_url": "https://pypi.org/project/pyparsing/", "extensions": {"python.details": {"contacts": [{"email": "ptmcg@users.sourceforge.net", "name": "Paul McGuire", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://github.com/pyparsing/pyparsing/"}}}, "generator": "bdist_wheel (0.30.0)", "license": "MIT License", "metadata_version": "2.0", "name": "pyparsing", "requires_python": ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*", "summary": "Python parsing module", "version": "2.2.1"} \ No newline at end of file diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/top_level.txt b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/top_level.txt deleted file mode 100644 index 210dfec50b..0000000000 --- a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -pyparsing diff --git a/pkg_resources/_vendor/pyparsing.py b/pkg_resources/_vendor/pyparsing.py deleted file mode 100644 index cf75e1e5fc..0000000000 --- a/pkg_resources/_vendor/pyparsing.py +++ /dev/null @@ -1,5742 +0,0 @@ -# module pyparsing.py -# -# Copyright (c) 2003-2018 Paul T. McGuire -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__doc__ = \ -""" -pyparsing module - Classes and methods to define and execute parsing grammars -============================================================================= - -The pyparsing module is an alternative approach to creating and executing simple grammars, -vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you -don't need to learn a new syntax for defining grammars or matching expressions - the parsing module -provides a library of classes that you use to construct the grammar directly in Python. - -Here is a program to parse "Hello, World!" (or any greeting of the form -C{", !"}), built up using L{Word}, L{Literal}, and L{And} elements -(L{'+'} operator gives L{And} expressions, strings are auto-converted to -L{Literal} expressions):: - - from pyparsing import Word, alphas - - # define grammar of a greeting - greet = Word(alphas) + "," + Word(alphas) + "!" - - hello = "Hello, World!" - print (hello, "->", greet.parseString(hello)) - -The program outputs the following:: - - Hello, World! -> ['Hello', ',', 'World', '!'] - -The Python representation of the grammar is quite readable, owing to the self-explanatory -class names, and the use of '+', '|' and '^' operators. - -The L{ParseResults} object returned from L{ParserElement.parseString} can be accessed as a nested list, a dictionary, or an -object with named attributes. - -The pyparsing module handles some of the problems that are typically vexing when writing text parsers: - - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) - - quoted strings - - embedded comments - - -Getting Started - ------------------ -Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing -classes inherit from. Use the docstrings for examples of how to: - - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes - - construct character word-group expressions using the L{Word} class - - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes - - use L{'+'}, L{'|'}, L{'^'}, and L{'&'} operators to combine simple expressions into more complex ones - - associate names with your parsed results using L{ParserElement.setResultsName} - - find some helpful expression short-cuts like L{delimitedList} and L{oneOf} - - find more useful common expressions in the L{pyparsing_common} namespace class -""" - -__version__ = "2.2.1" -__versionTime__ = "18 Sep 2018 00:49 UTC" -__author__ = "Paul McGuire " - -import string -from weakref import ref as wkref -import copy -import sys -import warnings -import re -import sre_constants -import collections -import pprint -import traceback -import types -from datetime import datetime - -try: - from _thread import RLock -except ImportError: - from threading import RLock - -try: - # Python 3 - from collections.abc import Iterable - from collections.abc import MutableMapping -except ImportError: - # Python 2.7 - from collections import Iterable - from collections import MutableMapping - -try: - from collections import OrderedDict as _OrderedDict -except ImportError: - try: - from ordereddict import OrderedDict as _OrderedDict - except ImportError: - _OrderedDict = None - -#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) - -__all__ = [ -'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', -'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', -'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', -'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', -'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', -'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', -'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', -'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', -'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', -'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', -'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', -'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', -'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', -'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', -'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', -'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', -'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', -'CloseMatch', 'tokenMap', 'pyparsing_common', -] - -system_version = tuple(sys.version_info)[:3] -PY_3 = system_version[0] == 3 -if PY_3: - _MAX_INT = sys.maxsize - basestring = str - unichr = chr - _ustr = str - - # build list of single arg builtins, that can be used as parse actions - singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max] - -else: - _MAX_INT = sys.maxint - range = xrange - - def _ustr(obj): - """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries - str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It - then < returns the unicode object | encodes it with the default encoding | ... >. - """ - if isinstance(obj,unicode): - return obj - - try: - # If this works, then _ustr(obj) has the same behaviour as str(obj), so - # it won't break any existing code. - return str(obj) - - except UnicodeEncodeError: - # Else encode it - ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') - xmlcharref = Regex(r'&#\d+;') - xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) - return xmlcharref.transformString(ret) - - # build list of single arg builtins, tolerant of Python version, that can be used as parse actions - singleArgBuiltins = [] - import __builtin__ - for fname in "sum len sorted reversed list tuple set any all min max".split(): - try: - singleArgBuiltins.append(getattr(__builtin__,fname)) - except AttributeError: - continue - -_generatorType = type((y for y in range(1))) - -def _xml_escape(data): - """Escape &, <, >, ", ', etc. in a string of data.""" - - # ampersand must be replaced first - from_symbols = '&><"\'' - to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) - for from_,to_ in zip(from_symbols, to_symbols): - data = data.replace(from_, to_) - return data - -class _Constants(object): - pass - -alphas = string.ascii_uppercase + string.ascii_lowercase -nums = "0123456789" -hexnums = nums + "ABCDEFabcdef" -alphanums = alphas + nums -_bslash = chr(92) -printables = "".join(c for c in string.printable if c not in string.whitespace) - -class ParseBaseException(Exception): - """base exception class for all parsing runtime exceptions""" - # Performance tuning: we construct a *lot* of these, so keep this - # constructor as small and fast as possible - def __init__( self, pstr, loc=0, msg=None, elem=None ): - self.loc = loc - if msg is None: - self.msg = pstr - self.pstr = "" - else: - self.msg = msg - self.pstr = pstr - self.parserElement = elem - self.args = (pstr, loc, msg) - - @classmethod - def _from_exception(cls, pe): - """ - internal factory method to simplify creating one type of ParseException - from another - avoids having __init__ signature conflicts among subclasses - """ - return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) - - def __getattr__( self, aname ): - """supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text - """ - if( aname == "lineno" ): - return lineno( self.loc, self.pstr ) - elif( aname in ("col", "column") ): - return col( self.loc, self.pstr ) - elif( aname == "line" ): - return line( self.loc, self.pstr ) - else: - raise AttributeError(aname) - - def __str__( self ): - return "%s (at char %d), (line:%d, col:%d)" % \ - ( self.msg, self.loc, self.lineno, self.column ) - def __repr__( self ): - return _ustr(self) - def markInputline( self, markerString = ">!<" ): - """Extracts the exception line from the input string, and marks - the location of the exception with a special symbol. - """ - line_str = self.line - line_column = self.column - 1 - if markerString: - line_str = "".join((line_str[:line_column], - markerString, line_str[line_column:])) - return line_str.strip() - def __dir__(self): - return "lineno col line".split() + dir(type(self)) - -class ParseException(ParseBaseException): - """ - Exception thrown when parse expressions don't match class; - supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text - - Example:: - try: - Word(nums).setName("integer").parseString("ABC") - except ParseException as pe: - print(pe) - print("column: {}".format(pe.col)) - - prints:: - Expected integer (at char 0), (line:1, col:1) - column: 1 - """ - pass - -class ParseFatalException(ParseBaseException): - """user-throwable exception thrown when inconsistent parse content - is found; stops all parsing immediately""" - pass - -class ParseSyntaxException(ParseFatalException): - """just like L{ParseFatalException}, but thrown internally when an - L{ErrorStop} ('-' operator) indicates that parsing is to stop - immediately because an unbacktrackable syntax error has been found""" - pass - -#~ class ReparseException(ParseBaseException): - #~ """Experimental class - parse actions can raise this exception to cause - #~ pyparsing to reparse the input string: - #~ - with a modified input string, and/or - #~ - with a modified start location - #~ Set the values of the ReparseException in the constructor, and raise the - #~ exception in a parse action to cause pyparsing to use the new string/location. - #~ Setting the values as None causes no change to be made. - #~ """ - #~ def __init_( self, newstring, restartLoc ): - #~ self.newParseText = newstring - #~ self.reparseLoc = restartLoc - -class RecursiveGrammarException(Exception): - """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive""" - def __init__( self, parseElementList ): - self.parseElementTrace = parseElementList - - def __str__( self ): - return "RecursiveGrammarException: %s" % self.parseElementTrace - -class _ParseResultsWithOffset(object): - def __init__(self,p1,p2): - self.tup = (p1,p2) - def __getitem__(self,i): - return self.tup[i] - def __repr__(self): - return repr(self.tup[0]) - def setOffset(self,i): - self.tup = (self.tup[0],i) - -class ParseResults(object): - """ - Structured parse results, to provide multiple means of access to the parsed data: - - as a list (C{len(results)}) - - by list index (C{results[0], results[1]}, etc.) - - by attribute (C{results.} - see L{ParserElement.setResultsName}) - - Example:: - integer = Word(nums) - date_str = (integer.setResultsName("year") + '/' - + integer.setResultsName("month") + '/' - + integer.setResultsName("day")) - # equivalent form: - # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - # parseString returns a ParseResults object - result = date_str.parseString("1999/12/31") - - def test(s, fn=repr): - print("%s -> %s" % (s, fn(eval(s)))) - test("list(result)") - test("result[0]") - test("result['month']") - test("result.day") - test("'month' in result") - test("'minutes' in result") - test("result.dump()", str) - prints:: - list(result) -> ['1999', '/', '12', '/', '31'] - result[0] -> '1999' - result['month'] -> '12' - result.day -> '31' - 'month' in result -> True - 'minutes' in result -> False - result.dump() -> ['1999', '/', '12', '/', '31'] - - day: 31 - - month: 12 - - year: 1999 - """ - def __new__(cls, toklist=None, name=None, asList=True, modal=True ): - if isinstance(toklist, cls): - return toklist - retobj = object.__new__(cls) - retobj.__doinit = True - return retobj - - # Performance tuning: we construct a *lot* of these, so keep this - # constructor as small and fast as possible - def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): - if self.__doinit: - self.__doinit = False - self.__name = None - self.__parent = None - self.__accumNames = {} - self.__asList = asList - self.__modal = modal - if toklist is None: - toklist = [] - if isinstance(toklist, list): - self.__toklist = toklist[:] - elif isinstance(toklist, _generatorType): - self.__toklist = list(toklist) - else: - self.__toklist = [toklist] - self.__tokdict = dict() - - if name is not None and name: - if not modal: - self.__accumNames[name] = 0 - if isinstance(name,int): - name = _ustr(name) # will always return a str, but use _ustr for consistency - self.__name = name - if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): - if isinstance(toklist,basestring): - toklist = [ toklist ] - if asList: - if isinstance(toklist,ParseResults): - self[name] = _ParseResultsWithOffset(toklist.copy(),0) - else: - self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) - self[name].__name = name - else: - try: - self[name] = toklist[0] - except (KeyError,TypeError,IndexError): - self[name] = toklist - - def __getitem__( self, i ): - if isinstance( i, (int,slice) ): - return self.__toklist[i] - else: - if i not in self.__accumNames: - return self.__tokdict[i][-1][0] - else: - return ParseResults([ v[0] for v in self.__tokdict[i] ]) - - def __setitem__( self, k, v, isinstance=isinstance ): - if isinstance(v,_ParseResultsWithOffset): - self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] - sub = v[0] - elif isinstance(k,(int,slice)): - self.__toklist[k] = v - sub = v - else: - self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] - sub = v - if isinstance(sub,ParseResults): - sub.__parent = wkref(self) - - def __delitem__( self, i ): - if isinstance(i,(int,slice)): - mylen = len( self.__toklist ) - del self.__toklist[i] - - # convert int to slice - if isinstance(i, int): - if i < 0: - i += mylen - i = slice(i, i+1) - # get removed indices - removed = list(range(*i.indices(mylen))) - removed.reverse() - # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): - for j in removed: - for k, (value, position) in enumerate(occurrences): - occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) - else: - del self.__tokdict[i] - - def __contains__( self, k ): - return k in self.__tokdict - - def __len__( self ): return len( self.__toklist ) - def __bool__(self): return ( not not self.__toklist ) - __nonzero__ = __bool__ - def __iter__( self ): return iter( self.__toklist ) - def __reversed__( self ): return iter( self.__toklist[::-1] ) - def _iterkeys( self ): - if hasattr(self.__tokdict, "iterkeys"): - return self.__tokdict.iterkeys() - else: - return iter(self.__tokdict) - - def _itervalues( self ): - return (self[k] for k in self._iterkeys()) - - def _iteritems( self ): - return ((k, self[k]) for k in self._iterkeys()) - - if PY_3: - keys = _iterkeys - """Returns an iterator of all named result keys (Python 3.x only).""" - - values = _itervalues - """Returns an iterator of all named result values (Python 3.x only).""" - - items = _iteritems - """Returns an iterator of all named result key-value tuples (Python 3.x only).""" - - else: - iterkeys = _iterkeys - """Returns an iterator of all named result keys (Python 2.x only).""" - - itervalues = _itervalues - """Returns an iterator of all named result values (Python 2.x only).""" - - iteritems = _iteritems - """Returns an iterator of all named result key-value tuples (Python 2.x only).""" - - def keys( self ): - """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" - return list(self.iterkeys()) - - def values( self ): - """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" - return list(self.itervalues()) - - def items( self ): - """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" - return list(self.iteritems()) - - def haskeys( self ): - """Since keys() returns an iterator, this method is helpful in bypassing - code that looks for the existence of any defined results names.""" - return bool(self.__tokdict) - - def pop( self, *args, **kwargs): - """ - Removes and returns item at specified index (default=C{last}). - Supports both C{list} and C{dict} semantics for C{pop()}. If passed no - argument or an integer argument, it will use C{list} semantics - and pop tokens from the list of parsed tokens. If passed a - non-integer argument (most likely a string), it will use C{dict} - semantics and pop the corresponding value from any defined - results names. A second default return value argument is - supported, just as in C{dict.pop()}. - - Example:: - def remove_first(tokens): - tokens.pop(0) - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] - - label = Word(alphas) - patt = label("LABEL") + OneOrMore(Word(nums)) - print(patt.parseString("AAB 123 321").dump()) - - # Use pop() in a parse action to remove named result (note that corresponding value is not - # removed from list form of results) - def remove_LABEL(tokens): - tokens.pop("LABEL") - return tokens - patt.addParseAction(remove_LABEL) - print(patt.parseString("AAB 123 321").dump()) - prints:: - ['AAB', '123', '321'] - - LABEL: AAB - - ['AAB', '123', '321'] - """ - if not args: - args = [-1] - for k,v in kwargs.items(): - if k == 'default': - args = (args[0], v) - else: - raise TypeError("pop() got an unexpected keyword argument '%s'" % k) - if (isinstance(args[0], int) or - len(args) == 1 or - args[0] in self): - index = args[0] - ret = self[index] - del self[index] - return ret - else: - defaultvalue = args[1] - return defaultvalue - - def get(self, key, defaultValue=None): - """ - Returns named result matching the given key, or if there is no - such name, then returns the given C{defaultValue} or C{None} if no - C{defaultValue} is specified. - - Similar to C{dict.get()}. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString("1999/12/31") - print(result.get("year")) # -> '1999' - print(result.get("hour", "not specified")) # -> 'not specified' - print(result.get("hour")) # -> None - """ - if key in self: - return self[key] - else: - return defaultValue - - def insert( self, index, insStr ): - """ - Inserts new element at location index in the list of parsed tokens. - - Similar to C{list.insert()}. - - Example:: - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - - # use a parse action to insert the parse location in the front of the parsed results - def insert_locn(locn, tokens): - tokens.insert(0, locn) - print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] - """ - self.__toklist.insert(index, insStr) - # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): - for k, (value, position) in enumerate(occurrences): - occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) - - def append( self, item ): - """ - Add single element to end of ParseResults list of elements. - - Example:: - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - - # use a parse action to compute the sum of the parsed integers, and add it to the end - def append_sum(tokens): - tokens.append(sum(map(int, tokens))) - print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] - """ - self.__toklist.append(item) - - def extend( self, itemseq ): - """ - Add sequence of elements to end of ParseResults list of elements. - - Example:: - patt = OneOrMore(Word(alphas)) - - # use a parse action to append the reverse of the matched strings, to make a palindrome - def make_palindrome(tokens): - tokens.extend(reversed([t[::-1] for t in tokens])) - return ''.join(tokens) - print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' - """ - if isinstance(itemseq, ParseResults): - self += itemseq - else: - self.__toklist.extend(itemseq) - - def clear( self ): - """ - Clear all elements and results names. - """ - del self.__toklist[:] - self.__tokdict.clear() - - def __getattr__( self, name ): - try: - return self[name] - except KeyError: - return "" - - if name in self.__tokdict: - if name not in self.__accumNames: - return self.__tokdict[name][-1][0] - else: - return ParseResults([ v[0] for v in self.__tokdict[name] ]) - else: - return "" - - def __add__( self, other ): - ret = self.copy() - ret += other - return ret - - def __iadd__( self, other ): - if other.__tokdict: - offset = len(self.__toklist) - addoffset = lambda a: offset if a<0 else a+offset - otheritems = other.__tokdict.items() - otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) - for (k,vlist) in otheritems for v in vlist] - for k,v in otherdictitems: - self[k] = v - if isinstance(v[0],ParseResults): - v[0].__parent = wkref(self) - - self.__toklist += other.__toklist - self.__accumNames.update( other.__accumNames ) - return self - - def __radd__(self, other): - if isinstance(other,int) and other == 0: - # useful for merging many ParseResults using sum() builtin - return self.copy() - else: - # this may raise a TypeError - so be it - return other + self - - def __repr__( self ): - return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) - - def __str__( self ): - return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' - - def _asStringList( self, sep='' ): - out = [] - for item in self.__toklist: - if out and sep: - out.append(sep) - if isinstance( item, ParseResults ): - out += item._asStringList() - else: - out.append( _ustr(item) ) - return out - - def asList( self ): - """ - Returns the parse results as a nested list of matching tokens, all converted to strings. - - Example:: - patt = OneOrMore(Word(alphas)) - result = patt.parseString("sldkj lsdkj sldkj") - # even though the result prints in string-like form, it is actually a pyparsing ParseResults - print(type(result), result) # -> ['sldkj', 'lsdkj', 'sldkj'] - - # Use asList() to create an actual list - result_list = result.asList() - print(type(result_list), result_list) # -> ['sldkj', 'lsdkj', 'sldkj'] - """ - return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] - - def asDict( self ): - """ - Returns the named parse results as a nested dictionary. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString('12/31/1999') - print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) - - result_dict = result.asDict() - print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12', 'month': '31'} - - # even though a ParseResults supports dict-like access, sometime you just need to have a dict - import json - print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable - print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} - """ - if PY_3: - item_fn = self.items - else: - item_fn = self.iteritems - - def toItem(obj): - if isinstance(obj, ParseResults): - if obj.haskeys(): - return obj.asDict() - else: - return [toItem(v) for v in obj] - else: - return obj - - return dict((k,toItem(v)) for k,v in item_fn()) - - def copy( self ): - """ - Returns a new copy of a C{ParseResults} object. - """ - ret = ParseResults( self.__toklist ) - ret.__tokdict = self.__tokdict.copy() - ret.__parent = self.__parent - ret.__accumNames.update( self.__accumNames ) - ret.__name = self.__name - return ret - - def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): - """ - (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. - """ - nl = "\n" - out = [] - namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() - for v in vlist) - nextLevelIndent = indent + " " - - # collapse out indents if formatting is not desired - if not formatted: - indent = "" - nextLevelIndent = "" - nl = "" - - selfTag = None - if doctag is not None: - selfTag = doctag - else: - if self.__name: - selfTag = self.__name - - if not selfTag: - if namedItemsOnly: - return "" - else: - selfTag = "ITEM" - - out += [ nl, indent, "<", selfTag, ">" ] - - for i,res in enumerate(self.__toklist): - if isinstance(res,ParseResults): - if i in namedItems: - out += [ res.asXML(namedItems[i], - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] - else: - out += [ res.asXML(None, - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] - else: - # individual token, see if there is a name for it - resTag = None - if i in namedItems: - resTag = namedItems[i] - if not resTag: - if namedItemsOnly: - continue - else: - resTag = "ITEM" - xmlBodyText = _xml_escape(_ustr(res)) - out += [ nl, nextLevelIndent, "<", resTag, ">", - xmlBodyText, - "" ] - - out += [ nl, indent, "" ] - return "".join(out) - - def __lookup(self,sub): - for k,vlist in self.__tokdict.items(): - for v,loc in vlist: - if sub is v: - return k - return None - - def getName(self): - r""" - Returns the results name for this token expression. Useful when several - different expressions might match at a particular location. - - Example:: - integer = Word(nums) - ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") - house_number_expr = Suppress('#') + Word(nums, alphanums) - user_data = (Group(house_number_expr)("house_number") - | Group(ssn_expr)("ssn") - | Group(integer)("age")) - user_info = OneOrMore(user_data) - - result = user_info.parseString("22 111-22-3333 #221B") - for item in result: - print(item.getName(), ':', item[0]) - prints:: - age : 22 - ssn : 111-22-3333 - house_number : 221B - """ - if self.__name: - return self.__name - elif self.__parent: - par = self.__parent() - if par: - return par.__lookup(self) - else: - return None - elif (len(self) == 1 and - len(self.__tokdict) == 1 and - next(iter(self.__tokdict.values()))[0][1] in (0,-1)): - return next(iter(self.__tokdict.keys())) - else: - return None - - def dump(self, indent='', depth=0, full=True): - """ - Diagnostic method for listing out the contents of a C{ParseResults}. - Accepts an optional C{indent} argument so that this string can be embedded - in a nested display of other data. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString('12/31/1999') - print(result.dump()) - prints:: - ['12', '/', '31', '/', '1999'] - - day: 1999 - - month: 31 - - year: 12 - """ - out = [] - NL = '\n' - out.append( indent+_ustr(self.asList()) ) - if full: - if self.haskeys(): - items = sorted((str(k), v) for k,v in self.items()) - for k,v in items: - if out: - out.append(NL) - out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) - if isinstance(v,ParseResults): - if v: - out.append( v.dump(indent,depth+1) ) - else: - out.append(_ustr(v)) - else: - out.append(repr(v)) - elif any(isinstance(vv,ParseResults) for vv in self): - v = self - for i,vv in enumerate(v): - if isinstance(vv,ParseResults): - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) - else: - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) - - return "".join(out) - - def pprint(self, *args, **kwargs): - """ - Pretty-printer for parsed results as a list, using the C{pprint} module. - Accepts additional positional or keyword args as defined for the - C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) - - Example:: - ident = Word(alphas, alphanums) - num = Word(nums) - func = Forward() - term = ident | num | Group('(' + func + ')') - func <<= ident + Group(Optional(delimitedList(term))) - result = func.parseString("fna a,b,(fnb c,d,200),100") - result.pprint(width=40) - prints:: - ['fna', - ['a', - 'b', - ['(', 'fnb', ['c', 'd', '200'], ')'], - '100']] - """ - pprint.pprint(self.asList(), *args, **kwargs) - - # add support for pickle protocol - def __getstate__(self): - return ( self.__toklist, - ( self.__tokdict.copy(), - self.__parent is not None and self.__parent() or None, - self.__accumNames, - self.__name ) ) - - def __setstate__(self,state): - self.__toklist = state[0] - (self.__tokdict, - par, - inAccumNames, - self.__name) = state[1] - self.__accumNames = {} - self.__accumNames.update(inAccumNames) - if par is not None: - self.__parent = wkref(par) - else: - self.__parent = None - - def __getnewargs__(self): - return self.__toklist, self.__name, self.__asList, self.__modal - - def __dir__(self): - return (dir(type(self)) + list(self.keys())) - -MutableMapping.register(ParseResults) - -def col (loc,strg): - """Returns current column within a string, counting newlines as line separators. - The first column is number 1. - - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{ParserElement.parseString}} for more information - on parsing strings containing C{}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - """ - s = strg - return 1 if 0} for more information - on parsing strings containing C{}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - """ - return strg.count("\n",0,loc) + 1 - -def line( loc, strg ): - """Returns the line of text containing loc within a string, counting newlines as line separators. - """ - lastCR = strg.rfind("\n", 0, loc) - nextCR = strg.find("\n", loc) - if nextCR >= 0: - return strg[lastCR+1:nextCR] - else: - return strg[lastCR+1:] - -def _defaultStartDebugAction( instring, loc, expr ): - print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) - -def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): - print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) - -def _defaultExceptionDebugAction( instring, loc, expr, exc ): - print ("Exception raised:" + _ustr(exc)) - -def nullDebugAction(*args): - """'Do-nothing' debug action, to suppress debugging output during parsing.""" - pass - -# Only works on Python 3.x - nonlocal is toxic to Python 2 installs -#~ 'decorator to trim function calls to match the arity of the target' -#~ def _trim_arity(func, maxargs=3): - #~ if func in singleArgBuiltins: - #~ return lambda s,l,t: func(t) - #~ limit = 0 - #~ foundArity = False - #~ def wrapper(*args): - #~ nonlocal limit,foundArity - #~ while 1: - #~ try: - #~ ret = func(*args[limit:]) - #~ foundArity = True - #~ return ret - #~ except TypeError: - #~ if limit == maxargs or foundArity: - #~ raise - #~ limit += 1 - #~ continue - #~ return wrapper - -# this version is Python 2.x-3.x cross-compatible -'decorator to trim function calls to match the arity of the target' -def _trim_arity(func, maxargs=2): - if func in singleArgBuiltins: - return lambda s,l,t: func(t) - limit = [0] - foundArity = [False] - - # traceback return data structure changed in Py3.5 - normalize back to plain tuples - if system_version[:2] >= (3,5): - def extract_stack(limit=0): - # special handling for Python 3.5.0 - extra deep call stack by 1 - offset = -3 if system_version == (3,5,0) else -2 - frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] - return [frame_summary[:2]] - def extract_tb(tb, limit=0): - frames = traceback.extract_tb(tb, limit=limit) - frame_summary = frames[-1] - return [frame_summary[:2]] - else: - extract_stack = traceback.extract_stack - extract_tb = traceback.extract_tb - - # synthesize what would be returned by traceback.extract_stack at the call to - # user's parse action 'func', so that we don't incur call penalty at parse time - - LINE_DIFF = 6 - # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND - # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! - this_line = extract_stack(limit=2)[-1] - pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) - - def wrapper(*args): - while 1: - try: - ret = func(*args[limit[0]:]) - foundArity[0] = True - return ret - except TypeError: - # re-raise TypeErrors if they did not come from our arity testing - if foundArity[0]: - raise - else: - try: - tb = sys.exc_info()[-1] - if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: - raise - finally: - del tb - - if limit[0] <= maxargs: - limit[0] += 1 - continue - raise - - # copy func name to wrapper for sensible debug output - func_name = "" - try: - func_name = getattr(func, '__name__', - getattr(func, '__class__').__name__) - except Exception: - func_name = str(func) - wrapper.__name__ = func_name - - return wrapper - -class ParserElement(object): - """Abstract base level parser element class.""" - DEFAULT_WHITE_CHARS = " \n\t\r" - verbose_stacktrace = False - - @staticmethod - def setDefaultWhitespaceChars( chars ): - r""" - Overrides the default whitespace chars - - Example:: - # default whitespace chars are space, and newline - OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] - - # change to just treat newline as significant - ParserElement.setDefaultWhitespaceChars(" \t") - OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] - """ - ParserElement.DEFAULT_WHITE_CHARS = chars - - @staticmethod - def inlineLiteralsUsing(cls): - """ - Set class to be used for inclusion of string literals into a parser. - - Example:: - # default literal class used is Literal - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] - - - # change to Suppress - ParserElement.inlineLiteralsUsing(Suppress) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] - """ - ParserElement._literalStringClass = cls - - def __init__( self, savelist=False ): - self.parseAction = list() - self.failAction = None - #~ self.name = "" # don't define self.name, let subclasses try/except upcall - self.strRepr = None - self.resultsName = None - self.saveAsList = savelist - self.skipWhitespace = True - self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS - self.copyDefaultWhiteChars = True - self.mayReturnEmpty = False # used when checking for left-recursion - self.keepTabs = False - self.ignoreExprs = list() - self.debug = False - self.streamlined = False - self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index - self.errmsg = "" - self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) - self.debugActions = ( None, None, None ) #custom debug actions - self.re = None - self.callPreparse = True # used to avoid redundant calls to preParse - self.callDuringTry = False - - def copy( self ): - """ - Make a copy of this C{ParserElement}. Useful for defining different parse actions - for the same parsing pattern, using copies of the original parse element. - - Example:: - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") - integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") - - print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) - prints:: - [5120, 100, 655360, 268435456] - Equivalent form of C{expr.copy()} is just C{expr()}:: - integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") - """ - cpy = copy.copy( self ) - cpy.parseAction = self.parseAction[:] - cpy.ignoreExprs = self.ignoreExprs[:] - if self.copyDefaultWhiteChars: - cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS - return cpy - - def setName( self, name ): - """ - Define name for this expression, makes debugging and exception messages clearer. - - Example:: - Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) - Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) - """ - self.name = name - self.errmsg = "Expected " + self.name - if hasattr(self,"exception"): - self.exception.msg = self.errmsg - return self - - def setResultsName( self, name, listAllMatches=False ): - """ - Define name for referencing matching tokens as a nested attribute - of the returned parse results. - NOTE: this returns a *copy* of the original C{ParserElement} object; - this is so that the client can define a basic element, such as an - integer, and reference it in multiple places with different names. - - You can also set results names using the abbreviated syntax, - C{expr("name")} in place of C{expr.setResultsName("name")} - - see L{I{__call__}<__call__>}. - - Example:: - date_str = (integer.setResultsName("year") + '/' - + integer.setResultsName("month") + '/' - + integer.setResultsName("day")) - - # equivalent form: - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - """ - newself = self.copy() - if name.endswith("*"): - name = name[:-1] - listAllMatches=True - newself.resultsName = name - newself.modalResults = not listAllMatches - return newself - - def setBreak(self,breakFlag = True): - """Method to invoke the Python pdb debugger when this element is - about to be parsed. Set C{breakFlag} to True to enable, False to - disable. - """ - if breakFlag: - _parseMethod = self._parse - def breaker(instring, loc, doActions=True, callPreParse=True): - import pdb - pdb.set_trace() - return _parseMethod( instring, loc, doActions, callPreParse ) - breaker._originalParseMethod = _parseMethod - self._parse = breaker - else: - if hasattr(self._parse,"_originalParseMethod"): - self._parse = self._parse._originalParseMethod - return self - - def setParseAction( self, *fns, **kwargs ): - """ - Define one or more actions to perform when successfully matching parse element definition. - Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, - C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: - - s = the original string being parsed (see note below) - - loc = the location of the matching substring - - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object - If the functions in fns modify the tokens, they can return them as the return - value from fn, and the modified list of tokens will replace the original. - Otherwise, fn does not need to return any value. - - Optional keyword arguments: - - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing - - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{parseString}} for more information - on parsing strings containing C{}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - - Example:: - integer = Word(nums) - date_str = integer + '/' + integer + '/' + integer - - date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] - - # use parse action to convert to ints at parse time - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - date_str = integer + '/' + integer + '/' + integer - - # note that integer fields are now ints, not strings - date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] - """ - self.parseAction = list(map(_trim_arity, list(fns))) - self.callDuringTry = kwargs.get("callDuringTry", False) - return self - - def addParseAction( self, *fns, **kwargs ): - """ - Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}}. - - See examples in L{I{copy}}. - """ - self.parseAction += list(map(_trim_arity, list(fns))) - self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) - return self - - def addCondition(self, *fns, **kwargs): - """Add a boolean predicate function to expression's list of parse actions. See - L{I{setParseAction}} for function call signatures. Unlike C{setParseAction}, - functions passed to C{addCondition} need to return boolean success/fail of the condition. - - Optional keyword arguments: - - message = define a custom message to be used in the raised exception - - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException - - Example:: - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - year_int = integer.copy() - year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") - date_str = year_int + '/' + integer + '/' + integer - - result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) - """ - msg = kwargs.get("message", "failed user-defined condition") - exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException - for fn in fns: - def pa(s,l,t): - if not bool(_trim_arity(fn)(s,l,t)): - raise exc_type(s,l,msg) - self.parseAction.append(pa) - self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) - return self - - def setFailAction( self, fn ): - """Define action to perform if parsing fails at this expression. - Fail acton fn is a callable function that takes the arguments - C{fn(s,loc,expr,err)} where: - - s = string being parsed - - loc = location where expression match was attempted and failed - - expr = the parse expression that failed - - err = the exception thrown - The function returns no value. It may throw C{L{ParseFatalException}} - if it is desired to stop parsing immediately.""" - self.failAction = fn - return self - - def _skipIgnorables( self, instring, loc ): - exprsFound = True - while exprsFound: - exprsFound = False - for e in self.ignoreExprs: - try: - while 1: - loc,dummy = e._parse( instring, loc ) - exprsFound = True - except ParseException: - pass - return loc - - def preParse( self, instring, loc ): - if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) - - if self.skipWhitespace: - wt = self.whiteChars - instrlen = len(instring) - while loc < instrlen and instring[loc] in wt: - loc += 1 - - return loc - - def parseImpl( self, instring, loc, doActions=True ): - return loc, [] - - def postParse( self, instring, loc, tokenlist ): - return tokenlist - - #~ @profile - def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): - debugging = ( self.debug ) #and doActions ) - - if debugging or self.failAction: - #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) - if (self.debugActions[0] ): - self.debugActions[0]( instring, loc, self ) - if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) - else: - preloc = loc - tokensStart = preloc - try: - try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) - except ParseBaseException as err: - #~ print ("Exception raised:", err) - if self.debugActions[2]: - self.debugActions[2]( instring, tokensStart, self, err ) - if self.failAction: - self.failAction( instring, tokensStart, self, err ) - raise - else: - if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) - else: - preloc = loc - tokensStart = preloc - if self.mayIndexError or preloc >= len(instring): - try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) - else: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - - tokens = self.postParse( instring, loc, tokens ) - - retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) - if self.parseAction and (doActions or self.callDuringTry): - if debugging: - try: - for fn in self.parseAction: - tokens = fn( instring, tokensStart, retTokens ) - if tokens is not None: - retTokens = ParseResults( tokens, - self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) - except ParseBaseException as err: - #~ print "Exception raised in user parse action:", err - if (self.debugActions[2] ): - self.debugActions[2]( instring, tokensStart, self, err ) - raise - else: - for fn in self.parseAction: - tokens = fn( instring, tokensStart, retTokens ) - if tokens is not None: - retTokens = ParseResults( tokens, - self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) - if debugging: - #~ print ("Matched",self,"->",retTokens.asList()) - if (self.debugActions[1] ): - self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) - - return loc, retTokens - - def tryParse( self, instring, loc ): - try: - return self._parse( instring, loc, doActions=False )[0] - except ParseFatalException: - raise ParseException( instring, loc, self.errmsg, self) - - def canParseNext(self, instring, loc): - try: - self.tryParse(instring, loc) - except (ParseException, IndexError): - return False - else: - return True - - class _UnboundedCache(object): - def __init__(self): - cache = {} - self.not_in_cache = not_in_cache = object() - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - - def clear(self): - cache.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - if _OrderedDict is not None: - class _FifoCache(object): - def __init__(self, size): - self.not_in_cache = not_in_cache = object() - - cache = _OrderedDict() - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - while len(cache) > size: - try: - cache.popitem(False) - except KeyError: - pass - - def clear(self): - cache.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - else: - class _FifoCache(object): - def __init__(self, size): - self.not_in_cache = not_in_cache = object() - - cache = {} - key_fifo = collections.deque([], size) - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - while len(key_fifo) > size: - cache.pop(key_fifo.popleft(), None) - key_fifo.append(key) - - def clear(self): - cache.clear() - key_fifo.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - # argument cache for optimizing repeated calls when backtracking through recursive expressions - packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail - packrat_cache_lock = RLock() - packrat_cache_stats = [0, 0] - - # this method gets repeatedly called during backtracking with the same arguments - - # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression - def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): - HIT, MISS = 0, 1 - lookup = (self, instring, loc, callPreParse, doActions) - with ParserElement.packrat_cache_lock: - cache = ParserElement.packrat_cache - value = cache.get(lookup) - if value is cache.not_in_cache: - ParserElement.packrat_cache_stats[MISS] += 1 - try: - value = self._parseNoCache(instring, loc, doActions, callPreParse) - except ParseBaseException as pe: - # cache a copy of the exception, without the traceback - cache.set(lookup, pe.__class__(*pe.args)) - raise - else: - cache.set(lookup, (value[0], value[1].copy())) - return value - else: - ParserElement.packrat_cache_stats[HIT] += 1 - if isinstance(value, Exception): - raise value - return (value[0], value[1].copy()) - - _parse = _parseNoCache - - @staticmethod - def resetCache(): - ParserElement.packrat_cache.clear() - ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) - - _packratEnabled = False - @staticmethod - def enablePackrat(cache_size_limit=128): - """Enables "packrat" parsing, which adds memoizing to the parsing logic. - Repeated parse attempts at the same string location (which happens - often in many complex grammars) can immediately return a cached value, - instead of re-executing parsing/validating code. Memoizing is done of - both valid results and parsing exceptions. - - Parameters: - - cache_size_limit - (default=C{128}) - if an integer value is provided - will limit the size of the packrat cache; if None is passed, then - the cache size will be unbounded; if 0 is passed, the cache will - be effectively disabled. - - This speedup may break existing programs that use parse actions that - have side-effects. For this reason, packrat parsing is disabled when - you first import pyparsing. To activate the packrat feature, your - program must call the class method C{ParserElement.enablePackrat()}. If - your program uses C{psyco} to "compile as you go", you must call - C{enablePackrat} before calling C{psyco.full()}. If you do not do this, - Python will crash. For best results, call C{enablePackrat()} immediately - after importing pyparsing. - - Example:: - import pyparsing - pyparsing.ParserElement.enablePackrat() - """ - if not ParserElement._packratEnabled: - ParserElement._packratEnabled = True - if cache_size_limit is None: - ParserElement.packrat_cache = ParserElement._UnboundedCache() - else: - ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) - ParserElement._parse = ParserElement._parseCache - - def parseString( self, instring, parseAll=False ): - """ - Execute the parse expression with the given string. - This is the main interface to the client code, once the complete - expression has been built. - - If you want the grammar to require that the entire input string be - successfully parsed, then set C{parseAll} to True (equivalent to ending - the grammar with C{L{StringEnd()}}). - - Note: C{parseString} implicitly calls C{expandtabs()} on the input string, - in order to report proper column numbers in parse actions. - If the input string contains tabs and - the grammar uses parse actions that use the C{loc} argument to index into the - string being parsed, you can ensure you have a consistent view of the input - string by: - - calling C{parseWithTabs} on your grammar before calling C{parseString} - (see L{I{parseWithTabs}}) - - define your parse action using the full C{(s,loc,toks)} signature, and - reference the input string using the parse action's C{s} argument - - explictly expand the tabs in your input string before calling - C{parseString} - - Example:: - Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] - Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text - """ - ParserElement.resetCache() - if not self.streamlined: - self.streamline() - #~ self.saveAsList = True - for e in self.ignoreExprs: - e.streamline() - if not self.keepTabs: - instring = instring.expandtabs() - try: - loc, tokens = self._parse( instring, 0 ) - if parseAll: - loc = self.preParse( instring, loc ) - se = Empty() + StringEnd() - se._parse( instring, loc ) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - else: - return tokens - - def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): - """ - Scan the input string for expression matches. Each match will return the - matching tokens, start location, and end location. May be called with optional - C{maxMatches} argument, to clip scanning after 'n' matches are found. If - C{overlap} is specified, then overlapping matches will be reported. - - Note that the start and end locations are reported relative to the string - being parsed. See L{I{parseString}} for more information on parsing - strings with embedded tabs. - - Example:: - source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" - print(source) - for tokens,start,end in Word(alphas).scanString(source): - print(' '*start + '^'*(end-start)) - print(' '*start + tokens[0]) - - prints:: - - sldjf123lsdjjkf345sldkjf879lkjsfd987 - ^^^^^ - sldjf - ^^^^^^^ - lsdjjkf - ^^^^^^ - sldkjf - ^^^^^^ - lkjsfd - """ - if not self.streamlined: - self.streamline() - for e in self.ignoreExprs: - e.streamline() - - if not self.keepTabs: - instring = _ustr(instring).expandtabs() - instrlen = len(instring) - loc = 0 - preparseFn = self.preParse - parseFn = self._parse - ParserElement.resetCache() - matches = 0 - try: - while loc <= instrlen and matches < maxMatches: - try: - preloc = preparseFn( instring, loc ) - nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) - except ParseException: - loc = preloc+1 - else: - if nextLoc > loc: - matches += 1 - yield tokens, preloc, nextLoc - if overlap: - nextloc = preparseFn( instring, loc ) - if nextloc > loc: - loc = nextLoc - else: - loc += 1 - else: - loc = nextLoc - else: - loc = preloc+1 - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def transformString( self, instring ): - """ - Extension to C{L{scanString}}, to modify matching text with modified tokens that may - be returned from a parse action. To use C{transformString}, define a grammar and - attach a parse action to it that modifies the returned token list. - Invoking C{transformString()} on a target string will then scan for matches, - and replace the matched text patterns according to the logic in the parse - action. C{transformString()} returns the resulting transformed string. - - Example:: - wd = Word(alphas) - wd.setParseAction(lambda toks: toks[0].title()) - - print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) - Prints:: - Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. - """ - out = [] - lastE = 0 - # force preservation of s, to minimize unwanted transformation of string, and to - # keep string locs straight between transformString and scanString - self.keepTabs = True - try: - for t,s,e in self.scanString( instring ): - out.append( instring[lastE:s] ) - if t: - if isinstance(t,ParseResults): - out += t.asList() - elif isinstance(t,list): - out += t - else: - out.append(t) - lastE = e - out.append(instring[lastE:]) - out = [o for o in out if o] - return "".join(map(_ustr,_flatten(out))) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def searchString( self, instring, maxMatches=_MAX_INT ): - """ - Another extension to C{L{scanString}}, simplifying the access to the tokens found - to match the given parse expression. May be called with optional - C{maxMatches} argument, to clip searching after 'n' matches are found. - - Example:: - # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters - cap_word = Word(alphas.upper(), alphas.lower()) - - print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) - - # the sum() builtin can be used to merge results into a single ParseResults object - print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))) - prints:: - [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']] - ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] - """ - try: - return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): - """ - Generator method to split a string using the given expression as a separator. - May be called with optional C{maxsplit} argument, to limit the number of splits; - and the optional C{includeSeparators} argument (default=C{False}), if the separating - matching text should be included in the split results. - - Example:: - punc = oneOf(list(".,;:/-!?")) - print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) - prints:: - ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] - """ - splits = 0 - last = 0 - for t,s,e in self.scanString(instring, maxMatches=maxsplit): - yield instring[last:s] - if includeSeparators: - yield t[0] - last = e - yield instring[last:] - - def __add__(self, other ): - """ - Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement - converts them to L{Literal}s by default. - - Example:: - greet = Word(alphas) + "," + Word(alphas) + "!" - hello = "Hello, World!" - print (hello, "->", greet.parseString(hello)) - Prints:: - Hello, World! -> ['Hello', ',', 'World', '!'] - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return And( [ self, other ] ) - - def __radd__(self, other ): - """ - Implementation of + operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other + self - - def __sub__(self, other): - """ - Implementation of - operator, returns C{L{And}} with error stop - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return self + And._ErrorStop() + other - - def __rsub__(self, other ): - """ - Implementation of - operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other - self - - def __mul__(self,other): - """ - Implementation of * operator, allows use of C{expr * 3} in place of - C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer - tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples - may also include C{None} as in: - - C{expr*(n,None)} or C{expr*(n,)} is equivalent - to C{expr*n + L{ZeroOrMore}(expr)} - (read as "at least n instances of C{expr}") - - C{expr*(None,n)} is equivalent to C{expr*(0,n)} - (read as "0 to n instances of C{expr}") - - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} - - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} - - Note that C{expr*(None,n)} does not raise an exception if - more than n exprs exist in the input stream; that is, - C{expr*(None,n)} does not enforce a maximum number of expr - occurrences. If this behavior is desired, then write - C{expr*(None,n) + ~expr} - """ - if isinstance(other,int): - minElements, optElements = other,0 - elif isinstance(other,tuple): - other = (other + (None, None))[:2] - if other[0] is None: - other = (0, other[1]) - if isinstance(other[0],int) and other[1] is None: - if other[0] == 0: - return ZeroOrMore(self) - if other[0] == 1: - return OneOrMore(self) - else: - return self*other[0] + ZeroOrMore(self) - elif isinstance(other[0],int) and isinstance(other[1],int): - minElements, optElements = other - optElements -= minElements - else: - raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) - else: - raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) - - if minElements < 0: - raise ValueError("cannot multiply ParserElement by negative value") - if optElements < 0: - raise ValueError("second tuple value must be greater or equal to first tuple value") - if minElements == optElements == 0: - raise ValueError("cannot multiply ParserElement by 0 or (0,0)") - - if (optElements): - def makeOptionalList(n): - if n>1: - return Optional(self + makeOptionalList(n-1)) - else: - return Optional(self) - if minElements: - if minElements == 1: - ret = self + makeOptionalList(optElements) - else: - ret = And([self]*minElements) + makeOptionalList(optElements) - else: - ret = makeOptionalList(optElements) - else: - if minElements == 1: - ret = self - else: - ret = And([self]*minElements) - return ret - - def __rmul__(self, other): - return self.__mul__(other) - - def __or__(self, other ): - """ - Implementation of | operator - returns C{L{MatchFirst}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return MatchFirst( [ self, other ] ) - - def __ror__(self, other ): - """ - Implementation of | operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other | self - - def __xor__(self, other ): - """ - Implementation of ^ operator - returns C{L{Or}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return Or( [ self, other ] ) - - def __rxor__(self, other ): - """ - Implementation of ^ operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other ^ self - - def __and__(self, other ): - """ - Implementation of & operator - returns C{L{Each}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return Each( [ self, other ] ) - - def __rand__(self, other ): - """ - Implementation of & operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other & self - - def __invert__( self ): - """ - Implementation of ~ operator - returns C{L{NotAny}} - """ - return NotAny( self ) - - def __call__(self, name=None): - """ - Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. - - If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be - passed as C{True}. - - If C{name} is omitted, same as calling C{L{copy}}. - - Example:: - # these are equivalent - userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") - userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") - """ - if name is not None: - return self.setResultsName(name) - else: - return self.copy() - - def suppress( self ): - """ - Suppresses the output of this C{ParserElement}; useful to keep punctuation from - cluttering up returned output. - """ - return Suppress( self ) - - def leaveWhitespace( self ): - """ - Disables the skipping of whitespace before matching the characters in the - C{ParserElement}'s defined pattern. This is normally only used internally by - the pyparsing module, but may be needed in some whitespace-sensitive grammars. - """ - self.skipWhitespace = False - return self - - def setWhitespaceChars( self, chars ): - """ - Overrides the default whitespace chars - """ - self.skipWhitespace = True - self.whiteChars = chars - self.copyDefaultWhiteChars = False - return self - - def parseWithTabs( self ): - """ - Overrides default behavior to expand C{}s to spaces before parsing the input string. - Must be called before C{parseString} when the input grammar contains elements that - match C{} characters. - """ - self.keepTabs = True - return self - - def ignore( self, other ): - """ - Define expression to be ignored (e.g., comments) while doing pattern - matching; may be called repeatedly, to define multiple comment or other - ignorable patterns. - - Example:: - patt = OneOrMore(Word(alphas)) - patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] - - patt.ignore(cStyleComment) - patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] - """ - if isinstance(other, basestring): - other = Suppress(other) - - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - self.ignoreExprs.append(other) - else: - self.ignoreExprs.append( Suppress( other.copy() ) ) - return self - - def setDebugActions( self, startAction, successAction, exceptionAction ): - """ - Enable display of debugging messages while doing pattern matching. - """ - self.debugActions = (startAction or _defaultStartDebugAction, - successAction or _defaultSuccessDebugAction, - exceptionAction or _defaultExceptionDebugAction) - self.debug = True - return self - - def setDebug( self, flag=True ): - """ - Enable display of debugging messages while doing pattern matching. - Set C{flag} to True to enable, False to disable. - - Example:: - wd = Word(alphas).setName("alphaword") - integer = Word(nums).setName("numword") - term = wd | integer - - # turn on debugging for wd - wd.setDebug() - - OneOrMore(term).parseString("abc 123 xyz 890") - - prints:: - Match alphaword at loc 0(1,1) - Matched alphaword -> ['abc'] - Match alphaword at loc 3(1,4) - Exception raised:Expected alphaword (at char 4), (line:1, col:5) - Match alphaword at loc 7(1,8) - Matched alphaword -> ['xyz'] - Match alphaword at loc 11(1,12) - Exception raised:Expected alphaword (at char 12), (line:1, col:13) - Match alphaword at loc 15(1,16) - Exception raised:Expected alphaword (at char 15), (line:1, col:16) - - The output shown is that produced by the default debug actions - custom debug actions can be - specified using L{setDebugActions}. Prior to attempting - to match the C{wd} expression, the debugging message C{"Match at loc (,)"} - is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} - message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, - which makes debugging and exception messages easier to understand - for instance, the default - name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. - """ - if flag: - self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) - else: - self.debug = False - return self - - def __str__( self ): - return self.name - - def __repr__( self ): - return _ustr(self) - - def streamline( self ): - self.streamlined = True - self.strRepr = None - return self - - def checkRecursion( self, parseElementList ): - pass - - def validate( self, validateTrace=[] ): - """ - Check defined expressions for valid structure, check for infinite recursive definitions. - """ - self.checkRecursion( [] ) - - def parseFile( self, file_or_filename, parseAll=False ): - """ - Execute the parse expression on the given file or filename. - If a filename is specified (instead of a file object), - the entire file is opened, read, and closed before parsing. - """ - try: - file_contents = file_or_filename.read() - except AttributeError: - with open(file_or_filename, "r") as f: - file_contents = f.read() - try: - return self.parseString(file_contents, parseAll) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def __eq__(self,other): - if isinstance(other, ParserElement): - return self is other or vars(self) == vars(other) - elif isinstance(other, basestring): - return self.matches(other) - else: - return super(ParserElement,self)==other - - def __ne__(self,other): - return not (self == other) - - def __hash__(self): - return hash(id(self)) - - def __req__(self,other): - return self == other - - def __rne__(self,other): - return not (self == other) - - def matches(self, testString, parseAll=True): - """ - Method for quick testing of a parser against a test string. Good for simple - inline microtests of sub expressions while building up larger parser. - - Parameters: - - testString - to test against this expression for a match - - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - - Example:: - expr = Word(nums) - assert expr.matches("100") - """ - try: - self.parseString(_ustr(testString), parseAll=parseAll) - return True - except ParseBaseException: - return False - - def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False): - """ - Execute the parse expression on a series of test strings, showing each - test, the parsed results or where the parse failed. Quick and easy way to - run a parse expression against a list of sample strings. - - Parameters: - - tests - a list of separate test strings, or a multiline string of test strings - - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - - comment - (default=C{'#'}) - expression for indicating embedded comments in the test - string; pass None to disable comment filtering - - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; - if False, only dump nested list - - printResults - (default=C{True}) prints test output to stdout - - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing - - Returns: a (success, results) tuple, where success indicates that all tests succeeded - (or failed if C{failureTests} is True), and the results contain a list of lines of each - test's output - - Example:: - number_expr = pyparsing_common.number.copy() - - result = number_expr.runTests(''' - # unsigned integer - 100 - # negative integer - -100 - # float with scientific notation - 6.02e23 - # integer with scientific notation - 1e-12 - ''') - print("Success" if result[0] else "Failed!") - - result = number_expr.runTests(''' - # stray character - 100Z - # missing leading digit before '.' - -.100 - # too many '.' - 3.14.159 - ''', failureTests=True) - print("Success" if result[0] else "Failed!") - prints:: - # unsigned integer - 100 - [100] - - # negative integer - -100 - [-100] - - # float with scientific notation - 6.02e23 - [6.02e+23] - - # integer with scientific notation - 1e-12 - [1e-12] - - Success - - # stray character - 100Z - ^ - FAIL: Expected end of text (at char 3), (line:1, col:4) - - # missing leading digit before '.' - -.100 - ^ - FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) - - # too many '.' - 3.14.159 - ^ - FAIL: Expected end of text (at char 4), (line:1, col:5) - - Success - - Each test string must be on a single line. If you want to test a string that spans multiple - lines, create a test like this:: - - expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") - - (Note that this is a raw string literal, you must include the leading 'r'.) - """ - if isinstance(tests, basestring): - tests = list(map(str.strip, tests.rstrip().splitlines())) - if isinstance(comment, basestring): - comment = Literal(comment) - allResults = [] - comments = [] - success = True - for t in tests: - if comment is not None and comment.matches(t, False) or comments and not t: - comments.append(t) - continue - if not t: - continue - out = ['\n'.join(comments), t] - comments = [] - try: - t = t.replace(r'\n','\n') - result = self.parseString(t, parseAll=parseAll) - out.append(result.dump(full=fullDump)) - success = success and not failureTests - except ParseBaseException as pe: - fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" - if '\n' in t: - out.append(line(pe.loc, t)) - out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) - else: - out.append(' '*pe.loc + '^' + fatal) - out.append("FAIL: " + str(pe)) - success = success and failureTests - result = pe - except Exception as exc: - out.append("FAIL-EXCEPTION: " + str(exc)) - success = success and failureTests - result = exc - - if printResults: - if fullDump: - out.append('') - print('\n'.join(out)) - - allResults.append((t, result)) - - return success, allResults - - -class Token(ParserElement): - """ - Abstract C{ParserElement} subclass, for defining atomic matching patterns. - """ - def __init__( self ): - super(Token,self).__init__( savelist=False ) - - -class Empty(Token): - """ - An empty token, will always match. - """ - def __init__( self ): - super(Empty,self).__init__() - self.name = "Empty" - self.mayReturnEmpty = True - self.mayIndexError = False - - -class NoMatch(Token): - """ - A token that will never match. - """ - def __init__( self ): - super(NoMatch,self).__init__() - self.name = "NoMatch" - self.mayReturnEmpty = True - self.mayIndexError = False - self.errmsg = "Unmatchable token" - - def parseImpl( self, instring, loc, doActions=True ): - raise ParseException(instring, loc, self.errmsg, self) - - -class Literal(Token): - """ - Token to exactly match a specified string. - - Example:: - Literal('blah').parseString('blah') # -> ['blah'] - Literal('blah').parseString('blahfooblah') # -> ['blah'] - Literal('blah').parseString('bla') # -> Exception: Expected "blah" - - For case-insensitive matching, use L{CaselessLiteral}. - - For keyword matching (force word break before and after the matched string), - use L{Keyword} or L{CaselessKeyword}. - """ - def __init__( self, matchString ): - super(Literal,self).__init__() - self.match = matchString - self.matchLen = len(matchString) - try: - self.firstMatchChar = matchString[0] - except IndexError: - warnings.warn("null string passed to Literal; use Empty() instead", - SyntaxWarning, stacklevel=2) - self.__class__ = Empty - self.name = '"%s"' % _ustr(self.match) - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = False - self.mayIndexError = False - - # Performance tuning: this routine gets called a *lot* - # if this is a single character match string and the first character matches, - # short-circuit as quickly as possible, and avoid calling startswith - #~ @profile - def parseImpl( self, instring, loc, doActions=True ): - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) -_L = Literal -ParserElement._literalStringClass = Literal - -class Keyword(Token): - """ - Token to exactly match a specified string as a keyword, that is, it must be - immediately followed by a non-keyword character. Compare with C{L{Literal}}: - - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. - - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} - Accepts two optional constructor arguments in addition to the keyword string: - - C{identChars} is a string of characters that would be valid identifier characters, - defaulting to all alphanumerics + "_" and "$" - - C{caseless} allows case-insensitive matching, default is C{False}. - - Example:: - Keyword("start").parseString("start") # -> ['start'] - Keyword("start").parseString("starting") # -> Exception - - For case-insensitive matching, use L{CaselessKeyword}. - """ - DEFAULT_KEYWORD_CHARS = alphanums+"_$" - - def __init__( self, matchString, identChars=None, caseless=False ): - super(Keyword,self).__init__() - if identChars is None: - identChars = Keyword.DEFAULT_KEYWORD_CHARS - self.match = matchString - self.matchLen = len(matchString) - try: - self.firstMatchChar = matchString[0] - except IndexError: - warnings.warn("null string passed to Keyword; use Empty() instead", - SyntaxWarning, stacklevel=2) - self.name = '"%s"' % self.match - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = False - self.mayIndexError = False - self.caseless = caseless - if caseless: - self.caselessmatch = matchString.upper() - identChars = identChars.upper() - self.identChars = set(identChars) - - def parseImpl( self, instring, loc, doActions=True ): - if self.caseless: - if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and - (loc == 0 or instring[loc-1].upper() not in self.identChars) ): - return loc+self.matchLen, self.match - else: - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and - (loc == 0 or instring[loc-1] not in self.identChars) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) - - def copy(self): - c = super(Keyword,self).copy() - c.identChars = Keyword.DEFAULT_KEYWORD_CHARS - return c - - @staticmethod - def setDefaultKeywordChars( chars ): - """Overrides the default Keyword chars - """ - Keyword.DEFAULT_KEYWORD_CHARS = chars - -class CaselessLiteral(Literal): - """ - Token to match a specified string, ignoring case of letters. - Note: the matched results will always be in the case of the given - match string, NOT the case of the input text. - - Example:: - OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] - - (Contrast with example for L{CaselessKeyword}.) - """ - def __init__( self, matchString ): - super(CaselessLiteral,self).__init__( matchString.upper() ) - # Preserve the defining literal. - self.returnString = matchString - self.name = "'%s'" % self.returnString - self.errmsg = "Expected " + self.name - - def parseImpl( self, instring, loc, doActions=True ): - if instring[ loc:loc+self.matchLen ].upper() == self.match: - return loc+self.matchLen, self.returnString - raise ParseException(instring, loc, self.errmsg, self) - -class CaselessKeyword(Keyword): - """ - Caseless version of L{Keyword}. - - Example:: - OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] - - (Contrast with example for L{CaselessLiteral}.) - """ - def __init__( self, matchString, identChars=None ): - super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) - - def parseImpl( self, instring, loc, doActions=True ): - if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) - -class CloseMatch(Token): - """ - A variation on L{Literal} which matches "close" matches, that is, - strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: - - C{match_string} - string to be matched - - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match - - The results from a successful parse will contain the matched text from the input string and the following named results: - - C{mismatches} - a list of the positions within the match_string where mismatches were found - - C{original} - the original match_string used to compare against the input string - - If C{mismatches} is an empty list, then the match was an exact match. - - Example:: - patt = CloseMatch("ATCATCGAATGGA") - patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) - patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) - - # exact match - patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) - - # close match allowing up to 2 mismatches - patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) - patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) - """ - def __init__(self, match_string, maxMismatches=1): - super(CloseMatch,self).__init__() - self.name = match_string - self.match_string = match_string - self.maxMismatches = maxMismatches - self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches) - self.mayIndexError = False - self.mayReturnEmpty = False - - def parseImpl( self, instring, loc, doActions=True ): - start = loc - instrlen = len(instring) - maxloc = start + len(self.match_string) - - if maxloc <= instrlen: - match_string = self.match_string - match_stringloc = 0 - mismatches = [] - maxMismatches = self.maxMismatches - - for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): - src,mat = s_m - if src != mat: - mismatches.append(match_stringloc) - if len(mismatches) > maxMismatches: - break - else: - loc = match_stringloc + 1 - results = ParseResults([instring[start:loc]]) - results['original'] = self.match_string - results['mismatches'] = mismatches - return loc, results - - raise ParseException(instring, loc, self.errmsg, self) - - -class Word(Token): - """ - Token for matching words composed of allowed character sets. - Defined with string containing all allowed initial characters, - an optional string containing allowed body characters (if omitted, - defaults to the initial character set), and an optional minimum, - maximum, and/or exact length. The default value for C{min} is 1 (a - minimum value < 1 is not valid); the default values for C{max} and C{exact} - are 0, meaning no maximum or exact length restriction. An optional - C{excludeChars} parameter can list characters that might be found in - the input C{bodyChars} string; useful to define a word of all printables - except for one or two characters, for instance. - - L{srange} is useful for defining custom character set strings for defining - C{Word} expressions, using range notation from regular expression character sets. - - A common mistake is to use C{Word} to match a specific literal string, as in - C{Word("Address")}. Remember that C{Word} uses the string argument to define - I{sets} of matchable characters. This expression would match "Add", "AAA", - "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. - To match an exact literal string, use L{Literal} or L{Keyword}. - - pyparsing includes helper strings for building Words: - - L{alphas} - - L{nums} - - L{alphanums} - - L{hexnums} - - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) - - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) - - L{printables} (any non-whitespace character) - - Example:: - # a word composed of digits - integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) - - # a word with a leading capital, and zero or more lowercase - capital_word = Word(alphas.upper(), alphas.lower()) - - # hostnames are alphanumeric, with leading alpha, and '-' - hostname = Word(alphas, alphanums+'-') - - # roman numeral (not a strict parser, accepts invalid mix of characters) - roman = Word("IVXLCDM") - - # any string of non-whitespace characters, except for ',' - csv_value = Word(printables, excludeChars=",") - """ - def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): - super(Word,self).__init__() - if excludeChars: - initChars = ''.join(c for c in initChars if c not in excludeChars) - if bodyChars: - bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) - self.initCharsOrig = initChars - self.initChars = set(initChars) - if bodyChars : - self.bodyCharsOrig = bodyChars - self.bodyChars = set(bodyChars) - else: - self.bodyCharsOrig = initChars - self.bodyChars = set(initChars) - - self.maxSpecified = max > 0 - - if min < 1: - raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.asKeyword = asKeyword - - if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): - if self.bodyCharsOrig == self.initCharsOrig: - self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) - elif len(self.initCharsOrig) == 1: - self.reString = "%s[%s]*" % \ - (re.escape(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) - else: - self.reString = "[%s][%s]*" % \ - (_escapeRegexRangeChars(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) - if self.asKeyword: - self.reString = r"\b"+self.reString+r"\b" - try: - self.re = re.compile( self.reString ) - except Exception: - self.re = None - - def parseImpl( self, instring, loc, doActions=True ): - if self.re: - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - return loc, result.group() - - if not(instring[ loc ] in self.initChars): - raise ParseException(instring, loc, self.errmsg, self) - - start = loc - loc += 1 - instrlen = len(instring) - bodychars = self.bodyChars - maxloc = start + self.maxLen - maxloc = min( maxloc, instrlen ) - while loc < maxloc and instring[loc] in bodychars: - loc += 1 - - throwException = False - if loc - start < self.minLen: - throwException = True - if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: - throwException = True - if self.asKeyword: - if (start>0 and instring[start-1] in bodychars) or (loc4: - return s[:4]+"..." - else: - return s - - if ( self.initCharsOrig != self.bodyCharsOrig ): - self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) - else: - self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) - - return self.strRepr - - -class Regex(Token): - r""" - Token for matching strings that match a given regular expression. - Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. - If the given regex contains named groups (defined using C{(?P...)}), these will be preserved as - named parse results. - - Example:: - realnum = Regex(r"[+-]?\d+\.\d*") - date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)') - # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression - roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") - """ - compiledREtype = type(re.compile("[A-Z]")) - def __init__( self, pattern, flags=0): - """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" - super(Regex,self).__init__() - - if isinstance(pattern, basestring): - if not pattern: - warnings.warn("null string passed to Regex; use Empty() instead", - SyntaxWarning, stacklevel=2) - - self.pattern = pattern - self.flags = flags - - try: - self.re = re.compile(self.pattern, self.flags) - self.reString = self.pattern - except sre_constants.error: - warnings.warn("invalid pattern (%s) passed to Regex" % pattern, - SyntaxWarning, stacklevel=2) - raise - - elif isinstance(pattern, Regex.compiledREtype): - self.re = pattern - self.pattern = \ - self.reString = str(pattern) - self.flags = flags - - else: - raise ValueError("Regex may only be constructed with a string or a compiled RE object") - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - d = result.groupdict() - ret = ParseResults(result.group()) - if d: - for k in d: - ret[k] = d[k] - return loc,ret - - def __str__( self ): - try: - return super(Regex,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "Re:(%s)" % repr(self.pattern) - - return self.strRepr - - -class QuotedString(Token): - r""" - Token for matching strings that are delimited by quoting characters. - - Defined with the following parameters: - - quoteChar - string of one or more characters defining the quote delimiting string - - escChar - character to escape quotes, typically backslash (default=C{None}) - - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) - - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) - - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) - - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) - - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) - - Example:: - qs = QuotedString('"') - print(qs.searchString('lsjdf "This is the quote" sldjf')) - complex_qs = QuotedString('{{', endQuoteChar='}}') - print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) - sql_qs = QuotedString('"', escQuote='""') - print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) - prints:: - [['This is the quote']] - [['This is the "quote"']] - [['This is the quote with "embedded" quotes']] - """ - def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): - super(QuotedString,self).__init__() - - # remove white space from quote chars - wont work anyway - quoteChar = quoteChar.strip() - if not quoteChar: - warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) - raise SyntaxError() - - if endQuoteChar is None: - endQuoteChar = quoteChar - else: - endQuoteChar = endQuoteChar.strip() - if not endQuoteChar: - warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) - raise SyntaxError() - - self.quoteChar = quoteChar - self.quoteCharLen = len(quoteChar) - self.firstQuoteChar = quoteChar[0] - self.endQuoteChar = endQuoteChar - self.endQuoteCharLen = len(endQuoteChar) - self.escChar = escChar - self.escQuote = escQuote - self.unquoteResults = unquoteResults - self.convertWhitespaceEscapes = convertWhitespaceEscapes - - if multiline: - self.flags = re.MULTILINE | re.DOTALL - self.pattern = r'%s(?:[^%s%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) - else: - self.flags = 0 - self.pattern = r'%s(?:[^%s\n\r%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) - if len(self.endQuoteChar) > 1: - self.pattern += ( - '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), - _escapeRegexRangeChars(self.endQuoteChar[i])) - for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' - ) - if escQuote: - self.pattern += (r'|(?:%s)' % re.escape(escQuote)) - if escChar: - self.pattern += (r'|(?:%s.)' % re.escape(escChar)) - self.escCharReplacePattern = re.escape(self.escChar)+"(.)" - self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) - - try: - self.re = re.compile(self.pattern, self.flags) - self.reString = self.pattern - except sre_constants.error: - warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, - SyntaxWarning, stacklevel=2) - raise - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - ret = result.group() - - if self.unquoteResults: - - # strip off quotes - ret = ret[self.quoteCharLen:-self.endQuoteCharLen] - - if isinstance(ret,basestring): - # replace escaped whitespace - if '\\' in ret and self.convertWhitespaceEscapes: - ws_map = { - r'\t' : '\t', - r'\n' : '\n', - r'\f' : '\f', - r'\r' : '\r', - } - for wslit,wschar in ws_map.items(): - ret = ret.replace(wslit, wschar) - - # replace escaped characters - if self.escChar: - ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) - - # replace escaped quotes - if self.escQuote: - ret = ret.replace(self.escQuote, self.endQuoteChar) - - return loc, ret - - def __str__( self ): - try: - return super(QuotedString,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) - - return self.strRepr - - -class CharsNotIn(Token): - """ - Token for matching words composed of characters I{not} in a given set (will - include whitespace in matched characters if not listed in the provided exclusion set - see example). - Defined with string containing all disallowed characters, and an optional - minimum, maximum, and/or exact length. The default value for C{min} is 1 (a - minimum value < 1 is not valid); the default values for C{max} and C{exact} - are 0, meaning no maximum or exact length restriction. - - Example:: - # define a comma-separated-value as anything that is not a ',' - csv_value = CharsNotIn(',') - print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) - prints:: - ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] - """ - def __init__( self, notChars, min=1, max=0, exact=0 ): - super(CharsNotIn,self).__init__() - self.skipWhitespace = False - self.notChars = notChars - - if min < 1: - raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = ( self.minLen == 0 ) - self.mayIndexError = False - - def parseImpl( self, instring, loc, doActions=True ): - if instring[loc] in self.notChars: - raise ParseException(instring, loc, self.errmsg, self) - - start = loc - loc += 1 - notchars = self.notChars - maxlen = min( start+self.maxLen, len(instring) ) - while loc < maxlen and \ - (instring[loc] not in notchars): - loc += 1 - - if loc - start < self.minLen: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - def __str__( self ): - try: - return super(CharsNotIn, self).__str__() - except Exception: - pass - - if self.strRepr is None: - if len(self.notChars) > 4: - self.strRepr = "!W:(%s...)" % self.notChars[:4] - else: - self.strRepr = "!W:(%s)" % self.notChars - - return self.strRepr - -class White(Token): - """ - Special matching class for matching whitespace. Normally, whitespace is ignored - by pyparsing grammars. This class is included when some whitespace structures - are significant. Define with a string containing the whitespace characters to be - matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, - as defined for the C{L{Word}} class. - """ - whiteStrs = { - " " : "", - "\t": "", - "\n": "", - "\r": "", - "\f": "", - } - def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): - super(White,self).__init__() - self.matchWhite = ws - self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) - #~ self.leaveWhitespace() - self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) - self.mayReturnEmpty = True - self.errmsg = "Expected " + self.name - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - def parseImpl( self, instring, loc, doActions=True ): - if not(instring[ loc ] in self.matchWhite): - raise ParseException(instring, loc, self.errmsg, self) - start = loc - loc += 1 - maxloc = start + self.maxLen - maxloc = min( maxloc, len(instring) ) - while loc < maxloc and instring[loc] in self.matchWhite: - loc += 1 - - if loc - start < self.minLen: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - -class _PositionToken(Token): - def __init__( self ): - super(_PositionToken,self).__init__() - self.name=self.__class__.__name__ - self.mayReturnEmpty = True - self.mayIndexError = False - -class GoToColumn(_PositionToken): - """ - Token to advance to a specific column of input text; useful for tabular report scraping. - """ - def __init__( self, colno ): - super(GoToColumn,self).__init__() - self.col = colno - - def preParse( self, instring, loc ): - if col(loc,instring) != self.col: - instrlen = len(instring) - if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) - while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : - loc += 1 - return loc - - def parseImpl( self, instring, loc, doActions=True ): - thiscol = col( loc, instring ) - if thiscol > self.col: - raise ParseException( instring, loc, "Text not in expected column", self ) - newloc = loc + self.col - thiscol - ret = instring[ loc: newloc ] - return newloc, ret - - -class LineStart(_PositionToken): - """ - Matches if current position is at the beginning of a line within the parse string - - Example:: - - test = '''\ - AAA this line - AAA and this line - AAA but not this one - B AAA and definitely not this one - ''' - - for t in (LineStart() + 'AAA' + restOfLine).searchString(test): - print(t) - - Prints:: - ['AAA', ' this line'] - ['AAA', ' and this line'] - - """ - def __init__( self ): - super(LineStart,self).__init__() - self.errmsg = "Expected start of line" - - def parseImpl( self, instring, loc, doActions=True ): - if col(loc, instring) == 1: - return loc, [] - raise ParseException(instring, loc, self.errmsg, self) - -class LineEnd(_PositionToken): - """ - Matches if current position is at the end of a line within the parse string - """ - def __init__( self ): - super(LineEnd,self).__init__() - self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) - self.errmsg = "Expected end of line" - - def parseImpl( self, instring, loc, doActions=True ): - if loc len(instring): - return loc, [] - else: - raise ParseException(instring, loc, self.errmsg, self) - -class WordStart(_PositionToken): - """ - Matches if the current position is at the beginning of a Word, and - is not preceded by any character in a given set of C{wordChars} - (default=C{printables}). To emulate the C{\b} behavior of regular expressions, - use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of - the string being parsed, or at the beginning of a line. - """ - def __init__(self, wordChars = printables): - super(WordStart,self).__init__() - self.wordChars = set(wordChars) - self.errmsg = "Not at the start of a word" - - def parseImpl(self, instring, loc, doActions=True ): - if loc != 0: - if (instring[loc-1] in self.wordChars or - instring[loc] not in self.wordChars): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - -class WordEnd(_PositionToken): - """ - Matches if the current position is at the end of a Word, and - is not followed by any character in a given set of C{wordChars} - (default=C{printables}). To emulate the C{\b} behavior of regular expressions, - use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of - the string being parsed, or at the end of a line. - """ - def __init__(self, wordChars = printables): - super(WordEnd,self).__init__() - self.wordChars = set(wordChars) - self.skipWhitespace = False - self.errmsg = "Not at the end of a word" - - def parseImpl(self, instring, loc, doActions=True ): - instrlen = len(instring) - if instrlen>0 and loc maxExcLoc: - maxException = err - maxExcLoc = err.loc - except IndexError: - if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) - maxExcLoc = len(instring) - else: - # save match among all matches, to retry longest to shortest - matches.append((loc2, e)) - - if matches: - matches.sort(key=lambda x: -x[0]) - for _,e in matches: - try: - return e._parse( instring, loc, doActions ) - except ParseException as err: - err.__traceback__ = None - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - - if maxException is not None: - maxException.msg = self.errmsg - raise maxException - else: - raise ParseException(instring, loc, "no defined alternatives to match", self) - - - def __ixor__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #Or( [ self, other ] ) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class MatchFirst(ParseExpression): - """ - Requires that at least one C{ParseExpression} is found. - If two expressions match, the first one listed is the one that will match. - May be constructed using the C{'|'} operator. - - Example:: - # construct MatchFirst using '|' operator - - # watch the order of expressions to match - number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) - print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] - - # put more selective expression first - number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) - print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] - """ - def __init__( self, exprs, savelist = False ): - super(MatchFirst,self).__init__(exprs, savelist) - if self.exprs: - self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) - else: - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - maxExcLoc = -1 - maxException = None - for e in self.exprs: - try: - ret = e._parse( instring, loc, doActions ) - return ret - except ParseException as err: - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - except IndexError: - if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) - maxExcLoc = len(instring) - - # only got here if no expression matched, raise exception for match that made it the furthest - else: - if maxException is not None: - maxException.msg = self.errmsg - raise maxException - else: - raise ParseException(instring, loc, "no defined alternatives to match", self) - - def __ior__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #MatchFirst( [ self, other ] ) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class Each(ParseExpression): - """ - Requires all given C{ParseExpression}s to be found, but in any order. - Expressions may be separated by whitespace. - May be constructed using the C{'&'} operator. - - Example:: - color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") - shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") - integer = Word(nums) - shape_attr = "shape:" + shape_type("shape") - posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") - color_attr = "color:" + color("color") - size_attr = "size:" + integer("size") - - # use Each (using operator '&') to accept attributes in any order - # (shape and posn are required, color and size are optional) - shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) - - shape_spec.runTests(''' - shape: SQUARE color: BLACK posn: 100, 120 - shape: CIRCLE size: 50 color: BLUE posn: 50,80 - color:GREEN size:20 shape:TRIANGLE posn:20,40 - ''' - ) - prints:: - shape: SQUARE color: BLACK posn: 100, 120 - ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] - - color: BLACK - - posn: ['100', ',', '120'] - - x: 100 - - y: 120 - - shape: SQUARE - - - shape: CIRCLE size: 50 color: BLUE posn: 50,80 - ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] - - color: BLUE - - posn: ['50', ',', '80'] - - x: 50 - - y: 80 - - shape: CIRCLE - - size: 50 - - - color: GREEN size: 20 shape: TRIANGLE posn: 20,40 - ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] - - color: GREEN - - posn: ['20', ',', '40'] - - x: 20 - - y: 40 - - shape: TRIANGLE - - size: 20 - """ - def __init__( self, exprs, savelist = True ): - super(Each,self).__init__(exprs, savelist) - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - self.skipWhitespace = True - self.initExprGroups = True - - def parseImpl( self, instring, loc, doActions=True ): - if self.initExprGroups: - self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) - opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] - opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] - self.optionals = opt1 + opt2 - self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] - self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] - self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] - self.required += self.multirequired - self.initExprGroups = False - tmpLoc = loc - tmpReqd = self.required[:] - tmpOpt = self.optionals[:] - matchOrder = [] - - keepMatching = True - while keepMatching: - tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired - failed = [] - for e in tmpExprs: - try: - tmpLoc = e.tryParse( instring, tmpLoc ) - except ParseException: - failed.append(e) - else: - matchOrder.append(self.opt1map.get(id(e),e)) - if e in tmpReqd: - tmpReqd.remove(e) - elif e in tmpOpt: - tmpOpt.remove(e) - if len(failed) == len(tmpExprs): - keepMatching = False - - if tmpReqd: - missing = ", ".join(_ustr(e) for e in tmpReqd) - raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) - - # add any unmatched Optionals, in case they have default values defined - matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] - - resultlist = [] - for e in matchOrder: - loc,results = e._parse(instring,loc,doActions) - resultlist.append(results) - - finalResults = sum(resultlist, ParseResults([])) - return loc, finalResults - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class ParseElementEnhance(ParserElement): - """ - Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. - """ - def __init__( self, expr, savelist=False ): - super(ParseElementEnhance,self).__init__(savelist) - if isinstance( expr, basestring ): - if issubclass(ParserElement._literalStringClass, Token): - expr = ParserElement._literalStringClass(expr) - else: - expr = ParserElement._literalStringClass(Literal(expr)) - self.expr = expr - self.strRepr = None - if expr is not None: - self.mayIndexError = expr.mayIndexError - self.mayReturnEmpty = expr.mayReturnEmpty - self.setWhitespaceChars( expr.whiteChars ) - self.skipWhitespace = expr.skipWhitespace - self.saveAsList = expr.saveAsList - self.callPreparse = expr.callPreparse - self.ignoreExprs.extend(expr.ignoreExprs) - - def parseImpl( self, instring, loc, doActions=True ): - if self.expr is not None: - return self.expr._parse( instring, loc, doActions, callPreParse=False ) - else: - raise ParseException("",loc,self.errmsg,self) - - def leaveWhitespace( self ): - self.skipWhitespace = False - self.expr = self.expr.copy() - if self.expr is not None: - self.expr.leaveWhitespace() - return self - - def ignore( self, other ): - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - super( ParseElementEnhance, self).ignore( other ) - if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) - else: - super( ParseElementEnhance, self).ignore( other ) - if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) - return self - - def streamline( self ): - super(ParseElementEnhance,self).streamline() - if self.expr is not None: - self.expr.streamline() - return self - - def checkRecursion( self, parseElementList ): - if self in parseElementList: - raise RecursiveGrammarException( parseElementList+[self] ) - subRecCheckList = parseElementList[:] + [ self ] - if self.expr is not None: - self.expr.checkRecursion( subRecCheckList ) - - def validate( self, validateTrace=[] ): - tmp = validateTrace[:]+[self] - if self.expr is not None: - self.expr.validate(tmp) - self.checkRecursion( [] ) - - def __str__( self ): - try: - return super(ParseElementEnhance,self).__str__() - except Exception: - pass - - if self.strRepr is None and self.expr is not None: - self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) - return self.strRepr - - -class FollowedBy(ParseElementEnhance): - """ - Lookahead matching of the given parse expression. C{FollowedBy} - does I{not} advance the parsing position within the input string, it only - verifies that the specified parse expression matches at the current - position. C{FollowedBy} always returns a null token list. - - Example:: - # use FollowedBy to match a label only if it is followed by a ':' - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - - OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() - prints:: - [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] - """ - def __init__( self, expr ): - super(FollowedBy,self).__init__(expr) - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - self.expr.tryParse( instring, loc ) - return loc, [] - - -class NotAny(ParseElementEnhance): - """ - Lookahead to disallow matching with the given parse expression. C{NotAny} - does I{not} advance the parsing position within the input string, it only - verifies that the specified parse expression does I{not} match at the current - position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} - always returns a null token list. May be constructed using the '~' operator. - - Example:: - - """ - def __init__( self, expr ): - super(NotAny,self).__init__(expr) - #~ self.leaveWhitespace() - self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs - self.mayReturnEmpty = True - self.errmsg = "Found unwanted token, "+_ustr(self.expr) - - def parseImpl( self, instring, loc, doActions=True ): - if self.expr.canParseNext(instring, loc): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "~{" + _ustr(self.expr) + "}" - - return self.strRepr - -class _MultipleMatch(ParseElementEnhance): - def __init__( self, expr, stopOn=None): - super(_MultipleMatch, self).__init__(expr) - self.saveAsList = True - ender = stopOn - if isinstance(ender, basestring): - ender = ParserElement._literalStringClass(ender) - self.not_ender = ~ender if ender is not None else None - - def parseImpl( self, instring, loc, doActions=True ): - self_expr_parse = self.expr._parse - self_skip_ignorables = self._skipIgnorables - check_ender = self.not_ender is not None - if check_ender: - try_not_ender = self.not_ender.tryParse - - # must be at least one (but first see if we are the stopOn sentinel; - # if so, fail) - if check_ender: - try_not_ender(instring, loc) - loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) - try: - hasIgnoreExprs = (not not self.ignoreExprs) - while 1: - if check_ender: - try_not_ender(instring, loc) - if hasIgnoreExprs: - preloc = self_skip_ignorables( instring, loc ) - else: - preloc = loc - loc, tmptokens = self_expr_parse( instring, preloc, doActions ) - if tmptokens or tmptokens.haskeys(): - tokens += tmptokens - except (ParseException,IndexError): - pass - - return loc, tokens - -class OneOrMore(_MultipleMatch): - """ - Repetition of one or more of the given expression. - - Parameters: - - expr - expression that must match one or more times - - stopOn - (default=C{None}) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) - - Example:: - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - - text = "shape: SQUARE posn: upper left color: BLACK" - OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] - - # use stopOn attribute for OneOrMore to avoid reading label string as part of the data - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] - - # could also be written as - (attr_expr * (1,)).parseString(text).pprint() - """ - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + _ustr(self.expr) + "}..." - - return self.strRepr - -class ZeroOrMore(_MultipleMatch): - """ - Optional repetition of zero or more of the given expression. - - Parameters: - - expr - expression that must match zero or more times - - stopOn - (default=C{None}) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) - - Example: similar to L{OneOrMore} - """ - def __init__( self, expr, stopOn=None): - super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - try: - return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) - except (ParseException,IndexError): - return loc, [] - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]..." - - return self.strRepr - -class _NullToken(object): - def __bool__(self): - return False - __nonzero__ = __bool__ - def __str__(self): - return "" - -_optionalNotMatched = _NullToken() -class Optional(ParseElementEnhance): - """ - Optional matching of the given expression. - - Parameters: - - expr - expression that must match zero or more times - - default (optional) - value to be returned if the optional expression is not found. - - Example:: - # US postal code can be a 5-digit zip, plus optional 4-digit qualifier - zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) - zip.runTests(''' - # traditional ZIP code - 12345 - - # ZIP+4 form - 12101-0001 - - # invalid ZIP - 98765- - ''') - prints:: - # traditional ZIP code - 12345 - ['12345'] - - # ZIP+4 form - 12101-0001 - ['12101-0001'] - - # invalid ZIP - 98765- - ^ - FAIL: Expected end of text (at char 5), (line:1, col:6) - """ - def __init__( self, expr, default=_optionalNotMatched ): - super(Optional,self).__init__( expr, savelist=False ) - self.saveAsList = self.expr.saveAsList - self.defaultValue = default - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - try: - loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) - except (ParseException,IndexError): - if self.defaultValue is not _optionalNotMatched: - if self.expr.resultsName: - tokens = ParseResults([ self.defaultValue ]) - tokens[self.expr.resultsName] = self.defaultValue - else: - tokens = [ self.defaultValue ] - else: - tokens = [] - return loc, tokens - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]" - - return self.strRepr - -class SkipTo(ParseElementEnhance): - """ - Token for skipping over all undefined text until the matched expression is found. - - Parameters: - - expr - target expression marking the end of the data to be skipped - - include - (default=C{False}) if True, the target expression is also parsed - (the skipped text and target expression are returned as a 2-element list). - - ignore - (default=C{None}) used to define grammars (typically quoted strings and - comments) that might contain false matches to the target expression - - failOn - (default=C{None}) define expressions that are not allowed to be - included in the skipped test; if found before the target expression is found, - the SkipTo is not a match - - Example:: - report = ''' - Outstanding Issues Report - 1 Jan 2000 - - # | Severity | Description | Days Open - -----+----------+-------------------------------------------+----------- - 101 | Critical | Intermittent system crash | 6 - 94 | Cosmetic | Spelling error on Login ('log|n') | 14 - 79 | Minor | System slow when running too many reports | 47 - ''' - integer = Word(nums) - SEP = Suppress('|') - # use SkipTo to simply match everything up until the next SEP - # - ignore quoted strings, so that a '|' character inside a quoted string does not match - # - parse action will call token.strip() for each matched token, i.e., the description body - string_data = SkipTo(SEP, ignore=quotedString) - string_data.setParseAction(tokenMap(str.strip)) - ticket_expr = (integer("issue_num") + SEP - + string_data("sev") + SEP - + string_data("desc") + SEP - + integer("days_open")) - - for tkt in ticket_expr.searchString(report): - print tkt.dump() - prints:: - ['101', 'Critical', 'Intermittent system crash', '6'] - - days_open: 6 - - desc: Intermittent system crash - - issue_num: 101 - - sev: Critical - ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] - - days_open: 14 - - desc: Spelling error on Login ('log|n') - - issue_num: 94 - - sev: Cosmetic - ['79', 'Minor', 'System slow when running too many reports', '47'] - - days_open: 47 - - desc: System slow when running too many reports - - issue_num: 79 - - sev: Minor - """ - def __init__( self, other, include=False, ignore=None, failOn=None ): - super( SkipTo, self ).__init__( other ) - self.ignoreExpr = ignore - self.mayReturnEmpty = True - self.mayIndexError = False - self.includeMatch = include - self.asList = False - if isinstance(failOn, basestring): - self.failOn = ParserElement._literalStringClass(failOn) - else: - self.failOn = failOn - self.errmsg = "No match found for "+_ustr(self.expr) - - def parseImpl( self, instring, loc, doActions=True ): - startloc = loc - instrlen = len(instring) - expr = self.expr - expr_parse = self.expr._parse - self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None - self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None - - tmploc = loc - while tmploc <= instrlen: - if self_failOn_canParseNext is not None: - # break if failOn expression matches - if self_failOn_canParseNext(instring, tmploc): - break - - if self_ignoreExpr_tryParse is not None: - # advance past ignore expressions - while 1: - try: - tmploc = self_ignoreExpr_tryParse(instring, tmploc) - except ParseBaseException: - break - - try: - expr_parse(instring, tmploc, doActions=False, callPreParse=False) - except (ParseException, IndexError): - # no match, advance loc in string - tmploc += 1 - else: - # matched skipto expr, done - break - - else: - # ran off the end of the input string without matching skipto expr, fail - raise ParseException(instring, loc, self.errmsg, self) - - # build up return values - loc = tmploc - skiptext = instring[startloc:loc] - skipresult = ParseResults(skiptext) - - if self.includeMatch: - loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) - skipresult += mat - - return loc, skipresult - -class Forward(ParseElementEnhance): - """ - Forward declaration of an expression to be defined later - - used for recursive grammars, such as algebraic infix notation. - When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. - - Note: take care when assigning to C{Forward} not to overlook precedence of operators. - Specifically, '|' has a lower precedence than '<<', so that:: - fwdExpr << a | b | c - will actually be evaluated as:: - (fwdExpr << a) | b | c - thereby leaving b and c out as parseable alternatives. It is recommended that you - explicitly group the values inserted into the C{Forward}:: - fwdExpr << (a | b | c) - Converting to use the '<<=' operator instead will avoid this problem. - - See L{ParseResults.pprint} for an example of a recursive parser created using - C{Forward}. - """ - def __init__( self, other=None ): - super(Forward,self).__init__( other, savelist=False ) - - def __lshift__( self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass(other) - self.expr = other - self.strRepr = None - self.mayIndexError = self.expr.mayIndexError - self.mayReturnEmpty = self.expr.mayReturnEmpty - self.setWhitespaceChars( self.expr.whiteChars ) - self.skipWhitespace = self.expr.skipWhitespace - self.saveAsList = self.expr.saveAsList - self.ignoreExprs.extend(self.expr.ignoreExprs) - return self - - def __ilshift__(self, other): - return self << other - - def leaveWhitespace( self ): - self.skipWhitespace = False - return self - - def streamline( self ): - if not self.streamlined: - self.streamlined = True - if self.expr is not None: - self.expr.streamline() - return self - - def validate( self, validateTrace=[] ): - if self not in validateTrace: - tmp = validateTrace[:]+[self] - if self.expr is not None: - self.expr.validate(tmp) - self.checkRecursion([]) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - return self.__class__.__name__ + ": ..." - - # stubbed out for now - creates awful memory and perf issues - self._revertClass = self.__class__ - self.__class__ = _ForwardNoRecurse - try: - if self.expr is not None: - retString = _ustr(self.expr) - else: - retString = "None" - finally: - self.__class__ = self._revertClass - return self.__class__.__name__ + ": " + retString - - def copy(self): - if self.expr is not None: - return super(Forward,self).copy() - else: - ret = Forward() - ret <<= self - return ret - -class _ForwardNoRecurse(Forward): - def __str__( self ): - return "..." - -class TokenConverter(ParseElementEnhance): - """ - Abstract subclass of C{ParseExpression}, for converting parsed results. - """ - def __init__( self, expr, savelist=False ): - super(TokenConverter,self).__init__( expr )#, savelist ) - self.saveAsList = False - -class Combine(TokenConverter): - """ - Converter to concatenate all matching tokens to a single string. - By default, the matching patterns must also be contiguous in the input string; - this can be disabled by specifying C{'adjacent=False'} in the constructor. - - Example:: - real = Word(nums) + '.' + Word(nums) - print(real.parseString('3.1416')) # -> ['3', '.', '1416'] - # will also erroneously match the following - print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] - - real = Combine(Word(nums) + '.' + Word(nums)) - print(real.parseString('3.1416')) # -> ['3.1416'] - # no match when there are internal spaces - print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) - """ - def __init__( self, expr, joinString="", adjacent=True ): - super(Combine,self).__init__( expr ) - # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself - if adjacent: - self.leaveWhitespace() - self.adjacent = adjacent - self.skipWhitespace = True - self.joinString = joinString - self.callPreparse = True - - def ignore( self, other ): - if self.adjacent: - ParserElement.ignore(self, other) - else: - super( Combine, self).ignore( other ) - return self - - def postParse( self, instring, loc, tokenlist ): - retToks = tokenlist.copy() - del retToks[:] - retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) - - if self.resultsName and retToks.haskeys(): - return [ retToks ] - else: - return retToks - -class Group(TokenConverter): - """ - Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. - - Example:: - ident = Word(alphas) - num = Word(nums) - term = ident | num - func = ident + Optional(delimitedList(term)) - print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] - - func = ident + Group(Optional(delimitedList(term))) - print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] - """ - def __init__( self, expr ): - super(Group,self).__init__( expr ) - self.saveAsList = True - - def postParse( self, instring, loc, tokenlist ): - return [ tokenlist ] - -class Dict(TokenConverter): - """ - Converter to return a repetitive expression as a list, but also as a dictionary. - Each element can also be referenced using the first token in the expression as its key. - Useful for tabular report scraping when the first column can be used as a item key. - - Example:: - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - - text = "shape: SQUARE posn: upper left color: light blue texture: burlap" - attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - - # print attributes as plain groups - print(OneOrMore(attr_expr).parseString(text).dump()) - - # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names - result = Dict(OneOrMore(Group(attr_expr))).parseString(text) - print(result.dump()) - - # access named fields as dict entries, or output as dict - print(result['shape']) - print(result.asDict()) - prints:: - ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] - - [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap - SQUARE - {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} - See more examples at L{ParseResults} of accessing fields by results name. - """ - def __init__( self, expr ): - super(Dict,self).__init__( expr ) - self.saveAsList = True - - def postParse( self, instring, loc, tokenlist ): - for i,tok in enumerate(tokenlist): - if len(tok) == 0: - continue - ikey = tok[0] - if isinstance(ikey,int): - ikey = _ustr(tok[0]).strip() - if len(tok)==1: - tokenlist[ikey] = _ParseResultsWithOffset("",i) - elif len(tok)==2 and not isinstance(tok[1],ParseResults): - tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) - else: - dictvalue = tok.copy() #ParseResults(i) - del dictvalue[0] - if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) - else: - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) - - if self.resultsName: - return [ tokenlist ] - else: - return tokenlist - - -class Suppress(TokenConverter): - """ - Converter for ignoring the results of a parsed expression. - - Example:: - source = "a, b, c,d" - wd = Word(alphas) - wd_list1 = wd + ZeroOrMore(',' + wd) - print(wd_list1.parseString(source)) - - # often, delimiters that are useful during parsing are just in the - # way afterward - use Suppress to keep them out of the parsed output - wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) - print(wd_list2.parseString(source)) - prints:: - ['a', ',', 'b', ',', 'c', ',', 'd'] - ['a', 'b', 'c', 'd'] - (See also L{delimitedList}.) - """ - def postParse( self, instring, loc, tokenlist ): - return [] - - def suppress( self ): - return self - - -class OnlyOnce(object): - """ - Wrapper for parse actions, to ensure they are only called once. - """ - def __init__(self, methodCall): - self.callable = _trim_arity(methodCall) - self.called = False - def __call__(self,s,l,t): - if not self.called: - results = self.callable(s,l,t) - self.called = True - return results - raise ParseException(s,l,"") - def reset(self): - self.called = False - -def traceParseAction(f): - """ - Decorator for debugging parse actions. - - When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} - When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. - - Example:: - wd = Word(alphas) - - @traceParseAction - def remove_duplicate_chars(tokens): - return ''.join(sorted(set(''.join(tokens)))) - - wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) - print(wds.parseString("slkdjs sld sldd sdlf sdljf")) - prints:: - >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) - <3: - thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc - sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) - try: - ret = f(*paArgs) - except Exception as exc: - sys.stderr.write( "< ['aa', 'bb', 'cc'] - delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] - """ - dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." - if combine: - return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) - else: - return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) - -def countedArray( expr, intExpr=None ): - """ - Helper to define a counted list of expressions. - This helper defines a pattern of the form:: - integer expr expr expr... - where the leading integer tells how many expr expressions follow. - The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. - - If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. - - Example:: - countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] - - # in this parser, the leading integer value is given in binary, - # '10' indicating that 2 values are in the array - binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) - countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] - """ - arrayExpr = Forward() - def countFieldParseAction(s,l,t): - n = t[0] - arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) - return [] - if intExpr is None: - intExpr = Word(nums).setParseAction(lambda t:int(t[0])) - else: - intExpr = intExpr.copy() - intExpr.setName("arrayLen") - intExpr.addParseAction(countFieldParseAction, callDuringTry=True) - return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') - -def _flatten(L): - ret = [] - for i in L: - if isinstance(i,list): - ret.extend(_flatten(i)) - else: - ret.append(i) - return ret - -def matchPreviousLiteral(expr): - """ - Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks - for a 'repeat' of a previous expression. For example:: - first = Word(nums) - second = matchPreviousLiteral(first) - matchExpr = first + ":" + second - will match C{"1:1"}, but not C{"1:2"}. Because this matches a - previous literal, will also match the leading C{"1:1"} in C{"1:10"}. - If this is not desired, use C{matchPreviousExpr}. - Do I{not} use with packrat parsing enabled. - """ - rep = Forward() - def copyTokenToRepeater(s,l,t): - if t: - if len(t) == 1: - rep << t[0] - else: - # flatten t tokens - tflat = _flatten(t.asList()) - rep << And(Literal(tt) for tt in tflat) - else: - rep << Empty() - expr.addParseAction(copyTokenToRepeater, callDuringTry=True) - rep.setName('(prev) ' + _ustr(expr)) - return rep - -def matchPreviousExpr(expr): - """ - Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks - for a 'repeat' of a previous expression. For example:: - first = Word(nums) - second = matchPreviousExpr(first) - matchExpr = first + ":" + second - will match C{"1:1"}, but not C{"1:2"}. Because this matches by - expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; - the expressions are evaluated first, and then compared, so - C{"1"} is compared with C{"10"}. - Do I{not} use with packrat parsing enabled. - """ - rep = Forward() - e2 = expr.copy() - rep <<= e2 - def copyTokenToRepeater(s,l,t): - matchTokens = _flatten(t.asList()) - def mustMatchTheseTokens(s,l,t): - theseTokens = _flatten(t.asList()) - if theseTokens != matchTokens: - raise ParseException("",0,"") - rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) - expr.addParseAction(copyTokenToRepeater, callDuringTry=True) - rep.setName('(prev) ' + _ustr(expr)) - return rep - -def _escapeRegexRangeChars(s): - #~ escape these chars: ^-] - for c in r"\^-]": - s = s.replace(c,_bslash+c) - s = s.replace("\n",r"\n") - s = s.replace("\t",r"\t") - return _ustr(s) - -def oneOf( strs, caseless=False, useRegex=True ): - """ - Helper to quickly define a set of alternative Literals, and makes sure to do - longest-first testing when there is a conflict, regardless of the input order, - but returns a C{L{MatchFirst}} for best performance. - - Parameters: - - strs - a string of space-delimited literals, or a collection of string literals - - caseless - (default=C{False}) - treat all literals as caseless - - useRegex - (default=C{True}) - as an optimization, will generate a Regex - object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or - if creating a C{Regex} raises an exception) - - Example:: - comp_oper = oneOf("< = > <= >= !=") - var = Word(alphas) - number = Word(nums) - term = var | number - comparison_expr = term + comp_oper + term - print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) - prints:: - [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] - """ - if caseless: - isequal = ( lambda a,b: a.upper() == b.upper() ) - masks = ( lambda a,b: b.upper().startswith(a.upper()) ) - parseElementClass = CaselessLiteral - else: - isequal = ( lambda a,b: a == b ) - masks = ( lambda a,b: b.startswith(a) ) - parseElementClass = Literal - - symbols = [] - if isinstance(strs,basestring): - symbols = strs.split() - elif isinstance(strs, Iterable): - symbols = list(strs) - else: - warnings.warn("Invalid argument to oneOf, expected string or iterable", - SyntaxWarning, stacklevel=2) - if not symbols: - return NoMatch() - - i = 0 - while i < len(symbols)-1: - cur = symbols[i] - for j,other in enumerate(symbols[i+1:]): - if ( isequal(other, cur) ): - del symbols[i+j+1] - break - elif ( masks(cur, other) ): - del symbols[i+j+1] - symbols.insert(i,other) - cur = other - break - else: - i += 1 - - if not caseless and useRegex: - #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) - try: - if len(symbols)==len("".join(symbols)): - return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) - else: - return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) - except Exception: - warnings.warn("Exception creating Regex for oneOf, building MatchFirst", - SyntaxWarning, stacklevel=2) - - - # last resort, just use MatchFirst - return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) - -def dictOf( key, value ): - """ - Helper to easily and clearly define a dictionary by specifying the respective patterns - for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens - in the proper order. The key pattern can include delimiting markers or punctuation, - as long as they are suppressed, thereby leaving the significant key text. The value - pattern can include named results, so that the C{Dict} results can include named token - fields. - - Example:: - text = "shape: SQUARE posn: upper left color: light blue texture: burlap" - attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - print(OneOrMore(attr_expr).parseString(text).dump()) - - attr_label = label - attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) - - # similar to Dict, but simpler call format - result = dictOf(attr_label, attr_value).parseString(text) - print(result.dump()) - print(result['shape']) - print(result.shape) # object attribute access works too - print(result.asDict()) - prints:: - [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap - SQUARE - SQUARE - {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} - """ - return Dict( ZeroOrMore( Group ( key + value ) ) ) - -def originalTextFor(expr, asString=True): - """ - Helper to return the original, untokenized text for a given expression. Useful to - restore the parsed fields of an HTML start tag into the raw tag text itself, or to - revert separate tokens with intervening whitespace back to the original matching - input text. By default, returns astring containing the original parsed text. - - If the optional C{asString} argument is passed as C{False}, then the return value is a - C{L{ParseResults}} containing any results names that were originally matched, and a - single token containing the original matched text from the input string. So if - the expression passed to C{L{originalTextFor}} contains expressions with defined - results names, you must set C{asString} to C{False} if you want to preserve those - results name values. - - Example:: - src = "this is test bold text normal text " - for tag in ("b","i"): - opener,closer = makeHTMLTags(tag) - patt = originalTextFor(opener + SkipTo(closer) + closer) - print(patt.searchString(src)[0]) - prints:: - [' bold text '] - ['text'] - """ - locMarker = Empty().setParseAction(lambda s,loc,t: loc) - endlocMarker = locMarker.copy() - endlocMarker.callPreparse = False - matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") - if asString: - extractText = lambda s,l,t: s[t._original_start:t._original_end] - else: - def extractText(s,l,t): - t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] - matchExpr.setParseAction(extractText) - matchExpr.ignoreExprs = expr.ignoreExprs - return matchExpr - -def ungroup(expr): - """ - Helper to undo pyparsing's default grouping of And expressions, even - if all but one are non-empty. - """ - return TokenConverter(expr).setParseAction(lambda t:t[0]) - -def locatedExpr(expr): - """ - Helper to decorate a returned token with its starting and ending locations in the input string. - This helper adds the following results names: - - locn_start = location where matched expression begins - - locn_end = location where matched expression ends - - value = the actual parsed results - - Be careful if the input text contains C{} characters, you may want to call - C{L{ParserElement.parseWithTabs}} - - Example:: - wd = Word(alphas) - for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): - print(match) - prints:: - [[0, 'ljsdf', 5]] - [[8, 'lksdjjf', 15]] - [[18, 'lkkjj', 23]] - """ - locator = Empty().setParseAction(lambda s,l,t: l) - return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) - - -# convenience constants for positional expressions -empty = Empty().setName("empty") -lineStart = LineStart().setName("lineStart") -lineEnd = LineEnd().setName("lineEnd") -stringStart = StringStart().setName("stringStart") -stringEnd = StringEnd().setName("stringEnd") - -_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) -_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) -_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) -_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1) -_charRange = Group(_singleChar + Suppress("-") + _singleChar) -_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" - -def srange(s): - r""" - Helper to easily define string ranges for use in Word construction. Borrows - syntax from regexp '[]' string range definitions:: - srange("[0-9]") -> "0123456789" - srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" - srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" - The input string must be enclosed in []'s, and the returned string is the expanded - character set joined into a single string. - The values enclosed in the []'s may be: - - a single character - - an escaped character with a leading backslash (such as C{\-} or C{\]}) - - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) - (C{\0x##} is also supported for backwards compatibility) - - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) - - a range of any of the above, separated by a dash (C{'a-z'}, etc.) - - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) - """ - _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) - try: - return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) - except Exception: - return "" - -def matchOnlyAtCol(n): - """ - Helper method for defining parse actions that require matching at a specific - column in the input text. - """ - def verifyCol(strg,locn,toks): - if col(locn,strg) != n: - raise ParseException(strg,locn,"matched token not at column %d" % n) - return verifyCol - -def replaceWith(replStr): - """ - Helper method for common parse actions that simply return a literal value. Especially - useful when used with C{L{transformString}()}. - - Example:: - num = Word(nums).setParseAction(lambda toks: int(toks[0])) - na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) - term = na | num - - OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] - """ - return lambda s,l,t: [replStr] - -def removeQuotes(s,l,t): - """ - Helper parse action for removing quotation marks from parsed quoted strings. - - Example:: - # by default, quotation marks are included in parsed results - quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] - - # use removeQuotes to strip quotation marks from parsed results - quotedString.setParseAction(removeQuotes) - quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] - """ - return t[0][1:-1] - -def tokenMap(func, *args): - """ - Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional - args are passed, they are forwarded to the given function as additional arguments after - the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the - parsed data to an integer using base 16. - - Example (compare the last to example in L{ParserElement.transformString}:: - hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) - hex_ints.runTests(''' - 00 11 22 aa FF 0a 0d 1a - ''') - - upperword = Word(alphas).setParseAction(tokenMap(str.upper)) - OneOrMore(upperword).runTests(''' - my kingdom for a horse - ''') - - wd = Word(alphas).setParseAction(tokenMap(str.title)) - OneOrMore(wd).setParseAction(' '.join).runTests(''' - now is the winter of our discontent made glorious summer by this sun of york - ''') - prints:: - 00 11 22 aa FF 0a 0d 1a - [0, 17, 34, 170, 255, 10, 13, 26] - - my kingdom for a horse - ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] - - now is the winter of our discontent made glorious summer by this sun of york - ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] - """ - def pa(s,l,t): - return [func(tokn, *args) for tokn in t] - - try: - func_name = getattr(func, '__name__', - getattr(func, '__class__').__name__) - except Exception: - func_name = str(func) - pa.__name__ = func_name - - return pa - -upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) -"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}""" - -downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) -"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}""" - -def _makeTags(tagStr, xml): - """Internal helper to construct opening and closing tag expressions, given a tag name""" - if isinstance(tagStr,basestring): - resname = tagStr - tagStr = Keyword(tagStr, caseless=not xml) - else: - resname = tagStr.name - - tagAttrName = Word(alphas,alphanums+"_-:") - if (xml): - tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) - openTag = Suppress("<") + tagStr("tag") + \ - Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ - Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") - else: - printablesLessRAbrack = "".join(c for c in printables if c not in ">") - tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) - openTag = Suppress("<") + tagStr("tag") + \ - Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ - Optional( Suppress("=") + tagAttrValue ) ))) + \ - Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") - closeTag = Combine(_L("") - - openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname) - closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("" % resname) - openTag.tag = resname - closeTag.tag = resname - return openTag, closeTag - -def makeHTMLTags(tagStr): - """ - Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches - tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. - - Example:: - text = 'More info at the pyparsing wiki page' - # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple - a,a_end = makeHTMLTags("A") - link_expr = a + SkipTo(a_end)("link_text") + a_end - - for link in link_expr.searchString(text): - # attributes in the tag (like "href" shown here) are also accessible as named results - print(link.link_text, '->', link.href) - prints:: - pyparsing -> http://pyparsing.wikispaces.com - """ - return _makeTags( tagStr, False ) - -def makeXMLTags(tagStr): - """ - Helper to construct opening and closing tag expressions for XML, given a tag name. Matches - tags only in the given upper/lower case. - - Example: similar to L{makeHTMLTags} - """ - return _makeTags( tagStr, True ) - -def withAttribute(*args,**attrDict): - """ - Helper to create a validating parse action to be used with start tags created - with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag - with a required attribute value, to avoid false matches on common tags such as - C{} or C{
}. - - Call C{withAttribute} with a series of attribute names and values. Specify the list - of filter attributes names and values as: - - keyword arguments, as in C{(align="right")}, or - - as an explicit dict with C{**} operator, when an attribute name is also a Python - reserved word, as in C{**{"class":"Customer", "align":"right"}} - - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) - For attribute names with a namespace prefix, you must use the second form. Attribute - names are matched insensitive to upper/lower case. - - If just testing for C{class} (with or without a namespace), use C{L{withClass}}. - - To verify that the attribute exists, but without specifying a value, pass - C{withAttribute.ANY_VALUE} as the value. - - Example:: - html = ''' -
- Some text -
1 4 0 1 0
-
1,3 2,3 1,1
-
this has no type
-
- - ''' - div,div_end = makeHTMLTags("div") - - # only match div tag having a type attribute with value "grid" - div_grid = div().setParseAction(withAttribute(type="grid")) - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - # construct a match with any div tag having a type attribute, regardless of the value - div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - prints:: - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - if args: - attrs = args[:] - else: - attrs = attrDict.items() - attrs = [(k,v) for k,v in attrs] - def pa(s,l,tokens): - for attrName,attrValue in attrs: - if attrName not in tokens: - raise ParseException(s,l,"no matching attribute " + attrName) - if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: - raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % - (attrName, tokens[attrName], attrValue)) - return pa -withAttribute.ANY_VALUE = object() - -def withClass(classname, namespace=''): - """ - Simplified version of C{L{withAttribute}} when matching on a div class - made - difficult because C{class} is a reserved word in Python. - - Example:: - html = ''' -
- Some text -
1 4 0 1 0
-
1,3 2,3 1,1
-
this <div> has no class
-
- - ''' - div,div_end = makeHTMLTags("div") - div_grid = div().setParseAction(withClass("grid")) - - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - prints:: - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - classattr = "%s:class" % namespace if namespace else "class" - return withAttribute(**{classattr : classname}) - -opAssoc = _Constants() -opAssoc.LEFT = object() -opAssoc.RIGHT = object() - -def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): - """ - Helper method for constructing grammars of expressions made up of - operators working in a precedence hierarchy. Operators may be unary or - binary, left- or right-associative. Parse actions can also be attached - to operator expressions. The generated parser will also recognize the use - of parentheses to override operator precedences (see example below). - - Note: if you define a deep operator list, you may see performance issues - when using infixNotation. See L{ParserElement.enablePackrat} for a - mechanism to potentially improve your parser performance. - - Parameters: - - baseExpr - expression representing the most basic element for the nested - - opList - list of tuples, one for each operator precedence level in the - expression grammar; each tuple is of the form - (opExpr, numTerms, rightLeftAssoc, parseAction), where: - - opExpr is the pyparsing expression for the operator; - may also be a string, which will be converted to a Literal; - if numTerms is 3, opExpr is a tuple of two expressions, for the - two operators separating the 3 terms - - numTerms is the number of terms for this operator (must - be 1, 2, or 3) - - rightLeftAssoc is the indicator whether the operator is - right or left associative, using the pyparsing-defined - constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. - - parseAction is the parse action to be associated with - expressions matching this operator expression (the - parse action tuple member may be omitted); if the parse action - is passed a tuple or list of functions, this is equivalent to - calling C{setParseAction(*fn)} (L{ParserElement.setParseAction}) - - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) - - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) - - Example:: - # simple example of four-function arithmetic with ints and variable names - integer = pyparsing_common.signed_integer - varname = pyparsing_common.identifier - - arith_expr = infixNotation(integer | varname, - [ - ('-', 1, opAssoc.RIGHT), - (oneOf('* /'), 2, opAssoc.LEFT), - (oneOf('+ -'), 2, opAssoc.LEFT), - ]) - - arith_expr.runTests(''' - 5+3*6 - (5+3)*6 - -2--11 - ''', fullDump=False) - prints:: - 5+3*6 - [[5, '+', [3, '*', 6]]] - - (5+3)*6 - [[[5, '+', 3], '*', 6]] - - -2--11 - [[['-', 2], '-', ['-', 11]]] - """ - ret = Forward() - lastExpr = baseExpr | ( lpar + ret + rpar ) - for i,operDef in enumerate(opList): - opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] - termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr - if arity == 3: - if opExpr is None or len(opExpr) != 2: - raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") - opExpr1, opExpr2 = opExpr - thisExpr = Forward().setName(termName) - if rightLeftAssoc == opAssoc.LEFT: - if arity == 1: - matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) - elif arity == 2: - if opExpr is not None: - matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) - else: - matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) - elif arity == 3: - matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ - Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - elif rightLeftAssoc == opAssoc.RIGHT: - if arity == 1: - # try to avoid LR with this extra test - if not isinstance(opExpr, Optional): - opExpr = Optional(opExpr) - matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) - elif arity == 2: - if opExpr is not None: - matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) - else: - matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) - elif arity == 3: - matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ - Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - else: - raise ValueError("operator must indicate right or left associativity") - if pa: - if isinstance(pa, (tuple, list)): - matchExpr.setParseAction(*pa) - else: - matchExpr.setParseAction(pa) - thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) - lastExpr = thisExpr - ret <<= lastExpr - return ret - -operatorPrecedence = infixNotation -"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" - -dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") -sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") -quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| - Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") -unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") - -def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): - """ - Helper method for defining nested lists enclosed in opening and closing - delimiters ("(" and ")" are the default). - - Parameters: - - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression - - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression - - content - expression for items within the nested lists (default=C{None}) - - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) - - If an expression is not provided for the content argument, the nested - expression will capture all whitespace-delimited content between delimiters - as a list of separate values. - - Use the C{ignoreExpr} argument to define expressions that may contain - opening or closing characters that should not be treated as opening - or closing characters for nesting, such as quotedString or a comment - expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. - The default is L{quotedString}, but if no expressions are to be ignored, - then pass C{None} for this argument. - - Example:: - data_type = oneOf("void int short long char float double") - decl_data_type = Combine(data_type + Optional(Word('*'))) - ident = Word(alphas+'_', alphanums+'_') - number = pyparsing_common.number - arg = Group(decl_data_type + ident) - LPAR,RPAR = map(Suppress, "()") - - code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) - - c_function = (decl_data_type("type") - + ident("name") - + LPAR + Optional(delimitedList(arg), [])("args") + RPAR - + code_body("body")) - c_function.ignore(cStyleComment) - - source_code = ''' - int is_odd(int x) { - return (x%2); - } - - int dec_to_hex(char hchar) { - if (hchar >= '0' && hchar <= '9') { - return (ord(hchar)-ord('0')); - } else { - return (10+ord(hchar)-ord('A')); - } - } - ''' - for func in c_function.searchString(source_code): - print("%(name)s (%(type)s) args: %(args)s" % func) - - prints:: - is_odd (int) args: [['int', 'x']] - dec_to_hex (int) args: [['char', 'hchar']] - """ - if opener == closer: - raise ValueError("opening and closing strings cannot be the same") - if content is None: - if isinstance(opener,basestring) and isinstance(closer,basestring): - if len(opener) == 1 and len(closer)==1: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS - ).setParseAction(lambda t:t[0].strip())) - else: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - ~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - raise ValueError("opening and closing arguments must be strings if no content expression is given") - ret = Forward() - if ignoreExpr is not None: - ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) - else: - ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) - ret.setName('nested %s%s expression' % (opener,closer)) - return ret - -def indentedBlock(blockStatementExpr, indentStack, indent=True): - """ - Helper method for defining space-delimited indentation blocks, such as - those used to define block statements in Python source code. - - Parameters: - - blockStatementExpr - expression defining syntax of statement that - is repeated within the indented block - - indentStack - list created by caller to manage indentation stack - (multiple statementWithIndentedBlock expressions within a single grammar - should share a common indentStack) - - indent - boolean indicating whether block must be indented beyond the - the current level; set to False for block of left-most statements - (default=C{True}) - - A valid block must contain at least one C{blockStatement}. - - Example:: - data = ''' - def A(z): - A1 - B = 100 - G = A2 - A2 - A3 - B - def BB(a,b,c): - BB1 - def BBA(): - bba1 - bba2 - bba3 - C - D - def spam(x,y): - def eggs(z): - pass - ''' - - - indentStack = [1] - stmt = Forward() - - identifier = Word(alphas, alphanums) - funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") - func_body = indentedBlock(stmt, indentStack) - funcDef = Group( funcDecl + func_body ) - - rvalue = Forward() - funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") - rvalue << (funcCall | identifier | Word(nums)) - assignment = Group(identifier + "=" + rvalue) - stmt << ( funcDef | assignment | identifier ) - - module_body = OneOrMore(stmt) - - parseTree = module_body.parseString(data) - parseTree.pprint() - prints:: - [['def', - 'A', - ['(', 'z', ')'], - ':', - [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], - 'B', - ['def', - 'BB', - ['(', 'a', 'b', 'c', ')'], - ':', - [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], - 'C', - 'D', - ['def', - 'spam', - ['(', 'x', 'y', ')'], - ':', - [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] - """ - def checkPeerIndent(s,l,t): - if l >= len(s): return - curCol = col(l,s) - if curCol != indentStack[-1]: - if curCol > indentStack[-1]: - raise ParseFatalException(s,l,"illegal nesting") - raise ParseException(s,l,"not a peer entry") - - def checkSubIndent(s,l,t): - curCol = col(l,s) - if curCol > indentStack[-1]: - indentStack.append( curCol ) - else: - raise ParseException(s,l,"not a subentry") - - def checkUnindent(s,l,t): - if l >= len(s): return - curCol = col(l,s) - if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): - raise ParseException(s,l,"not an unindent") - indentStack.pop() - - NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) - INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') - PEER = Empty().setParseAction(checkPeerIndent).setName('') - UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') - if indent: - smExpr = Group( Optional(NL) + - #~ FollowedBy(blockStatementExpr) + - INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) - else: - smExpr = Group( Optional(NL) + - (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) - blockStatementExpr.ignore(_bslash + LineEnd()) - return smExpr.setName('indented block') - -alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") -punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") - -anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) -_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) -commonHTMLEntity = Regex('&(?P' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") -def replaceHTMLEntity(t): - """Helper parser action to replace common HTML entities with their special characters""" - return _htmlEntityMap.get(t.entity) - -# it's easy to get these comment structures wrong - they're very common, so may as well make them available -cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") -"Comment of the form C{/* ... */}" - -htmlComment = Regex(r"").setName("HTML comment") -"Comment of the form C{}" - -restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") -dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") -"Comment of the form C{// ... (to end of line)}" - -cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") -"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" - -javaStyleComment = cppStyleComment -"Same as C{L{cppStyleComment}}" - -pythonStyleComment = Regex(r"#.*").setName("Python style comment") -"Comment of the form C{# ... (to end of line)}" - -_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + - Optional( Word(" \t") + - ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") -commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") -"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. - This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" - -# some other useful expressions - using lower-case class name since we are really using this as a namespace -class pyparsing_common: - """ - Here are some common low-level expressions that may be useful in jump-starting parser development: - - numeric forms (L{integers}, L{reals}, L{scientific notation}) - - common L{programming identifiers} - - network addresses (L{MAC}, L{IPv4}, L{IPv6}) - - ISO8601 L{dates} and L{datetime} - - L{UUID} - - L{comma-separated list} - Parse actions: - - C{L{convertToInteger}} - - C{L{convertToFloat}} - - C{L{convertToDate}} - - C{L{convertToDatetime}} - - C{L{stripHTMLTags}} - - C{L{upcaseTokens}} - - C{L{downcaseTokens}} - - Example:: - pyparsing_common.number.runTests(''' - # any int or real number, returned as the appropriate type - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.fnumber.runTests(''' - # any int or real number, returned as float - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.hex_integer.runTests(''' - # hex numbers - 100 - FF - ''') - - pyparsing_common.fraction.runTests(''' - # fractions - 1/2 - -3/4 - ''') - - pyparsing_common.mixed_integer.runTests(''' - # mixed fractions - 1 - 1/2 - -3/4 - 1-3/4 - ''') - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(''' - # uuid - 12345678-1234-5678-1234-567812345678 - ''') - prints:: - # any int or real number, returned as the appropriate type - 100 - [100] - - -100 - [-100] - - +100 - [100] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # any int or real number, returned as float - 100 - [100.0] - - -100 - [-100.0] - - +100 - [100.0] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # hex numbers - 100 - [256] - - FF - [255] - - # fractions - 1/2 - [0.5] - - -3/4 - [-0.75] - - # mixed fractions - 1 - [1] - - 1/2 - [0.5] - - -3/4 - [-0.75] - - 1-3/4 - [1.75] - - # uuid - 12345678-1234-5678-1234-567812345678 - [UUID('12345678-1234-5678-1234-567812345678')] - """ - - convertToInteger = tokenMap(int) - """ - Parse action for converting parsed integers to Python int - """ - - convertToFloat = tokenMap(float) - """ - Parse action for converting parsed numbers to Python float - """ - - integer = Word(nums).setName("integer").setParseAction(convertToInteger) - """expression that parses an unsigned integer, returns an int""" - - hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) - """expression that parses a hexadecimal integer, returns an int""" - - signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) - """expression that parses an integer with optional leading sign, returns an int""" - - fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") - """fractional expression of an integer divided by an integer, returns a float""" - fraction.addParseAction(lambda t: t[0]/t[-1]) - - mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") - """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" - mixed_integer.addParseAction(sum) - - real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) - """expression that parses a floating point number and returns a float""" - - sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) - """expression that parses a floating point number with optional scientific notation and returns a float""" - - # streamlining this expression makes the docs nicer-looking - number = (sci_real | real | signed_integer).streamline() - """any numeric expression, returns the corresponding Python type""" - - fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) - """any int or real number, returned as float""" - - identifier = Word(alphas+'_', alphanums+'_').setName("identifier") - """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" - - ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") - "IPv4 address (C{0.0.0.0 - 255.255.255.255})" - - _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") - _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") - _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") - _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) - _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") - ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") - "IPv6 address (long, short, or mixed form)" - - mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") - "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" - - @staticmethod - def convertToDate(fmt="%Y-%m-%d"): - """ - Helper to create a parse action for converting parsed date string to Python datetime.date - - Params - - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) - - Example:: - date_expr = pyparsing_common.iso8601_date.copy() - date_expr.setParseAction(pyparsing_common.convertToDate()) - print(date_expr.parseString("1999-12-31")) - prints:: - [datetime.date(1999, 12, 31)] - """ - def cvt_fn(s,l,t): - try: - return datetime.strptime(t[0], fmt).date() - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - @staticmethod - def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): - """ - Helper to create a parse action for converting parsed datetime string to Python datetime.datetime - - Params - - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) - - Example:: - dt_expr = pyparsing_common.iso8601_datetime.copy() - dt_expr.setParseAction(pyparsing_common.convertToDatetime()) - print(dt_expr.parseString("1999-12-31T23:59:59.999")) - prints:: - [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] - """ - def cvt_fn(s,l,t): - try: - return datetime.strptime(t[0], fmt) - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - iso8601_date = Regex(r'(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?').setName("ISO8601 date") - "ISO8601 date (C{yyyy-mm-dd})" - - iso8601_datetime = Regex(r'(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") - "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" - - uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") - "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" - - _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() - @staticmethod - def stripHTMLTags(s, l, tokens): - """ - Parse action to remove HTML tags from web page HTML source - - Example:: - # strip HTML links from normal text - text = 'More info at the
pyparsing wiki page' - td,td_end = makeHTMLTags("TD") - table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end - - print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' - """ - return pyparsing_common._html_stripper.transformString(tokens[0]) - - _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') - + Optional( White(" \t") ) ) ).streamline().setName("commaItem") - comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") - """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" - - upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) - """Parse action to convert tokens to upper case.""" - - downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) - """Parse action to convert tokens to lower case.""" - - -if __name__ == "__main__": - - selectToken = CaselessLiteral("select") - fromToken = CaselessLiteral("from") - - ident = Word(alphas, alphanums + "_$") - - columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - columnNameList = Group(delimitedList(columnName)).setName("columns") - columnSpec = ('*' | columnNameList) - - tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - tableNameList = Group(delimitedList(tableName)).setName("tables") - - simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") - - # demo runTests method, including embedded comments in test string - simpleSQL.runTests(""" - # '*' as column list and dotted table name - select * from SYS.XYZZY - - # caseless match on "SELECT", and casts back to "select" - SELECT * from XYZZY, ABC - - # list of column names, and mixed case SELECT keyword - Select AA,BB,CC from Sys.dual - - # multiple tables - Select A, B, C from Sys.dual, Table2 - - # invalid SELECT keyword - should fail - Xelect A, B, C from Sys.dual - - # incomplete command - should fail - Select - - # invalid column name - should fail - Select ^^^ frox Sys.dual - - """) - - pyparsing_common.number.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - # any int or real number, returned as float - pyparsing_common.fnumber.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - pyparsing_common.hex_integer.runTests(""" - 100 - FF - """) - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(""" - 12345678-1234-5678-1234-567812345678 - """) diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/INSTALLER b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/INSTALLER similarity index 100% rename from pkg_resources/_vendor/pyparsing-2.2.1.dist-info/INSTALLER rename to pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/INSTALLER diff --git a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/LICENSE b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/LICENSE new file mode 100644 index 0000000000..1df6b3b8de --- /dev/null +++ b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/LICENSE @@ -0,0 +1,254 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/METADATA b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/METADATA new file mode 100644 index 0000000000..1ed963a12c --- /dev/null +++ b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/METADATA @@ -0,0 +1,189 @@ +Metadata-Version: 2.1 +Name: typing_extensions +Version: 4.4.0 +Summary: Backported and Experimental Type Hints for Python 3.7+ +Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing +Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Software Development +Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues +Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md +Project-URL: Documentation, https://typing.readthedocs.io/ +Project-URL: Home, https://github.com/python/typing_extensions +Project-URL: Q & A, https://github.com/python/typing/discussions +Project-URL: Repository, https://github.com/python/typing_extensions + +# Typing Extensions + +[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing) + +## Overview + +The `typing_extensions` module serves two related purposes: + +- Enable use of new type system features on older Python versions. For example, + `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows + users on previous Python versions to use it too. +- Enable experimentation with new type system PEPs before they are accepted and + added to the `typing` module. + +New features may be added to `typing_extensions` as soon as they are specified +in a PEP that has been added to the [python/peps](https://github.com/python/peps) +repository. If the PEP is accepted, the feature will then be added to `typing` +for the next CPython release. No typing PEP has been rejected so far, so we +haven't yet figured out how to deal with that possibility. + +Starting with version 4.0.0, `typing_extensions` uses +[Semantic Versioning](https://semver.org/). The +major version is incremented for all backwards-incompatible changes. +Therefore, it's safe to depend +on `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`, +where `x.y` is the first version that includes all features you need. + +`typing_extensions` supports Python versions 3.7 and higher. In the future, +support for older Python versions will be dropped some time after that version +reaches end of life. + +## Included items + +This module currently contains the following: + +- Experimental features + + - `override` (see PEP 698) + - The `default=` argument to `TypeVar`, `ParamSpec`, and `TypeVarTuple` (see PEP 696) + - The `infer_variance=` argument to `TypeVar` (see PEP 695) + +- In `typing` since Python 3.11 + + - `assert_never` + - `assert_type` + - `clear_overloads` + - `@dataclass_transform()` (see PEP 681) + - `get_overloads` + - `LiteralString` (see PEP 675) + - `Never` + - `NotRequired` (see PEP 655) + - `reveal_type` + - `Required` (see PEP 655) + - `Self` (see PEP 673) + - `TypeVarTuple` (see PEP 646; the `typing_extensions` version supports the `default=` argument from PEP 696) + - `Unpack` (see PEP 646) + +- In `typing` since Python 3.10 + + - `Concatenate` (see PEP 612) + - `ParamSpec` (see PEP 612; the `typing_extensions` version supports the `default=` argument from PEP 696) + - `ParamSpecArgs` (see PEP 612) + - `ParamSpecKwargs` (see PEP 612) + - `TypeAlias` (see PEP 613) + - `TypeGuard` (see PEP 647) + - `is_typeddict` + +- In `typing` since Python 3.9 + + - `Annotated` (see PEP 593) + +- In `typing` since Python 3.8 + + - `final` (see PEP 591) + - `Final` (see PEP 591) + - `Literal` (see PEP 586) + - `Protocol` (see PEP 544) + - `runtime_checkable` (see PEP 544) + - `TypedDict` (see PEP 589) + - `get_origin` (`typing_extensions` provides this function only in Python 3.7+) + - `get_args` (`typing_extensions` provides this function only in Python 3.7+) + +- In `typing` since Python 3.7 + + - `OrderedDict` + +- In `typing` since Python 3.5 or 3.6 (see [the typing documentation](https://docs.python.org/3.10/library/typing.html) for details) + + - `AsyncContextManager` + - `AsyncGenerator` + - `AsyncIterable` + - `AsyncIterator` + - `Awaitable` + - `ChainMap` + - `ClassVar` (see PEP 526) + - `ContextManager` + - `Coroutine` + - `Counter` + - `DefaultDict` + - `Deque` + - `NewType` + - `NoReturn` + - `overload` + - `Text` + - `Type` + - `TYPE_CHECKING` + - `get_type_hints` + +- The following have always been present in `typing`, but the `typing_extensions` versions provide + additional features: + + - `Any` (supports inheritance since Python 3.11) + - `NamedTuple` (supports multiple inheritance with `Generic` since Python 3.11) + - `TypeVar` (see PEPs 695 and 696) + +# Other Notes and Limitations + +Certain objects were changed after they were added to `typing`, and +`typing_extensions` provides a backport even on newer Python versions: + +- `TypedDict` does not store runtime information + about which (if any) keys are non-required in Python 3.8, and does not + honor the `total` keyword with old-style `TypedDict()` in Python + 3.9.0 and 3.9.1. `TypedDict` also does not support multiple inheritance + with `typing.Generic` on Python <3.11. +- `get_origin` and `get_args` lack support for `Annotated` in + Python 3.8 and lack support for `ParamSpecArgs` and `ParamSpecKwargs` + in 3.9. +- `@final` was changed in Python 3.11 to set the `.__final__` attribute. +- `@overload` was changed in Python 3.11 to make function overloads + introspectable at runtime. In order to access overloads with + `typing_extensions.get_overloads()`, you must use + `@typing_extensions.overload`. +- `NamedTuple` was changed in Python 3.11 to allow for multiple inheritance + with `typing.Generic`. +- Since Python 3.11, it has been possible to inherit from `Any` at + runtime. `typing_extensions.Any` also provides this capability. +- `TypeVar` gains two additional parameters, `default=` and `infer_variance=`, + in the draft PEPs 695 and 696, which are being considered for inclusion + in Python 3.12. + +There are a few types whose interface was modified between different +versions of typing. For example, `typing.Sequence` was modified to +subclass `typing.Reversible` as of Python 3.5.3. + +These changes are _not_ backported to prevent subtle compatibility +issues when mixing the differing implementations of modified classes. + +Certain types have incorrect runtime behavior due to limitations of older +versions of the typing module: + +- `ParamSpec` and `Concatenate` will not work with `get_args` and + `get_origin`. Certain PEP 612 special cases in user-defined + `Generic`s are also not available. + +These types are only guaranteed to work for static type checking. + +## Running tests + +To run tests, navigate into the appropriate source directory and run +`test_typing_extensions.py`. + diff --git a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/RECORD b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/RECORD new file mode 100644 index 0000000000..b9e1bb0391 --- /dev/null +++ b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/RECORD @@ -0,0 +1,8 @@ +__pycache__/typing_extensions.cpython-311.pyc,, +typing_extensions-4.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +typing_extensions-4.4.0.dist-info/LICENSE,sha256=x6-2XnVXB7n7kEhziaF20-09ADHVExr95FwjcV_16JE,12787 +typing_extensions-4.4.0.dist-info/METADATA,sha256=1zSh1eMLnLkLMMC6aZSGRKx3eRnivEGDFWGSVD1zqhA,7249 +typing_extensions-4.4.0.dist-info/RECORD,, +typing_extensions-4.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +typing_extensions-4.4.0.dist-info/WHEEL,sha256=4TfKIB_xu-04bc2iKz6_zFt-gEFEEDU_31HGhqzOCE8,81 +typing_extensions.py,sha256=ipqWiq5AHzrwczt6c26AP05Llh6a5_GaXRpOBqbogHA,80078 diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/REQUESTED b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/REQUESTED similarity index 100% rename from setuptools/_vendor/importlib_metadata-4.11.1.dist-info/REQUESTED rename to pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/REQUESTED diff --git a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/WHEEL b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/WHEEL new file mode 100644 index 0000000000..668ba4d015 --- /dev/null +++ b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.7.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/pkg_resources/_vendor/typing_extensions.py b/pkg_resources/_vendor/typing_extensions.py new file mode 100644 index 0000000000..ef42417c20 --- /dev/null +++ b/pkg_resources/_vendor/typing_extensions.py @@ -0,0 +1,2209 @@ +import abc +import collections +import collections.abc +import functools +import operator +import sys +import types as _types +import typing + + +__all__ = [ + # Super-special typing primitives. + 'Any', + 'ClassVar', + 'Concatenate', + 'Final', + 'LiteralString', + 'ParamSpec', + 'ParamSpecArgs', + 'ParamSpecKwargs', + 'Self', + 'Type', + 'TypeVar', + 'TypeVarTuple', + 'Unpack', + + # ABCs (from collections.abc). + 'Awaitable', + 'AsyncIterator', + 'AsyncIterable', + 'Coroutine', + 'AsyncGenerator', + 'AsyncContextManager', + 'ChainMap', + + # Concrete collection types. + 'ContextManager', + 'Counter', + 'Deque', + 'DefaultDict', + 'NamedTuple', + 'OrderedDict', + 'TypedDict', + + # Structural checks, a.k.a. protocols. + 'SupportsIndex', + + # One-off things. + 'Annotated', + 'assert_never', + 'assert_type', + 'clear_overloads', + 'dataclass_transform', + 'get_overloads', + 'final', + 'get_args', + 'get_origin', + 'get_type_hints', + 'IntVar', + 'is_typeddict', + 'Literal', + 'NewType', + 'overload', + 'override', + 'Protocol', + 'reveal_type', + 'runtime', + 'runtime_checkable', + 'Text', + 'TypeAlias', + 'TypeGuard', + 'TYPE_CHECKING', + 'Never', + 'NoReturn', + 'Required', + 'NotRequired', +] + +# for backward compatibility +PEP_560 = True +GenericMeta = type + +# The functions below are modified copies of typing internal helpers. +# They are needed by _ProtocolMeta and they provide support for PEP 646. + +_marker = object() + + +def _check_generic(cls, parameters, elen=_marker): + """Check correct count for parameters of a generic cls (internal helper). + This gives a nice error message in case of count mismatch. + """ + if not elen: + raise TypeError(f"{cls} is not a generic class") + if elen is _marker: + if not hasattr(cls, "__parameters__") or not cls.__parameters__: + raise TypeError(f"{cls} is not a generic class") + elen = len(cls.__parameters__) + alen = len(parameters) + if alen != elen: + if hasattr(cls, "__parameters__"): + parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] + num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) + if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): + return + raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};" + f" actual {alen}, expected {elen}") + + +if sys.version_info >= (3, 10): + def _should_collect_from_parameters(t): + return isinstance( + t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) + ) +elif sys.version_info >= (3, 9): + def _should_collect_from_parameters(t): + return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) +else: + def _should_collect_from_parameters(t): + return isinstance(t, typing._GenericAlias) and not t._special + + +def _collect_type_vars(types, typevar_types=None): + """Collect all type variable contained in types in order of + first appearance (lexicographic order). For example:: + + _collect_type_vars((T, List[S, T])) == (T, S) + """ + if typevar_types is None: + typevar_types = typing.TypeVar + tvars = [] + for t in types: + if ( + isinstance(t, typevar_types) and + t not in tvars and + not _is_unpack(t) + ): + tvars.append(t) + if _should_collect_from_parameters(t): + tvars.extend([t for t in t.__parameters__ if t not in tvars]) + return tuple(tvars) + + +NoReturn = typing.NoReturn + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = typing.TypeVar('T') # Any type. +KT = typing.TypeVar('KT') # Key type. +VT = typing.TypeVar('VT') # Value type. +T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. +T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. + + +if sys.version_info >= (3, 11): + from typing import Any +else: + + class _AnyMeta(type): + def __instancecheck__(self, obj): + if self is Any: + raise TypeError("typing_extensions.Any cannot be used with isinstance()") + return super().__instancecheck__(obj) + + def __repr__(self): + if self is Any: + return "typing_extensions.Any" + return super().__repr__() + + class Any(metaclass=_AnyMeta): + """Special type indicating an unconstrained type. + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + checks. + """ + def __new__(cls, *args, **kwargs): + if cls is Any: + raise TypeError("Any cannot be instantiated") + return super().__new__(cls, *args, **kwargs) + + +ClassVar = typing.ClassVar + +# On older versions of typing there is an internal class named "Final". +# 3.8+ +if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): + Final = typing.Final +# 3.7 +else: + class _FinalForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + Final = _FinalForm('Final', + doc="""A special typing construct to indicate that a name + cannot be re-assigned or overridden in a subclass. + For example: + + MAX_SIZE: Final = 9000 + MAX_SIZE += 1 # Error reported by type checker + + class Connection: + TIMEOUT: Final[int] = 10 + class FastConnector(Connection): + TIMEOUT = 1 # Error reported by type checker + + There is no runtime checking of these properties.""") + +if sys.version_info >= (3, 11): + final = typing.final +else: + # @final exists in 3.8+, but we backport it for all versions + # before 3.11 to keep support for the __final__ attribute. + # See https://bugs.python.org/issue46342 + def final(f): + """This decorator can be used to indicate to type checkers that + the decorated method cannot be overridden, and decorated class + cannot be subclassed. For example: + + class Base: + @final + def done(self) -> None: + ... + class Sub(Base): + def done(self) -> None: # Error reported by type checker + ... + @final + class Leaf: + ... + class Other(Leaf): # Error reported by type checker + ... + + There is no runtime checking of these properties. The decorator + sets the ``__final__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + """ + try: + f.__final__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass + return f + + +def IntVar(name): + return typing.TypeVar(name) + + +# 3.8+: +if hasattr(typing, 'Literal'): + Literal = typing.Literal +# 3.7: +else: + class _LiteralForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + return typing._GenericAlias(self, parameters) + + Literal = _LiteralForm('Literal', + doc="""A type that can be used to indicate to type checkers + that the corresponding value has a value literally equivalent + to the provided parameter. For example: + + var: Literal[4] = 4 + + The type checker understands that 'var' is literally equal to + the value 4 and no other value. + + Literal[...] cannot be subclassed. There is no runtime + checking verifying that the parameter is actually a value + instead of a type.""") + + +_overload_dummy = typing._overload_dummy # noqa + + +if hasattr(typing, "get_overloads"): # 3.11+ + overload = typing.overload + get_overloads = typing.get_overloads + clear_overloads = typing.clear_overloads +else: + # {module: {qualname: {firstlineno: func}}} + _overload_registry = collections.defaultdict( + functools.partial(collections.defaultdict, dict) + ) + + def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + + The overloads for a function can be retrieved at runtime using the + get_overloads() function. + """ + # classmethod and staticmethod + f = getattr(func, "__func__", func) + try: + _overload_registry[f.__module__][f.__qualname__][ + f.__code__.co_firstlineno + ] = func + except AttributeError: + # Not a normal function; ignore. + pass + return _overload_dummy + + def get_overloads(func): + """Return all defined overloads for *func* as a sequence.""" + # classmethod and staticmethod + f = getattr(func, "__func__", func) + if f.__module__ not in _overload_registry: + return [] + mod_dict = _overload_registry[f.__module__] + if f.__qualname__ not in mod_dict: + return [] + return list(mod_dict[f.__qualname__].values()) + + def clear_overloads(): + """Clear all overloads in the registry.""" + _overload_registry.clear() + + +# This is not a real generic class. Don't use outside annotations. +Type = typing.Type + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + + +Awaitable = typing.Awaitable +Coroutine = typing.Coroutine +AsyncIterable = typing.AsyncIterable +AsyncIterator = typing.AsyncIterator +Deque = typing.Deque +ContextManager = typing.ContextManager +AsyncContextManager = typing.AsyncContextManager +DefaultDict = typing.DefaultDict + +# 3.7.2+ +if hasattr(typing, 'OrderedDict'): + OrderedDict = typing.OrderedDict +# 3.7.0-3.7.2 +else: + OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) + +Counter = typing.Counter +ChainMap = typing.ChainMap +AsyncGenerator = typing.AsyncGenerator +NewType = typing.NewType +Text = typing.Text +TYPE_CHECKING = typing.TYPE_CHECKING + + +_PROTO_WHITELIST = ['Callable', 'Awaitable', + 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', + 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', + 'ContextManager', 'AsyncContextManager'] + + +def _get_protocol_attrs(cls): + attrs = set() + for base in cls.__mro__[:-1]: # without object + if base.__name__ in ('Protocol', 'Generic'): + continue + annotations = getattr(base, '__annotations__', {}) + for attr in list(base.__dict__.keys()) + list(annotations.keys()): + if (not attr.startswith('_abc_') and attr not in ( + '__abstractmethods__', '__annotations__', '__weakref__', + '_is_protocol', '_is_runtime_protocol', '__dict__', + '__args__', '__slots__', + '__next_in_mro__', '__parameters__', '__origin__', + '__orig_bases__', '__extra__', '__tree_hash__', + '__doc__', '__subclasshook__', '__init__', '__new__', + '__module__', '_MutableMapping__marker', '_gorg')): + attrs.add(attr) + return attrs + + +def _is_callable_members_only(cls): + return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) + + +def _maybe_adjust_parameters(cls): + """Helper function used in Protocol.__init_subclass__ and _TypedDictMeta.__new__. + + The contents of this function are very similar + to logic found in typing.Generic.__init_subclass__ + on the CPython main branch. + """ + tvars = [] + if '__orig_bases__' in cls.__dict__: + tvars = typing._collect_type_vars(cls.__orig_bases__) + # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...] and/or Protocol[...]. + gvars = None + for base in cls.__orig_bases__: + if (isinstance(base, typing._GenericAlias) and + base.__origin__ in (typing.Generic, Protocol)): + # for error messages + the_base = base.__origin__.__name__ + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...]" + " and/or Protocol[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) + s_args = ', '.join(str(g) for g in gvars) + raise TypeError(f"Some type variables ({s_vars}) are" + f" not listed in {the_base}[{s_args}]") + tvars = gvars + cls.__parameters__ = tuple(tvars) + + +# 3.8+ +if hasattr(typing, 'Protocol'): + Protocol = typing.Protocol +# 3.7 +else: + + def _no_init(self, *args, **kwargs): + if type(self)._is_protocol: + raise TypeError('Protocols cannot be instantiated') + + class _ProtocolMeta(abc.ABCMeta): # noqa: B024 + # This metaclass is a bit unfortunate and exists only because of the lack + # of __instancehook__. + def __instancecheck__(cls, instance): + # We need this method for situations where attributes are + # assigned in __init__. + if ((not getattr(cls, '_is_protocol', False) or + _is_callable_members_only(cls)) and + issubclass(instance.__class__, cls)): + return True + if cls._is_protocol: + if all(hasattr(instance, attr) and + (not callable(getattr(cls, attr, None)) or + getattr(instance, attr) is not None) + for attr in _get_protocol_attrs(cls)): + return True + return super().__instancecheck__(instance) + + class Protocol(metaclass=_ProtocolMeta): + # There is quite a lot of overlapping code with typing.Generic. + # Unfortunately it is hard to avoid this while these live in two different + # modules. The duplicated code will be removed when Protocol is moved to typing. + """Base class for protocol classes. Protocol classes are defined as:: + + class Proto(Protocol): + def meth(self) -> int: + ... + + Such classes are primarily used with static type checkers that recognize + structural subtyping (static duck-typing), for example:: + + class C: + def meth(self) -> int: + return 0 + + def func(x: Proto) -> int: + return x.meth() + + func(C()) # Passes static type check + + See PEP 544 for details. Protocol classes decorated with + @typing_extensions.runtime act as simple-minded runtime protocol that checks + only the presence of given attributes, ignoring their type signatures. + + Protocol classes can be generic, they are defined as:: + + class GenProto(Protocol[T]): + def meth(self) -> T: + ... + """ + __slots__ = () + _is_protocol = True + + def __new__(cls, *args, **kwds): + if cls is Protocol: + raise TypeError("Type Protocol cannot be instantiated; " + "it can only be used as a base class") + return super().__new__(cls) + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple): + params = (params,) + if not params and cls is not typing.Tuple: + raise TypeError( + f"Parameter list to {cls.__qualname__}[...] cannot be empty") + msg = "Parameters to generic types must be types." + params = tuple(typing._type_check(p, msg) for p in params) # noqa + if cls is Protocol: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, typing.TypeVar) for p in params): + i = 0 + while isinstance(params[i], typing.TypeVar): + i += 1 + raise TypeError( + "Parameters to Protocol[...] must all be type variables." + f" Parameter {i + 1} is {params[i]}") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Protocol[...] must all be unique") + else: + # Subscripting a regular Generic subclass. + _check_generic(cls, params, len(cls.__parameters__)) + return typing._GenericAlias(cls, params) + + def __init_subclass__(cls, *args, **kwargs): + if '__orig_bases__' in cls.__dict__: + error = typing.Generic in cls.__orig_bases__ + else: + error = typing.Generic in cls.__bases__ + if error: + raise TypeError("Cannot inherit from plain Generic") + _maybe_adjust_parameters(cls) + + # Determine if this is a protocol or a concrete subclass. + if not cls.__dict__.get('_is_protocol', None): + cls._is_protocol = any(b is Protocol for b in cls.__bases__) + + # Set (or override) the protocol subclass hook. + def _proto_hook(other): + if not cls.__dict__.get('_is_protocol', None): + return NotImplemented + if not getattr(cls, '_is_runtime_protocol', False): + if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + return NotImplemented + raise TypeError("Instance and class checks can only be used with" + " @runtime protocols") + if not _is_callable_members_only(cls): + if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + return NotImplemented + raise TypeError("Protocols with non-method members" + " don't support issubclass()") + if not isinstance(other, type): + # Same error as for issubclass(1, int) + raise TypeError('issubclass() arg 1 must be a class') + for attr in _get_protocol_attrs(cls): + for base in other.__mro__: + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + annotations = getattr(base, '__annotations__', {}) + if (isinstance(annotations, typing.Mapping) and + attr in annotations and + isinstance(other, _ProtocolMeta) and + other._is_protocol): + break + else: + return NotImplemented + return True + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + # We have nothing more to do for non-protocols. + if not cls._is_protocol: + return + + # Check consistency of bases. + for base in cls.__bases__: + if not (base in (object, typing.Generic) or + base.__module__ == 'collections.abc' and + base.__name__ in _PROTO_WHITELIST or + isinstance(base, _ProtocolMeta) and base._is_protocol): + raise TypeError('Protocols can only inherit from other' + f' protocols, got {repr(base)}') + cls.__init__ = _no_init + + +# 3.8+ +if hasattr(typing, 'runtime_checkable'): + runtime_checkable = typing.runtime_checkable +# 3.7 +else: + def runtime_checkable(cls): + """Mark a protocol class as a runtime protocol, so that it + can be used with isinstance() and issubclass(). Raise TypeError + if applied to a non-protocol class. + + This allows a simple-minded structural check very similar to the + one-offs in collections.abc such as Hashable. + """ + if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: + raise TypeError('@runtime_checkable can be only applied to protocol classes,' + f' got {cls!r}') + cls._is_runtime_protocol = True + return cls + + +# Exists for backwards compatibility. +runtime = runtime_checkable + + +# 3.8+ +if hasattr(typing, 'SupportsIndex'): + SupportsIndex = typing.SupportsIndex +# 3.7 +else: + @runtime_checkable + class SupportsIndex(Protocol): + __slots__ = () + + @abc.abstractmethod + def __index__(self) -> int: + pass + + +if hasattr(typing, "Required"): + # The standard library TypedDict in Python 3.8 does not store runtime information + # about which (if any) keys are optional. See https://bugs.python.org/issue38834 + # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" + # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 + # The standard library TypedDict below Python 3.11 does not store runtime + # information about optional and required keys when using Required or NotRequired. + # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. + TypedDict = typing.TypedDict + _TypedDictMeta = typing._TypedDictMeta + is_typeddict = typing.is_typeddict +else: + def _check_fails(cls, other): + try: + if sys._getframe(1).f_globals['__name__'] not in ['abc', + 'functools', + 'typing']: + # Typed dicts are only for static structural subtyping. + raise TypeError('TypedDict does not support instance and class checks') + except (AttributeError, ValueError): + pass + return False + + def _dict_new(*args, **kwargs): + if not args: + raise TypeError('TypedDict.__new__(): not enough arguments') + _, args = args[0], args[1:] # allow the "cls" keyword be passed + return dict(*args, **kwargs) + + _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)' + + def _typeddict_new(*args, total=True, **kwargs): + if not args: + raise TypeError('TypedDict.__new__(): not enough arguments') + _, args = args[0], args[1:] # allow the "cls" keyword be passed + if args: + typename, args = args[0], args[1:] # allow the "_typename" keyword be passed + elif '_typename' in kwargs: + typename = kwargs.pop('_typename') + import warnings + warnings.warn("Passing '_typename' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError("TypedDict.__new__() missing 1 required positional " + "argument: '_typename'") + if args: + try: + fields, = args # allow the "_fields" keyword be passed + except ValueError: + raise TypeError('TypedDict.__new__() takes from 2 to 3 ' + f'positional arguments but {len(args) + 2} ' + 'were given') + elif '_fields' in kwargs and len(kwargs) == 1: + fields = kwargs.pop('_fields') + import warnings + warnings.warn("Passing '_fields' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + fields = None + + if fields is None: + fields = kwargs + elif kwargs: + raise TypeError("TypedDict takes either a dict or keyword arguments," + " but not both") + + ns = {'__annotations__': dict(fields)} + try: + # Setting correct module is necessary to make typed dict classes pickleable. + ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + + return _TypedDictMeta(typename, (), ns, total=total) + + _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,' + ' /, *, total=True, **kwargs)') + + class _TypedDictMeta(type): + def __init__(cls, name, bases, ns, total=True): + super().__init__(name, bases, ns) + + def __new__(cls, name, bases, ns, total=True): + # Create new typed dict class object. + # This method is called directly when TypedDict is subclassed, + # or via _typeddict_new when TypedDict is instantiated. This way + # TypedDict supports all three syntaxes described in its docstring. + # Subclasses and instances of TypedDict return actual dictionaries + # via _dict_new. + ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new + # Don't insert typing.Generic into __bases__ here, + # or Generic.__init_subclass__ will raise TypeError + # in the super().__new__() call. + # Instead, monkey-patch __bases__ onto the class after it's been created. + tp_dict = super().__new__(cls, name, (dict,), ns) + + if any(issubclass(base, typing.Generic) for base in bases): + tp_dict.__bases__ = (typing.Generic, dict) + _maybe_adjust_parameters(tp_dict) + + annotations = {} + own_annotations = ns.get('__annotations__', {}) + msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" + own_annotations = { + n: typing._type_check(tp, msg) for n, tp in own_annotations.items() + } + required_keys = set() + optional_keys = set() + + for base in bases: + annotations.update(base.__dict__.get('__annotations__', {})) + required_keys.update(base.__dict__.get('__required_keys__', ())) + optional_keys.update(base.__dict__.get('__optional_keys__', ())) + + annotations.update(own_annotations) + for annotation_key, annotation_type in own_annotations.items(): + annotation_origin = get_origin(annotation_type) + if annotation_origin is Annotated: + annotation_args = get_args(annotation_type) + if annotation_args: + annotation_type = annotation_args[0] + annotation_origin = get_origin(annotation_type) + + if annotation_origin is Required: + required_keys.add(annotation_key) + elif annotation_origin is NotRequired: + optional_keys.add(annotation_key) + elif total: + required_keys.add(annotation_key) + else: + optional_keys.add(annotation_key) + + tp_dict.__annotations__ = annotations + tp_dict.__required_keys__ = frozenset(required_keys) + tp_dict.__optional_keys__ = frozenset(optional_keys) + if not hasattr(tp_dict, '__total__'): + tp_dict.__total__ = total + return tp_dict + + __instancecheck__ = __subclasscheck__ = _check_fails + + TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) + TypedDict.__module__ = __name__ + TypedDict.__doc__ = \ + """A simple typed name space. At runtime it is equivalent to a plain dict. + + TypedDict creates a dictionary type that expects all of its + instances to have a certain set of keys, with each key + associated with a value of a consistent type. This expectation + is not checked at runtime but is only enforced by type checkers. + Usage:: + + class Point2D(TypedDict): + x: int + y: int + label: str + + a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK + b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check + + assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') + + The type info can be accessed via the Point2D.__annotations__ dict, and + the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. + TypedDict supports two additional equivalent forms:: + + Point2D = TypedDict('Point2D', x=int, y=int, label=str) + Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) + + The class syntax is only supported in Python 3.6+, while two other + syntax forms work for Python 2.7 and 3.2+ + """ + + if hasattr(typing, "_TypedDictMeta"): + _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) + else: + _TYPEDDICT_TYPES = (_TypedDictMeta,) + + def is_typeddict(tp): + """Check if an annotation is a TypedDict class + + For example:: + class Film(TypedDict): + title: str + year: int + + is_typeddict(Film) # => True + is_typeddict(Union[list, str]) # => False + """ + return isinstance(tp, tuple(_TYPEDDICT_TYPES)) + + +if hasattr(typing, "assert_type"): + assert_type = typing.assert_type + +else: + def assert_type(__val, __typ): + """Assert (to the type checker) that the value is of the given type. + + When the type checker encounters a call to assert_type(), it + emits an error if the value is not of the specified type:: + + def greet(name: str) -> None: + assert_type(name, str) # ok + assert_type(name, int) # type checker error + + At runtime this returns the first argument unchanged and otherwise + does nothing. + """ + return __val + + +if hasattr(typing, "Required"): + get_type_hints = typing.get_type_hints +else: + import functools + import types + + # replaces _strip_annotations() + def _strip_extras(t): + """Strips Annotated, Required and NotRequired from a given type.""" + if isinstance(t, _AnnotatedAlias): + return _strip_extras(t.__origin__) + if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired): + return _strip_extras(t.__args__[0]) + if isinstance(t, typing._GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return t.copy_with(stripped_args) + if hasattr(types, "GenericAlias") and isinstance(t, types.GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return types.GenericAlias(t.__origin__, stripped_args) + if hasattr(types, "UnionType") and isinstance(t, types.UnionType): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return functools.reduce(operator.or_, stripped_args) + + return t + + def get_type_hints(obj, globalns=None, localns=None, include_extras=False): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, adds Optional[t] if a + default value equal to None is set and recursively replaces all + 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' + (unless 'include_extras=True'). + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + if hasattr(typing, "Annotated"): + hint = typing.get_type_hints( + obj, globalns=globalns, localns=localns, include_extras=True + ) + else: + hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) + if include_extras: + return hint + return {k: _strip_extras(t) for k, t in hint.items()} + + +# Python 3.9+ has PEP 593 (Annotated) +if hasattr(typing, 'Annotated'): + Annotated = typing.Annotated + # Not exported and not a public API, but needed for get_origin() and get_args() + # to work. + _AnnotatedAlias = typing._AnnotatedAlias +# 3.7-3.8 +else: + class _AnnotatedAlias(typing._GenericAlias, _root=True): + """Runtime representation of an annotated type. + + At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't' + with extra annotations. The alias behaves like a normal typing alias, + instantiating is the same as instantiating the underlying type, binding + it to types is also the same. + """ + def __init__(self, origin, metadata): + if isinstance(origin, _AnnotatedAlias): + metadata = origin.__metadata__ + metadata + origin = origin.__origin__ + super().__init__(origin, origin) + self.__metadata__ = metadata + + def copy_with(self, params): + assert len(params) == 1 + new_type = params[0] + return _AnnotatedAlias(new_type, self.__metadata__) + + def __repr__(self): + return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " + f"{', '.join(repr(a) for a in self.__metadata__)}]") + + def __reduce__(self): + return operator.getitem, ( + Annotated, (self.__origin__,) + self.__metadata__ + ) + + def __eq__(self, other): + if not isinstance(other, _AnnotatedAlias): + return NotImplemented + if self.__origin__ != other.__origin__: + return False + return self.__metadata__ == other.__metadata__ + + def __hash__(self): + return hash((self.__origin__, self.__metadata__)) + + class Annotated: + """Add context specific metadata to a type. + + Example: Annotated[int, runtime_check.Unsigned] indicates to the + hypothetical runtime_check module that this type is an unsigned int. + Every other consumer of this type can ignore this metadata and treat + this type as int. + + The first argument to Annotated must be a valid type (and will be in + the __origin__ field), the remaining arguments are kept as a tuple in + the __extra__ field. + + Details: + + - It's an error to call `Annotated` with less than two arguments. + - Nested Annotated are flattened:: + + Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] + + - Instantiating an annotated type is equivalent to instantiating the + underlying type:: + + Annotated[C, Ann1](5) == C(5) + + - Annotated can be used as a generic type alias:: + + Optimized = Annotated[T, runtime.Optimize()] + Optimized[int] == Annotated[int, runtime.Optimize()] + + OptimizedList = Annotated[List[T], runtime.Optimize()] + OptimizedList[int] == Annotated[List[int], runtime.Optimize()] + """ + + __slots__ = () + + def __new__(cls, *args, **kwargs): + raise TypeError("Type Annotated cannot be instantiated.") + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple) or len(params) < 2: + raise TypeError("Annotated[...] should be used " + "with at least two arguments (a type and an " + "annotation).") + allowed_special_forms = (ClassVar, Final) + if get_origin(params[0]) in allowed_special_forms: + origin = params[0] + else: + msg = "Annotated[t, ...]: t must be a type." + origin = typing._type_check(params[0], msg) + metadata = tuple(params[1:]) + return _AnnotatedAlias(origin, metadata) + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + f"Cannot subclass {cls.__module__}.Annotated" + ) + +# Python 3.8 has get_origin() and get_args() but those implementations aren't +# Annotated-aware, so we can't use those. Python 3.9's versions don't support +# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. +if sys.version_info[:2] >= (3, 10): + get_origin = typing.get_origin + get_args = typing.get_args +# 3.7-3.9 +else: + try: + # 3.9+ + from typing import _BaseGenericAlias + except ImportError: + _BaseGenericAlias = typing._GenericAlias + try: + # 3.9+ + from typing import GenericAlias as _typing_GenericAlias + except ImportError: + _typing_GenericAlias = typing._GenericAlias + + def get_origin(tp): + """Get the unsubscripted version of a type. + + This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar + and Annotated. Return None for unsupported types. Examples:: + + get_origin(Literal[42]) is Literal + get_origin(int) is None + get_origin(ClassVar[int]) is ClassVar + get_origin(Generic) is Generic + get_origin(Generic[T]) is Generic + get_origin(Union[T, int]) is Union + get_origin(List[Tuple[T, T]][int]) == list + get_origin(P.args) is P + """ + if isinstance(tp, _AnnotatedAlias): + return Annotated + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias, + ParamSpecArgs, ParamSpecKwargs)): + return tp.__origin__ + if tp is typing.Generic: + return typing.Generic + return None + + def get_args(tp): + """Get type arguments with all substitutions performed. + + For unions, basic simplifications used by Union constructor are performed. + Examples:: + get_args(Dict[str, int]) == (str, int) + get_args(int) == () + get_args(Union[int, Union[T, int], str][int]) == (int, str) + get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + get_args(Callable[[], T][int]) == ([], int) + """ + if isinstance(tp, _AnnotatedAlias): + return (tp.__origin__,) + tp.__metadata__ + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)): + if getattr(tp, "_special", False): + return () + res = tp.__args__ + if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: + res = (list(res[:-1]), res[-1]) + return res + return () + + +# 3.10+ +if hasattr(typing, 'TypeAlias'): + TypeAlias = typing.TypeAlias +# 3.9 +elif sys.version_info[:2] >= (3, 9): + class _TypeAliasForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_TypeAliasForm + def TypeAlias(self, parameters): + """Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example above. + """ + raise TypeError(f"{self} is not subscriptable") +# 3.7-3.8 +else: + class _TypeAliasForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + TypeAlias = _TypeAliasForm('TypeAlias', + doc="""Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example + above.""") + + +class _DefaultMixin: + """Mixin for TypeVarLike defaults.""" + + __slots__ = () + + def __init__(self, default): + if isinstance(default, (tuple, list)): + self.__default__ = tuple((typing._type_check(d, "Default must be a type") + for d in default)) + elif default: + self.__default__ = typing._type_check(default, "Default must be a type") + else: + self.__default__ = None + + +# Add default and infer_variance parameters from PEP 696 and 695 +class TypeVar(typing.TypeVar, _DefaultMixin, _root=True): + """Type variable.""" + + __module__ = 'typing' + + def __init__(self, name, *constraints, bound=None, + covariant=False, contravariant=False, + default=None, infer_variance=False): + super().__init__(name, *constraints, bound=bound, covariant=covariant, + contravariant=contravariant) + _DefaultMixin.__init__(self, default) + self.__infer_variance__ = infer_variance + + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + +# Python 3.10+ has PEP 612 +if hasattr(typing, 'ParamSpecArgs'): + ParamSpecArgs = typing.ParamSpecArgs + ParamSpecKwargs = typing.ParamSpecKwargs +# 3.7-3.9 +else: + class _Immutable: + """Mixin to indicate that object should not be copied.""" + __slots__ = () + + def __copy__(self): + return self + + def __deepcopy__(self, memo): + return self + + class ParamSpecArgs(_Immutable): + """The args for a ParamSpec object. + + Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. + + ParamSpecArgs objects have a reference back to their ParamSpec: + + P.args.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.args" + + def __eq__(self, other): + if not isinstance(other, ParamSpecArgs): + return NotImplemented + return self.__origin__ == other.__origin__ + + class ParamSpecKwargs(_Immutable): + """The kwargs for a ParamSpec object. + + Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. + + ParamSpecKwargs objects have a reference back to their ParamSpec: + + P.kwargs.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.kwargs" + + def __eq__(self, other): + if not isinstance(other, ParamSpecKwargs): + return NotImplemented + return self.__origin__ == other.__origin__ + +# 3.10+ +if hasattr(typing, 'ParamSpec'): + + # Add default Parameter - PEP 696 + class ParamSpec(typing.ParamSpec, _DefaultMixin, _root=True): + """Parameter specification variable.""" + + __module__ = 'typing' + + def __init__(self, name, *, bound=None, covariant=False, contravariant=False, + default=None): + super().__init__(name, bound=bound, covariant=covariant, + contravariant=contravariant) + _DefaultMixin.__init__(self, default) + + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod + +# 3.7-3.9 +else: + + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class ParamSpec(list, _DefaultMixin): + """Parameter specification variable. + + Usage:: + + P = ParamSpec('P') + + Parameter specification variables exist primarily for the benefit of static + type checkers. They are used to forward the parameter types of one + callable to another callable, a pattern commonly found in higher order + functions and decorators. They are only valid when used in ``Concatenate``, + or s the first argument to ``Callable``. In Python 3.10 and higher, + they are also supported in user-defined Generics at runtime. + See class Generic for more information on generic types. An + example for annotating a decorator:: + + T = TypeVar('T') + P = ParamSpec('P') + + def add_logging(f: Callable[P, T]) -> Callable[P, T]: + '''A type-safe decorator to add logging to a function.''' + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + logging.info(f'{f.__name__} was called') + return f(*args, **kwargs) + return inner + + @add_logging + def add_two(x: float, y: float) -> float: + '''Add two numbers together.''' + return x + y + + Parameter specification variables defined with covariant=True or + contravariant=True can be used to declare covariant or contravariant + generic types. These keyword arguments are valid, but their actual semantics + are yet to be decided. See PEP 612 for details. + + Parameter specification variables can be introspected. e.g.: + + P.__name__ == 'T' + P.__bound__ == None + P.__covariant__ == False + P.__contravariant__ == False + + Note that only parameter specification variables defined in global scope can + be pickled. + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + @property + def args(self): + return ParamSpecArgs(self) + + @property + def kwargs(self): + return ParamSpecKwargs(self) + + def __init__(self, name, *, bound=None, covariant=False, contravariant=False, + default=None): + super().__init__([self]) + self.__name__ = name + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if bound: + self.__bound__ = typing._type_check(bound, 'Bound must be a type.') + else: + self.__bound__ = None + _DefaultMixin.__init__(self, default) + + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __repr__(self): + if self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + # Hack to get typing._type_check to pass. + def __call__(self, *args, **kwargs): + pass + + +# 3.7-3.9 +if not hasattr(typing, 'Concatenate'): + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class _ConcatenateGenericAlias(list): + + # Trick Generic into looking into this for __parameters__. + __class__ = typing._GenericAlias + + # Flag in 3.8. + _special = False + + def __init__(self, origin, args): + super().__init__(args) + self.__origin__ = origin + self.__args__ = args + + def __repr__(self): + _type_repr = typing._type_repr + return (f'{_type_repr(self.__origin__)}' + f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') + + def __hash__(self): + return hash((self.__origin__, self.__args__)) + + # Hack to get typing._type_check to pass in Generic. + def __call__(self, *args, **kwargs): + pass + + @property + def __parameters__(self): + return tuple( + tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) + ) + + +# 3.7-3.9 +@typing._tp_cache +def _concatenate_getitem(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Concatenate of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if not isinstance(parameters[-1], ParamSpec): + raise TypeError("The last parameter to Concatenate should be a " + "ParamSpec variable.") + msg = "Concatenate[arg, ...]: each arg must be a type." + parameters = tuple(typing._type_check(p, msg) for p in parameters) + return _ConcatenateGenericAlias(self, parameters) + + +# 3.10+ +if hasattr(typing, 'Concatenate'): + Concatenate = typing.Concatenate + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_TypeAliasForm + def Concatenate(self, parameters): + """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """ + return _concatenate_getitem(self, parameters) +# 3.7-8 +else: + class _ConcatenateForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + return _concatenate_getitem(self, parameters) + + Concatenate = _ConcatenateForm( + 'Concatenate', + doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """) + +# 3.10+ +if hasattr(typing, 'TypeGuard'): + TypeGuard = typing.TypeGuard +# 3.9 +elif sys.version_info[:2] >= (3, 9): + class _TypeGuardForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_TypeGuardForm + def TypeGuard(self, parameters): + """Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """ + item = typing._type_check(parameters, f'{self} accepts only a single type.') + return typing._GenericAlias(self, (item,)) +# 3.7-3.8 +else: + class _TypeGuardForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type') + return typing._GenericAlias(self, (item,)) + + TypeGuard = _TypeGuardForm( + 'TypeGuard', + doc="""Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """) + + +# Vendored from cpython typing._SpecialFrom +class _SpecialForm(typing._Final, _root=True): + __slots__ = ('_name', '__doc__', '_getitem') + + def __init__(self, getitem): + self._getitem = getitem + self._name = getitem.__name__ + self.__doc__ = getitem.__doc__ + + def __getattr__(self, item): + if item in {'__name__', '__qualname__'}: + return self._name + + raise AttributeError(item) + + def __mro_entries__(self, bases): + raise TypeError(f"Cannot subclass {self!r}") + + def __repr__(self): + return f'typing_extensions.{self._name}' + + def __reduce__(self): + return self._name + + def __call__(self, *args, **kwds): + raise TypeError(f"Cannot instantiate {self!r}") + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + def __instancecheck__(self, obj): + raise TypeError(f"{self} cannot be used with isinstance()") + + def __subclasscheck__(self, cls): + raise TypeError(f"{self} cannot be used with issubclass()") + + @typing._tp_cache + def __getitem__(self, parameters): + return self._getitem(self, parameters) + + +if hasattr(typing, "LiteralString"): + LiteralString = typing.LiteralString +else: + @_SpecialForm + def LiteralString(self, params): + """Represents an arbitrary literal string. + + Example:: + + from typing_extensions import LiteralString + + def query(sql: LiteralString) -> ...: + ... + + query("SELECT * FROM table") # ok + query(f"SELECT * FROM {input()}") # not ok + + See PEP 675 for details. + + """ + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Self"): + Self = typing.Self +else: + @_SpecialForm + def Self(self, params): + """Used to spell the type of "self" in classes. + + Example:: + + from typing import Self + + class ReturnsSelf: + def parse(self, data: bytes) -> Self: + ... + return self + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Never"): + Never = typing.Never +else: + @_SpecialForm + def Never(self, params): + """The bottom type, a type that has no members. + + This can be used to define a function that should never be + called, or a function that never returns:: + + from typing_extensions import Never + + def never_call_me(arg: Never) -> None: + pass + + def int_or_str(arg: int | str) -> None: + never_call_me(arg) # type checker error + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + never_call_me(arg) # ok, arg is of type Never + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, 'Required'): + Required = typing.Required + NotRequired = typing.NotRequired +elif sys.version_info[:2] >= (3, 9): + class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_ExtensionsSpecialForm + def Required(self, parameters): + """A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + @_ExtensionsSpecialForm + def NotRequired(self, parameters): + """A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + +else: + class _RequiredForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + Required = _RequiredForm( + 'Required', + doc="""A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """) + NotRequired = _RequiredForm( + 'NotRequired', + doc="""A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """) + + +if hasattr(typing, "Unpack"): # 3.11+ + Unpack = typing.Unpack +elif sys.version_info[:2] >= (3, 9): + class _UnpackSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + @_UnpackSpecialForm + def Unpack(self, parameters): + """A special typing construct to unpack a variadic type. For example: + + Shape = TypeVarTuple('Shape') + Batch = NewType('Batch', int) + + def add_batch_axis( + x: Array[Unpack[Shape]] + ) -> Array[Batch, Unpack[Shape]]: ... + + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + +else: + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + class _UnpackForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + Unpack = _UnpackForm( + 'Unpack', + doc="""A special typing construct to unpack a variadic type. For example: + + Shape = TypeVarTuple('Shape') + Batch = NewType('Batch', int) + + def add_batch_axis( + x: Array[Unpack[Shape]] + ) -> Array[Batch, Unpack[Shape]]: ... + + """) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + + +if hasattr(typing, "TypeVarTuple"): # 3.11+ + + # Add default Parameter - PEP 696 + class TypeVarTuple(typing.TypeVarTuple, _DefaultMixin, _root=True): + """Type variable tuple.""" + + def __init__(self, name, *, default=None): + super().__init__(name) + _DefaultMixin.__init__(self, default) + + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod + +else: + class TypeVarTuple(_DefaultMixin): + """Type variable tuple. + + Usage:: + + Ts = TypeVarTuple('Ts') + + In the same way that a normal type variable is a stand-in for a single + type such as ``int``, a type variable *tuple* is a stand-in for a *tuple* + type such as ``Tuple[int, str]``. + + Type variable tuples can be used in ``Generic`` declarations. + Consider the following example:: + + class Array(Generic[*Ts]): ... + + The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``, + where ``T1`` and ``T2`` are type variables. To use these type variables + as type parameters of ``Array``, we must *unpack* the type variable tuple using + the star operator: ``*Ts``. The signature of ``Array`` then behaves + as if we had simply written ``class Array(Generic[T1, T2]): ...``. + In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows + us to parameterise the class with an *arbitrary* number of type parameters. + + Type variable tuples can be used anywhere a normal ``TypeVar`` can. + This includes class definitions, as shown above, as well as function + signatures and variable annotations:: + + class Array(Generic[*Ts]): + + def __init__(self, shape: Tuple[*Ts]): + self._shape: Tuple[*Ts] = shape + + def get_shape(self) -> Tuple[*Ts]: + return self._shape + + shape = (Height(480), Width(640)) + x: Array[Height, Width] = Array(shape) + y = abs(x) # Inferred type is Array[Height, Width] + z = x + x # ... is Array[Height, Width] + x.get_shape() # ... is tuple[Height, Width] + + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + def __iter__(self): + yield self.__unpacked__ + + def __init__(self, name, *, default=None): + self.__name__ = name + _DefaultMixin.__init__(self, default) + + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + self.__unpacked__ = Unpack[self] + + def __repr__(self): + return self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(self, *args, **kwds): + if '_root' not in kwds: + raise TypeError("Cannot subclass special typing classes") + + +if hasattr(typing, "reveal_type"): + reveal_type = typing.reveal_type +else: + def reveal_type(__obj: T) -> T: + """Reveal the inferred type of a variable. + + When a static type checker encounters a call to ``reveal_type()``, + it will emit the inferred type of the argument:: + + x: int = 1 + reveal_type(x) + + Running a static type checker (e.g., ``mypy``) on this example + will produce output similar to 'Revealed type is "builtins.int"'. + + At runtime, the function prints the runtime type of the + argument and returns it unchanged. + + """ + print(f"Runtime type is {type(__obj).__name__!r}", file=sys.stderr) + return __obj + + +if hasattr(typing, "assert_never"): + assert_never = typing.assert_never +else: + def assert_never(__arg: Never) -> Never: + """Assert to the type checker that a line of code is unreachable. + + Example:: + + def int_or_str(arg: int | str) -> None: + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + assert_never(arg) + + If a type checker finds that a call to assert_never() is + reachable, it will emit an error. + + At runtime, this throws an exception when called. + + """ + raise AssertionError("Expected code to be unreachable") + + +if hasattr(typing, 'dataclass_transform'): + dataclass_transform = typing.dataclass_transform +else: + def dataclass_transform( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_specifiers: typing.Tuple[ + typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], + ... + ] = (), + **kwargs: typing.Any, + ) -> typing.Callable[[T], T]: + """Decorator that marks a function, class, or metaclass as providing + dataclass-like behavior. + + Example: + + from typing_extensions import dataclass_transform + + _T = TypeVar("_T") + + # Used on a decorator function + @dataclass_transform() + def create_model(cls: type[_T]) -> type[_T]: + ... + return cls + + @create_model + class CustomerModel: + id: int + name: str + + # Used on a base class + @dataclass_transform() + class ModelBase: ... + + class CustomerModel(ModelBase): + id: int + name: str + + # Used on a metaclass + @dataclass_transform() + class ModelMeta(type): ... + + class ModelBase(metaclass=ModelMeta): ... + + class CustomerModel(ModelBase): + id: int + name: str + + Each of the ``CustomerModel`` classes defined in this example will now + behave similarly to a dataclass created with the ``@dataclasses.dataclass`` + decorator. For example, the type checker will synthesize an ``__init__`` + method. + + The arguments to this decorator can be used to customize this behavior: + - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be + True or False if it is omitted by the caller. + - ``order_default`` indicates whether the ``order`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``kw_only_default`` indicates whether the ``kw_only`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``field_specifiers`` specifies a static list of supported classes + or functions that describe fields, similar to ``dataclasses.field()``. + + At runtime, this decorator records its arguments in the + ``__dataclass_transform__`` attribute on the decorated object. + + See PEP 681 for details. + + """ + def decorator(cls_or_fn): + cls_or_fn.__dataclass_transform__ = { + "eq_default": eq_default, + "order_default": order_default, + "kw_only_default": kw_only_default, + "field_specifiers": field_specifiers, + "kwargs": kwargs, + } + return cls_or_fn + return decorator + + +if hasattr(typing, "override"): + override = typing.override +else: + _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any]) + + def override(__arg: _F) -> _F: + """Indicate that a method is intended to override a method in a base class. + + Usage: + + class Base: + def method(self) -> None: ... + pass + + class Child(Base): + @override + def method(self) -> None: + super().method() + + When this decorator is applied to a method, the type checker will + validate that it overrides a method with the same name on a base class. + This helps prevent bugs that may occur when a base class is changed + without an equivalent change to a child class. + + See PEP 698 for details. + + """ + return __arg + + +# We have to do some monkey patching to deal with the dual nature of +# Unpack/TypeVarTuple: +# - We want Unpack to be a kind of TypeVar so it gets accepted in +# Generic[Unpack[Ts]] +# - We want it to *not* be treated as a TypeVar for the purposes of +# counting generic parameters, so that when we subscript a generic, +# the runtime doesn't try to substitute the Unpack with the subscripted type. +if not hasattr(typing, "TypeVarTuple"): + typing._collect_type_vars = _collect_type_vars + typing._check_generic = _check_generic + + +# Backport typing.NamedTuple as it exists in Python 3.11. +# In 3.11, the ability to define generic `NamedTuple`s was supported. +# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. +if sys.version_info >= (3, 11): + NamedTuple = typing.NamedTuple +else: + def _caller(): + try: + return sys._getframe(2).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): # For platforms without _getframe() + return None + + def _make_nmtuple(name, types, module, defaults=()): + fields = [n for n, t in types] + annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") + for n, t in types} + nm_tpl = collections.namedtuple(name, fields, + defaults=defaults, module=module) + nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations + # The `_field_types` attribute was removed in 3.9; + # in earlier versions, it is the same as the `__annotations__` attribute + if sys.version_info < (3, 9): + nm_tpl._field_types = annotations + return nm_tpl + + _prohibited_namedtuple_fields = typing._prohibited + _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'}) + + class _NamedTupleMeta(type): + def __new__(cls, typename, bases, ns): + assert _NamedTuple in bases + for base in bases: + if base is not _NamedTuple and base is not typing.Generic: + raise TypeError( + 'can only inherit from a NamedTuple type and Generic') + bases = tuple(tuple if base is _NamedTuple else base for base in bases) + types = ns.get('__annotations__', {}) + default_names = [] + for field_name in types: + if field_name in ns: + default_names.append(field_name) + elif default_names: + raise TypeError(f"Non-default namedtuple field {field_name} " + f"cannot follow default field" + f"{'s' if len(default_names) > 1 else ''} " + f"{', '.join(default_names)}") + nm_tpl = _make_nmtuple( + typename, types.items(), + defaults=[ns[n] for n in default_names], + module=ns['__module__'] + ) + nm_tpl.__bases__ = bases + if typing.Generic in bases: + class_getitem = typing.Generic.__class_getitem__.__func__ + nm_tpl.__class_getitem__ = classmethod(class_getitem) + # update from user namespace without overriding special namedtuple attributes + for key in ns: + if key in _prohibited_namedtuple_fields: + raise AttributeError("Cannot overwrite NamedTuple attribute " + key) + elif key not in _special_namedtuple_fields and key not in nm_tpl._fields: + setattr(nm_tpl, key, ns[key]) + if typing.Generic in bases: + nm_tpl.__init_subclass__() + return nm_tpl + + def NamedTuple(__typename, __fields=None, **kwargs): + if __fields is None: + __fields = kwargs.items() + elif kwargs: + raise TypeError("Either list of fields or keywords" + " can be provided to NamedTuple, not both") + return _make_nmtuple(__typename, __fields, module=_caller()) + + NamedTuple.__doc__ = typing.NamedTuple.__doc__ + _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) + + # On 3.8+, alter the signature so that it matches typing.NamedTuple. + # The signature of typing.NamedTuple on >=3.8 is invalid syntax in Python 3.7, + # so just leave the signature as it is on 3.7. + if sys.version_info >= (3, 8): + NamedTuple.__text_signature__ = '(typename, fields=None, /, **kwargs)' + + def _namedtuple_mro_entries(bases): + assert NamedTuple in bases + return (_NamedTuple,) + + NamedTuple.__mro_entries__ = _namedtuple_mro_entries diff --git a/pkg_resources/_vendor/vendored.txt b/pkg_resources/_vendor/vendored.txt index d5dbe736d6..6fafd437ac 100644 --- a/pkg_resources/_vendor/vendored.txt +++ b/pkg_resources/_vendor/vendored.txt @@ -1,8 +1,11 @@ -packaging==21.3 -pyparsing==2.2.1 -appdirs==1.4.3 +packaging==23.0 + +platformdirs==2.6.2 +# required for platformdirs on Python < 3.8 +typing_extensions==4.4.0 + jaraco.text==3.7.0 # required for jaraco.text on older Pythons -importlib_resources==5.4.0 +importlib_resources==5.10.2 # required for importlib_resources on older Pythons zipp==3.7.0 diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD b/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD index 38d0b21ad9..0a88551ce0 100644 --- a/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD +++ b/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD @@ -1,4 +1,4 @@ -__pycache__/zipp.cpython-310.pyc,, +__pycache__/zipp.cpython-311.pyc,, zipp-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 zipp-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 zipp-3.7.0.dist-info/METADATA,sha256=ZLzgaXTyZX_MxTU0lcGfhdPY4CjFrT_3vyQ2Fo49pl8,2261 diff --git a/pkg_resources/api_tests.txt b/pkg_resources/api_tests.txt index ded18800fe..d72b85aa37 100644 --- a/pkg_resources/api_tests.txt +++ b/pkg_resources/api_tests.txt @@ -338,49 +338,72 @@ Environment Markers >>> import os >>> print(im("sys_platform")) - Invalid marker: 'sys_platform', parse error at '' + Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in + sys_platform + ^ >>> print(im("sys_platform==")) - Invalid marker: 'sys_platform==', parse error at '' + Expected a marker variable or quoted string + sys_platform== + ^ >>> print(im("sys_platform=='win32'")) False >>> print(im("sys=='x'")) - Invalid marker: "sys=='x'", parse error at "sys=='x'" + Expected a marker variable or quoted string + sys=='x' + ^ >>> print(im("(extra)")) - Invalid marker: '(extra)', parse error at ')' + Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in + (extra) + ^ >>> print(im("(extra")) - Invalid marker: '(extra', parse error at '' + Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in + (extra + ^ >>> print(im("os.open('foo')=='y'")) - Invalid marker: "os.open('foo')=='y'", parse error at 'os.open(' + Expected a marker variable or quoted string + os.open('foo')=='y' + ^ >>> print(im("'x'=='y' and os.open('foo')=='y'")) # no short-circuit! - Invalid marker: "'x'=='y' and os.open('foo')=='y'", parse error at 'and os.o' + Expected a marker variable or quoted string + 'x'=='y' and os.open('foo')=='y' + ^ >>> print(im("'x'=='x' or os.open('foo')=='y'")) # no short-circuit! - Invalid marker: "'x'=='x' or os.open('foo')=='y'", parse error at 'or os.op' - - >>> print(im("'x' < 'y' < 'z'")) - Invalid marker: "'x' < 'y' < 'z'", parse error at "< 'z'" + Expected a marker variable or quoted string + 'x'=='x' or os.open('foo')=='y' + ^ >>> print(im("r'x'=='x'")) - Invalid marker: "r'x'=='x'", parse error at "r'x'=='x" + Expected a marker variable or quoted string + r'x'=='x' + ^ >>> print(im("'''x'''=='x'")) - Invalid marker: "'''x'''=='x'", parse error at "'x'''=='" + Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in + '''x'''=='x' + ^ >>> print(im('"""x"""=="x"')) - Invalid marker: '"""x"""=="x"', parse error at '"x"""=="' + Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in + """x"""=="x" + ^ >>> print(im(r"x\n=='x'")) - Invalid marker: "x\\n=='x'", parse error at "x\\n=='x'" + Expected a marker variable or quoted string + x\n=='x' + ^ >>> print(im("os.open=='y'")) - Invalid marker: "os.open=='y'", parse error at 'os.open=' + Expected a marker variable or quoted string + os.open=='y' + ^ >>> em("sys_platform=='win32'") == (sys.platform=='win32') True diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py index 70897eea62..948bcc6094 100644 --- a/pkg_resources/extern/__init__.py +++ b/pkg_resources/extern/__init__.py @@ -58,7 +58,8 @@ def find_spec(self, fullname, path=None, target=None): """Return a module spec for vendored names.""" return ( importlib.util.spec_from_loader(fullname, self) - if self._module_matches_namespace(fullname) else None + if self._module_matches_namespace(fullname) + else None ) def install(self): @@ -70,7 +71,10 @@ def install(self): names = ( - 'packaging', 'pyparsing', 'appdirs', 'jaraco', 'importlib_resources', + 'packaging', + 'platformdirs', + 'jaraco', + 'importlib_resources', 'more_itertools', ) VendorImporter(__name__, names).install() diff --git a/pkg_resources/tests/test_markers.py b/pkg_resources/tests/test_markers.py index 15a3b499a6..9306d5b348 100644 --- a/pkg_resources/tests/test_markers.py +++ b/pkg_resources/tests/test_markers.py @@ -1,4 +1,4 @@ -import mock +from unittest import mock from pkg_resources import evaluate_marker diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py index 6518820e6f..fd5cc8ceb1 100644 --- a/pkg_resources/tests/test_pkg_resources.py +++ b/pkg_resources/tests/test_pkg_resources.py @@ -9,10 +9,7 @@ import distutils.dist import distutils.command.install_egg_info -try: - from unittest import mock -except ImportError: - import mock +from unittest import mock from pkg_resources import ( DistInfoDistribution, Distribution, EggInfoDistribution, @@ -259,6 +256,10 @@ def make_distribution_no_version(tmpdir, basename): ('dist-info', 'METADATA', DistInfoDistribution), ], ) +@pytest.mark.xfail( + sys.version_info[:2] == (3, 12) and sys.version_info.releaselevel != 'final', + reason="https://github.com/python/cpython/issues/103632", +) def test_distribution_version_missing( tmpdir, suffix, expected_filename, expected_dist_type): """ @@ -289,6 +290,10 @@ def test_distribution_version_missing( assert type(dist) == expected_dist_type +@pytest.mark.xfail( + sys.version_info[:2] == (3, 12) and sys.version_info.releaselevel != 'final', + reason="https://github.com/python/cpython/issues/103632", +) def test_distribution_version_missing_undetected_path(): """ Test Distribution.version when the "Version" header is missing and diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py index 107dda7bab..baf477dba4 100644 --- a/pkg_resources/tests/test_resources.py +++ b/pkg_resources/tests/test_resources.py @@ -319,6 +319,10 @@ def test_marker_evaluation_with_extras_loop(self): res = list(ws.resolve(parse_requirements("a"), ad)) assert res == [a, c, b, foo] + @pytest.mark.xfail( + sys.version_info[:2] == (3, 12) and sys.version_info.releaselevel != 'final', + reason="https://github.com/python/cpython/issues/103632", + ) def testDistroDependsOptions(self): d = self.distRequires(""" Twisted>=1.5 @@ -830,10 +834,12 @@ def test_two_levels_deep(self, symlinked_tmpdir): pkg2.ensure_dir() (pkg1 / '__init__.py').write_text(self.ns_str, encoding='utf-8') (pkg2 / '__init__.py').write_text(self.ns_str, encoding='utf-8') - import pkg1 + with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): + import pkg1 assert "pkg1" in pkg_resources._namespace_packages # attempt to import pkg2 from site-pkgs2 - import pkg1.pkg2 + with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): + import pkg1.pkg2 # check the _namespace_packages dict assert "pkg1.pkg2" in pkg_resources._namespace_packages assert pkg_resources._namespace_packages["pkg1"] == ["pkg1.pkg2"] @@ -874,8 +880,9 @@ def test_path_order(self, symlinked_tmpdir): (subpkg / '__init__.py').write_text( vers_str % number, encoding='utf-8') - import nspkg.subpkg - import nspkg + with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): + import nspkg.subpkg + import nspkg expected = [ str(site.realpath() / 'nspkg') for site in site_dirs diff --git a/pkg_resources/tests/test_working_set.py b/pkg_resources/tests/test_working_set.py index db13c7149b..575656ee8f 100644 --- a/pkg_resources/tests/test_working_set.py +++ b/pkg_resources/tests/test_working_set.py @@ -42,7 +42,7 @@ def parse_distributions(s): continue fields = spec.split('\n', 1) assert 1 <= len(fields) <= 2 - name, version = fields.pop(0).split('-') + name, version = fields.pop(0).rsplit('-', 1) if fields: requires = textwrap.dedent(fields.pop(0)) metadata = Metadata(('requires.txt', requires)) @@ -465,6 +465,25 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] VersionConflict ''', + + ''' + # id + wanted_normalized_name_installed_canonical + + # installed + foo.bar-3.6 + + # installable + + # wanted + foo-bar==3.6 + + # resolved + foo.bar-3.6 + + # resolved [replace conflicting] + foo.bar-3.6 + ''', ) def test_working_set_resolve(installed_dists, installable_dists, requirements, replace_conflicting, resolved_dists_or_exception): diff --git a/pyproject.toml b/pyproject.toml index 6b426a3755..6b5fbf7177 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,19 +8,19 @@ skip-string-normalization = true [tool.setuptools_scm] -[pytest.enabler.black] +[tool.pytest-enabler.black] #addopts = "--black" -[pytest.enabler.mypy] +[tool.pytest-enabler.mypy] #addopts = "--mypy" -[pytest.enabler.flake8] -addopts = "--flake8" - -[pytest.enabler.cov] +[tool.pytest-enabler.cov] addopts = "--cov" -[pytest.enabler.xdist] +[tool.pytest-enabler.ruff] +addopts = "--ruff" + +[tool.pytest-enabler.xdist] addopts = "-n auto" [tool.towncrier] diff --git a/pytest.ini b/pytest.ini index 4a5ad50d30..19043d3f18 100644 --- a/pytest.ini +++ b/pytest.ini @@ -10,21 +10,27 @@ filterwarnings= error ## upstream - # Suppress deprecation warning in flake8 - ignore:SelectableGroups dict interface is deprecated::flake8 + + # Ensure ResourceWarnings are emitted + default::ResourceWarning # shopkeep/pytest-black#55 ignore: is not using a cooperative constructor:pytest.PytestDeprecationWarning ignore:The \(fspath. py.path.local\) argument to BlackItem is deprecated.:pytest.PytestDeprecationWarning ignore:BlackItem is an Item subclass and should not be a collector:pytest.PytestWarning - # tholo/pytest-flake8#83 - ignore: is not using a cooperative constructor:pytest.PytestDeprecationWarning - ignore:The \(fspath. py.path.local\) argument to Flake8Item is deprecated.:pytest.PytestDeprecationWarning - ignore:Flake8Item is an Item subclass and should not be a collector:pytest.PytestWarning + # shopkeep/pytest-black#67 + ignore:'encoding' argument not specified::pytest_black + + # realpython/pytest-mypy#152 + ignore:'encoding' argument not specified::pytest_mypy + + # python/cpython#100750 + ignore:'encoding' argument not specified::platform + + # Dependencies might not have been updated yet + default:onerror argument is deprecated, use onexc instead - # dbader/pytest-mypy#131 - ignore:The \(fspath. py.path.local\) argument to MypyFile is deprecated.:pytest.PytestDeprecationWarning ## end upstream # https://github.com/pypa/setuptools/issues/1823 @@ -37,7 +43,7 @@ filterwarnings= ignore:The Windows bytes API has been deprecated:DeprecationWarning # https://github.com/pypa/setuptools/issues/2823 - ignore:setuptools.installer is deprecated. + ignore:setuptools.installer and fetch_build_eggs are deprecated. # https://github.com/pypa/setuptools/issues/917 ignore:setup.py install is deprecated. @@ -58,3 +64,23 @@ filterwarnings= # https://github.com/pytest-dev/pytest/discussions/9296 ignore:Distutils was imported before setuptools ignore:Setuptools is replacing distutils + + # suppress warnings in deprecated msvc compilers + ignore:(bcpp|msvc9?)compiler is deprecated + + ignore:Support for .* in .pyproject.toml. is still .beta. + ignore::setuptools.command.editable_wheel.InformationOnly + + # https://github.com/pypa/setuptools/issues/3655 + ignore:The --rsyncdir command line argument and rsyncdirs config variable are deprecated.:DeprecationWarning + + # Workarounds for pypa/setuptools#3810 + # Can't use EncodingWarning as it doesn't exist on Python 3.9 + default:'encoding' argument not specified + default:UTF-8 Mode affects locale.getpreferredencoding(). + + # Avoid errors when testing pkg_resources.declare_namespace + ignore:.*pkg_resources\.declare_namespace.*:DeprecationWarning + + # suppress known deprecation + ignore:pkg_resources is deprecated:DeprecationWarning diff --git a/setup.cfg b/setup.cfg index 6171f62419..18ae634e14 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = setuptools -version = 60.9.3 +version = 67.7.2 author = Python Packaging Authority author_email = distutils-sig@python.org description = Easily download, build, install, upgrade, and uninstall Python packages @@ -19,6 +19,7 @@ classifiers = keywords = CPAN PyPI distutils eggs package management project_urls = Documentation = https://setuptools.pypa.io/ + Changelog = https://setuptools.pypa.io/en/stable/history.html [options] packages = find_namespace: @@ -42,7 +43,6 @@ testing = # upstream pytest >= 6 pytest-checkdocs >= 2.4 - pytest-flake8 pytest-black >= 0.3.7; \ # workaround for jaraco/skeleton#22 python_implementation != "PyPy" @@ -52,11 +52,11 @@ testing = pytest-mypy >= 0.9.1; \ # workaround for jaraco/skeleton#22 python_implementation != "PyPy" - pytest-enabler >= 1.0.1 - pytest-perf + pytest-enabler >= 1.3 + # workaround for pypa/setuptools#3921 + pytest-ruff; sys_platform != "cygwin" # local - mock flake8-2020 virtualenv>=13.0.0 wheel @@ -67,6 +67,10 @@ testing = build[virtualenv] filelock>=3.4.0 pip_run>=8.8 + ini2toml[lite]>=0.9 + tomli-w>=1.0.0 + pytest-timeout + pytest-perf testing-integration = pytest @@ -80,20 +84,25 @@ testing-integration = build[virtualenv] filelock>=3.4.0 - docs = # upstream - sphinx + sphinx >= 3.5 jaraco.packaging >= 9 rst.linker >= 1.9 + furo + sphinx-lint + + # tidelift jaraco.tidelift >= 1.4 # local pygments-github-lexers==0.0.5 sphinx-favicon sphinx-inline-tabs + sphinx-reredirects sphinxcontrib-towncrier - furo + sphinx-notfound-page == 0.8.3 + sphinx-hoverxref < 2 ssl = @@ -104,12 +113,14 @@ distutils.commands = alias = setuptools.command.alias:alias bdist_egg = setuptools.command.bdist_egg:bdist_egg bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm + build = setuptools.command.build:build build_clib = setuptools.command.build_clib:build_clib build_ext = setuptools.command.build_ext:build_ext build_py = setuptools.command.build_py:build_py develop = setuptools.command.develop:develop dist_info = setuptools.command.dist_info:dist_info easy_install = setuptools.command.easy_install:easy_install + editable_wheel = setuptools.command.editable_wheel:editable_wheel egg_info = setuptools.command.egg_info:egg_info install = setuptools.command.install:install install_egg_info = setuptools.command.install_egg_info:install_egg_info diff --git a/setuptools/__init__.py b/setuptools/__init__.py index 06991b65d7..35d7bd1c4a 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py @@ -1,6 +1,5 @@ """Extensions to the 'distutils' for large or complex distributions""" -from fnmatch import fnmatchcase import functools import os import re @@ -9,14 +8,15 @@ import distutils.core from distutils.errors import DistutilsOptionError -from distutils.util import convert_path +from distutils.util import convert_path as _convert_path -from ._deprecation_warning import SetuptoolsDeprecationWarning +from .warnings import SetuptoolsDeprecationWarning import setuptools.version from setuptools.extension import Extension from setuptools.dist import Distribution from setuptools.depends import Require +from setuptools.discovery import PackageFinder, PEP420PackageFinder from . import monkey from . import logging @@ -37,85 +37,6 @@ bootstrap_install_from = None -class PackageFinder: - """ - Generate a list of all Python packages found within a directory - """ - - @classmethod - def find(cls, where='.', exclude=(), include=('*',)): - """Return a list all Python packages found within directory 'where' - - 'where' is the root directory which will be searched for packages. It - should be supplied as a "cross-platform" (i.e. URL-style) path; it will - be converted to the appropriate local path syntax. - - 'exclude' is a sequence of package names to exclude; '*' can be used - as a wildcard in the names, such that 'foo.*' will exclude all - subpackages of 'foo' (but not 'foo' itself). - - 'include' is a sequence of package names to include. If it's - specified, only the named packages will be included. If it's not - specified, all found packages will be included. 'include' can contain - shell style wildcard patterns just like 'exclude'. - """ - - return list( - cls._find_packages_iter( - convert_path(where), - cls._build_filter('ez_setup', '*__pycache__', *exclude), - cls._build_filter(*include), - ) - ) - - @classmethod - def _find_packages_iter(cls, where, exclude, include): - """ - All the packages found in 'where' that pass the 'include' filter, but - not the 'exclude' filter. - """ - for root, dirs, files in os.walk(where, followlinks=True): - # Copy dirs to iterate over it, then empty dirs. - all_dirs = dirs[:] - dirs[:] = [] - - for dir in all_dirs: - full_path = os.path.join(root, dir) - rel_path = os.path.relpath(full_path, where) - package = rel_path.replace(os.path.sep, '.') - - # Skip directory trees that are not valid packages - if '.' in dir or not cls._looks_like_package(full_path): - continue - - # Should this package be included? - if include(package) and not exclude(package): - yield package - - # Keep searching subdirectories, as there may be more packages - # down there, even if the parent was excluded. - dirs.append(dir) - - @staticmethod - def _looks_like_package(path): - """Does a directory look like a package?""" - return os.path.isfile(os.path.join(path, '__init__.py')) - - @staticmethod - def _build_filter(*patterns): - """ - Given a list of patterns, return a callable that will be true only if - the input matches at least one of the patterns. - """ - return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns) - - -class PEP420PackageFinder(PackageFinder): - @staticmethod - def _looks_like_package(path): - return True - - find_packages = PackageFinder.find find_namespace_packages = PEP420PackageFinder.find @@ -133,6 +54,16 @@ def __init__(self, attrs): _incl = 'dependency_links', 'setup_requires' filtered = {k: attrs[k] for k in set(_incl) & set(attrs)} super().__init__(filtered) + # Prevent accidentally triggering discovery with incomplete set of attrs + self.set_defaults._disable() + + def _get_project_config_files(self, filenames=None): + """Ignore ``pyproject.toml``, they are not related to setup_requires""" + try: + cfg, toml = super()._split_standard_project_metadata(filenames) + return cfg, () + except Exception: + return filenames, () def finalize_options(self): """ @@ -145,7 +76,28 @@ def finalize_options(self): # Honor setup.cfg's options. dist.parse_config_files(ignore_option_errors=True) if dist.setup_requires: + _fetch_build_eggs(dist) + + +def _fetch_build_eggs(dist): + try: dist.fetch_build_eggs(dist.setup_requires) + except Exception as ex: + msg = """ + It is possible a package already installed in your system + contains an version that is invalid according to PEP 440. + You can try `pip install --use-pep517` as a workaround for this problem, + or rely on a new virtual environment. + + If the problem refers to a package that is not installed yet, + please contact that package's maintainers or distributors. + """ + if "InvalidVersion" in ex.__class__.__name__: + if hasattr(ex, "add_note"): + ex.add_note(msg) # PEP 678 + else: + dist.announce(f"\n{msg}\n") + raise def setup(**attrs): @@ -162,7 +114,59 @@ def setup(**attrs): class Command(_Command): - __doc__ = _Command.__doc__ + """ + Setuptools internal actions are organized using a *command design pattern*. + This means that each action (or group of closely related actions) executed during + the build should be implemented as a ``Command`` subclass. + + These commands are abstractions and do not necessarily correspond to a command that + can (or should) be executed via a terminal, in a CLI fashion (although historically + they would). + + When creating a new command from scratch, custom defined classes **SHOULD** inherit + from ``setuptools.Command`` and implement a few mandatory methods. + Between these mandatory methods, are listed: + + .. method:: initialize_options(self) + + Set or (reset) all options/attributes/caches used by the command + to their default values. Note that these values may be overwritten during + the build. + + .. method:: finalize_options(self) + + Set final values for all options/attributes used by the command. + Most of the time, each option/attribute/cache should only be set if it does not + have any value yet (e.g. ``if self.attr is None: self.attr = val``). + + .. method:: run(self) + + Execute the actions intended by the command. + (Side effects **SHOULD** only take place when ``run`` is executed, + for example, creating new files or writing to the terminal output). + + A useful analogy for command classes is to think of them as subroutines with local + variables called "options". The options are "declared" in ``initialize_options()`` + and "defined" (given their final values, aka "finalized") in ``finalize_options()``, + both of which must be defined by every command class. The "body" of the subroutine, + (where it does all the work) is the ``run()`` method. + Between ``initialize_options()`` and ``finalize_options()``, ``setuptools`` may set + the values for options/attributes based on user's input (or circumstance), + which means that the implementation should be careful to not overwrite values in + ``finalize_options`` unless necessary. + + Please note that other commands (or other parts of setuptools) may also overwrite + the values of the command's options/attributes multiple times during the build + process. + Therefore it is important to consistently implement ``initialize_options()`` and + ``finalize_options()``. For example, all derived attributes (or attributes that + depend on the value of other attributes) **SHOULD** be recomputed in + ``finalize_options``. + + When overwriting existing commands, custom defined classes **MUST** abide by the + same APIs implemented by the original class. They also **SHOULD** inherit from the + original class. + """ command_consumes_arguments = False @@ -190,6 +194,12 @@ def ensure_string_list(self, option): currently a string, we split it either on /,\s*/ or /\s+/, so "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become ["foo", "bar", "baz"]. + + .. + TODO: This method seems to be similar to the one in ``distutils.cmd`` + Probably it is just here for backward compatibility with old Python versions? + + :meta private: """ val = getattr(self, option) if val is None: @@ -236,6 +246,22 @@ def findall(dir=os.curdir): return list(files) +@functools.wraps(_convert_path) +def convert_path(pathname): + SetuptoolsDeprecationWarning.emit( + "Access to implementation detail", + """ + The function `convert_path` is not provided by setuptools itself, + and therefore not part of the public API. + + Its direct usage by 3rd-party packages is considered improper and the function + may be removed in the future. + """, + due_date=(2023, 12, 13) # initial deprecation 2022-03-25, see #3201 + ) + return _convert_path(pathname) + + class sic(str): """Treat this string as-is (https://en.wikipedia.org/wiki/Sic)""" diff --git a/setuptools/_deprecation_warning.py b/setuptools/_deprecation_warning.py deleted file mode 100644 index 086b64dd38..0000000000 --- a/setuptools/_deprecation_warning.py +++ /dev/null @@ -1,7 +0,0 @@ -class SetuptoolsDeprecationWarning(Warning): - """ - Base class for warning deprecations in ``setuptools`` - - This class is not derived from ``DeprecationWarning``, and as such is - visible by default. - """ diff --git a/setuptools/_distutils/README b/setuptools/_distutils/README deleted file mode 100644 index 23f488506f..0000000000 --- a/setuptools/_distutils/README +++ /dev/null @@ -1,11 +0,0 @@ -This directory contains the Distutils package. - -There's a full documentation available at: - - http://docs.python.org/distutils/ - -The Distutils-SIG web page is also a good starting point: - - http://www.python.org/sigs/distutils-sig/ - -$Id$ diff --git a/setuptools/_distutils/__init__.py b/setuptools/_distutils/__init__.py index 8fd493b42c..1a188c35cb 100644 --- a/setuptools/_distutils/__init__.py +++ b/setuptools/_distutils/__init__.py @@ -1,17 +1,7 @@ -"""distutils - -The main package for the Python Module Distribution Utilities. Normally -used from a setup script as - - from distutils.core import setup - - setup (...) -""" - import sys import importlib -__version__ = sys.version[:sys.version.index(' ')] +__version__, _, _ = sys.version.partition(' ') try: diff --git a/setuptools/_distutils/_collections.py b/setuptools/_distutils/_collections.py index 98fce8008d..5ad21cc7c9 100644 --- a/setuptools/_distutils/_collections.py +++ b/setuptools/_distutils/_collections.py @@ -1,5 +1,7 @@ import collections +import functools import itertools +import operator # from jaraco.collections 3.5.1 @@ -54,3 +56,139 @@ def __contains__(self, other): def __len__(self): return len(list(iter(self))) + + +# from jaraco.collections 3.7 +class RangeMap(dict): + """ + A dictionary-like object that uses the keys as bounds for a range. + Inclusion of the value for that range is determined by the + key_match_comparator, which defaults to less-than-or-equal. + A value is returned for a key if it is the first key that matches in + the sorted list of keys. + + One may supply keyword parameters to be passed to the sort function used + to sort keys (i.e. key, reverse) as sort_params. + + Let's create a map that maps 1-3 -> 'a', 4-6 -> 'b' + + >>> r = RangeMap({3: 'a', 6: 'b'}) # boy, that was easy + >>> r[1], r[2], r[3], r[4], r[5], r[6] + ('a', 'a', 'a', 'b', 'b', 'b') + + Even float values should work so long as the comparison operator + supports it. + + >>> r[4.5] + 'b' + + But you'll notice that the way rangemap is defined, it must be open-ended + on one side. + + >>> r[0] + 'a' + >>> r[-1] + 'a' + + One can close the open-end of the RangeMap by using undefined_value + + >>> r = RangeMap({0: RangeMap.undefined_value, 3: 'a', 6: 'b'}) + >>> r[0] + Traceback (most recent call last): + ... + KeyError: 0 + + One can get the first or last elements in the range by using RangeMap.Item + + >>> last_item = RangeMap.Item(-1) + >>> r[last_item] + 'b' + + .last_item is a shortcut for Item(-1) + + >>> r[RangeMap.last_item] + 'b' + + Sometimes it's useful to find the bounds for a RangeMap + + >>> r.bounds() + (0, 6) + + RangeMap supports .get(key, default) + + >>> r.get(0, 'not found') + 'not found' + + >>> r.get(7, 'not found') + 'not found' + + One often wishes to define the ranges by their left-most values, + which requires use of sort params and a key_match_comparator. + + >>> r = RangeMap({1: 'a', 4: 'b'}, + ... sort_params=dict(reverse=True), + ... key_match_comparator=operator.ge) + >>> r[1], r[2], r[3], r[4], r[5], r[6] + ('a', 'a', 'a', 'b', 'b', 'b') + + That wasn't nearly as easy as before, so an alternate constructor + is provided: + + >>> r = RangeMap.left({1: 'a', 4: 'b', 7: RangeMap.undefined_value}) + >>> r[1], r[2], r[3], r[4], r[5], r[6] + ('a', 'a', 'a', 'b', 'b', 'b') + + """ + + def __init__(self, source, sort_params={}, key_match_comparator=operator.le): + dict.__init__(self, source) + self.sort_params = sort_params + self.match = key_match_comparator + + @classmethod + def left(cls, source): + return cls( + source, sort_params=dict(reverse=True), key_match_comparator=operator.ge + ) + + def __getitem__(self, item): + sorted_keys = sorted(self.keys(), **self.sort_params) + if isinstance(item, RangeMap.Item): + result = self.__getitem__(sorted_keys[item]) + else: + key = self._find_first_match_(sorted_keys, item) + result = dict.__getitem__(self, key) + if result is RangeMap.undefined_value: + raise KeyError(key) + return result + + def get(self, key, default=None): + """ + Return the value for key if key is in the dictionary, else default. + If default is not given, it defaults to None, so that this method + never raises a KeyError. + """ + try: + return self[key] + except KeyError: + return default + + def _find_first_match_(self, keys, item): + is_match = functools.partial(self.match, item) + matches = list(filter(is_match, keys)) + if matches: + return matches[0] + raise KeyError(item) + + def bounds(self): + sorted_keys = sorted(self.keys(), **self.sort_params) + return (sorted_keys[RangeMap.first_item], sorted_keys[RangeMap.last_item]) + + # some special values for the RangeMap + undefined_value = type('RangeValueUndefined', (), {})() + + class Item(int): + "RangeMap Item" + + first_item = Item(0) + last_item = Item(-1) diff --git a/setuptools/_distutils/_functools.py b/setuptools/_distutils/_functools.py new file mode 100644 index 0000000000..e7053bac12 --- /dev/null +++ b/setuptools/_distutils/_functools.py @@ -0,0 +1,20 @@ +import functools + + +# from jaraco.functools 3.5 +def pass_none(func): + """ + Wrap func so it's not called if its first param is None + + >>> print_text = pass_none(print) + >>> print_text('text') + text + >>> print_text(None) + """ + + @functools.wraps(func) + def wrapper(param, *args, **kwargs): + if param is not None: + return func(param, *args, **kwargs) + + return wrapper diff --git a/setuptools/_distutils/_log.py b/setuptools/_distutils/_log.py new file mode 100644 index 0000000000..4a2ae0acb8 --- /dev/null +++ b/setuptools/_distutils/_log.py @@ -0,0 +1,4 @@ +import logging + + +log = logging.getLogger() diff --git a/setuptools/_distutils/_macos_compat.py b/setuptools/_distutils/_macos_compat.py new file mode 100644 index 0000000000..17769e9154 --- /dev/null +++ b/setuptools/_distutils/_macos_compat.py @@ -0,0 +1,12 @@ +import sys +import importlib + + +def bypass_compiler_fixup(cmd, args): + return cmd + + +if sys.platform == 'darwin': + compiler_fixup = importlib.import_module('_osx_support').compiler_fixup +else: + compiler_fixup = bypass_compiler_fixup diff --git a/setuptools/_distutils/_msvccompiler.py b/setuptools/_distutils/_msvccompiler.py index f2f801c552..4f081c7e92 100644 --- a/setuptools/_distutils/_msvccompiler.py +++ b/setuptools/_distutils/_msvccompiler.py @@ -17,24 +17,31 @@ import subprocess import contextlib import warnings -import unittest.mock +import unittest.mock as mock + with contextlib.suppress(ImportError): import winreg -from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError -from distutils.ccompiler import CCompiler, gen_lib_options -from distutils import log -from distutils.util import get_platform +from .errors import ( + DistutilsExecError, + DistutilsPlatformError, + CompileError, + LibError, + LinkError, +) +from .ccompiler import CCompiler, gen_lib_options +from ._log import log +from .util import get_platform from itertools import count + def _find_vc2015(): try: key = winreg.OpenKeyEx( winreg.HKEY_LOCAL_MACHINE, r"Software\Microsoft\VisualStudio\SxS\VC7", - access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY + access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY, ) except OSError: log.debug("Visual C++ is not registered") @@ -57,6 +64,7 @@ def _find_vc2015(): best_version, best_dir = version, vc_dir return best_version, best_dir + def _find_vc2017(): """Returns "15, path" based on the result of invoking vswhere.exe If no install is found, returns "None, None" @@ -72,14 +80,23 @@ def _find_vc2017(): return None, None try: - path = subprocess.check_output([ - os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), - "-latest", - "-prerelease", - "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", - "-property", "installationPath", - "-products", "*", - ], encoding="mbcs", errors="strict").strip() + path = subprocess.check_output( + [ + os.path.join( + root, "Microsoft Visual Studio", "Installer", "vswhere.exe" + ), + "-latest", + "-prerelease", + "-requires", + "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + "-property", + "installationPath", + "-products", + "*", + ], + encoding="mbcs", + errors="strict", + ).strip() except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): return None, None @@ -89,13 +106,15 @@ def _find_vc2017(): return None, None + PLAT_SPEC_TO_RUNTIME = { - 'x86' : 'x86', - 'x86_amd64' : 'x64', - 'x86_arm' : 'arm', - 'x86_arm64' : 'arm64' + 'x86': 'x86', + 'x86_amd64': 'x64', + 'x86_arm': 'arm', + 'x86_arm64': 'arm64', } + def _find_vcvarsall(plat_spec): # bpo-38597: Removed vcruntime return value _, best_dir = _find_vc2017() @@ -114,12 +133,10 @@ def _find_vcvarsall(plat_spec): return vcvarsall, None + def _get_vc_env(plat_spec): if os.getenv("DISTUTILS_USE_SDK"): - return { - key.lower(): value - for key, value in os.environ.items() - } + return {key.lower(): value for key, value in os.environ.items()} vcvarsall, _ = _find_vcvarsall(plat_spec) if not vcvarsall: @@ -127,23 +144,22 @@ def _get_vc_env(plat_spec): try: out = subprocess.check_output( - 'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec), + f'cmd /u /c "{vcvarsall}" {plat_spec} && set', stderr=subprocess.STDOUT, ).decode('utf-16le', errors='replace') except subprocess.CalledProcessError as exc: log.error(exc.output) - raise DistutilsPlatformError("Error executing {}" - .format(exc.cmd)) + raise DistutilsPlatformError(f"Error executing {exc.cmd}") env = { key.lower(): value - for key, _, value in - (line.partition('=') for line in out.splitlines()) + for key, _, value in (line.partition('=') for line in out.splitlines()) if key and value } return env + def _find_exe(exe, paths=None): """Return path to an MSVC executable program. @@ -161,19 +177,21 @@ def _find_exe(exe, paths=None): return fn return exe + # A map keyed by get_platform() return values to values accepted by # 'vcvarsall.bat'. Always cross-compile from x86 to work with the # lighter-weight MSVC installs that do not include native 64-bit tools. PLAT_TO_VCVARS = { - 'win32' : 'x86', - 'win-amd64' : 'x86_amd64', - 'win-arm32' : 'x86_arm', - 'win-arm64' : 'x86_arm64' + 'win32': 'x86', + 'win-amd64': 'x86_amd64', + 'win-arm32': 'x86_arm', + 'win-arm64': 'x86_arm64', } -class MSVCCompiler(CCompiler) : + +class MSVCCompiler(CCompiler): """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" + as defined by the CCompiler abstract class.""" compiler_type = 'msvc' @@ -192,8 +210,7 @@ class MSVCCompiler(CCompiler) : # Needed for the filename generation methods provided by the # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) + src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions res_extension = '.res' obj_extension = '.obj' static_lib_extension = '.lib' @@ -201,13 +218,24 @@ class MSVCCompiler(CCompiler) : static_lib_format = shared_lib_format = '%s%s' exe_extension = '.exe' - def __init__(self, verbose=0, dry_run=0, force=0): super().__init__(verbose, dry_run, force) # target platform (.plat_name is consistent with 'bdist') self.plat_name = None self.initialized = False + @classmethod + def _configure(cls, vc_env): + """ + Set class-level include/lib dirs. + """ + cls.include_dirs = cls._parse_path(vc_env.get('include', '')) + cls.library_dirs = cls._parse_path(vc_env.get('lib', '')) + + @staticmethod + def _parse_path(val): + return [dir.rstrip(os.sep) for dir in val.split(os.pathsep) if dir] + def initialize(self, plat_name=None): # multi-init means we would need to check platform same each time... assert not self.initialized, "don't init multiple times" @@ -215,58 +243,62 @@ def initialize(self, plat_name=None): plat_name = get_platform() # sanity check for platforms to prevent obscure errors later. if plat_name not in PLAT_TO_VCVARS: - raise DistutilsPlatformError("--plat-name must be one of {}" - .format(tuple(PLAT_TO_VCVARS))) + raise DistutilsPlatformError( + f"--plat-name must be one of {tuple(PLAT_TO_VCVARS)}" + ) # Get the vcvarsall.bat spec for the requested platform. plat_spec = PLAT_TO_VCVARS[plat_name] vc_env = _get_vc_env(plat_spec) if not vc_env: - raise DistutilsPlatformError("Unable to find a compatible " - "Visual Studio installation.") + raise DistutilsPlatformError( + "Unable to find a compatible " "Visual Studio installation." + ) + self._configure(vc_env) self._paths = vc_env.get('path', '') paths = self._paths.split(os.pathsep) self.cc = _find_exe("cl.exe", paths) self.linker = _find_exe("link.exe", paths) self.lib = _find_exe("lib.exe", paths) - self.rc = _find_exe("rc.exe", paths) # resource compiler - self.mc = _find_exe("mc.exe", paths) # message compiler - self.mt = _find_exe("mt.exe", paths) # message compiler - - for dir in vc_env.get('include', '').split(os.pathsep): - if dir: - self.add_include_dir(dir.rstrip(os.sep)) - - for dir in vc_env.get('lib', '').split(os.pathsep): - if dir: - self.add_library_dir(dir.rstrip(os.sep)) + self.rc = _find_exe("rc.exe", paths) # resource compiler + self.mc = _find_exe("mc.exe", paths) # message compiler + self.mt = _find_exe("mt.exe", paths) # message compiler self.preprocess_options = None # bpo-38597: Always compile with dynamic linking # Future releases of Python 3.x will include all past # versions of vcruntime*.dll for compatibility. - self.compile_options = [ - '/nologo', '/O2', '/W3', '/GL', '/DNDEBUG', '/MD' - ] + self.compile_options = ['/nologo', '/O2', '/W3', '/GL', '/DNDEBUG', '/MD'] self.compile_options_debug = [ - '/nologo', '/Od', '/MDd', '/Zi', '/W3', '/D_DEBUG' + '/nologo', + '/Od', + '/MDd', + '/Zi', + '/W3', + '/D_DEBUG', ] - ldflags = [ - '/nologo', '/INCREMENTAL:NO', '/LTCG' - ] + ldflags = ['/nologo', '/INCREMENTAL:NO', '/LTCG'] - ldflags_debug = [ - '/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL' - ] + ldflags_debug = ['/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL'] self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1'] self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1'] - self.ldflags_shared = [*ldflags, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] - self.ldflags_shared_debug = [*ldflags_debug, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] + self.ldflags_shared = [ + *ldflags, + '/DLL', + '/MANIFEST:EMBED,ID=2', + '/MANIFESTUAC:NO', + ] + self.ldflags_shared_debug = [ + *ldflags_debug, + '/DLL', + '/MANIFEST:EMBED,ID=2', + '/MANIFESTUAC:NO', + ] self.ldflags_static = [*ldflags] self.ldflags_static_debug = [*ldflags_debug] @@ -286,47 +318,32 @@ def initialize(self, plat_name=None): # -- Worker methods ------------------------------------------------ - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): - ext_map = { - **{ext: self.obj_extension for ext in self.src_extensions}, - **{ext: self.res_extension for ext in self._rc_extensions + self._mc_extensions}, + @property + def out_extensions(self): + return { + **super().out_extensions, + **{ + ext: self.res_extension + for ext in self._rc_extensions + self._mc_extensions + }, } - output_dir = output_dir or '' - - def make_out_path(p): - base, ext = os.path.splitext(p) - if strip_dir: - base = os.path.basename(base) - else: - _, base = os.path.splitdrive(base) - if base.startswith((os.path.sep, os.path.altsep)): - base = base[1:] - try: - # XXX: This may produce absurdly long paths. We should check - # the length of the result and trim base until we fit within - # 260 characters. - return os.path.join(output_dir, base + ext_map[ext]) - except LookupError: - # Better to raise an exception instead of silently continuing - # and later complain about sources and targets having - # different lengths - raise CompileError("Don't know how to compile {}".format(p)) - - return list(map(make_out_path, source_filenames)) - - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - + def compile( # noqa: C901 + self, + sources, + output_dir=None, + macros=None, + include_dirs=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + depends=None, + ): if not self.initialized: self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) + compile_info = self._setup_compile( + output_dir, macros, include_dirs, sources, depends, extra_postargs + ) macros, objects, extra_postargs, pp_opts, build = compile_info compile_opts = extra_preargs or [] @@ -336,7 +353,6 @@ def compile(self, sources, else: compile_opts.extend(self.compile_options) - add_cpp_opts = False for obj in objects: @@ -381,7 +397,7 @@ def compile(self, sources, try: # first compile .MC to .RC and .H file self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src]) - base, _ = os.path.splitext(os.path.basename (src)) + base, _ = os.path.splitext(os.path.basename(src)) rc_file = os.path.join(rc_dir, base + '.rc') # then compile .RC to .RES file self.spawn([self.rc, "/fo" + obj, rc_file]) @@ -391,14 +407,12 @@ def compile(self, sources, continue else: # how to handle this file? - raise CompileError("Don't know how to compile {} to {}" - .format(src, obj)) + raise CompileError(f"Don't know how to compile {src} to {obj}") args = [self.cc] + compile_opts + pp_opts if add_cpp_opts: args.append('/EHsc') - args.append(input_opt) - args.append("/Fo" + obj) + args.extend((input_opt, "/Fo" + obj)) args.extend(extra_postargs) try: @@ -408,24 +422,18 @@ def compile(self, sources, return objects - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - + def create_static_lib( + self, objects, output_libname, output_dir=None, debug=0, target_lang=None + ): if not self.initialized: self.initialize() objects, output_dir = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) + output_filename = self.library_filename(output_libname, output_dir=output_dir) if self._need_link(objects, output_filename): lib_args = objects + ['/OUT:' + output_filename] if debug: - pass # XXX what goes here? + pass # XXX what goes here? try: log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args)) self.spawn([self.lib] + lib_args) @@ -434,36 +442,35 @@ def create_static_lib(self, else: log.debug("skipping %s (up-to-date)", output_filename) - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - + def link( + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): if not self.initialized: self.initialize() objects, output_dir = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) + fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) libraries, library_dirs, runtime_library_dirs = fixed_args if runtime_library_dirs: - self.warn("I don't know what to do with 'runtime_library_dirs': " - + str(runtime_library_dirs)) + self.warn( + "I don't know what to do with 'runtime_library_dirs': " + + str(runtime_library_dirs) + ) - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) + lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) if output_dir is not None: output_filename = os.path.join(output_dir, output_filename) @@ -472,8 +479,9 @@ def link(self, export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])] - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) + ld_args = ( + ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename] + ) # The MSVC linker generates .lib and .exp files, which cannot be # suppressed by any linker switches. The .lib files may even be @@ -483,11 +491,10 @@ def link(self, build_temp = os.path.dirname(objects[0]) if export_symbols is not None: (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - build_temp, - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) + os.path.basename(output_filename) + ) + implib_file = os.path.join(build_temp, self.library_filename(dll_name)) + ld_args.append('/IMPLIB:' + implib_file) if extra_preargs: ld_args[:0] = extra_preargs @@ -525,9 +532,8 @@ def _fallback_spawn(self, cmd, env): raise else: return - warnings.warn( - "Fallback spawn triggered. Please update distutils monkeypatch.") - with unittest.mock.patch.dict('os.environ', env): + warnings.warn("Fallback spawn triggered. Please update distutils monkeypatch.") + with mock.patch.dict('os.environ', env): bag.value = super().spawn(cmd) # -- Miscellaneous methods ----------------------------------------- @@ -539,7 +545,8 @@ def library_dir_option(self, dir): def runtime_library_dir_option(self, dir): raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC") + "don't know how to set runtime library search path for MSVC" + ) def library_option(self, lib): return self.library_filename(lib) diff --git a/setuptools/_distutils/archive_util.py b/setuptools/_distutils/archive_util.py index 565a3117b4..7f9e1e00cc 100644 --- a/setuptools/_distutils/archive_util.py +++ b/setuptools/_distutils/archive_util.py @@ -13,10 +13,10 @@ zipfile = None -from distutils.errors import DistutilsExecError -from distutils.spawn import spawn -from distutils.dir_util import mkpath -from distutils import log +from .errors import DistutilsExecError +from .spawn import spawn +from .dir_util import mkpath +from ._log import log try: from pwd import getpwnam @@ -28,6 +28,7 @@ except ImportError: getgrnam = None + def _get_gid(name): """Returns a gid, given a group name.""" if getgrnam is None or name is None: @@ -40,6 +41,7 @@ def _get_gid(name): return result[2] return None + def _get_uid(name): """Returns an uid, given a user name.""" if getpwnam is None or name is None: @@ -52,8 +54,10 @@ def _get_uid(name): return result[2] return None -def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, - owner=None, group=None): + +def make_tarball( + base_name, base_dir, compress="gzip", verbose=0, dry_run=0, owner=None, group=None +): """Create a (possibly compressed) tar file from all the files under 'base_dir'. @@ -69,16 +73,21 @@ def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, Returns the output filename. """ - tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', 'xz': 'xz', None: '', - 'compress': ''} - compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', - 'compress': '.Z'} + tar_compression = { + 'gzip': 'gz', + 'bzip2': 'bz2', + 'xz': 'xz', + None: '', + 'compress': '', + } + compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', 'compress': '.Z'} # flags for compression program, each element of list will be an argument if compress is not None and compress not in compress_ext.keys(): raise ValueError( - "bad value for 'compress': must be None, 'gzip', 'bzip2', " - "'xz' or 'compress'") + "bad value for 'compress': must be None, 'gzip', 'bzip2', " + "'xz' or 'compress'" + ) archive_name = base_name + '.tar' if compress != 'compress': @@ -112,7 +121,7 @@ def _set_uid_gid(tarinfo): # compression using `compress` if compress == 'compress': - warn("'compress' will be deprecated.", PendingDeprecationWarning) + warn("'compress' is deprecated.", DeprecationWarning) # the option varies depending on the platform compressed_name = archive_name + compress_ext[compress] if sys.platform == 'win32': @@ -124,7 +133,8 @@ def _set_uid_gid(tarinfo): return archive_name -def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): + +def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): # noqa: C901 """Create a zip file from all the files under 'base_dir'. The output zip file will be named 'base_name' + ".zip". Uses either the @@ -145,26 +155,29 @@ def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): zipoptions = "-rq" try: - spawn(["zip", zipoptions, zip_filename, base_dir], - dry_run=dry_run) + spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) except DistutilsExecError: # XXX really should distinguish between "couldn't find # external 'zip' command" and "zip failed". - raise DistutilsExecError(("unable to create zip file '%s': " - "could neither import the 'zipfile' module nor " - "find a standalone zip utility") % zip_filename) + raise DistutilsExecError( + ( + "unable to create zip file '%s': " + "could neither import the 'zipfile' module nor " + "find a standalone zip utility" + ) + % zip_filename + ) else: - log.info("creating '%s' and adding '%s' to it", - zip_filename, base_dir) + log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) if not dry_run: try: - zip = zipfile.ZipFile(zip_filename, "w", - compression=zipfile.ZIP_DEFLATED) + zip = zipfile.ZipFile( + zip_filename, "w", compression=zipfile.ZIP_DEFLATED + ) except RuntimeError: - zip = zipfile.ZipFile(zip_filename, "w", - compression=zipfile.ZIP_STORED) + zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_STORED) with zip: if base_dir != os.curdir: @@ -184,14 +197,16 @@ def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): return zip_filename + ARCHIVE_FORMATS = { 'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), 'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), 'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"), - 'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"), - 'tar': (make_tarball, [('compress', None)], "uncompressed tar file"), - 'zip': (make_zipfile, [],"ZIP file") - } + 'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"), + 'tar': (make_tarball, [('compress', None)], "uncompressed tar file"), + 'zip': (make_zipfile, [], "ZIP file"), +} + def check_archive_formats(formats): """Returns the first format from the 'format' list that is unknown. @@ -203,8 +218,17 @@ def check_archive_formats(formats): return format return None -def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, - dry_run=0, owner=None, group=None): + +def make_archive( + base_name, + format, + root_dir=None, + base_dir=None, + verbose=0, + dry_run=0, + owner=None, + group=None, +): """Create an archive file (eg. zip or tar). 'base_name' is the name of the file to create, minus any format-specific diff --git a/setuptools/_distutils/bcppcompiler.py b/setuptools/_distutils/bcppcompiler.py index 2eb6d2e956..ba45ea2b95 100644 --- a/setuptools/_distutils/bcppcompiler.py +++ b/setuptools/_distutils/bcppcompiler.py @@ -13,16 +13,30 @@ import os -from distutils.errors import \ - DistutilsExecError, \ - CompileError, LibError, LinkError, UnknownFileError -from distutils.ccompiler import \ - CCompiler, gen_preprocess_options -from distutils.file_util import write_file -from distutils.dep_util import newer -from distutils import log - -class BCPPCompiler(CCompiler) : +import warnings + +from .errors import ( + DistutilsExecError, + CompileError, + LibError, + LinkError, + UnknownFileError, +) +from .ccompiler import CCompiler, gen_preprocess_options +from .file_util import write_file +from .dep_util import newer +from ._log import log + + +warnings.warn( + "bcppcompiler is deprecated and slated to be removed " + "in the future. Please discontinue use or file an issue " + "with pypa/distutils describing your use case.", + DeprecationWarning, +) + + +class BCPPCompiler(CCompiler): """Concrete class that implements an interface to the Borland C/C++ compiler, as defined by the CCompiler abstract class. """ @@ -49,12 +63,7 @@ class BCPPCompiler(CCompiler) : static_lib_format = shared_lib_format = '%s%s' exe_extension = '.exe' - - def __init__ (self, - verbose=0, - dry_run=0, - force=0): - + def __init__(self, verbose=0, dry_run=0, force=0): super().__init__(verbose, dry_run, force) # These executables are assumed to all be in the path. @@ -73,24 +82,30 @@ def __init__ (self, self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x'] self.ldflags_static = [] self.ldflags_exe = ['/Gn', '/q', '/x'] - self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r'] - + self.ldflags_exe_debug = ['/Gn', '/q', '/x', '/r'] # -- Worker methods ------------------------------------------------ - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - - macros, objects, extra_postargs, pp_opts, build = \ - self._setup_compile(output_dir, macros, include_dirs, sources, - depends, extra_postargs) + def compile( # noqa: C901 + self, + sources, + output_dir=None, + macros=None, + include_dirs=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + depends=None, + ): + macros, objects, extra_postargs, pp_opts, build = self._setup_compile( + output_dir, macros, include_dirs, sources, depends, extra_postargs + ) compile_opts = extra_preargs or [] - compile_opts.append ('-c') + compile_opts.append('-c') if debug: - compile_opts.extend (self.compile_options_debug) + compile_opts.extend(self.compile_options_debug) else: - compile_opts.extend (self.compile_options) + compile_opts.extend(self.compile_options) for obj in objects: try: @@ -106,14 +121,14 @@ def compile(self, sources, if ext == '.res': # This is already a binary file -- skip it. - continue # the 'for' loop + continue # the 'for' loop if ext == '.rc': # This needs to be compiled to a .res file -- do it now. try: - self.spawn (["brcc32", "-fo", obj, src]) + self.spawn(["brcc32", "-fo", obj, src]) except DistutilsExecError as msg: raise CompileError(msg) - continue # the 'for' loop + continue # the 'for' loop # The next two are both for the real compiler. if ext in self._c_extensions: @@ -132,9 +147,14 @@ def compile(self, sources, # Note that the source file names must appear at the end of # the command line. try: - self.spawn ([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs + [src]) + self.spawn( + [self.cc] + + compile_opts + + pp_opts + + [input_opt, output_opt] + + extra_postargs + + [src] + ) except DistutilsExecError as msg: raise CompileError(msg) @@ -142,24 +162,18 @@ def compile(self, sources, # compile () + def create_static_lib( + self, objects, output_libname, output_dir=None, debug=0, target_lang=None + ): + (objects, output_dir) = self._fix_object_args(objects, output_dir) + output_filename = self.library_filename(output_libname, output_dir=output_dir) - def create_static_lib (self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - (objects, output_dir) = self._fix_object_args (objects, output_dir) - output_filename = \ - self.library_filename (output_libname, output_dir=output_dir) - - if self._need_link (objects, output_filename): + if self._need_link(objects, output_filename): lib_args = [output_filename, '/u'] + objects if debug: - pass # XXX what goes here? + pass # XXX what goes here? try: - self.spawn ([self.lib] + lib_args) + self.spawn([self.lib] + lib_args) except DistutilsExecError as msg: raise LibError(msg) else: @@ -167,38 +181,40 @@ def create_static_lib (self, # create_static_lib () - - def link (self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - + def link( # noqa: C901 + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): # XXX this ignores 'build_temp'! should follow the lead of # msvccompiler.py - (objects, output_dir) = self._fix_object_args (objects, output_dir) - (libraries, library_dirs, runtime_library_dirs) = \ - self._fix_lib_args (libraries, library_dirs, runtime_library_dirs) + (objects, output_dir) = self._fix_object_args(objects, output_dir) + (libraries, library_dirs, runtime_library_dirs) = self._fix_lib_args( + libraries, library_dirs, runtime_library_dirs + ) if runtime_library_dirs: - log.warn("I don't know what to do with 'runtime_library_dirs': %s", - str(runtime_library_dirs)) + log.warning( + "I don't know what to do with 'runtime_library_dirs': %s", + str(runtime_library_dirs), + ) if output_dir is not None: - output_filename = os.path.join (output_dir, output_filename) - - if self._need_link (objects, output_filename): + output_filename = os.path.join(output_dir, output_filename) + if self._need_link(objects, output_filename): # Figure out linker args based on type of target. if target_desc == CCompiler.EXECUTABLE: startup_obj = 'c0w32' @@ -213,20 +229,18 @@ def link (self, else: ld_args = self.ldflags_shared[:] - # Create a temporary exports file for use by the linker if export_symbols is None: def_file = '' else: - head, tail = os.path.split (output_filename) - modname, ext = os.path.splitext (tail) - temp_dir = os.path.dirname(objects[0]) # preserve tree structure - def_file = os.path.join (temp_dir, '%s.def' % modname) + head, tail = os.path.split(output_filename) + modname, ext = os.path.splitext(tail) + temp_dir = os.path.dirname(objects[0]) # preserve tree structure + def_file = os.path.join(temp_dir, '%s.def' % modname) contents = ['EXPORTS'] - for sym in (export_symbols or []): - contents.append(' %s=_%s' % (sym, sym)) - self.execute(write_file, (def_file, contents), - "writing %s" % def_file) + for sym in export_symbols or []: + contents.append(' {}=_{}'.format(sym, sym)) + self.execute(write_file, (def_file, contents), "writing %s" % def_file) # Borland C++ has problems with '/' in paths objects2 = map(os.path.normpath, objects) @@ -241,10 +255,9 @@ def link (self, else: objects.append(file) - - for l in library_dirs: - ld_args.append("/L%s" % os.path.normpath(l)) - ld_args.append("/L.") # we sometimes use relative paths + for ell in library_dirs: + ld_args.append("/L%s" % os.path.normpath(ell)) + ld_args.append("/L.") # we sometimes use relative paths # list of object files ld_args.extend(objects) @@ -260,7 +273,7 @@ def link (self, # them. Arghghh!. Apparently it works fine as coded... # name of dll/exe file - ld_args.extend([',',output_filename]) + ld_args.extend([',', output_filename]) # no map file and start libraries ld_args.append(',,') @@ -276,24 +289,22 @@ def link (self, ld_args.append(libfile) # some default libraries - ld_args.append ('import32') - ld_args.append ('cw32mt') + ld_args.extend(('import32', 'cw32mt')) # def file for export symbols - ld_args.extend([',',def_file]) + ld_args.extend([',', def_file]) # add resource files ld_args.append(',') ld_args.extend(resources) - if extra_preargs: ld_args[:0] = extra_preargs if extra_postargs: ld_args.extend(extra_postargs) - self.mkpath (os.path.dirname (output_filename)) + self.mkpath(os.path.dirname(output_filename)) try: - self.spawn ([self.linker] + ld_args) + self.spawn([self.linker] + ld_args) except DistutilsExecError as msg: raise LinkError(msg) @@ -304,8 +315,7 @@ def link (self, # -- Miscellaneous methods ----------------------------------------- - - def find_library_file (self, dirs, lib, debug=0): + def find_library_file(self, dirs, lib, debug=0): # List of effective library names to try, in order of preference: # xxx_bcpp.lib is better than xxx.lib # and xxx_d.lib is better than xxx.lib if debug is set @@ -316,7 +326,7 @@ def find_library_file (self, dirs, lib, debug=0): # compiler they care about, since (almost?) every Windows compiler # seems to have a different format for static libraries. if debug: - dlib = (lib + "_d") + dlib = lib + "_d" try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib) else: try_names = (lib + "_bcpp", lib) @@ -331,43 +341,41 @@ def find_library_file (self, dirs, lib, debug=0): return None # overwrite the one from CCompiler to support rc and res-files - def object_filenames (self, - source_filenames, - strip_dir=0, - output_dir=''): - if output_dir is None: output_dir = '' + def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): + if output_dir is None: + output_dir = '' obj_names = [] for src_name in source_filenames: # use normcase to make sure '.rc' is really '.rc' and not '.RC' - (base, ext) = os.path.splitext (os.path.normcase(src_name)) - if ext not in (self.src_extensions + ['.rc','.res']): - raise UnknownFileError("unknown file type '%s' (from '%s')" % \ - (ext, src_name)) + (base, ext) = os.path.splitext(os.path.normcase(src_name)) + if ext not in (self.src_extensions + ['.rc', '.res']): + raise UnknownFileError( + "unknown file type '{}' (from '{}')".format(ext, src_name) + ) if strip_dir: - base = os.path.basename (base) + base = os.path.basename(base) if ext == '.res': # these can go unchanged - obj_names.append (os.path.join (output_dir, base + ext)) + obj_names.append(os.path.join(output_dir, base + ext)) elif ext == '.rc': # these need to be compiled to .res-files - obj_names.append (os.path.join (output_dir, base + '.res')) + obj_names.append(os.path.join(output_dir, base + '.res')) else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) + obj_names.append(os.path.join(output_dir, base + self.obj_extension)) return obj_names # object_filenames () - def preprocess (self, - source, - output_file=None, - macros=None, - include_dirs=None, - extra_preargs=None, - extra_postargs=None): - - (_, macros, include_dirs) = \ - self._fix_compile_args(None, macros, include_dirs) + def preprocess( + self, + source, + output_file=None, + macros=None, + include_dirs=None, + extra_preargs=None, + extra_postargs=None, + ): + (_, macros, include_dirs) = self._fix_compile_args(None, macros, include_dirs) pp_opts = gen_preprocess_options(macros, include_dirs) pp_args = ['cpp32.exe'] + pp_opts if output_file is not None: diff --git a/setuptools/_distutils/ccompiler.py b/setuptools/_distutils/ccompiler.py index 777fc661ea..1818fce901 100644 --- a/setuptools/_distutils/ccompiler.py +++ b/setuptools/_distutils/ccompiler.py @@ -3,14 +3,25 @@ Contains CCompiler, an abstract base class that defines the interface for the Distutils compiler abstraction model.""" -import sys, os, re -from distutils.errors import * -from distutils.spawn import spawn -from distutils.file_util import move_file -from distutils.dir_util import mkpath -from distutils.dep_util import newer_group -from distutils.util import split_quoted, execute -from distutils import log +import sys +import os +import re +import warnings + +from .errors import ( + CompileError, + LinkError, + UnknownFileError, + DistutilsPlatformError, + DistutilsModuleError, +) +from .spawn import spawn +from .file_util import move_file +from .dir_util import mkpath +from .dep_util import newer_group +from .util import split_quoted, execute +from ._log import log + class CCompiler: """Abstract base class to define the interface that must be implemented @@ -56,17 +67,16 @@ class CCompiler: # think this is useless without the ability to null out the # library search path anyways. - # Subclasses that rely on the standard filename generation methods # implemented below should override these; see the comment near # those methods ('object_filenames()' et. al.) for details: - src_extensions = None # list of strings - obj_extension = None # string + src_extensions = None # list of strings + obj_extension = None # string static_lib_extension = None - shared_lib_extension = None # string - static_lib_format = None # format string - shared_lib_format = None # prob. same as static_lib_format - exe_extension = None # string + shared_lib_extension = None # string + static_lib_format = None # format string + shared_lib_format = None # prob. same as static_lib_format + exe_extension = None # string # Default language settings. language_map is used to detect a source # file or Extension target language, checking source filenames. @@ -74,14 +84,25 @@ class CCompiler: # what language to use when mixing source types. For example, if some # extension has two files with ".c" extension, and one with ".cpp", it # is still linked as c++. - language_map = {".c" : "c", - ".cc" : "c++", - ".cpp" : "c++", - ".cxx" : "c++", - ".m" : "objc", - } + language_map = { + ".c": "c", + ".cc": "c++", + ".cpp": "c++", + ".cxx": "c++", + ".m": "objc", + } language_order = ["c++", "objc", "c"] + include_dirs = [] + """ + include dirs specific to this compiler class + """ + + library_dirs = [] + """ + library dirs specific to this compiler class + """ + def __init__(self, verbose=0, dry_run=0, force=0): self.dry_run = dry_run self.force = force @@ -146,8 +167,10 @@ class (via the 'executables' class attribute), but most will have: for key in kwargs: if key not in self.executables: - raise ValueError("unknown executable '%s' for class %s" % - (key, self.__class__.__name__)) + raise ValueError( + "unknown executable '%s' for class %s" + % (key, self.__class__.__name__) + ) self.set_executable(key, kwargs[key]) def set_executable(self, key, value): @@ -170,14 +193,19 @@ def _check_macro_definitions(self, definitions): nothing if all definitions are OK, raise TypeError otherwise. """ for defn in definitions: - if not (isinstance(defn, tuple) and - (len(defn) in (1, 2) and - (isinstance (defn[1], str) or defn[1] is None)) and - isinstance (defn[0], str)): - raise TypeError(("invalid macro definition '%s': " % defn) + \ - "must be tuple (string,), (string, string), or " + \ - "(string, None)") - + if not ( + isinstance(defn, tuple) + and ( + len(defn) in (1, 2) + and (isinstance(defn[1], str) or defn[1] is None) + ) + and isinstance(defn[0], str) + ): + raise TypeError( + ("invalid macro definition '%s': " % defn) + + "must be tuple (string,), (string, string), or " + + "(string, None)" + ) # -- Bookkeeping methods ------------------------------------------- @@ -190,7 +218,7 @@ def define_macro(self, name, value=None): """ # Delete from the list of macro definitions/undefinitions if # already there (so that this one will take precedence). - i = self._find_macro (name) + i = self._find_macro(name) if i is not None: del self.macros[i] @@ -207,7 +235,7 @@ def undefine_macro(self, name): """ # Delete from the list of macro definitions/undefinitions if # already there (so that this one will take precedence). - i = self._find_macro (name) + i = self._find_macro(name) if i is not None: del self.macros[i] @@ -301,41 +329,20 @@ def set_link_objects(self, objects): """ self.objects = objects[:] - # -- Private utility methods -------------------------------------- # (here for the convenience of subclasses) # Helper method to prep compiler in subclass compile() methods - def _setup_compile(self, outdir, macros, incdirs, sources, depends, - extra): + def _setup_compile(self, outdir, macros, incdirs, sources, depends, extra): """Process arguments and decide which source files to compile.""" - if outdir is None: - outdir = self.output_dir - elif not isinstance(outdir, str): - raise TypeError("'output_dir' must be a string or None") - - if macros is None: - macros = self.macros - elif isinstance(macros, list): - macros = macros + (self.macros or []) - else: - raise TypeError("'macros' (if supplied) must be a list of tuples") - - if incdirs is None: - incdirs = self.include_dirs - elif isinstance(incdirs, (list, tuple)): - incdirs = list(incdirs) + (self.include_dirs or []) - else: - raise TypeError( - "'include_dirs' (if supplied) must be a list of strings") + outdir, macros, incdirs = self._fix_compile_args(outdir, macros, incdirs) if extra is None: extra = [] # Get the list of expected output (object) files - objects = self.object_filenames(sources, strip_dir=0, - output_dir=outdir) + objects = self.object_filenames(sources, strip_dir=0, output_dir=outdir) assert len(objects) == len(sources) pp_opts = gen_preprocess_options(macros, incdirs) @@ -382,12 +389,14 @@ def _fix_compile_args(self, output_dir, macros, include_dirs): raise TypeError("'macros' (if supplied) must be a list of tuples") if include_dirs is None: - include_dirs = self.include_dirs + include_dirs = list(self.include_dirs) elif isinstance(include_dirs, (list, tuple)): include_dirs = list(include_dirs) + (self.include_dirs or []) else: - raise TypeError( - "'include_dirs' (if supplied) must be a list of strings") + raise TypeError("'include_dirs' (if supplied) must be a list of strings") + + # add include dirs for class + include_dirs += self.__class__.include_dirs return output_dir, macros, include_dirs @@ -434,27 +443,30 @@ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): if libraries is None: libraries = self.libraries elif isinstance(libraries, (list, tuple)): - libraries = list (libraries) + (self.libraries or []) + libraries = list(libraries) + (self.libraries or []) else: - raise TypeError( - "'libraries' (if supplied) must be a list of strings") + raise TypeError("'libraries' (if supplied) must be a list of strings") if library_dirs is None: library_dirs = self.library_dirs elif isinstance(library_dirs, (list, tuple)): - library_dirs = list (library_dirs) + (self.library_dirs or []) + library_dirs = list(library_dirs) + (self.library_dirs or []) else: - raise TypeError( - "'library_dirs' (if supplied) must be a list of strings") + raise TypeError("'library_dirs' (if supplied) must be a list of strings") + + # add library dirs for class + library_dirs += self.__class__.library_dirs if runtime_library_dirs is None: runtime_library_dirs = self.runtime_library_dirs elif isinstance(runtime_library_dirs, (list, tuple)): - runtime_library_dirs = (list(runtime_library_dirs) + - (self.runtime_library_dirs or [])) + runtime_library_dirs = list(runtime_library_dirs) + ( + self.runtime_library_dirs or [] + ) else: - raise TypeError("'runtime_library_dirs' (if supplied) " - "must be a list of strings") + raise TypeError( + "'runtime_library_dirs' (if supplied) " "must be a list of strings" + ) return (libraries, library_dirs, runtime_library_dirs) @@ -466,9 +478,9 @@ def _need_link(self, objects, output_file): return True else: if self.dry_run: - newer = newer_group (objects, output_file, missing='newer') + newer = newer_group(objects, output_file, missing='newer') else: - newer = newer_group (objects, output_file) + newer = newer_group(objects, output_file) return newer def detect_language(self, sources): @@ -491,12 +503,18 @@ def detect_language(self, sources): pass return lang - # -- Worker methods ------------------------------------------------ # (must be implemented by subclasses) - def preprocess(self, source, output_file=None, macros=None, - include_dirs=None, extra_preargs=None, extra_postargs=None): + def preprocess( + self, + source, + output_file=None, + macros=None, + include_dirs=None, + extra_preargs=None, + extra_postargs=None, + ): """Preprocess a single C/C++ source file, named in 'source'. Output will be written to file named 'output_file', or stdout if 'output_file' not supplied. 'macros' is a list of macro @@ -508,9 +526,17 @@ def preprocess(self, source, output_file=None, macros=None, """ pass - def compile(self, sources, output_dir=None, macros=None, - include_dirs=None, debug=0, extra_preargs=None, - extra_postargs=None, depends=None): + def compile( + self, + sources, + output_dir=None, + macros=None, + include_dirs=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + depends=None, + ): """Compile one or more source files. 'sources' must be a list of filenames, most likely C/C++ @@ -561,9 +587,9 @@ def compile(self, sources, output_dir=None, macros=None, """ # A concrete compiler class can either override this method # entirely or implement _compile(). - macros, objects, extra_postargs, pp_opts, build = \ - self._setup_compile(output_dir, macros, include_dirs, sources, - depends, extra_postargs) + macros, objects, extra_postargs, pp_opts, build = self._setup_compile( + output_dir, macros, include_dirs, sources, depends, extra_postargs + ) cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) for obj in objects: @@ -582,8 +608,9 @@ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): # should implement _compile(). pass - def create_static_lib(self, objects, output_libname, output_dir=None, - debug=0, target_lang=None): + def create_static_lib( + self, objects, output_libname, output_dir=None, debug=0, target_lang=None + ): """Link a bunch of stuff together to create a static library file. The "bunch of stuff" consists of the list of object files supplied as 'objects', the extra object files supplied to @@ -608,26 +635,27 @@ def create_static_lib(self, objects, output_libname, output_dir=None, """ pass - # values for target_desc parameter in link() SHARED_OBJECT = "shared_object" SHARED_LIBRARY = "shared_library" EXECUTABLE = "executable" - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): + def link( + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): """Link a bunch of stuff together to create an executable or shared library file. @@ -673,66 +701,98 @@ def link(self, """ raise NotImplementedError - # Old 'link_*()' methods, rewritten to use the new 'link()' method. - def link_shared_lib(self, - objects, - output_libname, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - self.link(CCompiler.SHARED_LIBRARY, objects, - self.library_filename(output_libname, lib_type='shared'), - output_dir, - libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, - extra_preargs, extra_postargs, build_temp, target_lang) - - - def link_shared_object(self, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - self.link(CCompiler.SHARED_OBJECT, objects, - output_filename, output_dir, - libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, - extra_preargs, extra_postargs, build_temp, target_lang) - - - def link_executable(self, - objects, - output_progname, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - target_lang=None): - self.link(CCompiler.EXECUTABLE, objects, - self.executable_filename(output_progname), output_dir, - libraries, library_dirs, runtime_library_dirs, None, - debug, extra_preargs, extra_postargs, None, target_lang) - + def link_shared_lib( + self, + objects, + output_libname, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): + self.link( + CCompiler.SHARED_LIBRARY, + objects, + self.library_filename(output_libname, lib_type='shared'), + output_dir, + libraries, + library_dirs, + runtime_library_dirs, + export_symbols, + debug, + extra_preargs, + extra_postargs, + build_temp, + target_lang, + ) + + def link_shared_object( + self, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): + self.link( + CCompiler.SHARED_OBJECT, + objects, + output_filename, + output_dir, + libraries, + library_dirs, + runtime_library_dirs, + export_symbols, + debug, + extra_preargs, + extra_postargs, + build_temp, + target_lang, + ) + + def link_executable( + self, + objects, + output_progname, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + target_lang=None, + ): + self.link( + CCompiler.EXECUTABLE, + objects, + self.executable_filename(output_progname), + output_dir, + libraries, + library_dirs, + runtime_library_dirs, + None, + debug, + extra_preargs, + extra_postargs, + None, + target_lang, + ) # -- Miscellaneous methods ----------------------------------------- # These are all used by the 'gen_lib_options() function; there is @@ -757,20 +817,41 @@ def library_option(self, lib): """ raise NotImplementedError - def has_function(self, funcname, includes=None, include_dirs=None, - libraries=None, library_dirs=None): - """Return a boolean indicating whether funcname is supported on - the current platform. The optional arguments can be used to - augment the compilation environment. + def has_function( # noqa: C901 + self, + funcname, + includes=None, + include_dirs=None, + libraries=None, + library_dirs=None, + ): + """Return a boolean indicating whether funcname is provided as + a symbol on the current platform. The optional arguments can + be used to augment the compilation environment. + + The libraries argument is a list of flags to be passed to the + linker to make additional symbol definitions available for + linking. + + The includes and include_dirs arguments are deprecated. + Usually, supplying include files with function declarations + will cause function detection to fail even in cases where the + symbol is available for linking. + """ # this can't be included at module scope because it tries to # import math which might not be available at that point - maybe # the necessary logic should just be inlined? import tempfile + if includes is None: includes = [] + else: + warnings.warn("includes is deprecated", DeprecationWarning) if include_dirs is None: include_dirs = [] + else: + warnings.warn("include_dirs is deprecated", DeprecationWarning) if libraries is None: libraries = [] if library_dirs is None: @@ -780,12 +861,32 @@ def has_function(self, funcname, includes=None, include_dirs=None, try: for incl in includes: f.write("""#include "%s"\n""" % incl) - f.write("""\ + if not includes: + # Use "char func(void);" as the prototype to follow + # what autoconf does. This prototype does not match + # any well-known function the compiler might recognize + # as a builtin, so this ends up as a true link test. + # Without a fake prototype, the test would need to + # know the exact argument types, and the has_function + # interface does not provide that level of information. + f.write( + """\ +#ifdef __cplusplus +extern "C" +#endif +char %s(void); +""" + % funcname + ) + f.write( + """\ int main (int argc, char **argv) { %s(); return 0; } -""" % funcname) +""" + % funcname + ) finally: f.close() try: @@ -796,19 +897,21 @@ def has_function(self, funcname, includes=None, include_dirs=None, os.remove(fname) try: - self.link_executable(objects, "a.out", - libraries=libraries, - library_dirs=library_dirs) + self.link_executable( + objects, "a.out", libraries=libraries, library_dirs=library_dirs + ) except (LinkError, TypeError): return False else: - os.remove(os.path.join(self.output_dir or '', "a.out")) + os.remove( + self.executable_filename("a.out", output_dir=self.output_dir or '') + ) finally: for fn in objects: os.remove(fn) return True - def find_library_file (self, dirs, lib, debug=0): + def find_library_file(self, dirs, lib, debug=0): """Search the specified list of directories for a static or shared library file 'lib' and return the full path to that file. If 'debug' true, look for a debugging version (if that makes sense on @@ -854,19 +957,39 @@ def find_library_file (self, dirs, lib, debug=0): def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): if output_dir is None: output_dir = '' - obj_names = [] - for src_name in source_filenames: - base, ext = os.path.splitext(src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / - if ext not in self.src_extensions: - raise UnknownFileError( - "unknown file type '%s' (from '%s')" % (ext, src_name)) - if strip_dir: - base = os.path.basename(base) - obj_names.append(os.path.join(output_dir, - base + self.obj_extension)) - return obj_names + return list( + self._make_out_path(output_dir, strip_dir, src_name) + for src_name in source_filenames + ) + + @property + def out_extensions(self): + return dict.fromkeys(self.src_extensions, self.obj_extension) + + def _make_out_path(self, output_dir, strip_dir, src_name): + base, ext = os.path.splitext(src_name) + base = self._make_relative(base) + try: + new_ext = self.out_extensions[ext] + except LookupError: + raise UnknownFileError( + "unknown file type '{}' (from '{}')".format(ext, src_name) + ) + if strip_dir: + base = os.path.basename(base) + return os.path.join(output_dir, base + new_ext) + + @staticmethod + def _make_relative(base): + """ + In order to ensure that a filename always honors the + indicated output_dir, make sure it's relative. + Ref python/cpython#37775. + """ + # Chop off the drive + no_drive = os.path.splitdrive(base)[1] + # If abs, chop off leading / + return no_drive[os.path.isabs(no_drive) :] def shared_object_filename(self, basename, strip_dir=0, output_dir=''): assert output_dir is not None @@ -880,12 +1003,13 @@ def executable_filename(self, basename, strip_dir=0, output_dir=''): basename = os.path.basename(basename) return os.path.join(output_dir, basename + (self.exe_extension or '')) - def library_filename(self, libname, lib_type='static', # or 'shared' - strip_dir=0, output_dir=''): + def library_filename( + self, libname, lib_type='static', strip_dir=0, output_dir='' # or 'shared' + ): assert output_dir is not None - if lib_type not in ("static", "shared", "dylib", "xcode_stub"): - raise ValueError( - "'lib_type' must be \"static\", \"shared\", \"dylib\", or \"xcode_stub\"") + expected = '"static", "shared", "dylib", "xcode_stub"' + if lib_type not in eval(expected): + raise ValueError(f"'lib_type' must be {expected}") fmt = getattr(self, lib_type + "_lib_format") ext = getattr(self, lib_type + "_lib_extension") @@ -896,7 +1020,6 @@ def library_filename(self, libname, lib_type='static', # or 'shared' return os.path.join(output_dir, dir, filename) - # -- Utility methods ----------------------------------------------- def announce(self, msg, level=1): @@ -904,6 +1027,7 @@ def announce(self, msg, level=1): def debug_print(self, msg): from distutils.debug import DEBUG + if DEBUG: print(msg) @@ -919,7 +1043,7 @@ def spawn(self, cmd, **kwargs): def move_file(self, src, dst): return move_file(src, dst, dry_run=self.dry_run) - def mkpath (self, name, mode=0o777): + def mkpath(self, name, mode=0o777): mkpath(name, mode, dry_run=self.dry_run) @@ -928,54 +1052,59 @@ def mkpath (self, name, mode=0o777): # patterns. Order is important; platform mappings are preferred over # OS names. _default_compilers = ( - # Platform string mappings - # on a cygwin built python we can use gcc like an ordinary UNIXish # compiler ('cygwin.*', 'unix'), - # OS name mappings ('posix', 'unix'), ('nt', 'msvc'), +) - ) def get_default_compiler(osname=None, platform=None): """Determine the default compiler to use for the given platform. - osname should be one of the standard Python OS names (i.e. the - ones returned by os.name) and platform the common value - returned by sys.platform for the platform in question. + osname should be one of the standard Python OS names (i.e. the + ones returned by os.name) and platform the common value + returned by sys.platform for the platform in question. - The default values are os.name and sys.platform in case the - parameters are not given. + The default values are os.name and sys.platform in case the + parameters are not given. """ if osname is None: osname = os.name if platform is None: platform = sys.platform for pattern, compiler in _default_compilers: - if re.match(pattern, platform) is not None or \ - re.match(pattern, osname) is not None: + if ( + re.match(pattern, platform) is not None + or re.match(pattern, osname) is not None + ): return compiler # Default to Unix compiler return 'unix' + # Map compiler types to (module_name, class_name) pairs -- ie. where to # find the code that implements an interface to this compiler. (The module # is assumed to be in the 'distutils' package.) -compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler', - "standard UNIX-style compiler"), - 'msvc': ('_msvccompiler', 'MSVCCompiler', - "Microsoft Visual C++"), - 'cygwin': ('cygwinccompiler', 'CygwinCCompiler', - "Cygwin port of GNU C Compiler for Win32"), - 'mingw32': ('cygwinccompiler', 'Mingw32CCompiler', - "Mingw32 port of GNU C Compiler for Win32"), - 'bcpp': ('bcppcompiler', 'BCPPCompiler', - "Borland C++ Compiler"), - } +compiler_class = { + 'unix': ('unixccompiler', 'UnixCCompiler', "standard UNIX-style compiler"), + 'msvc': ('_msvccompiler', 'MSVCCompiler', "Microsoft Visual C++"), + 'cygwin': ( + 'cygwinccompiler', + 'CygwinCCompiler', + "Cygwin port of GNU C Compiler for Win32", + ), + 'mingw32': ( + 'cygwinccompiler', + 'Mingw32CCompiler', + "Mingw32 port of GNU C Compiler for Win32", + ), + 'bcpp': ('bcppcompiler', 'BCPPCompiler', "Borland C++ Compiler"), +} + def show_compilers(): """Print list of available compilers (used by the "--help-compiler" @@ -985,10 +1114,10 @@ def show_compilers(): # "--compiler", which just happens to be the case for the three # commands that use it. from distutils.fancy_getopt import FancyGetopt + compilers = [] for compiler in compiler_class.keys(): - compilers.append(("compiler="+compiler, None, - compiler_class[compiler][2])) + compilers.append(("compiler=" + compiler, None, compiler_class[compiler][2])) compilers.sort() pretty_printer = FancyGetopt(compilers) pretty_printer.print_help("List of available compilers:") @@ -1021,17 +1150,18 @@ def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0): try: module_name = "distutils." + module_name - __import__ (module_name) + __import__(module_name) module = sys.modules[module_name] klass = vars(module)[class_name] except ImportError: raise DistutilsModuleError( - "can't compile C/C++ code: unable to load module '%s'" % \ - module_name) + "can't compile C/C++ code: unable to load module '%s'" % module_name + ) except KeyError: raise DistutilsModuleError( - "can't compile C/C++ code: unable to find class '%s' " - "in module '%s'" % (class_name, module_name)) + "can't compile C/C++ code: unable to find class '%s' " + "in module '%s'" % (class_name, module_name) + ) # XXX The None is necessary to preserve backwards compatibility # with classes that expect verbose to be the first positional @@ -1064,14 +1194,14 @@ def gen_preprocess_options(macros, include_dirs): for macro in macros: if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2): raise TypeError( - "bad macro definition '%s': " - "each element of 'macros' list must be a 1- or 2-tuple" - % macro) + "bad macro definition '%s': " + "each element of 'macros' list must be a 1- or 2-tuple" % macro + ) - if len(macro) == 1: # undefine this macro + if len(macro) == 1: # undefine this macro pp_opts.append("-U%s" % macro[0]) elif len(macro) == 2: - if macro[1] is None: # define with no explicit value + if macro[1] is None: # define with no explicit value pp_opts.append("-D%s" % macro[0]) else: # XXX *don't* need to be clever about quoting the @@ -1084,7 +1214,7 @@ def gen_preprocess_options(macros, include_dirs): return pp_opts -def gen_lib_options (compiler, library_dirs, runtime_library_dirs, libraries): +def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries): """Generate linker options for searching library directories and linking with specific libraries. 'libraries' and 'library_dirs' are, respectively, lists of library names (not filenames!) and search @@ -1116,8 +1246,9 @@ def gen_lib_options (compiler, library_dirs, runtime_library_dirs, libraries): if lib_file: lib_opts.append(lib_file) else: - compiler.warn("no library file corresponding to " - "'%s' found (skipping)" % lib) + compiler.warn( + "no library file corresponding to " "'%s' found (skipping)" % lib + ) else: - lib_opts.append(compiler.library_option (lib)) + lib_opts.append(compiler.library_option(lib)) return lib_opts diff --git a/setuptools/_distutils/cmd.py b/setuptools/_distutils/cmd.py index dba3191e58..3860c3ff1e 100644 --- a/setuptools/_distutils/cmd.py +++ b/setuptools/_distutils/cmd.py @@ -4,10 +4,15 @@ in the distutils.command package. """ -import sys, os, re -from distutils.errors import DistutilsOptionError -from distutils import util, dir_util, file_util, archive_util, dep_util -from distutils import log +import sys +import os +import re +import logging + +from .errors import DistutilsOptionError +from . import util, dir_util, file_util, archive_util, dep_util +from ._log import log + class Command: """Abstract base class for defining command classes, the "worker bees" @@ -41,7 +46,6 @@ class Command: # defined. The canonical example is the "install" command. sub_commands = [] - # -- Creation/initialization methods ------------------------------- def __init__(self, dist): @@ -130,8 +134,9 @@ def initialize_options(self): This method must be implemented by all command classes. """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) + raise RuntimeError( + "abstract method -- subclass %s must override" % self.__class__ + ) def finalize_options(self): """Set final values for all the options that this command supports. @@ -144,23 +149,23 @@ def finalize_options(self): This method must be implemented by all command classes. """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) - + raise RuntimeError( + "abstract method -- subclass %s must override" % self.__class__ + ) def dump_options(self, header=None, indent=""): from distutils.fancy_getopt import longopt_xlate + if header is None: header = "command options for '%s':" % self.get_command_name() - self.announce(indent + header, level=log.INFO) + self.announce(indent + header, level=logging.INFO) indent = indent + " " - for (option, _, _) in self.user_options: + for option, _, _ in self.user_options: option = option.translate(longopt_xlate) if option[-1] == "=": option = option[:-1] value = getattr(self, option) - self.announce(indent + "%s = %s" % (option, value), - level=log.INFO) + self.announce(indent + "{} = {}".format(option, value), level=logging.INFO) def run(self): """A command's raison d'etre: carry out the action it exists to @@ -172,13 +177,11 @@ def run(self): This method must be implemented by all command classes. """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) + raise RuntimeError( + "abstract method -- subclass %s must override" % self.__class__ + ) - def announce(self, msg, level=1): - """If the current verbosity level is of greater than or equal to - 'level' print 'msg' to stdout. - """ + def announce(self, msg, level=logging.DEBUG): log.log(level, msg) def debug_print(self, msg): @@ -186,11 +189,11 @@ def debug_print(self, msg): DISTUTILS_DEBUG environment variable) flag is true. """ from distutils.debug import DEBUG + if DEBUG: print(msg) sys.stdout.flush() - # -- Option validation methods ------------------------------------- # (these are very handy in writing the 'finalize_options()' method) # @@ -210,8 +213,9 @@ def _ensure_stringlike(self, option, what, default=None): setattr(self, option, default) return default elif not isinstance(val, str): - raise DistutilsOptionError("'%s' must be a %s (got `%s`)" - % (option, what, val)) + raise DistutilsOptionError( + "'{}' must be a {} (got `{}`)".format(option, what, val) + ) return val def ensure_string(self, option, default=None): @@ -238,27 +242,29 @@ def ensure_string_list(self, option): ok = False if not ok: raise DistutilsOptionError( - "'%s' must be a list of strings (got %r)" - % (option, val)) + "'{}' must be a list of strings (got {!r})".format(option, val) + ) - def _ensure_tested_string(self, option, tester, what, error_fmt, - default=None): + def _ensure_tested_string(self, option, tester, what, error_fmt, default=None): val = self._ensure_stringlike(option, what, default) if val is not None and not tester(val): - raise DistutilsOptionError(("error in '%s' option: " + error_fmt) - % (option, val)) + raise DistutilsOptionError( + ("error in '%s' option: " + error_fmt) % (option, val) + ) def ensure_filename(self, option): """Ensure that 'option' is the name of an existing file.""" - self._ensure_tested_string(option, os.path.isfile, - "filename", - "'%s' does not exist or is not a file") + self._ensure_tested_string( + option, os.path.isfile, "filename", "'%s' does not exist or is not a file" + ) def ensure_dirname(self, option): - self._ensure_tested_string(option, os.path.isdir, - "directory name", - "'%s' does not exist or is not a directory") - + self._ensure_tested_string( + option, + os.path.isdir, + "directory name", + "'%s' does not exist or is not a directory", + ) # -- Convenience methods for commands ------------------------------ @@ -285,7 +291,7 @@ def set_undefined_options(self, src_cmd, *option_pairs): # Option_pairs: list of (src_option, dst_option) tuples src_cmd_obj = self.distribution.get_command_obj(src_cmd) src_cmd_obj.ensure_finalized() - for (src_option, dst_option) in option_pairs: + for src_option, dst_option in option_pairs: if getattr(self, dst_option) is None: setattr(self, dst_option, getattr(src_cmd_obj, src_option)) @@ -302,8 +308,7 @@ def get_finalized_command(self, command, create=1): # XXX rename to 'get_reinitialized_command()'? (should do the # same in dist.py, if so) def reinitialize_command(self, command, reinit_subcommands=0): - return self.distribution.reinitialize_command(command, - reinit_subcommands) + return self.distribution.reinitialize_command(command, reinit_subcommands) def run_command(self, command): """Run some other command: uses the 'run_command()' method of @@ -320,16 +325,15 @@ def get_sub_commands(self): run for the current distribution. Return a list of command names. """ commands = [] - for (cmd_name, method) in self.sub_commands: + for cmd_name, method in self.sub_commands: if method is None or method(self): commands.append(cmd_name) return commands - # -- External world manipulation ----------------------------------- def warn(self, msg): - log.warn("warning: %s: %s\n", self.get_command_name(), msg) + log.warning("warning: %s: %s\n", self.get_command_name(), msg) def execute(self, func, args, msg=None, level=1): util.execute(func, args, msg, dry_run=self.dry_run) @@ -337,41 +341,70 @@ def execute(self, func, args, msg=None, level=1): def mkpath(self, name, mode=0o777): dir_util.mkpath(name, mode, dry_run=self.dry_run) - def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1, - link=None, level=1): + def copy_file( + self, infile, outfile, preserve_mode=1, preserve_times=1, link=None, level=1 + ): """Copy a file respecting verbose, dry-run and force flags. (The former two default to whatever is in the Distribution object, and the latter defaults to false for commands that don't define it.)""" - return file_util.copy_file(infile, outfile, preserve_mode, - preserve_times, not self.force, link, - dry_run=self.dry_run) - - def copy_tree(self, infile, outfile, preserve_mode=1, preserve_times=1, - preserve_symlinks=0, level=1): + return file_util.copy_file( + infile, + outfile, + preserve_mode, + preserve_times, + not self.force, + link, + dry_run=self.dry_run, + ) + + def copy_tree( + self, + infile, + outfile, + preserve_mode=1, + preserve_times=1, + preserve_symlinks=0, + level=1, + ): """Copy an entire directory tree respecting verbose, dry-run, and force flags. """ - return dir_util.copy_tree(infile, outfile, preserve_mode, - preserve_times, preserve_symlinks, - not self.force, dry_run=self.dry_run) - - def move_file (self, src, dst, level=1): + return dir_util.copy_tree( + infile, + outfile, + preserve_mode, + preserve_times, + preserve_symlinks, + not self.force, + dry_run=self.dry_run, + ) + + def move_file(self, src, dst, level=1): """Move a file respecting dry-run flag.""" return file_util.move_file(src, dst, dry_run=self.dry_run) def spawn(self, cmd, search_path=1, level=1): """Spawn an external command respecting dry-run flag.""" from distutils.spawn import spawn - spawn(cmd, search_path, dry_run=self.dry_run) - def make_archive(self, base_name, format, root_dir=None, base_dir=None, - owner=None, group=None): - return archive_util.make_archive(base_name, format, root_dir, base_dir, - dry_run=self.dry_run, - owner=owner, group=group) + spawn(cmd, search_path, dry_run=self.dry_run) - def make_file(self, infiles, outfile, func, args, - exec_msg=None, skip_msg=None, level=1): + def make_archive( + self, base_name, format, root_dir=None, base_dir=None, owner=None, group=None + ): + return archive_util.make_archive( + base_name, + format, + root_dir, + base_dir, + dry_run=self.dry_run, + owner=owner, + group=group, + ) + + def make_file( + self, infiles, outfile, func, args, exec_msg=None, skip_msg=None, level=1 + ): """Special case of 'execute()' for operations that process one or more input files and generate one output file. Works just like 'execute()', except the operation is skipped and a different @@ -387,11 +420,10 @@ def make_file(self, infiles, outfile, func, args, if isinstance(infiles, str): infiles = (infiles,) elif not isinstance(infiles, (list, tuple)): - raise TypeError( - "'infiles' must be a string, or a list or tuple of strings") + raise TypeError("'infiles' must be a string, or a list or tuple of strings") if exec_msg is None: - exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles)) + exec_msg = "generating {} from {}".format(outfile, ', '.join(infiles)) # If 'outfile' must be regenerated (either because it doesn't # exist, is out-of-date, or the 'force' flag is true) then diff --git a/setuptools/_distutils/command/__init__.py b/setuptools/_distutils/command/__init__.py index 481eea9fd4..028dcfa0fc 100644 --- a/setuptools/_distutils/command/__init__.py +++ b/setuptools/_distutils/command/__init__.py @@ -3,29 +3,23 @@ Package containing implementation of all the standard Distutils commands.""" -__all__ = ['build', - 'build_py', - 'build_ext', - 'build_clib', - 'build_scripts', - 'clean', - 'install', - 'install_lib', - 'install_headers', - 'install_scripts', - 'install_data', - 'sdist', - 'register', - 'bdist', - 'bdist_dumb', - 'bdist_rpm', - 'bdist_wininst', - 'check', - 'upload', - # These two are reserved for future use: - #'bdist_sdux', - #'bdist_pkgtool', - # Note: - # bdist_packager is not included because it only provides - # an abstract base class - ] +__all__ = [ # noqa: F822 + 'build', + 'build_py', + 'build_ext', + 'build_clib', + 'build_scripts', + 'clean', + 'install', + 'install_lib', + 'install_headers', + 'install_scripts', + 'install_data', + 'sdist', + 'register', + 'bdist', + 'bdist_dumb', + 'bdist_rpm', + 'check', + 'upload', +] diff --git a/setuptools/_distutils/command/_framework_compat.py b/setuptools/_distutils/command/_framework_compat.py new file mode 100644 index 0000000000..cffa27cb08 --- /dev/null +++ b/setuptools/_distutils/command/_framework_compat.py @@ -0,0 +1,55 @@ +""" +Backward compatibility for homebrew builds on macOS. +""" + + +import sys +import os +import functools +import subprocess +import sysconfig + + +@functools.lru_cache() +def enabled(): + """ + Only enabled for Python 3.9 framework homebrew builds + except ensurepip and venv. + """ + PY39 = (3, 9) < sys.version_info < (3, 10) + framework = sys.platform == 'darwin' and sys._framework + homebrew = "Cellar" in sysconfig.get_config_var('projectbase') + venv = sys.prefix != sys.base_prefix + ensurepip = os.environ.get("ENSUREPIP_OPTIONS") + return PY39 and framework and homebrew and not venv and not ensurepip + + +schemes = dict( + osx_framework_library=dict( + stdlib='{installed_base}/{platlibdir}/python{py_version_short}', + platstdlib='{platbase}/{platlibdir}/python{py_version_short}', + purelib='{homebrew_prefix}/lib/python{py_version_short}/site-packages', + platlib='{homebrew_prefix}/{platlibdir}/python{py_version_short}/site-packages', + include='{installed_base}/include/python{py_version_short}{abiflags}', + platinclude='{installed_platbase}/include/python{py_version_short}{abiflags}', + scripts='{homebrew_prefix}/bin', + data='{homebrew_prefix}', + ) +) + + +@functools.lru_cache() +def vars(): + if not enabled(): + return {} + homebrew_prefix = subprocess.check_output(['brew', '--prefix'], text=True).strip() + return locals() + + +def scheme(name): + """ + Override the selected scheme for posix_prefix. + """ + if not enabled() or not name.endswith('_prefix'): + return name + return 'osx_framework_library' diff --git a/setuptools/_distutils/command/bdist.py b/setuptools/_distutils/command/bdist.py index 014871d280..6329039ce4 100644 --- a/setuptools/_distutils/command/bdist.py +++ b/setuptools/_distutils/command/bdist.py @@ -4,79 +4,92 @@ distribution).""" import os -from distutils.core import Command -from distutils.errors import * -from distutils.util import get_platform +import warnings + +from ..core import Command +from ..errors import DistutilsPlatformError, DistutilsOptionError +from ..util import get_platform def show_formats(): - """Print list of available formats (arguments to "--format" option). - """ - from distutils.fancy_getopt import FancyGetopt + """Print list of available formats (arguments to "--format" option).""" + from ..fancy_getopt import FancyGetopt + formats = [] for format in bdist.format_commands: - formats.append(("formats=" + format, None, - bdist.format_command[format][1])) + formats.append(("formats=" + format, None, bdist.format_commands[format][1])) pretty_printer = FancyGetopt(formats) pretty_printer.print_help("List of available distribution formats:") -class bdist(Command): +class ListCompat(dict): + # adapter to allow for Setuptools compatibility in format_commands + def append(self, item): + warnings.warn( + """format_commands is now a dict. append is deprecated.""", + DeprecationWarning, + stacklevel=2, + ) + +class bdist(Command): description = "create a built (binary) distribution" - user_options = [('bdist-base=', 'b', - "temporary directory for creating built distributions"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('formats=', None, - "formats for distribution (comma-separated list)"), - ('dist-dir=', 'd', - "directory to put final built distributions in " - "[default: dist]"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('owner=', 'u', - "Owner name used when creating a tar file" - " [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file" - " [default: current group]"), - ] + user_options = [ + ('bdist-base=', 'b', "temporary directory for creating built distributions"), + ( + 'plat-name=', + 'p', + "platform name to embed in generated filenames " + "(default: %s)" % get_platform(), + ), + ('formats=', None, "formats for distribution (comma-separated list)"), + ( + 'dist-dir=', + 'd', + "directory to put final built distributions in " "[default: dist]", + ), + ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), + ( + 'owner=', + 'u', + "Owner name used when creating a tar file" " [default: current user]", + ), + ( + 'group=', + 'g', + "Group name used when creating a tar file" " [default: current group]", + ), + ] boolean_options = ['skip-build'] help_options = [ - ('help-formats', None, - "lists available distribution formats", show_formats), - ] + ('help-formats', None, "lists available distribution formats", show_formats), + ] # The following commands do not take a format option from bdist no_format_option = ('bdist_rpm',) # This won't do in reality: will need to distinguish RPM-ish Linux, # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS. - default_format = {'posix': 'gztar', - 'nt': 'zip'} - - # Establish the preferred order (for the --help-formats option). - format_commands = ['rpm', 'gztar', 'bztar', 'xztar', 'ztar', 'tar', - 'wininst', 'zip', 'msi'] - - # And the real information. - format_command = {'rpm': ('bdist_rpm', "RPM distribution"), - 'gztar': ('bdist_dumb', "gzip'ed tar file"), - 'bztar': ('bdist_dumb', "bzip2'ed tar file"), - 'xztar': ('bdist_dumb', "xz'ed tar file"), - 'ztar': ('bdist_dumb', "compressed tar file"), - 'tar': ('bdist_dumb', "tar file"), - 'wininst': ('bdist_wininst', - "Windows executable installer"), - 'zip': ('bdist_dumb', "ZIP file"), - 'msi': ('bdist_msi', "Microsoft Installer") - } - + default_format = {'posix': 'gztar', 'nt': 'zip'} + + # Define commands in preferred order for the --help-formats option + format_commands = ListCompat( + { + 'rpm': ('bdist_rpm', "RPM distribution"), + 'gztar': ('bdist_dumb', "gzip'ed tar file"), + 'bztar': ('bdist_dumb', "bzip2'ed tar file"), + 'xztar': ('bdist_dumb', "xz'ed tar file"), + 'ztar': ('bdist_dumb', "compressed tar file"), + 'tar': ('bdist_dumb', "tar file"), + 'zip': ('bdist_dumb', "ZIP file"), + } + ) + + # for compatibility until consumers only reference format_commands + format_command = format_commands def initialize_options(self): self.bdist_base = None @@ -100,8 +113,7 @@ def finalize_options(self): # "build/bdist./dumb", "build/bdist./rpm", etc.) if self.bdist_base is None: build_base = self.get_finalized_command('build').build_base - self.bdist_base = os.path.join(build_base, - 'bdist.' + self.plat_name) + self.bdist_base = os.path.join(build_base, 'bdist.' + self.plat_name) self.ensure_string_list('formats') if self.formats is None: @@ -109,8 +121,9 @@ def finalize_options(self): self.formats = [self.default_format[os.name]] except KeyError: raise DistutilsPlatformError( - "don't know how to create built distributions " - "on platform %s" % os.name) + "don't know how to create built distributions " + "on platform %s" % os.name + ) if self.dist_dir is None: self.dist_dir = "dist" @@ -120,7 +133,7 @@ def run(self): commands = [] for format in self.formats: try: - commands.append(self.format_command[format][0]) + commands.append(self.format_commands[format][0]) except KeyError: raise DistutilsOptionError("invalid format '%s'" % format) @@ -138,6 +151,6 @@ def run(self): # If we're going to need to run this command again, tell it to # keep its temporary files around so subsequent runs go faster. - if cmd_name in commands[i+1:]: + if cmd_name in commands[i + 1 :]: sub_cmd.keep_temp = 1 self.run_command(cmd_name) diff --git a/setuptools/_distutils/command/bdist_dumb.py b/setuptools/_distutils/command/bdist_dumb.py index f0d6b5b8cd..01dd79079b 100644 --- a/setuptools/_distutils/command/bdist_dumb.py +++ b/setuptools/_distutils/command/bdist_dumb.py @@ -5,47 +5,58 @@ $exec_prefix).""" import os -from distutils.core import Command -from distutils.util import get_platform -from distutils.dir_util import remove_tree, ensure_relative -from distutils.errors import * -from distutils.sysconfig import get_python_version -from distutils import log +from ..core import Command +from ..util import get_platform +from ..dir_util import remove_tree, ensure_relative +from ..errors import DistutilsPlatformError +from ..sysconfig import get_python_version +from distutils._log import log -class bdist_dumb(Command): +class bdist_dumb(Command): description = "create a \"dumb\" built distribution" - user_options = [('bdist-dir=', 'd', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('format=', 'f', - "archive format to create (tar, gztar, bztar, xztar, " - "ztar, zip)"), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('relative', None, - "build the archive using relative paths " - "(default: false)"), - ('owner=', 'u', - "Owner name used when creating a tar file" - " [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file" - " [default: current group]"), - ] + user_options = [ + ('bdist-dir=', 'd', "temporary directory for creating the distribution"), + ( + 'plat-name=', + 'p', + "platform name to embed in generated filenames " + "(default: %s)" % get_platform(), + ), + ( + 'format=', + 'f', + "archive format to create (tar, gztar, bztar, xztar, " "ztar, zip)", + ), + ( + 'keep-temp', + 'k', + "keep the pseudo-installation tree around after " + + "creating the distribution archive", + ), + ('dist-dir=', 'd', "directory to put final built distributions in"), + ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), + ( + 'relative', + None, + "build the archive using relative paths " "(default: false)", + ), + ( + 'owner=', + 'u', + "Owner name used when creating a tar file" " [default: current user]", + ), + ( + 'group=', + 'g', + "Group name used when creating a tar file" " [default: current group]", + ), + ] boolean_options = ['keep-temp', 'skip-build', 'relative'] - default_format = { 'posix': 'gztar', - 'nt': 'zip' } + default_format = {'posix': 'gztar', 'nt': 'zip'} def initialize_options(self): self.bdist_dir = None @@ -68,13 +79,16 @@ def finalize_options(self): self.format = self.default_format[os.name] except KeyError: raise DistutilsPlatformError( - "don't know how to create dumb built distributions " - "on platform %s" % os.name) + "don't know how to create dumb built distributions " + "on platform %s" % os.name + ) - self.set_undefined_options('bdist', - ('dist_dir', 'dist_dir'), - ('plat_name', 'plat_name'), - ('skip_build', 'skip_build')) + self.set_undefined_options( + 'bdist', + ('dist_dir', 'dist_dir'), + ('plat_name', 'plat_name'), + ('skip_build', 'skip_build'), + ) def run(self): if not self.skip_build: @@ -90,34 +104,40 @@ def run(self): # And make an archive relative to the root of the # pseudo-installation tree. - archive_basename = "%s.%s" % (self.distribution.get_fullname(), - self.plat_name) + archive_basename = "{}.{}".format( + self.distribution.get_fullname(), self.plat_name + ) pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) if not self.relative: archive_root = self.bdist_dir else: - if (self.distribution.has_ext_modules() and - (install.install_base != install.install_platbase)): + if self.distribution.has_ext_modules() and ( + install.install_base != install.install_platbase + ): raise DistutilsPlatformError( - "can't make a dumb built distribution where " - "base and platbase are different (%s, %s)" - % (repr(install.install_base), - repr(install.install_platbase))) + "can't make a dumb built distribution where " + "base and platbase are different (%s, %s)" + % (repr(install.install_base), repr(install.install_platbase)) + ) else: - archive_root = os.path.join(self.bdist_dir, - ensure_relative(install.install_base)) + archive_root = os.path.join( + self.bdist_dir, ensure_relative(install.install_base) + ) # Make the archive - filename = self.make_archive(pseudoinstall_root, - self.format, root_dir=archive_root, - owner=self.owner, group=self.group) + filename = self.make_archive( + pseudoinstall_root, + self.format, + root_dir=archive_root, + owner=self.owner, + group=self.group, + ) if self.distribution.has_ext_modules(): pyversion = get_python_version() else: pyversion = 'any' - self.distribution.dist_files.append(('bdist_dumb', pyversion, - filename)) + self.distribution.dist_files.append(('bdist_dumb', pyversion, filename)) if not self.keep_temp: remove_tree(self.bdist_dir, dry_run=self.dry_run) diff --git a/setuptools/_distutils/command/bdist_msi.py b/setuptools/_distutils/command/bdist_msi.py deleted file mode 100644 index 1525953241..0000000000 --- a/setuptools/_distutils/command/bdist_msi.py +++ /dev/null @@ -1,749 +0,0 @@ -# Copyright (C) 2005, 2006 Martin von Löwis -# Licensed to PSF under a Contributor Agreement. -# The bdist_wininst command proper -# based on bdist_wininst -""" -Implements the bdist_msi command. -""" - -import os -import sys -import warnings -from distutils.core import Command -from distutils.dir_util import remove_tree -from distutils.sysconfig import get_python_version -from distutils.version import StrictVersion -from distutils.errors import DistutilsOptionError -from distutils.util import get_platform -from distutils import log -import msilib -from msilib import schema, sequence, text -from msilib import Directory, Feature, Dialog, add_data - -class PyDialog(Dialog): - """Dialog class with a fixed layout: controls at the top, then a ruler, - then a list of buttons: back, next, cancel. Optionally a bitmap at the - left.""" - def __init__(self, *args, **kw): - """Dialog(database, name, x, y, w, h, attributes, title, first, - default, cancel, bitmap=true)""" - super().__init__(*args) - ruler = self.h - 36 - bmwidth = 152*ruler/328 - #if kw.get("bitmap", True): - # self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin") - self.line("BottomLine", 0, ruler, self.w, 0) - - def title(self, title): - "Set the title text of the dialog at the top." - # name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix, - # text, in VerdanaBold10 - self.text("Title", 15, 10, 320, 60, 0x30003, - r"{\VerdanaBold10}%s" % title) - - def back(self, title, next, name = "Back", active = 1): - """Add a back button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. - - Return the button, so that events can be associated""" - if active: - flags = 3 # Visible|Enabled - else: - flags = 1 # Visible - return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next) - - def cancel(self, title, next, name = "Cancel", active = 1): - """Add a cancel button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. - - Return the button, so that events can be associated""" - if active: - flags = 3 # Visible|Enabled - else: - flags = 1 # Visible - return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next) - - def next(self, title, next, name = "Next", active = 1): - """Add a Next button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. - - Return the button, so that events can be associated""" - if active: - flags = 3 # Visible|Enabled - else: - flags = 1 # Visible - return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next) - - def xbutton(self, name, title, next, xpos): - """Add a button with a given title, the tab-next button, - its name in the Control table, giving its x position; the - y-position is aligned with the other buttons. - - Return the button, so that events can be associated""" - return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next) - -class bdist_msi(Command): - - description = "create a Microsoft Installer (.msi) binary distribution" - - user_options = [('bdist-dir=', None, - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('target-version=', None, - "require a specific python version" + - " on the target system"), - ('no-target-compile', 'c', - "do not compile .py to .pyc on the target system"), - ('no-target-optimize', 'o', - "do not compile .py to .pyo (optimized) " - "on the target system"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('install-script=', None, - "basename of installation script to be run after " - "installation or before deinstallation"), - ('pre-install-script=', None, - "Fully qualified filename of a script to be run before " - "any files are installed. This script need not be in the " - "distribution"), - ] - - boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize', - 'skip-build'] - - all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4', - '2.5', '2.6', '2.7', '2.8', '2.9', - '3.0', '3.1', '3.2', '3.3', '3.4', - '3.5', '3.6', '3.7', '3.8', '3.9'] - other_version = 'X' - - def __init__(self, *args, **kw): - super().__init__(*args, **kw) - warnings.warn("bdist_msi command is deprecated since Python 3.9, " - "use bdist_wheel (wheel packages) instead", - DeprecationWarning, 2) - - def initialize_options(self): - self.bdist_dir = None - self.plat_name = None - self.keep_temp = 0 - self.no_target_compile = 0 - self.no_target_optimize = 0 - self.target_version = None - self.dist_dir = None - self.skip_build = None - self.install_script = None - self.pre_install_script = None - self.versions = None - - def finalize_options(self): - self.set_undefined_options('bdist', ('skip_build', 'skip_build')) - - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'msi') - - short_version = get_python_version() - if (not self.target_version) and self.distribution.has_ext_modules(): - self.target_version = short_version - - if self.target_version: - self.versions = [self.target_version] - if not self.skip_build and self.distribution.has_ext_modules()\ - and self.target_version != short_version: - raise DistutilsOptionError( - "target version can only be %s, or the '--skip-build'" - " option must be specified" % (short_version,)) - else: - self.versions = list(self.all_versions) - - self.set_undefined_options('bdist', - ('dist_dir', 'dist_dir'), - ('plat_name', 'plat_name'), - ) - - if self.pre_install_script: - raise DistutilsOptionError( - "the pre-install-script feature is not yet implemented") - - if self.install_script: - for script in self.distribution.scripts: - if self.install_script == os.path.basename(script): - break - else: - raise DistutilsOptionError( - "install_script '%s' not found in scripts" - % self.install_script) - self.install_script_key = None - - def run(self): - if not self.skip_build: - self.run_command('build') - - install = self.reinitialize_command('install', reinit_subcommands=1) - install.prefix = self.bdist_dir - install.skip_build = self.skip_build - install.warn_dir = 0 - - install_lib = self.reinitialize_command('install_lib') - # we do not want to include pyc or pyo files - install_lib.compile = 0 - install_lib.optimize = 0 - - if self.distribution.has_ext_modules(): - # If we are building an installer for a Python version other - # than the one we are currently running, then we need to ensure - # our build_lib reflects the other Python version rather than ours. - # Note that for target_version!=sys.version, we must have skipped the - # build step, so there is no issue with enforcing the build of this - # version. - target_version = self.target_version - if not target_version: - assert self.skip_build, "Should have already checked this" - target_version = '%d.%d' % sys.version_info[:2] - plat_specifier = ".%s-%s" % (self.plat_name, target_version) - build = self.get_finalized_command('build') - build.build_lib = os.path.join(build.build_base, - 'lib' + plat_specifier) - - log.info("installing to %s", self.bdist_dir) - install.ensure_finalized() - - # avoid warning of 'install_lib' about installing - # into a directory not in sys.path - sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB')) - - install.run() - - del sys.path[0] - - self.mkpath(self.dist_dir) - fullname = self.distribution.get_fullname() - installer_name = self.get_installer_filename(fullname) - installer_name = os.path.abspath(installer_name) - if os.path.exists(installer_name): os.unlink(installer_name) - - metadata = self.distribution.metadata - author = metadata.author - if not author: - author = metadata.maintainer - if not author: - author = "UNKNOWN" - version = metadata.get_version() - # ProductVersion must be strictly numeric - # XXX need to deal with prerelease versions - sversion = "%d.%d.%d" % StrictVersion(version).version - # Prefix ProductName with Python x.y, so that - # it sorts together with the other Python packages - # in Add-Remove-Programs (APR) - fullname = self.distribution.get_fullname() - if self.target_version: - product_name = "Python %s %s" % (self.target_version, fullname) - else: - product_name = "Python %s" % (fullname) - self.db = msilib.init_database(installer_name, schema, - product_name, msilib.gen_uuid(), - sversion, author) - msilib.add_tables(self.db, sequence) - props = [('DistVersion', version)] - email = metadata.author_email or metadata.maintainer_email - if email: - props.append(("ARPCONTACT", email)) - if metadata.url: - props.append(("ARPURLINFOABOUT", metadata.url)) - if props: - add_data(self.db, 'Property', props) - - self.add_find_python() - self.add_files() - self.add_scripts() - self.add_ui() - self.db.Commit() - - if hasattr(self.distribution, 'dist_files'): - tup = 'bdist_msi', self.target_version or 'any', fullname - self.distribution.dist_files.append(tup) - - if not self.keep_temp: - remove_tree(self.bdist_dir, dry_run=self.dry_run) - - def add_files(self): - db = self.db - cab = msilib.CAB("distfiles") - rootdir = os.path.abspath(self.bdist_dir) - - root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir") - f = Feature(db, "Python", "Python", "Everything", - 0, 1, directory="TARGETDIR") - - items = [(f, root, '')] - for version in self.versions + [self.other_version]: - target = "TARGETDIR" + version - name = default = "Python" + version - desc = "Everything" - if version is self.other_version: - title = "Python from another location" - level = 2 - else: - title = "Python %s from registry" % version - level = 1 - f = Feature(db, name, title, desc, 1, level, directory=target) - dir = Directory(db, cab, root, rootdir, target, default) - items.append((f, dir, version)) - db.Commit() - - seen = {} - for feature, dir, version in items: - todo = [dir] - while todo: - dir = todo.pop() - for file in os.listdir(dir.absolute): - afile = os.path.join(dir.absolute, file) - if os.path.isdir(afile): - short = "%s|%s" % (dir.make_short(file), file) - default = file + version - newdir = Directory(db, cab, dir, file, default, short) - todo.append(newdir) - else: - if not dir.component: - dir.start_component(dir.logical, feature, 0) - if afile not in seen: - key = seen[afile] = dir.add_file(file) - if file==self.install_script: - if self.install_script_key: - raise DistutilsOptionError( - "Multiple files with name %s" % file) - self.install_script_key = '[#%s]' % key - else: - key = seen[afile] - add_data(self.db, "DuplicateFile", - [(key + version, dir.component, key, None, dir.logical)]) - db.Commit() - cab.commit(db) - - def add_find_python(self): - """Adds code to the installer to compute the location of Python. - - Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the - registry for each version of Python. - - Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined, - else from PYTHON.MACHINE.X.Y. - - Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe""" - - start = 402 - for ver in self.versions: - install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver - machine_reg = "python.machine." + ver - user_reg = "python.user." + ver - machine_prop = "PYTHON.MACHINE." + ver - user_prop = "PYTHON.USER." + ver - machine_action = "PythonFromMachine" + ver - user_action = "PythonFromUser" + ver - exe_action = "PythonExe" + ver - target_dir_prop = "TARGETDIR" + ver - exe_prop = "PYTHON" + ver - if msilib.Win64: - # type: msidbLocatorTypeRawValue + msidbLocatorType64bit - Type = 2+16 - else: - Type = 2 - add_data(self.db, "RegLocator", - [(machine_reg, 2, install_path, None, Type), - (user_reg, 1, install_path, None, Type)]) - add_data(self.db, "AppSearch", - [(machine_prop, machine_reg), - (user_prop, user_reg)]) - add_data(self.db, "CustomAction", - [(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"), - (user_action, 51+256, target_dir_prop, "[" + user_prop + "]"), - (exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"), - ]) - add_data(self.db, "InstallExecuteSequence", - [(machine_action, machine_prop, start), - (user_action, user_prop, start + 1), - (exe_action, None, start + 2), - ]) - add_data(self.db, "InstallUISequence", - [(machine_action, machine_prop, start), - (user_action, user_prop, start + 1), - (exe_action, None, start + 2), - ]) - add_data(self.db, "Condition", - [("Python" + ver, 0, "NOT TARGETDIR" + ver)]) - start += 4 - assert start < 500 - - def add_scripts(self): - if self.install_script: - start = 6800 - for ver in self.versions + [self.other_version]: - install_action = "install_script." + ver - exe_prop = "PYTHON" + ver - add_data(self.db, "CustomAction", - [(install_action, 50, exe_prop, self.install_script_key)]) - add_data(self.db, "InstallExecuteSequence", - [(install_action, "&Python%s=3" % ver, start)]) - start += 1 - # XXX pre-install scripts are currently refused in finalize_options() - # but if this feature is completed, it will also need to add - # entries for each version as the above code does - if self.pre_install_script: - scriptfn = os.path.join(self.bdist_dir, "preinstall.bat") - with open(scriptfn, "w") as f: - # The batch file will be executed with [PYTHON], so that %1 - # is the path to the Python interpreter; %0 will be the path - # of the batch file. - # rem =""" - # %1 %0 - # exit - # """ - # - f.write('rem ="""\n%1 %0\nexit\n"""\n') - with open(self.pre_install_script) as fin: - f.write(fin.read()) - add_data(self.db, "Binary", - [("PreInstall", msilib.Binary(scriptfn)) - ]) - add_data(self.db, "CustomAction", - [("PreInstall", 2, "PreInstall", None) - ]) - add_data(self.db, "InstallExecuteSequence", - [("PreInstall", "NOT Installed", 450)]) - - - def add_ui(self): - db = self.db - x = y = 50 - w = 370 - h = 300 - title = "[ProductName] Setup" - - # see "Dialog Style Bits" - modal = 3 # visible | modal - modeless = 1 # visible - track_disk_space = 32 - - # UI customization properties - add_data(db, "Property", - # See "DefaultUIFont Property" - [("DefaultUIFont", "DlgFont8"), - # See "ErrorDialog Style Bit" - ("ErrorDialog", "ErrorDlg"), - ("Progress1", "Install"), # modified in maintenance type dlg - ("Progress2", "installs"), - ("MaintenanceForm_Action", "Repair"), - # possible values: ALL, JUSTME - ("WhichUsers", "ALL") - ]) - - # Fonts, see "TextStyle Table" - add_data(db, "TextStyle", - [("DlgFont8", "Tahoma", 9, None, 0), - ("DlgFontBold8", "Tahoma", 8, None, 1), #bold - ("VerdanaBold10", "Verdana", 10, None, 1), - ("VerdanaRed9", "Verdana", 9, 255, 0), - ]) - - # UI Sequences, see "InstallUISequence Table", "Using a Sequence Table" - # Numbers indicate sequence; see sequence.py for how these action integrate - add_data(db, "InstallUISequence", - [("PrepareDlg", "Not Privileged or Windows9x or Installed", 140), - ("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141), - # In the user interface, assume all-users installation if privileged. - ("SelectFeaturesDlg", "Not Installed", 1230), - # XXX no support for resume installations yet - #("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240), - ("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250), - ("ProgressDlg", None, 1280)]) - - add_data(db, 'ActionText', text.ActionText) - add_data(db, 'UIText', text.UIText) - ##################################################################### - # Standard dialogs: FatalError, UserExit, ExitDialog - fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title, - "Finish", "Finish", "Finish") - fatal.title("[ProductName] Installer ended prematurely") - fatal.back("< Back", "Finish", active = 0) - fatal.cancel("Cancel", "Back", active = 0) - fatal.text("Description1", 15, 70, 320, 80, 0x30003, - "[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.") - fatal.text("Description2", 15, 155, 320, 20, 0x30003, - "Click the Finish button to exit the Installer.") - c=fatal.next("Finish", "Cancel", name="Finish") - c.event("EndDialog", "Exit") - - user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title, - "Finish", "Finish", "Finish") - user_exit.title("[ProductName] Installer was interrupted") - user_exit.back("< Back", "Finish", active = 0) - user_exit.cancel("Cancel", "Back", active = 0) - user_exit.text("Description1", 15, 70, 320, 80, 0x30003, - "[ProductName] setup was interrupted. Your system has not been modified. " - "To install this program at a later time, please run the installation again.") - user_exit.text("Description2", 15, 155, 320, 20, 0x30003, - "Click the Finish button to exit the Installer.") - c = user_exit.next("Finish", "Cancel", name="Finish") - c.event("EndDialog", "Exit") - - exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title, - "Finish", "Finish", "Finish") - exit_dialog.title("Completing the [ProductName] Installer") - exit_dialog.back("< Back", "Finish", active = 0) - exit_dialog.cancel("Cancel", "Back", active = 0) - exit_dialog.text("Description", 15, 235, 320, 20, 0x30003, - "Click the Finish button to exit the Installer.") - c = exit_dialog.next("Finish", "Cancel", name="Finish") - c.event("EndDialog", "Return") - - ##################################################################### - # Required dialog: FilesInUse, ErrorDlg - inuse = PyDialog(db, "FilesInUse", - x, y, w, h, - 19, # KeepModeless|Modal|Visible - title, - "Retry", "Retry", "Retry", bitmap=False) - inuse.text("Title", 15, 6, 200, 15, 0x30003, - r"{\DlgFontBold8}Files in Use") - inuse.text("Description", 20, 23, 280, 20, 0x30003, - "Some files that need to be updated are currently in use.") - inuse.text("Text", 20, 55, 330, 50, 3, - "The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.") - inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess", - None, None, None) - c=inuse.back("Exit", "Ignore", name="Exit") - c.event("EndDialog", "Exit") - c=inuse.next("Ignore", "Retry", name="Ignore") - c.event("EndDialog", "Ignore") - c=inuse.cancel("Retry", "Exit", name="Retry") - c.event("EndDialog","Retry") - - # See "Error Dialog". See "ICE20" for the required names of the controls. - error = Dialog(db, "ErrorDlg", - 50, 10, 330, 101, - 65543, # Error|Minimize|Modal|Visible - title, - "ErrorText", None, None) - error.text("ErrorText", 50,9,280,48,3, "") - #error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None) - error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo") - error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes") - error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort") - error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel") - error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore") - error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk") - error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry") - - ##################################################################### - # Global "Query Cancel" dialog - cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title, - "No", "No", "No") - cancel.text("Text", 48, 15, 194, 30, 3, - "Are you sure you want to cancel [ProductName] installation?") - #cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None, - # "py.ico", None, None) - c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No") - c.event("EndDialog", "Exit") - - c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes") - c.event("EndDialog", "Return") - - ##################################################################### - # Global "Wait for costing" dialog - costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title, - "Return", "Return", "Return") - costing.text("Text", 48, 15, 194, 30, 3, - "Please wait while the installer finishes determining your disk space requirements.") - c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None) - c.event("EndDialog", "Exit") - - ##################################################################### - # Preparation dialog: no user input except cancellation - prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title, - "Cancel", "Cancel", "Cancel") - prep.text("Description", 15, 70, 320, 40, 0x30003, - "Please wait while the Installer prepares to guide you through the installation.") - prep.title("Welcome to the [ProductName] Installer") - c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...") - c.mapping("ActionText", "Text") - c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None) - c.mapping("ActionData", "Text") - prep.back("Back", None, active=0) - prep.next("Next", None, active=0) - c=prep.cancel("Cancel", None) - c.event("SpawnDialog", "CancelDlg") - - ##################################################################### - # Feature (Python directory) selection - seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title, - "Next", "Next", "Cancel") - seldlg.title("Select Python Installations") - - seldlg.text("Hint", 15, 30, 300, 20, 3, - "Select the Python locations where %s should be installed." - % self.distribution.get_fullname()) - - seldlg.back("< Back", None, active=0) - c = seldlg.next("Next >", "Cancel") - order = 1 - c.event("[TARGETDIR]", "[SourceDir]", ordering=order) - for version in self.versions + [self.other_version]: - order += 1 - c.event("[TARGETDIR]", "[TARGETDIR%s]" % version, - "FEATURE_SELECTED AND &Python%s=3" % version, - ordering=order) - c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1) - c.event("EndDialog", "Return", ordering=order + 2) - c = seldlg.cancel("Cancel", "Features") - c.event("SpawnDialog", "CancelDlg") - - c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3, - "FEATURE", None, "PathEdit", None) - c.event("[FEATURE_SELECTED]", "1") - ver = self.other_version - install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver - dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver - - c = seldlg.text("Other", 15, 200, 300, 15, 3, - "Provide an alternate Python location") - c.condition("Enable", install_other_cond) - c.condition("Show", install_other_cond) - c.condition("Disable", dont_install_other_cond) - c.condition("Hide", dont_install_other_cond) - - c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1, - "TARGETDIR" + ver, None, "Next", None) - c.condition("Enable", install_other_cond) - c.condition("Show", install_other_cond) - c.condition("Disable", dont_install_other_cond) - c.condition("Hide", dont_install_other_cond) - - ##################################################################### - # Disk cost - cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title, - "OK", "OK", "OK", bitmap=False) - cost.text("Title", 15, 6, 200, 15, 0x30003, - r"{\DlgFontBold8}Disk Space Requirements") - cost.text("Description", 20, 20, 280, 20, 0x30003, - "The disk space required for the installation of the selected features.") - cost.text("Text", 20, 53, 330, 60, 3, - "The highlighted volumes (if any) do not have enough disk space " - "available for the currently selected features. You can either " - "remove some files from the highlighted volumes, or choose to " - "install less features onto local drive(s), or select different " - "destination drive(s).") - cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223, - None, "{120}{70}{70}{70}{70}", None, None) - cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return") - - ##################################################################### - # WhichUsers Dialog. Only available on NT, and for privileged users. - # This must be run before FindRelatedProducts, because that will - # take into account whether the previous installation was per-user - # or per-machine. We currently don't support going back to this - # dialog after "Next" was selected; to support this, we would need to - # find how to reset the ALLUSERS property, and how to re-run - # FindRelatedProducts. - # On Windows9x, the ALLUSERS property is ignored on the command line - # and in the Property table, but installer fails according to the documentation - # if a dialog attempts to set ALLUSERS. - whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title, - "AdminInstall", "Next", "Cancel") - whichusers.title("Select whether to install [ProductName] for all users of this computer.") - # A radio group with two options: allusers, justme - g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3, - "WhichUsers", "", "Next") - g.add("ALL", 0, 5, 150, 20, "Install for all users") - g.add("JUSTME", 0, 25, 150, 20, "Install just for me") - - whichusers.back("Back", None, active=0) - - c = whichusers.next("Next >", "Cancel") - c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1) - c.event("EndDialog", "Return", ordering = 2) - - c = whichusers.cancel("Cancel", "AdminInstall") - c.event("SpawnDialog", "CancelDlg") - - ##################################################################### - # Installation Progress dialog (modeless) - progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title, - "Cancel", "Cancel", "Cancel", bitmap=False) - progress.text("Title", 20, 15, 200, 15, 0x30003, - r"{\DlgFontBold8}[Progress1] [ProductName]") - progress.text("Text", 35, 65, 300, 30, 3, - "Please wait while the Installer [Progress2] [ProductName]. " - "This may take several minutes.") - progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:") - - c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...") - c.mapping("ActionText", "Text") - - #c=progress.text("ActionData", 35, 140, 300, 20, 3, None) - #c.mapping("ActionData", "Text") - - c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537, - None, "Progress done", None, None) - c.mapping("SetProgress", "Progress") - - progress.back("< Back", "Next", active=False) - progress.next("Next >", "Cancel", active=False) - progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg") - - ################################################################### - # Maintenance type: repair/uninstall - maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title, - "Next", "Next", "Cancel") - maint.title("Welcome to the [ProductName] Setup Wizard") - maint.text("BodyText", 15, 63, 330, 42, 3, - "Select whether you want to repair or remove [ProductName].") - g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3, - "MaintenanceForm_Action", "", "Next") - #g.add("Change", 0, 0, 200, 17, "&Change [ProductName]") - g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]") - g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]") - - maint.back("< Back", None, active=False) - c=maint.next("Finish", "Cancel") - # Change installation: Change progress dialog to "Change", then ask - # for feature selection - #c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1) - #c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2) - - # Reinstall: Change progress dialog to "Repair", then invoke reinstall - # Also set list of reinstalled features to "ALL" - c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5) - c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6) - c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7) - c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8) - - # Uninstall: Change progress to "Remove", then invoke uninstall - # Also set list of removed features to "ALL" - c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11) - c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12) - c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13) - c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14) - - # Close dialog when maintenance action scheduled - c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20) - #c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21) - - maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg") - - def get_installer_filename(self, fullname): - # Factored out to allow overriding in subclasses - if self.target_version: - base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name, - self.target_version) - else: - base_name = "%s.%s.msi" % (fullname, self.plat_name) - installer_name = os.path.join(self.dist_dir, base_name) - return installer_name diff --git a/setuptools/_distutils/command/bdist_rpm.py b/setuptools/_distutils/command/bdist_rpm.py index 550cbfa1e2..3ed608b479 100644 --- a/setuptools/_distutils/command/bdist_rpm.py +++ b/setuptools/_distutils/command/bdist_rpm.py @@ -3,134 +3,152 @@ Implements the Distutils 'bdist_rpm' command (create RPM source and binary distributions).""" -import subprocess, sys, os -from distutils.core import Command -from distutils.debug import DEBUG -from distutils.file_util import write_file -from distutils.errors import * -from distutils.sysconfig import get_python_version -from distutils import log +import subprocess +import sys +import os + +from ..core import Command +from ..debug import DEBUG +from ..file_util import write_file +from ..errors import ( + DistutilsOptionError, + DistutilsPlatformError, + DistutilsFileError, + DistutilsExecError, +) +from ..sysconfig import get_python_version +from distutils._log import log -class bdist_rpm(Command): +class bdist_rpm(Command): description = "create an RPM distribution" user_options = [ - ('bdist-base=', None, - "base directory for creating built distributions"), - ('rpm-base=', None, - "base directory for creating RPMs (defaults to \"rpm\" under " - "--bdist-base; must be specified for RPM 2)"), - ('dist-dir=', 'd', - "directory to put final RPM files in " - "(and .spec files if --spec-only)"), - ('python=', None, - "path to Python interpreter to hard-code in the .spec file " - "(default: \"python\")"), - ('fix-python', None, - "hard-code the exact path to the current Python interpreter in " - "the .spec file"), - ('spec-only', None, - "only regenerate spec file"), - ('source-only', None, - "only generate source RPM"), - ('binary-only', None, - "only generate binary RPM"), - ('use-bzip2', None, - "use bzip2 instead of gzip to create source distribution"), - + ('bdist-base=', None, "base directory for creating built distributions"), + ( + 'rpm-base=', + None, + "base directory for creating RPMs (defaults to \"rpm\" under " + "--bdist-base; must be specified for RPM 2)", + ), + ( + 'dist-dir=', + 'd', + "directory to put final RPM files in " "(and .spec files if --spec-only)", + ), + ( + 'python=', + None, + "path to Python interpreter to hard-code in the .spec file " + "(default: \"python\")", + ), + ( + 'fix-python', + None, + "hard-code the exact path to the current Python interpreter in " + "the .spec file", + ), + ('spec-only', None, "only regenerate spec file"), + ('source-only', None, "only generate source RPM"), + ('binary-only', None, "only generate binary RPM"), + ('use-bzip2', None, "use bzip2 instead of gzip to create source distribution"), # More meta-data: too RPM-specific to put in the setup script, # but needs to go in the .spec file -- so we make these options # to "bdist_rpm". The idea is that packagers would put this # info in setup.cfg, although they are of course free to # supply it on the command line. - ('distribution-name=', None, - "name of the (Linux) distribution to which this " - "RPM applies (*not* the name of the module distribution!)"), - ('group=', None, - "package classification [default: \"Development/Libraries\"]"), - ('release=', None, - "RPM release number"), - ('serial=', None, - "RPM serial number"), - ('vendor=', None, - "RPM \"vendor\" (eg. \"Joe Blow \") " - "[default: maintainer or author from setup script]"), - ('packager=', None, - "RPM packager (eg. \"Jane Doe \") " - "[default: vendor]"), - ('doc-files=', None, - "list of documentation files (space or comma-separated)"), - ('changelog=', None, - "RPM changelog"), - ('icon=', None, - "name of icon file"), - ('provides=', None, - "capabilities provided by this package"), - ('requires=', None, - "capabilities required by this package"), - ('conflicts=', None, - "capabilities which conflict with this package"), - ('build-requires=', None, - "capabilities required to build this package"), - ('obsoletes=', None, - "capabilities made obsolete by this package"), - ('no-autoreq', None, - "do not automatically calculate dependencies"), - + ( + 'distribution-name=', + None, + "name of the (Linux) distribution to which this " + "RPM applies (*not* the name of the module distribution!)", + ), + ('group=', None, "package classification [default: \"Development/Libraries\"]"), + ('release=', None, "RPM release number"), + ('serial=', None, "RPM serial number"), + ( + 'vendor=', + None, + "RPM \"vendor\" (eg. \"Joe Blow \") " + "[default: maintainer or author from setup script]", + ), + ( + 'packager=', + None, + "RPM packager (eg. \"Jane Doe \") " "[default: vendor]", + ), + ('doc-files=', None, "list of documentation files (space or comma-separated)"), + ('changelog=', None, "RPM changelog"), + ('icon=', None, "name of icon file"), + ('provides=', None, "capabilities provided by this package"), + ('requires=', None, "capabilities required by this package"), + ('conflicts=', None, "capabilities which conflict with this package"), + ('build-requires=', None, "capabilities required to build this package"), + ('obsoletes=', None, "capabilities made obsolete by this package"), + ('no-autoreq', None, "do not automatically calculate dependencies"), # Actions to take when building RPM - ('keep-temp', 'k', - "don't clean up RPM build directory"), - ('no-keep-temp', None, - "clean up RPM build directory [default]"), - ('use-rpm-opt-flags', None, - "compile with RPM_OPT_FLAGS when building from source RPM"), - ('no-rpm-opt-flags', None, - "do not pass any RPM CFLAGS to compiler"), - ('rpm3-mode', None, - "RPM 3 compatibility mode (default)"), - ('rpm2-mode', None, - "RPM 2 compatibility mode"), - + ('keep-temp', 'k', "don't clean up RPM build directory"), + ('no-keep-temp', None, "clean up RPM build directory [default]"), + ( + 'use-rpm-opt-flags', + None, + "compile with RPM_OPT_FLAGS when building from source RPM", + ), + ('no-rpm-opt-flags', None, "do not pass any RPM CFLAGS to compiler"), + ('rpm3-mode', None, "RPM 3 compatibility mode (default)"), + ('rpm2-mode', None, "RPM 2 compatibility mode"), # Add the hooks necessary for specifying custom scripts - ('prep-script=', None, - "Specify a script for the PREP phase of RPM building"), - ('build-script=', None, - "Specify a script for the BUILD phase of RPM building"), - - ('pre-install=', None, - "Specify a script for the pre-INSTALL phase of RPM building"), - ('install-script=', None, - "Specify a script for the INSTALL phase of RPM building"), - ('post-install=', None, - "Specify a script for the post-INSTALL phase of RPM building"), - - ('pre-uninstall=', None, - "Specify a script for the pre-UNINSTALL phase of RPM building"), - ('post-uninstall=', None, - "Specify a script for the post-UNINSTALL phase of RPM building"), - - ('clean-script=', None, - "Specify a script for the CLEAN phase of RPM building"), - - ('verify-script=', None, - "Specify a script for the VERIFY phase of the RPM build"), - + ('prep-script=', None, "Specify a script for the PREP phase of RPM building"), + ('build-script=', None, "Specify a script for the BUILD phase of RPM building"), + ( + 'pre-install=', + None, + "Specify a script for the pre-INSTALL phase of RPM building", + ), + ( + 'install-script=', + None, + "Specify a script for the INSTALL phase of RPM building", + ), + ( + 'post-install=', + None, + "Specify a script for the post-INSTALL phase of RPM building", + ), + ( + 'pre-uninstall=', + None, + "Specify a script for the pre-UNINSTALL phase of RPM building", + ), + ( + 'post-uninstall=', + None, + "Specify a script for the post-UNINSTALL phase of RPM building", + ), + ('clean-script=', None, "Specify a script for the CLEAN phase of RPM building"), + ( + 'verify-script=', + None, + "Specify a script for the VERIFY phase of the RPM build", + ), # Allow a packager to explicitly force an architecture - ('force-arch=', None, - "Force an architecture onto the RPM build process"), - - ('quiet', 'q', - "Run the INSTALL phase of RPM building in quiet mode"), - ] - - boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode', - 'no-autoreq', 'quiet'] - - negative_opt = {'no-keep-temp': 'keep-temp', - 'no-rpm-opt-flags': 'use-rpm-opt-flags', - 'rpm2-mode': 'rpm3-mode'} - + ('force-arch=', None, "Force an architecture onto the RPM build process"), + ('quiet', 'q', "Run the INSTALL phase of RPM building in quiet mode"), + ] + + boolean_options = [ + 'keep-temp', + 'use-rpm-opt-flags', + 'rpm3-mode', + 'no-autoreq', + 'quiet', + ] + + negative_opt = { + 'no-keep-temp': 'keep-temp', + 'no-rpm-opt-flags': 'use-rpm-opt-flags', + 'rpm2-mode': 'rpm3-mode', + } def initialize_options(self): self.bdist_base = None @@ -181,8 +199,7 @@ def finalize_options(self): self.set_undefined_options('bdist', ('bdist_base', 'bdist_base')) if self.rpm_base is None: if not self.rpm3_mode: - raise DistutilsOptionError( - "you must specify --rpm-base in RPM 2 mode") + raise DistutilsOptionError("you must specify --rpm-base in RPM 2 mode") self.rpm_base = os.path.join(self.bdist_base, "rpm") if self.python is None: @@ -192,14 +209,17 @@ def finalize_options(self): self.python = "python3" elif self.fix_python: raise DistutilsOptionError( - "--python and --fix-python are mutually exclusive options") + "--python and --fix-python are mutually exclusive options" + ) if os.name != 'posix': - raise DistutilsPlatformError("don't know how to create RPM " - "distributions on platform %s" % os.name) + raise DistutilsPlatformError( + "don't know how to create RPM " "distributions on platform %s" % os.name + ) if self.binary_only and self.source_only: raise DistutilsOptionError( - "cannot supply both '--source-only' and '--binary-only'") + "cannot supply both '--source-only' and '--binary-only'" + ) # don't pass CFLAGS to pure python distributions if not self.distribution.has_ext_modules(): @@ -210,9 +230,11 @@ def finalize_options(self): def finalize_package_data(self): self.ensure_string('group', "Development/Libraries") - self.ensure_string('vendor', - "%s <%s>" % (self.distribution.get_contact(), - self.distribution.get_contact_email())) + self.ensure_string( + 'vendor', + "%s <%s>" + % (self.distribution.get_contact(), self.distribution.get_contact_email()), + ) self.ensure_string('packager') self.ensure_string_list('doc_files') if isinstance(self.doc_files, list): @@ -221,12 +243,12 @@ def finalize_package_data(self): self.doc_files.append(readme) self.ensure_string('release', "1") - self.ensure_string('serial') # should it be an int? + self.ensure_string('serial') # should it be an int? self.ensure_string('distribution_name') self.ensure_string('changelog') - # Format changelog correctly + # Format changelog correctly self.changelog = self._format_changelog(self.changelog) self.ensure_filename('icon') @@ -253,7 +275,7 @@ def finalize_package_data(self): self.ensure_string('force_arch') - def run(self): + def run(self): # noqa: C901 if DEBUG: print("before _get_package_data():") print("vendor =", self.vendor) @@ -274,14 +296,12 @@ def run(self): # Spec file goes into 'dist_dir' if '--spec-only specified', # build/rpm. otherwise. - spec_path = os.path.join(spec_dir, - "%s.spec" % self.distribution.get_name()) - self.execute(write_file, - (spec_path, - self._make_spec_file()), - "writing '%s'" % spec_path) - - if self.spec_only: # stop if requested + spec_path = os.path.join(spec_dir, "%s.spec" % self.distribution.get_name()) + self.execute( + write_file, (spec_path, self._make_spec_file()), "writing '%s'" % spec_path + ) + + if self.spec_only: # stop if requested return # Make a source distribution and copy to SOURCES directory with @@ -303,14 +323,13 @@ def run(self): if os.path.exists(self.icon): self.copy_file(self.icon, source_dir) else: - raise DistutilsFileError( - "icon file '%s' does not exist" % self.icon) + raise DistutilsFileError("icon file '%s' does not exist" % self.icon) # build package log.info("building RPMs") rpm_cmd = ['rpmbuild'] - if self.source_only: # what kind of RPMs? + if self.source_only: # what kind of RPMs? rpm_cmd.append('-bs') elif self.binary_only: rpm_cmd.append('-bb') @@ -318,8 +337,7 @@ def run(self): rpm_cmd.append('-ba') rpm_cmd.extend(['--define', '__python %s' % self.python]) if self.rpm3_mode: - rpm_cmd.extend(['--define', - '_topdir %s' % os.path.abspath(self.rpm_base)]) + rpm_cmd.extend(['--define', '_topdir %s' % os.path.abspath(self.rpm_base)]) if not self.keep_temp: rpm_cmd.append('--clean') @@ -334,8 +352,11 @@ def run(self): nvr_string = "%{name}-%{version}-%{release}" src_rpm = nvr_string + ".src.rpm" non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm" - q_cmd = r"rpm -q --qf '%s %s\n' --specfile '%s'" % ( - src_rpm, non_src_rpm, spec_path) + q_cmd = r"rpm -q --qf '{} {}\n' --specfile '{}'".format( + src_rpm, + non_src_rpm, + spec_path, + ) out = os.popen(q_cmd) try: @@ -345,12 +366,12 @@ def run(self): line = out.readline() if not line: break - l = line.strip().split() - assert(len(l) == 2) - binary_rpms.append(l[1]) + ell = line.strip().split() + assert len(ell) == 2 + binary_rpms.append(ell[1]) # The source rpm is named after the first entry in the spec file if source_rpm is None: - source_rpm = l[0] + source_rpm = ell[0] status = out.close() if status: @@ -369,38 +390,37 @@ def run(self): if not self.binary_only: srpm = os.path.join(rpm_dir['SRPMS'], source_rpm) - assert(os.path.exists(srpm)) + assert os.path.exists(srpm) self.move_file(srpm, self.dist_dir) filename = os.path.join(self.dist_dir, source_rpm) - self.distribution.dist_files.append( - ('bdist_rpm', pyversion, filename)) + self.distribution.dist_files.append(('bdist_rpm', pyversion, filename)) if not self.source_only: for rpm in binary_rpms: rpm = os.path.join(rpm_dir['RPMS'], rpm) if os.path.exists(rpm): self.move_file(rpm, self.dist_dir) - filename = os.path.join(self.dist_dir, - os.path.basename(rpm)) + filename = os.path.join(self.dist_dir, os.path.basename(rpm)) self.distribution.dist_files.append( - ('bdist_rpm', pyversion, filename)) + ('bdist_rpm', pyversion, filename) + ) def _dist_path(self, path): return os.path.join(self.dist_dir, os.path.basename(path)) - def _make_spec_file(self): + def _make_spec_file(self): # noqa: C901 """Generate the text of an RPM spec file and return it as a list of strings (one per line). """ # definitions and headers spec_file = [ '%define name ' + self.distribution.get_name(), - '%define version ' + self.distribution.get_version().replace('-','_'), + '%define version ' + self.distribution.get_version().replace('-', '_'), '%define unmangled_version ' + self.distribution.get_version(), - '%define release ' + self.release.replace('-','_'), + '%define release ' + self.release.replace('-', '_'), '', - 'Summary: ' + self.distribution.get_description(), - ] + 'Summary: ' + (self.distribution.get_description() or "UNKNOWN"), + ] # Workaround for #14443 which affects some RPM based systems such as # RHEL6 (and probably derivatives) @@ -408,8 +428,9 @@ def _make_spec_file(self): # Generate a potential replacement value for __os_install_post (whilst # normalizing the whitespace to simplify the test for whether the # invocation of brp-python-bytecompile passes in __python): - vendor_hook = '\n'.join([' %s \\' % line.strip() - for line in vendor_hook.splitlines()]) + vendor_hook = '\n'.join( + [' %s \\' % line.strip() for line in vendor_hook.splitlines()] + ) problem = "brp-python-bytecompile \\\n" fixed = "brp-python-bytecompile %{__python} \\\n" fixed_hook = vendor_hook.replace(problem, fixed) @@ -420,14 +441,17 @@ def _make_spec_file(self): # put locale summaries into spec file # XXX not supported for now (hard to put a dictionary # in a config file -- arg!) - #for locale in self.summaries.keys(): + # for locale in self.summaries.keys(): # spec_file.append('Summary(%s): %s' % (locale, # self.summaries[locale])) - spec_file.extend([ - 'Name: %{name}', - 'Version: %{version}', - 'Release: %{release}',]) + spec_file.extend( + [ + 'Name: %{name}', + 'Version: %{version}', + 'Release: %{release}', + ] + ) # XXX yuck! this filename is available from the "sdist" command, # but only after it has run: and we create the spec file before @@ -437,42 +461,44 @@ def _make_spec_file(self): else: spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz') - spec_file.extend([ - 'License: ' + self.distribution.get_license(), - 'Group: ' + self.group, - 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot', - 'Prefix: %{_prefix}', ]) + spec_file.extend( + [ + 'License: ' + (self.distribution.get_license() or "UNKNOWN"), + 'Group: ' + self.group, + 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot', + 'Prefix: %{_prefix}', + ] + ) if not self.force_arch: # noarch if no extension modules if not self.distribution.has_ext_modules(): spec_file.append('BuildArch: noarch') else: - spec_file.append( 'BuildArch: %s' % self.force_arch ) - - for field in ('Vendor', - 'Packager', - 'Provides', - 'Requires', - 'Conflicts', - 'Obsoletes', - ): + spec_file.append('BuildArch: %s' % self.force_arch) + + for field in ( + 'Vendor', + 'Packager', + 'Provides', + 'Requires', + 'Conflicts', + 'Obsoletes', + ): val = getattr(self, field.lower()) if isinstance(val, list): - spec_file.append('%s: %s' % (field, ' '.join(val))) + spec_file.append('{}: {}'.format(field, ' '.join(val))) elif val is not None: - spec_file.append('%s: %s' % (field, val)) + spec_file.append('{}: {}'.format(field, val)) - - if self.distribution.get_url() != 'UNKNOWN': + if self.distribution.get_url(): spec_file.append('Url: ' + self.distribution.get_url()) if self.distribution_name: spec_file.append('Distribution: ' + self.distribution_name) if self.build_requires: - spec_file.append('BuildRequires: ' + - ' '.join(self.build_requires)) + spec_file.append('BuildRequires: ' + ' '.join(self.build_requires)) if self.icon: spec_file.append('Icon: ' + os.path.basename(self.icon)) @@ -480,16 +506,18 @@ def _make_spec_file(self): if self.no_autoreq: spec_file.append('AutoReq: 0') - spec_file.extend([ - '', - '%description', - self.distribution.get_long_description() - ]) + spec_file.extend( + [ + '', + '%description', + self.distribution.get_long_description() or "", + ] + ) # put locale descriptions into spec file # XXX again, suppressed because config file syntax doesn't # easily support this ;-( - #for locale in self.descriptions.keys(): + # for locale in self.descriptions.keys(): # spec_file.extend([ # '', # '%description -l ' + locale, @@ -498,7 +526,7 @@ def _make_spec_file(self): # rpm scripts # figure out default build script - def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0])) + def_setup_call = "{} {}".format(self.python, os.path.basename(sys.argv[0])) def_build = "%s build" % def_setup_call if self.use_rpm_opt_flags: def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build @@ -509,8 +537,9 @@ def _make_spec_file(self): # that we open and interpolate into the spec file, but the defaults # are just text that we drop in as-is. Hmmm. - install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT ' - '--record=INSTALLED_FILES') % def_setup_call + install_cmd = ( + '%s install -O1 --root=$RPM_BUILD_ROOT ' '--record=INSTALLED_FILES' + ) % def_setup_call script_options = [ ('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"), @@ -524,42 +553,48 @@ def _make_spec_file(self): ('postun', 'post_uninstall', None), ] - for (rpm_opt, attr, default) in script_options: + for rpm_opt, attr, default in script_options: # Insert contents of file referred to, if no file is referred to # use 'default' as contents of script val = getattr(self, attr) if val or default: - spec_file.extend([ - '', - '%' + rpm_opt,]) + spec_file.extend( + [ + '', + '%' + rpm_opt, + ] + ) if val: with open(val) as f: spec_file.extend(f.read().split('\n')) else: spec_file.append(default) - # files section - spec_file.extend([ - '', - '%files -f INSTALLED_FILES', - '%defattr(-,root,root)', - ]) + spec_file.extend( + [ + '', + '%files -f INSTALLED_FILES', + '%defattr(-,root,root)', + ] + ) if self.doc_files: spec_file.append('%doc ' + ' '.join(self.doc_files)) if self.changelog: - spec_file.extend([ - '', - '%changelog',]) + spec_file.extend( + [ + '', + '%changelog', + ] + ) spec_file.extend(self.changelog) return spec_file def _format_changelog(self, changelog): - """Format the changelog correctly and convert it to a list of strings - """ + """Format the changelog correctly and convert it to a list of strings""" if not changelog: return changelog new_changelog = [] diff --git a/setuptools/_distutils/command/bdist_wininst.py b/setuptools/_distutils/command/bdist_wininst.py deleted file mode 100644 index 0e9ddaa214..0000000000 --- a/setuptools/_distutils/command/bdist_wininst.py +++ /dev/null @@ -1,377 +0,0 @@ -"""distutils.command.bdist_wininst - -Implements the Distutils 'bdist_wininst' command: create a windows installer -exe-program.""" - -import os -import sys -import warnings -from distutils.core import Command -from distutils.util import get_platform -from distutils.dir_util import remove_tree -from distutils.errors import * -from distutils.sysconfig import get_python_version -from distutils import log - -class bdist_wininst(Command): - - description = "create an executable installer for MS Windows" - - user_options = [('bdist-dir=', None, - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('target-version=', None, - "require a specific python version" + - " on the target system"), - ('no-target-compile', 'c', - "do not compile .py to .pyc on the target system"), - ('no-target-optimize', 'o', - "do not compile .py to .pyo (optimized) " - "on the target system"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('bitmap=', 'b', - "bitmap to use for the installer instead of python-powered logo"), - ('title=', 't', - "title to display on the installer background instead of default"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('install-script=', None, - "basename of installation script to be run after " - "installation or before deinstallation"), - ('pre-install-script=', None, - "Fully qualified filename of a script to be run before " - "any files are installed. This script need not be in the " - "distribution"), - ('user-access-control=', None, - "specify Vista's UAC handling - 'none'/default=no " - "handling, 'auto'=use UAC if target Python installed for " - "all users, 'force'=always use UAC"), - ] - - boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize', - 'skip-build'] - - # bpo-10945: bdist_wininst requires mbcs encoding only available on Windows - _unsupported = (sys.platform != "win32") - - def __init__(self, *args, **kw): - super().__init__(*args, **kw) - warnings.warn("bdist_wininst command is deprecated since Python 3.8, " - "use bdist_wheel (wheel packages) instead", - DeprecationWarning, 2) - - def initialize_options(self): - self.bdist_dir = None - self.plat_name = None - self.keep_temp = 0 - self.no_target_compile = 0 - self.no_target_optimize = 0 - self.target_version = None - self.dist_dir = None - self.bitmap = None - self.title = None - self.skip_build = None - self.install_script = None - self.pre_install_script = None - self.user_access_control = None - - - def finalize_options(self): - self.set_undefined_options('bdist', ('skip_build', 'skip_build')) - - if self.bdist_dir is None: - if self.skip_build and self.plat_name: - # If build is skipped and plat_name is overridden, bdist will - # not see the correct 'plat_name' - so set that up manually. - bdist = self.distribution.get_command_obj('bdist') - bdist.plat_name = self.plat_name - # next the command will be initialized using that name - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'wininst') - - if not self.target_version: - self.target_version = "" - - if not self.skip_build and self.distribution.has_ext_modules(): - short_version = get_python_version() - if self.target_version and self.target_version != short_version: - raise DistutilsOptionError( - "target version can only be %s, or the '--skip-build'" \ - " option must be specified" % (short_version,)) - self.target_version = short_version - - self.set_undefined_options('bdist', - ('dist_dir', 'dist_dir'), - ('plat_name', 'plat_name'), - ) - - if self.install_script: - for script in self.distribution.scripts: - if self.install_script == os.path.basename(script): - break - else: - raise DistutilsOptionError( - "install_script '%s' not found in scripts" - % self.install_script) - - def run(self): - if (sys.platform != "win32" and - (self.distribution.has_ext_modules() or - self.distribution.has_c_libraries())): - raise DistutilsPlatformError \ - ("distribution contains extensions and/or C libraries; " - "must be compiled on a Windows 32 platform") - - if not self.skip_build: - self.run_command('build') - - install = self.reinitialize_command('install', reinit_subcommands=1) - install.root = self.bdist_dir - install.skip_build = self.skip_build - install.warn_dir = 0 - install.plat_name = self.plat_name - - install_lib = self.reinitialize_command('install_lib') - # we do not want to include pyc or pyo files - install_lib.compile = 0 - install_lib.optimize = 0 - - if self.distribution.has_ext_modules(): - # If we are building an installer for a Python version other - # than the one we are currently running, then we need to ensure - # our build_lib reflects the other Python version rather than ours. - # Note that for target_version!=sys.version, we must have skipped the - # build step, so there is no issue with enforcing the build of this - # version. - target_version = self.target_version - if not target_version: - assert self.skip_build, "Should have already checked this" - target_version = '%d.%d' % sys.version_info[:2] - plat_specifier = ".%s-%s" % (self.plat_name, target_version) - build = self.get_finalized_command('build') - build.build_lib = os.path.join(build.build_base, - 'lib' + plat_specifier) - - # Use a custom scheme for the zip-file, because we have to decide - # at installation time which scheme to use. - for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'): - value = key.upper() - if key == 'headers': - value = value + '/Include/$dist_name' - setattr(install, - 'install_' + key, - value) - - log.info("installing to %s", self.bdist_dir) - install.ensure_finalized() - - # avoid warning of 'install_lib' about installing - # into a directory not in sys.path - sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB')) - - install.run() - - del sys.path[0] - - # And make an archive relative to the root of the - # pseudo-installation tree. - from tempfile import mktemp - archive_basename = mktemp() - fullname = self.distribution.get_fullname() - arcname = self.make_archive(archive_basename, "zip", - root_dir=self.bdist_dir) - # create an exe containing the zip-file - self.create_exe(arcname, fullname, self.bitmap) - if self.distribution.has_ext_modules(): - pyversion = get_python_version() - else: - pyversion = 'any' - self.distribution.dist_files.append(('bdist_wininst', pyversion, - self.get_installer_filename(fullname))) - # remove the zip-file again - log.debug("removing temporary file '%s'", arcname) - os.remove(arcname) - - if not self.keep_temp: - remove_tree(self.bdist_dir, dry_run=self.dry_run) - - def get_inidata(self): - # Return data describing the installation. - lines = [] - metadata = self.distribution.metadata - - # Write the [metadata] section. - lines.append("[metadata]") - - # 'info' will be displayed in the installer's dialog box, - # describing the items to be installed. - info = (metadata.long_description or '') + '\n' - - # Escape newline characters - def escape(s): - return s.replace("\n", "\\n") - - for name in ["author", "author_email", "description", "maintainer", - "maintainer_email", "name", "url", "version"]: - data = getattr(metadata, name, "") - if data: - info = info + ("\n %s: %s" % \ - (name.capitalize(), escape(data))) - lines.append("%s=%s" % (name, escape(data))) - - # The [setup] section contains entries controlling - # the installer runtime. - lines.append("\n[Setup]") - if self.install_script: - lines.append("install_script=%s" % self.install_script) - lines.append("info=%s" % escape(info)) - lines.append("target_compile=%d" % (not self.no_target_compile)) - lines.append("target_optimize=%d" % (not self.no_target_optimize)) - if self.target_version: - lines.append("target_version=%s" % self.target_version) - if self.user_access_control: - lines.append("user_access_control=%s" % self.user_access_control) - - title = self.title or self.distribution.get_fullname() - lines.append("title=%s" % escape(title)) - import time - import distutils - build_info = "Built %s with distutils-%s" % \ - (time.ctime(time.time()), distutils.__version__) - lines.append("build_info=%s" % build_info) - return "\n".join(lines) - - def create_exe(self, arcname, fullname, bitmap=None): - import struct - - self.mkpath(self.dist_dir) - - cfgdata = self.get_inidata() - - installer_name = self.get_installer_filename(fullname) - self.announce("creating %s" % installer_name) - - if bitmap: - with open(bitmap, "rb") as f: - bitmapdata = f.read() - bitmaplen = len(bitmapdata) - else: - bitmaplen = 0 - - with open(installer_name, "wb") as file: - file.write(self.get_exe_bytes()) - if bitmap: - file.write(bitmapdata) - - # Convert cfgdata from unicode to ascii, mbcs encoded - if isinstance(cfgdata, str): - cfgdata = cfgdata.encode("mbcs") - - # Append the pre-install script - cfgdata = cfgdata + b"\0" - if self.pre_install_script: - # We need to normalize newlines, so we open in text mode and - # convert back to bytes. "latin-1" simply avoids any possible - # failures. - with open(self.pre_install_script, "r", - encoding="latin-1") as script: - script_data = script.read().encode("latin-1") - cfgdata = cfgdata + script_data + b"\n\0" - else: - # empty pre-install script - cfgdata = cfgdata + b"\0" - file.write(cfgdata) - - # The 'magic number' 0x1234567B is used to make sure that the - # binary layout of 'cfgdata' is what the wininst.exe binary - # expects. If the layout changes, increment that number, make - # the corresponding changes to the wininst.exe sources, and - # recompile them. - header = struct.pack("" if self.build_temp is None: - self.build_temp = os.path.join(self.build_base, - 'temp' + plat_specifier) + self.build_temp = os.path.join(self.build_base, 'temp' + plat_specifier) if self.build_scripts is None: - self.build_scripts = os.path.join(self.build_base, - 'scripts-%d.%d' % sys.version_info[:2]) + self.build_scripts = os.path.join( + self.build_base, 'scripts-%d.%d' % sys.version_info[:2] + ) if self.executable is None and sys.executable: self.executable = os.path.normpath(sys.executable) @@ -134,7 +130,6 @@ def run(self): for cmd_name in self.get_sub_commands(): self.run_command(cmd_name) - # -- Predicates for the sub-command list --------------------------- def has_pure_modules(self): @@ -149,9 +144,9 @@ def has_ext_modules(self): def has_scripts(self): return self.distribution.has_scripts() - - sub_commands = [('build_py', has_pure_modules), - ('build_clib', has_c_libraries), - ('build_ext', has_ext_modules), - ('build_scripts', has_scripts), - ] + sub_commands = [ + ('build_py', has_pure_modules), + ('build_clib', has_c_libraries), + ('build_ext', has_ext_modules), + ('build_scripts', has_scripts), + ] diff --git a/setuptools/_distutils/command/build_clib.py b/setuptools/_distutils/command/build_clib.py index 3e20ef23cd..b3f679b67d 100644 --- a/setuptools/_distutils/command/build_clib.py +++ b/setuptools/_distutils/command/build_clib.py @@ -15,39 +15,34 @@ # cut 'n paste. Sigh. import os -from distutils.core import Command -from distutils.errors import * -from distutils.sysconfig import customize_compiler -from distutils import log +from ..core import Command +from ..errors import DistutilsSetupError +from ..sysconfig import customize_compiler +from distutils._log import log + def show_compilers(): - from distutils.ccompiler import show_compilers + from ..ccompiler import show_compilers + show_compilers() class build_clib(Command): - description = "build C/C++ libraries used by Python extensions" user_options = [ - ('build-clib=', 'b', - "directory to build C/C++ libraries to"), - ('build-temp=', 't', - "directory to put temporary build by-products"), - ('debug', 'g', - "compile with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('compiler=', 'c', - "specify the compiler type"), - ] + ('build-clib=', 'b', "directory to build C/C++ libraries to"), + ('build-temp=', 't', "directory to put temporary build by-products"), + ('debug', 'g', "compile with debugging information"), + ('force', 'f', "forcibly build everything (ignore file timestamps)"), + ('compiler=', 'c', "specify the compiler type"), + ] boolean_options = ['debug', 'force'] help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] + ('help-compiler', None, "list available compilers", show_compilers), + ] def initialize_options(self): self.build_clib = None @@ -64,19 +59,20 @@ def initialize_options(self): self.force = 0 self.compiler = None - def finalize_options(self): # This might be confusing: both build-clib and build-temp default # to build-temp as defined by the "build" command. This is because # I think that C libraries are really just temporary build # by-products, at least from the point of view of building Python # extensions -- but I want to keep my options open. - self.set_undefined_options('build', - ('build_temp', 'build_clib'), - ('build_temp', 'build_temp'), - ('compiler', 'compiler'), - ('debug', 'debug'), - ('force', 'force')) + self.set_undefined_options( + 'build', + ('build_temp', 'build_clib'), + ('build_temp', 'build_temp'), + ('compiler', 'compiler'), + ('debug', 'debug'), + ('force', 'force'), + ) self.libraries = self.distribution.libraries if self.libraries: @@ -90,23 +86,23 @@ def finalize_options(self): # XXX same as for build_ext -- what about 'self.define' and # 'self.undef' ? - def run(self): if not self.libraries: return # Yech -- this is cut 'n pasted from build_ext.py! - from distutils.ccompiler import new_compiler - self.compiler = new_compiler(compiler=self.compiler, - dry_run=self.dry_run, - force=self.force) + from ..ccompiler import new_compiler + + self.compiler = new_compiler( + compiler=self.compiler, dry_run=self.dry_run, force=self.force + ) customize_compiler(self.compiler) if self.include_dirs is not None: self.compiler.set_include_dirs(self.include_dirs) if self.define is not None: # 'define' option is a list of (name,value) tuples - for (name,value) in self.define: + for name, value in self.define: self.compiler.define_macro(name, value) if self.undef is not None: for macro in self.undef: @@ -114,7 +110,6 @@ def run(self): self.build_libraries(self.libraries) - def check_library_list(self, libraries): """Ensure that the list of libraries is valid. @@ -126,30 +121,31 @@ def check_library_list(self, libraries): just returns otherwise. """ if not isinstance(libraries, list): - raise DistutilsSetupError( - "'libraries' option must be a list of tuples") + raise DistutilsSetupError("'libraries' option must be a list of tuples") for lib in libraries: if not isinstance(lib, tuple) and len(lib) != 2: - raise DistutilsSetupError( - "each element of 'libraries' must a 2-tuple") + raise DistutilsSetupError("each element of 'libraries' must a 2-tuple") name, build_info = lib if not isinstance(name, str): raise DistutilsSetupError( - "first element of each tuple in 'libraries' " - "must be a string (the library name)") + "first element of each tuple in 'libraries' " + "must be a string (the library name)" + ) if '/' in name or (os.sep != '/' and os.sep in name): - raise DistutilsSetupError("bad library name '%s': " - "may not contain directory separators" % lib[0]) + raise DistutilsSetupError( + "bad library name '%s': " + "may not contain directory separators" % lib[0] + ) if not isinstance(build_info, dict): raise DistutilsSetupError( - "second element of each tuple in 'libraries' " - "must be a dictionary (build info)") - + "second element of each tuple in 'libraries' " + "must be a dictionary (build info)" + ) def get_library_names(self): # Assume the library list is valid -- 'check_library_list()' is @@ -158,34 +154,34 @@ def get_library_names(self): return None lib_names = [] - for (lib_name, build_info) in self.libraries: + for lib_name, build_info in self.libraries: lib_names.append(lib_name) return lib_names - def get_source_files(self): self.check_library_list(self.libraries) filenames = [] - for (lib_name, build_info) in self.libraries: + for lib_name, build_info in self.libraries: sources = build_info.get('sources') if sources is None or not isinstance(sources, (list, tuple)): raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % lib_name) + "in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % lib_name + ) filenames.extend(sources) return filenames - def build_libraries(self, libraries): - for (lib_name, build_info) in libraries: + for lib_name, build_info in libraries: sources = build_info.get('sources') if sources is None or not isinstance(sources, (list, tuple)): raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % lib_name) + "in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % lib_name + ) sources = list(sources) log.info("building '%s' library", lib_name) @@ -195,15 +191,17 @@ def build_libraries(self, libraries): # files in a temporary build directory.) macros = build_info.get('macros') include_dirs = build_info.get('include_dirs') - objects = self.compiler.compile(sources, - output_dir=self.build_temp, - macros=macros, - include_dirs=include_dirs, - debug=self.debug) + objects = self.compiler.compile( + sources, + output_dir=self.build_temp, + macros=macros, + include_dirs=include_dirs, + debug=self.debug, + ) # Now "link" the object files together into a static library. # (On Unix at least, this isn't really linking -- it just # builds an archive. Whatever.) - self.compiler.create_static_lib(objects, lib_name, - output_dir=self.build_clib, - debug=self.debug) + self.compiler.create_static_lib( + objects, lib_name, output_dir=self.build_clib, debug=self.debug + ) diff --git a/setuptools/_distutils/command/build_ext.py b/setuptools/_distutils/command/build_ext.py index 181671bf19..fbeec342c0 100644 --- a/setuptools/_distutils/command/build_ext.py +++ b/setuptools/_distutils/command/build_ext.py @@ -8,31 +8,37 @@ import os import re import sys -from distutils.core import Command -from distutils.errors import * -from distutils.sysconfig import customize_compiler, get_python_version -from distutils.sysconfig import get_config_h_filename -from distutils.dep_util import newer_group -from distutils.extension import Extension -from distutils.util import get_platform -from distutils import log +from ..core import Command +from ..errors import ( + DistutilsOptionError, + DistutilsSetupError, + CCompilerError, + DistutilsError, + CompileError, + DistutilsPlatformError, +) +from ..sysconfig import customize_compiler, get_python_version +from ..sysconfig import get_config_h_filename +from ..dep_util import newer_group +from ..extension import Extension +from ..util import get_platform +from distutils._log import log from . import py37compat from site import USER_BASE # An extension name is just a dot-separated list of Python NAMEs (ie. # the same as a fully-qualified module name). -extension_name_re = re.compile \ - (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$') +extension_name_re = re.compile(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$') -def show_compilers (): - from distutils.ccompiler import show_compilers +def show_compilers(): + from ..ccompiler import show_compilers + show_compilers() class build_ext(Command): - description = "build C/C++ extensions (compile/link to build directory)" # XXX thoughts on how to deal with complex command-line options like @@ -55,54 +61,50 @@ class build_ext(Command): sep_by = " (separated by '%s')" % os.pathsep user_options = [ - ('build-lib=', 'b', - "directory for compiled extension modules"), - ('build-temp=', 't', - "directory for temporary files (build by-products)"), - ('plat-name=', 'p', - "platform name to cross-compile for, if supported " - "(default: %s)" % get_platform()), - ('inplace', 'i', - "ignore build-lib and put compiled extensions into the source " + - "directory alongside your pure Python modules"), - ('include-dirs=', 'I', - "list of directories to search for header files" + sep_by), - ('define=', 'D', - "C preprocessor macros to define"), - ('undef=', 'U', - "C preprocessor macros to undefine"), - ('libraries=', 'l', - "external C libraries to link with"), - ('library-dirs=', 'L', - "directories to search for external C libraries" + sep_by), - ('rpath=', 'R', - "directories to search for shared C libraries at runtime"), - ('link-objects=', 'O', - "extra explicit link objects to include in the link"), - ('debug', 'g', - "compile/link with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('compiler=', 'c', - "specify the compiler type"), - ('parallel=', 'j', - "number of parallel build jobs"), - ('swig-cpp', None, - "make SWIG create C++ files (default is C)"), - ('swig-opts=', None, - "list of SWIG command line options"), - ('swig=', None, - "path to the SWIG executable"), - ('user', None, - "add user include, library and rpath") - ] + ('build-lib=', 'b', "directory for compiled extension modules"), + ('build-temp=', 't', "directory for temporary files (build by-products)"), + ( + 'plat-name=', + 'p', + "platform name to cross-compile for, if supported " + "(default: %s)" % get_platform(), + ), + ( + 'inplace', + 'i', + "ignore build-lib and put compiled extensions into the source " + + "directory alongside your pure Python modules", + ), + ( + 'include-dirs=', + 'I', + "list of directories to search for header files" + sep_by, + ), + ('define=', 'D', "C preprocessor macros to define"), + ('undef=', 'U', "C preprocessor macros to undefine"), + ('libraries=', 'l', "external C libraries to link with"), + ( + 'library-dirs=', + 'L', + "directories to search for external C libraries" + sep_by, + ), + ('rpath=', 'R', "directories to search for shared C libraries at runtime"), + ('link-objects=', 'O', "extra explicit link objects to include in the link"), + ('debug', 'g', "compile/link with debugging information"), + ('force', 'f', "forcibly build everything (ignore file timestamps)"), + ('compiler=', 'c', "specify the compiler type"), + ('parallel=', 'j', "number of parallel build jobs"), + ('swig-cpp', None, "make SWIG create C++ files (default is C)"), + ('swig-opts=', None, "list of SWIG command line options"), + ('swig=', None, "path to the SWIG executable"), + ('user', None, "add user include, library and rpath"), + ] boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user'] help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] + ('help-compiler', None, "list available compilers", show_compilers), + ] def initialize_options(self): self.extensions = None @@ -128,18 +130,19 @@ def initialize_options(self): self.user = None self.parallel = None - def finalize_options(self): + def finalize_options(self): # noqa: C901 from distutils import sysconfig - self.set_undefined_options('build', - ('build_lib', 'build_lib'), - ('build_temp', 'build_temp'), - ('compiler', 'compiler'), - ('debug', 'debug'), - ('force', 'force'), - ('parallel', 'parallel'), - ('plat_name', 'plat_name'), - ) + self.set_undefined_options( + 'build', + ('build_lib', 'build_lib'), + ('build_temp', 'build_temp'), + ('compiler', 'compiler'), + ('debug', 'debug'), + ('force', 'force'), + ('parallel', 'parallel'), + ('plat_name', 'plat_name'), + ) if self.package is None: self.package = self.distribution.ext_package @@ -164,8 +167,7 @@ def finalize_options(self): # any local include dirs take precedence. self.include_dirs.extend(py_include.split(os.path.pathsep)) if plat_py_include != py_include: - self.include_dirs.extend( - plat_py_include.split(os.path.pathsep)) + self.include_dirs.extend(plat_py_include.split(os.path.pathsep)) self.ensure_string_list('libraries') self.ensure_string_list('link_objects') @@ -220,9 +222,11 @@ def finalize_options(self): if sys.platform[:6] == 'cygwin': if not sysconfig.python_build: # building third party extensions - self.library_dirs.append(os.path.join(sys.prefix, "lib", - "python" + get_python_version(), - "config")) + self.library_dirs.append( + os.path.join( + sys.prefix, "lib", "python" + get_python_version(), "config" + ) + ) else: # building python standard extensions self.library_dirs.append('.') @@ -230,7 +234,7 @@ def finalize_options(self): # For building extensions with a shared Python library, # Python's library directory must be appended to library_dirs # See Issues: #1600860, #4366 - if (sysconfig.get_config_var('Py_ENABLE_SHARED')): + if sysconfig.get_config_var('Py_ENABLE_SHARED'): if not sysconfig.python_build: # building third party extensions self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) @@ -274,8 +278,8 @@ def finalize_options(self): except ValueError: raise DistutilsOptionError("parallel should be an integer") - def run(self): - from distutils.ccompiler import new_compiler + def run(self): # noqa: C901 + from ..ccompiler import new_compiler # 'self.extensions', as supplied by setup.py, is a list of # Extension instances. See the documentation for Extension (in @@ -302,10 +306,12 @@ def run(self): # Setup the CCompiler object that we'll use to do all the # compiling and linking - self.compiler = new_compiler(compiler=self.compiler, - verbose=self.verbose, - dry_run=self.dry_run, - force=self.force) + self.compiler = new_compiler( + compiler=self.compiler, + verbose=self.verbose, + dry_run=self.dry_run, + force=self.force, + ) customize_compiler(self.compiler) # If we are cross-compiling, init the compiler now (if we are not # cross-compiling, init would not hurt, but people may rely on @@ -321,7 +327,7 @@ def run(self): self.compiler.set_include_dirs(self.include_dirs) if self.define is not None: # 'define' option is a list of (name,value) tuples - for (name, value) in self.define: + for name, value in self.define: self.compiler.define_macro(name, value) if self.undef is not None: for macro in self.undef: @@ -338,7 +344,7 @@ def run(self): # Now actually compile and link everything. self.build_extensions() - def check_extensions_list(self, extensions): + def check_extensions_list(self, extensions): # noqa: C901 """Ensure that the list of extensions (presumably provided as a command option 'extensions') is valid, i.e. it is a list of Extension objects. We also support the old-style list of 2-tuples, @@ -350,34 +356,40 @@ def check_extensions_list(self, extensions): """ if not isinstance(extensions, list): raise DistutilsSetupError( - "'ext_modules' option must be a list of Extension instances") + "'ext_modules' option must be a list of Extension instances" + ) for i, ext in enumerate(extensions): if isinstance(ext, Extension): - continue # OK! (assume type-checking done - # by Extension constructor) + continue # OK! (assume type-checking done + # by Extension constructor) if not isinstance(ext, tuple) or len(ext) != 2: raise DistutilsSetupError( - "each element of 'ext_modules' option must be an " - "Extension instance or 2-tuple") + "each element of 'ext_modules' option must be an " + "Extension instance or 2-tuple" + ) ext_name, build_info = ext - log.warn("old-style (ext_name, build_info) tuple found in " - "ext_modules for extension '%s' " - "-- please convert to Extension instance", ext_name) + log.warning( + "old-style (ext_name, build_info) tuple found in " + "ext_modules for extension '%s' " + "-- please convert to Extension instance", + ext_name, + ) - if not (isinstance(ext_name, str) and - extension_name_re.match(ext_name)): + if not (isinstance(ext_name, str) and extension_name_re.match(ext_name)): raise DistutilsSetupError( - "first element of each tuple in 'ext_modules' " - "must be the extension name (a string)") + "first element of each tuple in 'ext_modules' " + "must be the extension name (a string)" + ) if not isinstance(build_info, dict): raise DistutilsSetupError( - "second element of each tuple in 'ext_modules' " - "must be a dictionary (build info)") + "second element of each tuple in 'ext_modules' " + "must be a dictionary (build info)" + ) # OK, the (ext_name, build_info) dict is type-safe: convert it # to an Extension instance. @@ -385,9 +397,14 @@ def check_extensions_list(self, extensions): # Easy stuff: one-to-one mapping from dict elements to # instance attributes. - for key in ('include_dirs', 'library_dirs', 'libraries', - 'extra_objects', 'extra_compile_args', - 'extra_link_args'): + for key in ( + 'include_dirs', + 'library_dirs', + 'libraries', + 'extra_objects', + 'extra_compile_args', + 'extra_link_args', + ): val = build_info.get(key) if val is not None: setattr(ext, key, val) @@ -395,8 +412,9 @@ def check_extensions_list(self, extensions): # Medium-easy stuff: same syntax/semantics, different names. ext.runtime_library_dirs = build_info.get('rpath') if 'def_file' in build_info: - log.warn("'def_file' element of build info dict " - "no longer supported") + log.warning( + "'def_file' element of build info dict " "no longer supported" + ) # Non-trivial stuff: 'macros' split into 'define_macros' # and 'undef_macros'. @@ -407,8 +425,9 @@ def check_extensions_list(self, extensions): for macro in macros: if not (isinstance(macro, tuple) and len(macro) in (1, 2)): raise DistutilsSetupError( - "'macros' element of build info dict " - "must be 1- or 2-tuple") + "'macros' element of build info dict " + "must be 1- or 2-tuple" + ) if len(macro) == 1: ext.undef_macros.append(macro[0]) elif len(macro) == 2: @@ -461,8 +480,9 @@ def _build_extensions_parallel(self): return with ThreadPoolExecutor(max_workers=workers) as executor: - futures = [executor.submit(self.build_extension, ext) - for ext in self.extensions] + futures = [ + executor.submit(self.build_extension, ext) for ext in self.extensions + ] for ext, fut in zip(self.extensions, futures): with self._filter_build_errors(ext): fut.result() @@ -479,16 +499,16 @@ def _filter_build_errors(self, ext): except (CCompilerError, DistutilsError, CompileError) as e: if not ext.optional: raise - self.warn('building extension "%s" failed: %s' % - (ext.name, e)) + self.warn('building extension "{}" failed: {}'.format(ext.name, e)) def build_extension(self, ext): sources = ext.sources if sources is None or not isinstance(sources, (list, tuple)): raise DistutilsSetupError( - "in 'ext_modules' option (extension '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % ext.name) + "in 'ext_modules' option (extension '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % ext.name + ) # sort to make the resulting .so file build reproducible sources = sorted(sources) @@ -525,13 +545,15 @@ def build_extension(self, ext): for undef in ext.undef_macros: macros.append((undef,)) - objects = self.compiler.compile(sources, - output_dir=self.build_temp, - macros=macros, - include_dirs=ext.include_dirs, - debug=self.debug, - extra_postargs=extra_args, - depends=ext.depends) + objects = self.compiler.compile( + sources, + output_dir=self.build_temp, + macros=macros, + include_dirs=ext.include_dirs, + debug=self.debug, + extra_postargs=extra_args, + depends=ext.depends, + ) # XXX outdated variable, kept here in case third-part code # needs it. @@ -548,7 +570,8 @@ def build_extension(self, ext): language = ext.language or self.compiler.detect_language(sources) self.compiler.link_shared_object( - objects, ext_path, + objects, + ext_path, libraries=self.get_libraries(ext), library_dirs=ext.library_dirs, runtime_library_dirs=ext.runtime_library_dirs, @@ -556,7 +579,8 @@ def build_extension(self, ext): export_symbols=self.get_export_symbols(ext), debug=self.debug, build_temp=self.build_temp, - target_lang=language) + target_lang=language, + ) def swig_sources(self, sources, extension): """Walk the list of source files in 'sources', looking for SWIG @@ -574,17 +598,20 @@ def swig_sources(self, sources, extension): # the temp dir. if self.swig_cpp: - log.warn("--swig-cpp is deprecated - use --swig-opts=-c++") + log.warning("--swig-cpp is deprecated - use --swig-opts=-c++") - if self.swig_cpp or ('-c++' in self.swig_opts) or \ - ('-c++' in extension.swig_opts): + if ( + self.swig_cpp + or ('-c++' in self.swig_opts) + or ('-c++' in extension.swig_opts) + ): target_ext = '.cpp' else: target_ext = '.c' for source in sources: (base, ext) = os.path.splitext(source) - if ext == ".i": # SWIG interface file + if ext == ".i": # SWIG interface file new_sources.append(base + '_wrap' + target_ext) swig_sources.append(source) swig_targets[source] = new_sources[-1] @@ -631,8 +658,9 @@ def find_swig(self): return "swig.exe" else: raise DistutilsPlatformError( - "I don't know how to find (much less run) SWIG " - "on platform '%s'" % os.name) + "I don't know how to find (much less run) SWIG " + "on platform '%s'" % os.name + ) # -- Name generators ----------------------------------------------- # (extension names, filenames, whatever) @@ -650,7 +678,7 @@ def get_ext_fullpath(self, ext_name): # no further work needed # returning : # build_dir/package/path/filename - filename = os.path.join(*modpath[:-1]+[filename]) + filename = os.path.join(*modpath[:-1] + [filename]) return os.path.join(self.build_lib, filename) # the inplace option requires to find the package directory @@ -677,7 +705,8 @@ def get_ext_filename(self, ext_name): of the file from which it will be loaded (eg. "foo/bar.so", or "foo\bar.pyd"). """ - from distutils.sysconfig import get_config_var + from ..sysconfig import get_config_var + ext_path = ext_name.split('.') ext_suffix = get_config_var('EXT_SUFFIX') return os.path.join(*ext_path) + ext_suffix @@ -691,7 +720,7 @@ def get_export_symbols(self, ext): name = ext.name.split('.')[-1] try: # Unicode module name support as defined in PEP-489 - # https://www.python.org/dev/peps/pep-0489/#export-hook-name + # https://peps.python.org/pep-0489/#export-hook-name name.encode('ascii') except UnicodeEncodeError: suffix = 'U_' + name.encode('punycode').replace(b'-', b'_').decode('ascii') @@ -703,7 +732,7 @@ def get_export_symbols(self, ext): ext.export_symbols.append(initfunc_name) return ext.export_symbols - def get_libraries(self, ext): + def get_libraries(self, ext): # noqa: C901 """Return the list of libraries to link against when building a shared extension. On most platforms, this is just 'ext.libraries'; on Windows, we add the Python library (eg. python20.dll). @@ -714,13 +743,16 @@ def get_libraries(self, ext): # to need it mentioned explicitly, though, so that's what we do. # Append '_d' to the python import library on debug builds. if sys.platform == "win32": - from distutils._msvccompiler import MSVCCompiler + from .._msvccompiler import MSVCCompiler + if not isinstance(self.compiler, MSVCCompiler): template = "python%d%d" if self.debug: template = template + '_d' - pythonlib = (template % - (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + pythonlib = template % ( + sys.hexversion >> 24, + (sys.hexversion >> 16) & 0xFF, + ) # don't extend ext.libraries, it may be shared with other # extensions, it is a reference to the original list return ext.libraries + [pythonlib] @@ -733,7 +765,8 @@ def get_libraries(self, ext): # On Cygwin (and if required, other POSIX-like platforms based on # Windows like MinGW) it is simply necessary that all symbols in # shared libraries are resolved at link time. - from distutils.sysconfig import get_config_var + from ..sysconfig import get_config_var + link_libpython = False if get_config_var('Py_ENABLE_SHARED'): # A native build on an Android device or on Cygwin diff --git a/setuptools/_distutils/command/build_py.py b/setuptools/_distutils/command/build_py.py index 7ef9bcefde..d9df95922f 100644 --- a/setuptools/_distutils/command/build_py.py +++ b/setuptools/_distutils/command/build_py.py @@ -7,27 +7,30 @@ import sys import glob -from distutils.core import Command -from distutils.errors import * -from distutils.util import convert_path -from distutils import log +from ..core import Command +from ..errors import DistutilsOptionError, DistutilsFileError +from ..util import convert_path +from distutils._log import log -class build_py (Command): +class build_py(Command): description = "\"build\" pure Python modules (copy to build directory)" user_options = [ ('build-lib=', 'd', "directory to \"build\" (copy) to"), ('compile', 'c', "compile .py to .pyc"), ('no-compile', None, "don't compile .py files [default]"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + ( + 'optimize=', + 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]", + ), ('force', 'f', "forcibly build everything (ignore file timestamps)"), - ] + ] boolean_options = ['compile', 'force'] - negative_opt = {'no-compile' : 'compile'} + negative_opt = {'no-compile': 'compile'} def initialize_options(self): self.build_lib = None @@ -40,9 +43,9 @@ def initialize_options(self): self.force = None def finalize_options(self): - self.set_undefined_options('build', - ('build_lib', 'build_lib'), - ('force', 'force')) + self.set_undefined_options( + 'build', ('build_lib', 'build_lib'), ('force', 'force') + ) # Get the distribution options that are aliases for build_py # options -- list of packages and list of modules. @@ -109,42 +112,42 @@ def get_data_files(self): # Length of path to strip from found files plen = 0 if src_dir: - plen = len(src_dir)+1 + plen = len(src_dir) + 1 # Strip directory from globbed filenames - filenames = [ - file[plen:] for file in self.find_data_files(package, src_dir) - ] + filenames = [file[plen:] for file in self.find_data_files(package, src_dir)] data.append((package, src_dir, build_dir, filenames)) return data def find_data_files(self, package, src_dir): """Return filenames for package's data files in 'src_dir'""" - globs = (self.package_data.get('', []) - + self.package_data.get(package, [])) + globs = self.package_data.get('', []) + self.package_data.get(package, []) files = [] for pattern in globs: # Each pattern has to be converted to a platform-specific path - filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern))) + filelist = glob.glob( + os.path.join(glob.escape(src_dir), convert_path(pattern)) + ) # Files that match more than one pattern are only added once - files.extend([fn for fn in filelist if fn not in files - and os.path.isfile(fn)]) + files.extend( + [fn for fn in filelist if fn not in files and os.path.isfile(fn)] + ) return files def build_package_data(self): """Copy data files into build directory""" - lastdir = None for package, src_dir, build_dir, filenames in self.data_files: for filename in filenames: target = os.path.join(build_dir, filename) self.mkpath(os.path.dirname(target)) - self.copy_file(os.path.join(src_dir, filename), target, - preserve_mode=False) + self.copy_file( + os.path.join(src_dir, filename), target, preserve_mode=False + ) def get_package_dir(self, package): """Return the directory, relative to the top of the source - distribution, where package 'package' should be found - (at least according to the 'package_dir' option, if any).""" + distribution, where package 'package' should be found + (at least according to the 'package_dir' option, if any).""" path = package.split('.') if not self.package_dir: @@ -188,20 +191,19 @@ def check_package(self, package, package_dir): if package_dir != "": if not os.path.exists(package_dir): raise DistutilsFileError( - "package directory '%s' does not exist" % package_dir) + "package directory '%s' does not exist" % package_dir + ) if not os.path.isdir(package_dir): raise DistutilsFileError( - "supposed package directory '%s' exists, " - "but is not a directory" % package_dir) + "supposed package directory '%s' exists, " + "but is not a directory" % package_dir + ) - # Require __init__.py for all but the "root package" + # Directories without __init__.py are namespace packages (PEP 420). if package: init_py = os.path.join(package_dir, "__init__.py") if os.path.isfile(init_py): return init_py - else: - log.warn(("package init file '%s' not found " + - "(or not a regular file)"), init_py) # Either not in a package at all (__init__.py not expected), or # __init__.py doesn't exist -- so don't return the filename. @@ -209,7 +211,7 @@ def check_package(self, package, package_dir): def check_module(self, module, module_file): if not os.path.isfile(module_file): - log.warn("file %s (for module %s) not found", module_file, module) + log.warning("file %s (for module %s) not found", module_file, module) return False else: return True @@ -307,23 +309,27 @@ def get_module_outfile(self, build_dir, package, module): def get_outputs(self, include_bytecode=1): modules = self.find_all_modules() outputs = [] - for (package, module, module_file) in modules: + for package, module, module_file in modules: package = package.split('.') filename = self.get_module_outfile(self.build_lib, package, module) outputs.append(filename) if include_bytecode: if self.compile: - outputs.append(importlib.util.cache_from_source( - filename, optimization='')) + outputs.append( + importlib.util.cache_from_source(filename, optimization='') + ) if self.optimize > 0: - outputs.append(importlib.util.cache_from_source( - filename, optimization=self.optimize)) + outputs.append( + importlib.util.cache_from_source( + filename, optimization=self.optimize + ) + ) outputs += [ os.path.join(build_dir, filename) for package, src_dir, build_dir, filenames in self.data_files for filename in filenames - ] + ] return outputs @@ -332,7 +338,8 @@ def build_module(self, module, module_file, package): package = package.split('.') elif not isinstance(package, (list, tuple)): raise TypeError( - "'package' must be a string (dot-separated), list, or tuple") + "'package' must be a string (dot-separated), list, or tuple" + ) # Now put the module source file into the "build" area -- this is # easy, we just copy it somewhere under self.build_lib (the build @@ -344,7 +351,7 @@ def build_module(self, module, module_file, package): def build_modules(self): modules = self.find_modules() - for (package, module, module_file) in modules: + for package, module, module_file in modules: # Now "build" the module -- ie. copy the source file to # self.build_lib (the build directory for Python source). # (Actually, it gets copied to the directory for this package @@ -367,7 +374,7 @@ def build_packages(self): # Now loop over the modules we found, "building" each one (just # copy it to self.build_lib). - for (package_, module, module_file) in modules: + for package_, module, module_file in modules: assert package == package_ self.build_module(module, module_file, package) @@ -376,7 +383,8 @@ def byte_compile(self, files): self.warn('byte-compiling is disabled, skipping.') return - from distutils.util import byte_compile + from ..util import byte_compile + prefix = self.build_lib if prefix[-1] != os.sep: prefix = prefix + os.sep @@ -385,8 +393,14 @@ def byte_compile(self, files): # method of the "install_lib" command, except for the determination # of the 'prefix' string. Hmmm. if self.compile: - byte_compile(files, optimize=0, - force=self.force, prefix=prefix, dry_run=self.dry_run) + byte_compile( + files, optimize=0, force=self.force, prefix=prefix, dry_run=self.dry_run + ) if self.optimize > 0: - byte_compile(files, optimize=self.optimize, - force=self.force, prefix=prefix, dry_run=self.dry_run) + byte_compile( + files, + optimize=self.optimize, + force=self.force, + prefix=prefix, + dry_run=self.dry_run, + ) diff --git a/setuptools/_distutils/command/build_scripts.py b/setuptools/_distutils/command/build_scripts.py index e3312cf0ca..ce222f1e52 100644 --- a/setuptools/_distutils/command/build_scripts.py +++ b/setuptools/_distutils/command/build_scripts.py @@ -2,43 +2,49 @@ Implements the Distutils 'build_scripts' command.""" -import os, re +import os +import re from stat import ST_MODE from distutils import sysconfig -from distutils.core import Command -from distutils.dep_util import newer -from distutils.util import convert_path -from distutils import log +from ..core import Command +from ..dep_util import newer +from ..util import convert_path +from distutils._log import log import tokenize -# check if Python is called on the first line with this expression -first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$') +shebang_pattern = re.compile('^#!.*python[0-9.]*([ \t].*)?$') +""" +Pattern matching a Python interpreter indicated in first line of a script. +""" + +# for Setuptools compatibility +first_line_re = shebang_pattern -class build_scripts(Command): +class build_scripts(Command): description = "\"build\" scripts (copy and fixup #! line)" user_options = [ ('build-dir=', 'd', "directory to \"build\" (copy) to"), ('force', 'f', "forcibly build everything (ignore file timestamps"), ('executable=', 'e', "specify final destination interpreter path"), - ] + ] boolean_options = ['force'] - def initialize_options(self): self.build_dir = None self.scripts = None self.force = None self.executable = None - self.outfiles = None def finalize_options(self): - self.set_undefined_options('build', - ('build_scripts', 'build_dir'), - ('force', 'force'), - ('executable', 'executable')) + self.set_undefined_options( + 'build', + ('build_scripts', 'build_dir'), + ('force', 'force'), + ('executable', 'executable'), + ) self.scripts = self.distribution.scripts def get_source_files(self): @@ -49,104 +55,118 @@ def run(self): return self.copy_scripts() - def copy_scripts(self): - r"""Copy each script listed in 'self.scripts'; if it's marked as a - Python script in the Unix way (first line matches 'first_line_re', - ie. starts with "\#!" and contains "python"), then adjust the first - line to refer to the current Python interpreter as we copy. + """ + Copy each script listed in ``self.scripts``. + + If a script is marked as a Python script (first line matches + 'shebang_pattern', i.e. starts with ``#!`` and contains + "python"), then adjust in the copy the first line to refer to + the current Python interpreter. """ self.mkpath(self.build_dir) outfiles = [] updated_files = [] for script in self.scripts: - adjust = False - script = convert_path(script) - outfile = os.path.join(self.build_dir, os.path.basename(script)) - outfiles.append(outfile) - - if not self.force and not newer(script, outfile): - log.debug("not copying %s (up-to-date)", script) - continue - - # Always open the file, but ignore failures in dry-run mode -- - # that way, we'll get accurate feedback if we can read the - # script. - try: - f = open(script, "rb") - except OSError: - if not self.dry_run: - raise - f = None - else: - encoding, lines = tokenize.detect_encoding(f.readline) - f.seek(0) - first_line = f.readline() - if not first_line: - self.warn("%s is an empty file (skipping)" % script) - continue - - match = first_line_re.match(first_line) - if match: - adjust = True - post_interp = match.group(1) or b'' - - if adjust: - log.info("copying and adjusting %s -> %s", script, - self.build_dir) - updated_files.append(outfile) - if not self.dry_run: - if not sysconfig.python_build: - executable = self.executable - else: - executable = os.path.join( - sysconfig.get_config_var("BINDIR"), - "python%s%s" % (sysconfig.get_config_var("VERSION"), - sysconfig.get_config_var("EXE"))) - executable = os.fsencode(executable) - shebang = b"#!" + executable + post_interp + b"\n" - # Python parser starts to read a script using UTF-8 until - # it gets a #coding:xxx cookie. The shebang has to be the - # first line of a file, the #coding:xxx cookie cannot be - # written before. So the shebang has to be decodable from - # UTF-8. - try: - shebang.decode('utf-8') - except UnicodeDecodeError: - raise ValueError( - "The shebang ({!r}) is not decodable " - "from utf-8".format(shebang)) - # If the script is encoded to a custom encoding (use a - # #coding:xxx cookie), the shebang has to be decodable from - # the script encoding too. - try: - shebang.decode(encoding) - except UnicodeDecodeError: - raise ValueError( - "The shebang ({!r}) is not decodable " - "from the script encoding ({})" - .format(shebang, encoding)) - with open(outfile, "wb") as outf: - outf.write(shebang) - outf.writelines(f.readlines()) - if f: - f.close() - else: - if f: - f.close() - updated_files.append(outfile) - self.copy_file(script, outfile) - - if os.name == 'posix': - for file in outfiles: - if self.dry_run: - log.info("changing mode of %s", file) - else: - oldmode = os.stat(file)[ST_MODE] & 0o7777 - newmode = (oldmode | 0o555) & 0o7777 - if newmode != oldmode: - log.info("changing mode of %s from %o to %o", - file, oldmode, newmode) - os.chmod(file, newmode) - # XXX should we modify self.outfiles? + self._copy_script(script, outfiles, updated_files) + + self._change_modes(outfiles) + return outfiles, updated_files + + def _copy_script(self, script, outfiles, updated_files): # noqa: C901 + shebang_match = None + script = convert_path(script) + outfile = os.path.join(self.build_dir, os.path.basename(script)) + outfiles.append(outfile) + + if not self.force and not newer(script, outfile): + log.debug("not copying %s (up-to-date)", script) + return + + # Always open the file, but ignore failures in dry-run mode + # in order to attempt to copy directly. + try: + f = tokenize.open(script) + except OSError: + if not self.dry_run: + raise + f = None + else: + first_line = f.readline() + if not first_line: + self.warn("%s is an empty file (skipping)" % script) + return + + shebang_match = shebang_pattern.match(first_line) + + updated_files.append(outfile) + if shebang_match: + log.info("copying and adjusting %s -> %s", script, self.build_dir) + if not self.dry_run: + if not sysconfig.python_build: + executable = self.executable + else: + executable = os.path.join( + sysconfig.get_config_var("BINDIR"), + "python%s%s" + % ( + sysconfig.get_config_var("VERSION"), + sysconfig.get_config_var("EXE"), + ), + ) + post_interp = shebang_match.group(1) or '' + shebang = "#!" + executable + post_interp + "\n" + self._validate_shebang(shebang, f.encoding) + with open(outfile, "w", encoding=f.encoding) as outf: + outf.write(shebang) + outf.writelines(f.readlines()) + if f: + f.close() + else: + if f: + f.close() + self.copy_file(script, outfile) + + def _change_modes(self, outfiles): + if os.name != 'posix': + return + + for file in outfiles: + self._change_mode(file) + + def _change_mode(self, file): + if self.dry_run: + log.info("changing mode of %s", file) + return + + oldmode = os.stat(file)[ST_MODE] & 0o7777 + newmode = (oldmode | 0o555) & 0o7777 + if newmode != oldmode: + log.info("changing mode of %s from %o to %o", file, oldmode, newmode) + os.chmod(file, newmode) + + @staticmethod + def _validate_shebang(shebang, encoding): + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be encodable to + # UTF-8. + try: + shebang.encode('utf-8') + except UnicodeEncodeError: + raise ValueError( + "The shebang ({!r}) is not encodable " "to utf-8".format(shebang) + ) + + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be encodable to + # the script encoding too. + try: + shebang.encode(encoding) + except UnicodeEncodeError: + raise ValueError( + "The shebang ({!r}) is not encodable " + "to the script encoding ({})".format(shebang, encoding) + ) diff --git a/setuptools/_distutils/command/check.py b/setuptools/_distutils/command/check.py index 525540b6cc..575e49fb4b 100644 --- a/setuptools/_distutils/command/check.py +++ b/setuptools/_distutils/command/check.py @@ -2,46 +2,56 @@ Implements the Distutils 'check' command. """ -from distutils.core import Command -from distutils.errors import DistutilsSetupError - -try: - # docutils is installed - from docutils.utils import Reporter - from docutils.parsers.rst import Parser - from docutils import frontend - from docutils import nodes - - class SilentReporter(Reporter): - - def __init__(self, source, report_level, halt_level, stream=None, - debug=0, encoding='ascii', error_handler='replace'): +import contextlib + +from ..core import Command +from ..errors import DistutilsSetupError + +with contextlib.suppress(ImportError): + import docutils.utils + import docutils.parsers.rst + import docutils.frontend + import docutils.nodes + + class SilentReporter(docutils.utils.Reporter): + def __init__( + self, + source, + report_level, + halt_level, + stream=None, + debug=0, + encoding='ascii', + error_handler='replace', + ): self.messages = [] - super().__init__(source, report_level, halt_level, stream, - debug, encoding, error_handler) + super().__init__( + source, report_level, halt_level, stream, debug, encoding, error_handler + ) def system_message(self, level, message, *children, **kwargs): self.messages.append((level, message, children, kwargs)) - return nodes.system_message(message, level=level, - type=self.levels[level], - *children, **kwargs) + return docutils.nodes.system_message( + message, level=level, type=self.levels[level], *children, **kwargs + ) - HAS_DOCUTILS = True -except Exception: - # Catch all exceptions because exceptions besides ImportError probably - # indicate that docutils is not ported to Py3k. - HAS_DOCUTILS = False class check(Command): - """This command checks the meta-data of the package. - """ - description = ("perform some checks on the package") - user_options = [('metadata', 'm', 'Verify meta-data'), - ('restructuredtext', 'r', - ('Checks if long string meta-data syntax ' - 'are reStructuredText-compliant')), - ('strict', 's', - 'Will exit with an error if a check fails')] + """This command checks the meta-data of the package.""" + + description = "perform some checks on the package" + user_options = [ + ('metadata', 'm', 'Verify meta-data'), + ( + 'restructuredtext', + 'r', + ( + 'Checks if long string meta-data syntax ' + 'are reStructuredText-compliant' + ), + ), + ('strict', 's', 'Will exit with an error if a check fails'), + ] boolean_options = ['metadata', 'restructuredtext', 'strict'] @@ -66,8 +76,11 @@ def run(self): if self.metadata: self.check_metadata() if self.restructuredtext: - if HAS_DOCUTILS: - self.check_restructuredtext() + if 'docutils' in globals(): + try: + self.check_restructuredtext() + except TypeError as exc: + raise DistutilsSetupError(str(exc)) elif self.strict: raise DistutilsSetupError('The docutils package is needed.') @@ -80,34 +93,19 @@ def check_metadata(self): """Ensures that all required elements of meta-data are supplied. Required fields: - name, version, URL - - Recommended fields: - (author and author_email) or (maintainer and maintainer_email)) + name, version Warns if any are missing. """ metadata = self.distribution.metadata missing = [] - for attr in ('name', 'version', 'url'): - if not (hasattr(metadata, attr) and getattr(metadata, attr)): + for attr in 'name', 'version': + if not getattr(metadata, attr, None): missing.append(attr) if missing: - self.warn("missing required meta-data: %s" % ', '.join(missing)) - if metadata.author: - if not metadata.author_email: - self.warn("missing meta-data: if 'author' supplied, " + - "'author_email' should be supplied too") - elif metadata.maintainer: - if not metadata.maintainer_email: - self.warn("missing meta-data: if 'maintainer' supplied, " + - "'maintainer_email' should be supplied too") - else: - self.warn("missing meta-data: either (author and author_email) " + - "or (maintainer and maintainer_email) " + - "should be supplied") + self.warn("missing required meta-data: %s" % ', '.join(missing)) def check_restructuredtext(self): """Checks if the long string fields are reST-compliant.""" @@ -117,32 +115,37 @@ def check_restructuredtext(self): if line is None: warning = warning[1] else: - warning = '%s (line %s)' % (warning[1], line) + warning = '{} (line {})'.format(warning[1], line) self.warn(warning) def _check_rst_data(self, data): """Returns warnings when the provided data doesn't compile.""" # the include and csv_table directives need this to be a path source_path = self.distribution.script_name or 'setup.py' - parser = Parser() - settings = frontend.OptionParser(components=(Parser,)).get_default_values() + parser = docutils.parsers.rst.Parser() + settings = docutils.frontend.OptionParser( + components=(docutils.parsers.rst.Parser,) + ).get_default_values() settings.tab_width = 4 settings.pep_references = None settings.rfc_references = None - reporter = SilentReporter(source_path, - settings.report_level, - settings.halt_level, - stream=settings.warning_stream, - debug=settings.debug, - encoding=settings.error_encoding, - error_handler=settings.error_encoding_error_handler) - - document = nodes.document(settings, reporter, source=source_path) + reporter = SilentReporter( + source_path, + settings.report_level, + settings.halt_level, + stream=settings.warning_stream, + debug=settings.debug, + encoding=settings.error_encoding, + error_handler=settings.error_encoding_error_handler, + ) + + document = docutils.nodes.document(settings, reporter, source=source_path) document.note_source(source_path, -1) try: parser.parse(data, document) except AttributeError as e: reporter.messages.append( - (-1, 'Could not finish the parsing: %s.' % e, '', {})) + (-1, 'Could not finish the parsing: %s.' % e, '', {}) + ) return reporter.messages diff --git a/setuptools/_distutils/command/clean.py b/setuptools/_distutils/command/clean.py index 0cb2701662..9413f7cfcb 100644 --- a/setuptools/_distutils/command/clean.py +++ b/setuptools/_distutils/command/clean.py @@ -5,26 +5,28 @@ # contributed by Bastian Kleineidam , added 2000-03-18 import os -from distutils.core import Command -from distutils.dir_util import remove_tree -from distutils import log +from ..core import Command +from ..dir_util import remove_tree +from distutils._log import log -class clean(Command): +class clean(Command): description = "clean up temporary files from 'build' command" user_options = [ - ('build-base=', 'b', - "base build directory (default: 'build.build-base')"), - ('build-lib=', None, - "build directory for all modules (default: 'build.build-lib')"), - ('build-temp=', 't', - "temporary build directory (default: 'build.build-temp')"), - ('build-scripts=', None, - "build directory for scripts (default: 'build.build-scripts')"), - ('bdist-base=', None, - "temporary directory for built distributions"), - ('all', 'a', - "remove all build output, not just temporary by-products") + ('build-base=', 'b', "base build directory (default: 'build.build-base')"), + ( + 'build-lib=', + None, + "build directory for all modules (default: 'build.build-lib')", + ), + ('build-temp=', 't', "temporary build directory (default: 'build.build-temp')"), + ( + 'build-scripts=', + None, + "build directory for scripts (default: 'build.build-scripts')", + ), + ('bdist-base=', None, "temporary directory for built distributions"), + ('all', 'a', "remove all build output, not just temporary by-products"), ] boolean_options = ['all'] @@ -38,13 +40,14 @@ def initialize_options(self): self.all = None def finalize_options(self): - self.set_undefined_options('build', - ('build_base', 'build_base'), - ('build_lib', 'build_lib'), - ('build_scripts', 'build_scripts'), - ('build_temp', 'build_temp')) - self.set_undefined_options('bdist', - ('bdist_base', 'bdist_base')) + self.set_undefined_options( + 'build', + ('build_base', 'build_base'), + ('build_lib', 'build_lib'), + ('build_scripts', 'build_scripts'), + ('build_temp', 'build_temp'), + ) + self.set_undefined_options('bdist', ('bdist_base', 'bdist_base')) def run(self): # remove the build/temp. directory (unless it's already @@ -52,19 +55,15 @@ def run(self): if os.path.exists(self.build_temp): remove_tree(self.build_temp, dry_run=self.dry_run) else: - log.debug("'%s' does not exist -- can't clean it", - self.build_temp) + log.debug("'%s' does not exist -- can't clean it", self.build_temp) if self.all: # remove build directories - for directory in (self.build_lib, - self.bdist_base, - self.build_scripts): + for directory in (self.build_lib, self.bdist_base, self.build_scripts): if os.path.exists(directory): remove_tree(directory, dry_run=self.dry_run) else: - log.warn("'%s' does not exist -- can't clean it", - directory) + log.warning("'%s' does not exist -- can't clean it", directory) # just for the heck of it, try to remove the base build directory: # we might have emptied it right now, but if not we don't care diff --git a/setuptools/_distutils/command/config.py b/setuptools/_distutils/command/config.py index aeda408e73..494d97d16f 100644 --- a/setuptools/_distutils/command/config.py +++ b/setuptools/_distutils/command/config.py @@ -9,41 +9,35 @@ this header file lives". """ -import os, re +import os +import re -from distutils.core import Command -from distutils.errors import DistutilsExecError -from distutils.sysconfig import customize_compiler -from distutils import log +from ..core import Command +from ..errors import DistutilsExecError +from ..sysconfig import customize_compiler +from distutils._log import log LANG_EXT = {"c": ".c", "c++": ".cxx"} -class config(Command): +class config(Command): description = "prepare to build" user_options = [ - ('compiler=', None, - "specify the compiler type"), - ('cc=', None, - "specify the compiler executable"), - ('include-dirs=', 'I', - "list of directories to search for header files"), - ('define=', 'D', - "C preprocessor macros to define"), - ('undef=', 'U', - "C preprocessor macros to undefine"), - ('libraries=', 'l', - "external C libraries to link with"), - ('library-dirs=', 'L', - "directories to search for external C libraries"), - - ('noisy', None, - "show every action (compile, link, run, ...) taken"), - ('dump-source', None, - "dump generated source files before attempting to compile them"), - ] - + ('compiler=', None, "specify the compiler type"), + ('cc=', None, "specify the compiler executable"), + ('include-dirs=', 'I', "list of directories to search for header files"), + ('define=', 'D', "C preprocessor macros to define"), + ('undef=', 'U', "C preprocessor macros to undefine"), + ('libraries=', 'l', "external C libraries to link with"), + ('library-dirs=', 'L', "directories to search for external C libraries"), + ('noisy', None, "show every action (compile, link, run, ...) taken"), + ( + 'dump-source', + None, + "dump generated source files before attempting to compile them", + ), + ] # The three standard command methods: since the "config" command # does nothing by default, these are empty. @@ -92,10 +86,12 @@ def _check_compiler(self): """ # We do this late, and only on-demand, because this is an expensive # import. - from distutils.ccompiler import CCompiler, new_compiler + from ..ccompiler import CCompiler, new_compiler + if not isinstance(self.compiler, CCompiler): - self.compiler = new_compiler(compiler=self.compiler, - dry_run=self.dry_run, force=1) + self.compiler = new_compiler( + compiler=self.compiler, dry_run=self.dry_run, force=1 + ) customize_compiler(self.compiler) if self.include_dirs: self.compiler.set_include_dirs(self.include_dirs) @@ -132,14 +128,16 @@ def _compile(self, body, headers, include_dirs, lang): self.compiler.compile([src], include_dirs=include_dirs) return (src, obj) - def _link(self, body, headers, include_dirs, libraries, library_dirs, - lang): + def _link(self, body, headers, include_dirs, libraries, library_dirs, lang): (src, obj) = self._compile(body, headers, include_dirs, lang) prog = os.path.splitext(os.path.basename(src))[0] - self.compiler.link_executable([obj], prog, - libraries=libraries, - library_dirs=library_dirs, - target_lang=lang) + self.compiler.link_executable( + [obj], + prog, + libraries=libraries, + library_dirs=library_dirs, + target_lang=lang, + ) if self.compiler.exe_extension is not None: prog = prog + self.compiler.exe_extension @@ -158,7 +156,6 @@ def _clean(self, *filenames): except OSError: pass - # XXX these ignore the dry-run flag: what to do, what to do? even if # you want a dry-run build, you still need some sort of configuration # info. My inclination is to make it up to the real config command to @@ -176,7 +173,8 @@ def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"): preprocessor succeeded, false if there were any errors. ('body' probably isn't of much use, but what the heck.) """ - from distutils.ccompiler import CompileError + from ..ccompiler import CompileError + self._check_compiler() ok = True try: @@ -187,8 +185,7 @@ def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"): self._clean() return ok - def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, - lang="c"): + def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, lang="c"): """Construct a source file (just like 'try_cpp()'), run it through the preprocessor, and return true if any line of the output matches 'pattern'. 'pattern' should either be a compiled regex object or a @@ -219,7 +216,8 @@ def try_compile(self, body, headers=None, include_dirs=None, lang="c"): """Try to compile a source file built from 'body' and 'headers'. Return true on success, false otherwise. """ - from distutils.ccompiler import CompileError + from ..ccompiler import CompileError + self._check_compiler() try: self._compile(body, headers, include_dirs, lang) @@ -231,17 +229,24 @@ def try_compile(self, body, headers=None, include_dirs=None, lang="c"): self._clean() return ok - def try_link(self, body, headers=None, include_dirs=None, libraries=None, - library_dirs=None, lang="c"): + def try_link( + self, + body, + headers=None, + include_dirs=None, + libraries=None, + library_dirs=None, + lang="c", + ): """Try to compile and link a source file, built from 'body' and 'headers', to executable form. Return true on success, false otherwise. """ - from distutils.ccompiler import CompileError, LinkError + from ..ccompiler import CompileError, LinkError + self._check_compiler() try: - self._link(body, headers, include_dirs, - libraries, library_dirs, lang) + self._link(body, headers, include_dirs, libraries, library_dirs, lang) ok = True except (CompileError, LinkError): ok = False @@ -250,17 +255,26 @@ def try_link(self, body, headers=None, include_dirs=None, libraries=None, self._clean() return ok - def try_run(self, body, headers=None, include_dirs=None, libraries=None, - library_dirs=None, lang="c"): + def try_run( + self, + body, + headers=None, + include_dirs=None, + libraries=None, + library_dirs=None, + lang="c", + ): """Try to compile, link to an executable, and run a program built from 'body' and 'headers'. Return true on success, false otherwise. """ - from distutils.ccompiler import CompileError, LinkError + from ..ccompiler import CompileError, LinkError + self._check_compiler() try: - src, obj, exe = self._link(body, headers, include_dirs, - libraries, library_dirs, lang) + src, obj, exe = self._link( + body, headers, include_dirs, libraries, library_dirs, lang + ) self.spawn([exe]) ok = True except (CompileError, LinkError, DistutilsExecError): @@ -270,13 +284,20 @@ def try_run(self, body, headers=None, include_dirs=None, libraries=None, self._clean() return ok - # -- High-level methods -------------------------------------------- # (these are the ones that are actually likely to be useful # when implementing a real-world config command!) - def check_func(self, func, headers=None, include_dirs=None, - libraries=None, library_dirs=None, decl=0, call=0): + def check_func( + self, + func, + headers=None, + include_dirs=None, + libraries=None, + library_dirs=None, + decl=0, + call=0, + ): """Determine if function 'func' is available by constructing a source file that refers to 'func', and compiles and links it. If everything succeeds, returns true; otherwise returns false. @@ -302,11 +323,16 @@ def check_func(self, func, headers=None, include_dirs=None, body.append("}") body = "\n".join(body) + "\n" - return self.try_link(body, headers, include_dirs, - libraries, library_dirs) + return self.try_link(body, headers, include_dirs, libraries, library_dirs) - def check_lib(self, library, library_dirs=None, headers=None, - include_dirs=None, other_libraries=[]): + def check_lib( + self, + library, + library_dirs=None, + headers=None, + include_dirs=None, + other_libraries=[], + ): """Determine if 'library' is available to be linked against, without actually checking that any particular symbols are provided by it. 'headers' will be used in constructing the source file to @@ -316,17 +342,23 @@ def check_lib(self, library, library_dirs=None, headers=None, has symbols that depend on other libraries. """ self._check_compiler() - return self.try_link("int main (void) { }", headers, include_dirs, - [library] + other_libraries, library_dirs) - - def check_header(self, header, include_dirs=None, library_dirs=None, - lang="c"): + return self.try_link( + "int main (void) { }", + headers, + include_dirs, + [library] + other_libraries, + library_dirs, + ) + + def check_header(self, header, include_dirs=None, library_dirs=None, lang="c"): """Determine if the system header file named by 'header_file' exists and can be found by the preprocessor; return true if so, false otherwise. """ - return self.try_cpp(body="/* No body */", headers=[header], - include_dirs=include_dirs) + return self.try_cpp( + body="/* No body */", headers=[header], include_dirs=include_dirs + ) + def dump_file(filename, head=None): """Dumps a file content into log.info. diff --git a/setuptools/_distutils/command/install.py b/setuptools/_distutils/command/install.py index 41c17d8a7f..a7ac4e6077 100644 --- a/setuptools/_distutils/command/install.py +++ b/setuptools/_distutils/command/install.py @@ -8,19 +8,20 @@ import sysconfig import itertools -from distutils import log -from distutils.core import Command -from distutils.debug import DEBUG -from distutils.sysconfig import get_config_vars -from distutils.errors import DistutilsPlatformError -from distutils.file_util import write_file -from distutils.util import convert_path, subst_vars, change_root -from distutils.util import get_platform -from distutils.errors import DistutilsOptionError +from distutils._log import log +from ..core import Command +from ..debug import DEBUG +from ..sysconfig import get_config_vars +from ..file_util import write_file +from ..util import convert_path, subst_vars, change_root +from ..util import get_platform +from ..errors import DistutilsOptionError, DistutilsPlatformError +from . import _framework_compat as fw from .. import _collections from site import USER_BASE from site import USER_SITE + HAS_USER_SITE = True WINDOWS_SCHEME = { @@ -28,59 +29,66 @@ 'platlib': '{base}/Lib/site-packages', 'headers': '{base}/Include/{dist_name}', 'scripts': '{base}/Scripts', - 'data' : '{base}', + 'data': '{base}', } INSTALL_SCHEMES = { 'posix_prefix': { 'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages', - 'platlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}/site-packages', - 'headers': '{base}/include/{implementation_lower}{py_version_short}{abiflags}/{dist_name}', + 'platlib': '{platbase}/{platlibdir}/{implementation_lower}' + '{py_version_short}/site-packages', + 'headers': '{base}/include/{implementation_lower}' + '{py_version_short}{abiflags}/{dist_name}', 'scripts': '{base}/bin', - 'data' : '{base}', - }, + 'data': '{base}', + }, 'posix_home': { 'purelib': '{base}/lib/{implementation_lower}', 'platlib': '{base}/{platlibdir}/{implementation_lower}', 'headers': '{base}/include/{implementation_lower}/{dist_name}', 'scripts': '{base}/bin', - 'data' : '{base}', - }, + 'data': '{base}', + }, 'nt': WINDOWS_SCHEME, 'pypy': { 'purelib': '{base}/site-packages', 'platlib': '{base}/site-packages', 'headers': '{base}/include/{dist_name}', 'scripts': '{base}/bin', - 'data' : '{base}', - }, + 'data': '{base}', + }, 'pypy_nt': { 'purelib': '{base}/site-packages', 'platlib': '{base}/site-packages', 'headers': '{base}/include/{dist_name}', 'scripts': '{base}/Scripts', - 'data' : '{base}', - }, - } + 'data': '{base}', + }, +} # user site schemes if HAS_USER_SITE: INSTALL_SCHEMES['nt_user'] = { 'purelib': '{usersite}', 'platlib': '{usersite}', - 'headers': '{userbase}/{implementation}{py_version_nodot_plat}/Include/{dist_name}', + 'headers': '{userbase}/{implementation}{py_version_nodot_plat}' + '/Include/{dist_name}', 'scripts': '{userbase}/{implementation}{py_version_nodot_plat}/Scripts', - 'data' : '{userbase}', - } + 'data': '{userbase}', + } INSTALL_SCHEMES['posix_user'] = { 'purelib': '{usersite}', 'platlib': '{usersite}', - 'headers': - '{userbase}/include/{implementation_lower}{py_version_short}{abiflags}/{dist_name}', + 'headers': '{userbase}/include/{implementation_lower}' + '{py_version_short}{abiflags}/{dist_name}', 'scripts': '{userbase}/bin', - 'data' : '{userbase}', - } + 'data': '{userbase}', + } + + +INSTALL_SCHEMES.update(fw.schemes) + # The keys to an installation scheme; if any new types of files are to be # installed, be sure to add an entry to every installation scheme above, @@ -128,11 +136,7 @@ def _remove_set(ob, attrs): """ Include only attrs that are None in ob. """ - return { - key: value - for key, value in attrs.items() - if getattr(ob, key) is None - } + return {key: value for key, value in attrs.items() if getattr(ob, key) is None} def _resolve_scheme(name): @@ -140,7 +144,7 @@ def _resolve_scheme(name): try: resolved = sysconfig.get_preferred_scheme(key) except Exception: - resolved = _pypy_hack(name) + resolved = fw.scheme(_pypy_hack(name)) return resolved @@ -164,10 +168,7 @@ def _inject_headers(name, scheme): def _scheme_attrs(scheme): """Resolve install directories by applying the install schemes.""" - return { - f'install_{key}': scheme[key] - for key in SCHEME_KEYS - } + return {f'install_{key}': scheme[key] for key in SCHEME_KEYS} def _pypy_hack(name): @@ -179,77 +180,77 @@ def _pypy_hack(name): class install(Command): - description = "install everything from build directory" user_options = [ # Select installation scheme and set base director(y|ies) - ('prefix=', None, - "installation prefix"), - ('exec-prefix=', None, - "(Unix only) prefix for platform-specific files"), - ('home=', None, - "(Unix only) home directory to install under"), - + ('prefix=', None, "installation prefix"), + ('exec-prefix=', None, "(Unix only) prefix for platform-specific files"), + ('home=', None, "(Unix only) home directory to install under"), # Or, just set the base director(y|ies) - ('install-base=', None, - "base installation directory (instead of --prefix or --home)"), - ('install-platbase=', None, - "base installation directory for platform-specific files " + - "(instead of --exec-prefix or --home)"), - ('root=', None, - "install everything relative to this alternate root directory"), - + ( + 'install-base=', + None, + "base installation directory (instead of --prefix or --home)", + ), + ( + 'install-platbase=', + None, + "base installation directory for platform-specific files " + + "(instead of --exec-prefix or --home)", + ), + ('root=', None, "install everything relative to this alternate root directory"), # Or, explicitly set the installation scheme - ('install-purelib=', None, - "installation directory for pure Python module distributions"), - ('install-platlib=', None, - "installation directory for non-pure module distributions"), - ('install-lib=', None, - "installation directory for all module distributions " + - "(overrides --install-purelib and --install-platlib)"), - - ('install-headers=', None, - "installation directory for C/C++ headers"), - ('install-scripts=', None, - "installation directory for Python scripts"), - ('install-data=', None, - "installation directory for data files"), - + ( + 'install-purelib=', + None, + "installation directory for pure Python module distributions", + ), + ( + 'install-platlib=', + None, + "installation directory for non-pure module distributions", + ), + ( + 'install-lib=', + None, + "installation directory for all module distributions " + + "(overrides --install-purelib and --install-platlib)", + ), + ('install-headers=', None, "installation directory for C/C++ headers"), + ('install-scripts=', None, "installation directory for Python scripts"), + ('install-data=', None, "installation directory for data files"), # Byte-compilation options -- see install_lib.py for details, as # these are duplicated from there (but only install_lib does # anything with them). ('compile', 'c', "compile .py to .pyc [default]"), ('no-compile', None, "don't compile .py files"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - + ( + 'optimize=', + 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]", + ), # Miscellaneous control options - ('force', 'f', - "force installation (overwrite any existing files)"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - + ('force', 'f', "force installation (overwrite any existing files)"), + ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), # Where to install documentation (eventually!) - #('doc-format=', None, "format of documentation to generate"), - #('install-man=', None, "directory for Unix man pages"), - #('install-html=', None, "directory for HTML documentation"), - #('install-info=', None, "directory for GNU info files"), - - ('record=', None, - "filename in which to record list of installed files"), - ] + # ('doc-format=', None, "format of documentation to generate"), + # ('install-man=', None, "directory for Unix man pages"), + # ('install-html=', None, "directory for HTML documentation"), + # ('install-info=', None, "directory for GNU info files"), + ('record=', None, "filename in which to record list of installed files"), + ] boolean_options = ['compile', 'force', 'skip-build'] if HAS_USER_SITE: - user_options.append(('user', None, - "install in user site-package '%s'" % USER_SITE)) + user_options.append( + ('user', None, "install in user site-package '%s'" % USER_SITE) + ) boolean_options.append('user') - negative_opt = {'no-compile' : 'compile'} - + negative_opt = {'no-compile': 'compile'} def initialize_options(self): """Initializes options.""" @@ -271,10 +272,10 @@ def initialize_options(self): # supplied by the user, they are filled in using the installation # scheme implied by prefix/exec-prefix/home and the contents of # that installation scheme. - self.install_purelib = None # for pure module distributions - self.install_platlib = None # non-pure (dists w/ extensions) - self.install_headers = None # for C/C++ headers - self.install_lib = None # set to either purelib or platlib + self.install_purelib = None # for pure module distributions + self.install_platlib = None # non-pure (dists w/ extensions) + self.install_headers = None # for C/C++ headers + self.install_lib = None # set to either purelib or platlib self.install_scripts = None self.install_data = None self.install_userbase = USER_BASE @@ -316,20 +317,19 @@ def initialize_options(self): # Not defined yet because we don't know anything about # documentation yet. - #self.install_man = None - #self.install_html = None - #self.install_info = None + # self.install_man = None + # self.install_html = None + # self.install_info = None self.record = None - # -- Option finalizing methods ------------------------------------- # (This is rather more involved than for most commands, # because this is where the policy for installing third- # party Python modules on various platforms given a wide # array of user input is decided. Yes, it's quite complex!) - def finalize_options(self): + def finalize_options(self): # noqa: C901 """Finalizes options.""" # This method (and its helpers, like 'finalize_unix()', # 'finalize_other()', and 'select_scheme()') is where the default @@ -345,20 +345,30 @@ def finalize_options(self): # Check for errors/inconsistencies in the options; first, stuff # that's wrong on any platform. - if ((self.prefix or self.exec_prefix or self.home) and - (self.install_base or self.install_platbase)): + if (self.prefix or self.exec_prefix or self.home) and ( + self.install_base or self.install_platbase + ): raise DistutilsOptionError( - "must supply either prefix/exec-prefix/home or " + - "install-base/install-platbase -- not both") + "must supply either prefix/exec-prefix/home or " + + "install-base/install-platbase -- not both" + ) if self.home and (self.prefix or self.exec_prefix): raise DistutilsOptionError( - "must supply either home or prefix/exec-prefix -- not both") + "must supply either home or prefix/exec-prefix -- not both" + ) - if self.user and (self.prefix or self.exec_prefix or self.home or - self.install_base or self.install_platbase): - raise DistutilsOptionError("can't combine user with prefix, " - "exec_prefix/home, or install_(plat)base") + if self.user and ( + self.prefix + or self.exec_prefix + or self.home + or self.install_base + or self.install_platbase + ): + raise DistutilsOptionError( + "can't combine user with prefix, " + "exec_prefix/home, or install_(plat)base" + ) # Next, stuff that's wrong (or dubious) only on certain platforms. if os.name != "posix": @@ -423,7 +433,8 @@ def finalize_options(self): local_vars['usersite'] = self.install_usersite self.config_vars = _collections.DictStack( - [compat_vars, sysconfig.get_config_vars(), local_vars]) + [fw.vars(), compat_vars, sysconfig.get_config_vars(), local_vars] + ) self.expand_basedirs() @@ -436,6 +447,7 @@ def finalize_options(self): if DEBUG: from pprint import pprint + print("config vars:") pprint(dict(self.config_vars)) @@ -454,17 +466,23 @@ def finalize_options(self): # module distribution is pure or not. Of course, if the user # already specified install_lib, use their selection. if self.install_lib is None: - if self.distribution.has_ext_modules(): # has extensions: non-pure + if self.distribution.has_ext_modules(): # has extensions: non-pure self.install_lib = self.install_platlib else: self.install_lib = self.install_purelib - # Convert directories from Unix /-separated syntax to the local # convention. - self.convert_paths('lib', 'purelib', 'platlib', - 'scripts', 'data', 'headers', - 'userbase', 'usersite') + self.convert_paths( + 'lib', + 'purelib', + 'platlib', + 'scripts', + 'data', + 'headers', + 'userbase', + 'usersite', + ) # Deprecated # Well, we're not actually fully completely finalized yet: we still @@ -472,21 +490,22 @@ def finalize_options(self): # non-packagized module distributions (hello, Numerical Python!) to # get their own directories. self.handle_extra_path() - self.install_libbase = self.install_lib # needed for .pth file + self.install_libbase = self.install_lib # needed for .pth file self.install_lib = os.path.join(self.install_lib, self.extra_dirs) # If a new root directory was supplied, make all the installation # dirs relative to it. if self.root is not None: - self.change_roots('libbase', 'lib', 'purelib', 'platlib', - 'scripts', 'data', 'headers') + self.change_roots( + 'libbase', 'lib', 'purelib', 'platlib', 'scripts', 'data', 'headers' + ) self.dump_dirs("after prepending root") # Find out the build directories, ie. where to install from. - self.set_undefined_options('build', - ('build_base', 'build_base'), - ('build_lib', 'build_lib')) + self.set_undefined_options( + 'build', ('build_base', 'build_base'), ('build_lib', 'build_lib') + ) # Punt on doc directories for now -- after all, we're punting on # documentation completely! @@ -495,7 +514,8 @@ def dump_dirs(self, msg): """Dumps the list of user options.""" if not DEBUG: return - from distutils.fancy_getopt import longopt_xlate + from ..fancy_getopt import longopt_xlate + log.debug(msg + ":") for opt in self.user_options: opt_name = opt[0] @@ -515,24 +535,24 @@ def finalize_unix(self): if self.install_base is not None or self.install_platbase is not None: incomplete_scheme = ( ( - self.install_lib is None and - self.install_purelib is None and - self.install_platlib is None - ) or - self.install_headers is None or - self.install_scripts is None or - self.install_data is None + self.install_lib is None + and self.install_purelib is None + and self.install_platlib is None + ) + or self.install_headers is None + or self.install_scripts is None + or self.install_data is None ) if incomplete_scheme: raise DistutilsOptionError( - "install-base or install-platbase supplied, but " - "installation scheme is incomplete") + "install-base or install-platbase supplied, but " + "installation scheme is incomplete" + ) return if self.user: if self.install_userbase is None: - raise DistutilsPlatformError( - "User base directory is not specified") + raise DistutilsPlatformError("User base directory is not specified") self.install_base = self.install_platbase = self.install_userbase self.select_scheme("posix_user") elif self.home is not None: @@ -542,15 +562,14 @@ def finalize_unix(self): if self.prefix is None: if self.exec_prefix is not None: raise DistutilsOptionError( - "must not supply exec-prefix without prefix") + "must not supply exec-prefix without prefix" + ) # Allow Fedora to add components to the prefix _prefix_addition = getattr(sysconfig, '_prefix_addition', "") - self.prefix = ( - os.path.normpath(sys.prefix) + _prefix_addition) - self.exec_prefix = ( - os.path.normpath(sys.exec_prefix) + _prefix_addition) + self.prefix = os.path.normpath(sys.prefix) + _prefix_addition + self.exec_prefix = os.path.normpath(sys.exec_prefix) + _prefix_addition else: if self.exec_prefix is None: @@ -564,8 +583,7 @@ def finalize_other(self): """Finalizes options for non-posix platforms""" if self.user: if self.install_userbase is None: - raise DistutilsPlatformError( - "User base directory is not specified") + raise DistutilsPlatformError("User base directory is not specified") self.install_base = self.install_platbase = self.install_userbase self.select_scheme(os.name + "_user") elif self.home is not None: @@ -580,7 +598,8 @@ def finalize_other(self): self.select_scheme(os.name) except KeyError: raise DistutilsPlatformError( - "I don't know how to install stuff on '%s'" % os.name) + "I don't know how to install stuff on '%s'" % os.name + ) def select_scheme(self, name): _select_scheme(self, name) @@ -589,7 +608,7 @@ def _expand_attrs(self, attrs): for attr in attrs: val = getattr(self, attr) if val is not None: - if os.name == 'posix' or os.name == 'nt': + if os.name in ('posix', 'nt'): val = os.path.expanduser(val) val = subst_vars(val, self.config_vars) setattr(self, attr, val) @@ -601,9 +620,16 @@ def expand_basedirs(self): def expand_dirs(self): """Calls `os.path.expanduser` on install dirs.""" - self._expand_attrs(['install_purelib', 'install_platlib', - 'install_lib', 'install_headers', - 'install_scripts', 'install_data',]) + self._expand_attrs( + [ + 'install_purelib', + 'install_platlib', + 'install_lib', + 'install_headers', + 'install_scripts', + 'install_data', + ] + ) def convert_paths(self, *names): """Call `convert_path` over `names`.""" @@ -617,7 +643,7 @@ def handle_extra_path(self): self.extra_path = self.distribution.extra_path if self.extra_path is not None: - log.warn( + log.warning( "Distribution option extra_path is deprecated. " "See issue27919 for details." ) @@ -630,8 +656,9 @@ def handle_extra_path(self): path_file, extra_dirs = self.extra_path else: raise DistutilsOptionError( - "'extra_path' option must be a list, tuple, or " - "comma-separated string with 1 or 2 elements") + "'extra_path' option must be a list, tuple, or " + "comma-separated string with 1 or 2 elements" + ) # convert to local form in case Unix notation used (as it # should be in setup scripts) @@ -674,8 +701,7 @@ def run(self): # internally, and not to sys.path, so we don't check the platform # matches what we are running. if self.warn_dir and build_plat != get_platform(): - raise DistutilsPlatformError("Can't install when " - "cross-compiling") + raise DistutilsPlatformError("Can't install when " "cross-compiling") # Run all sub-commands (at least those that need to be run) for cmd_name in self.get_sub_commands(): @@ -687,38 +713,43 @@ def run(self): # write list of installed files, if requested. if self.record: outputs = self.get_outputs() - if self.root: # strip any package prefix + if self.root: # strip any package prefix root_len = len(self.root) for counter in range(len(outputs)): outputs[counter] = outputs[counter][root_len:] - self.execute(write_file, - (self.record, outputs), - "writing list of installed files to '%s'" % - self.record) + self.execute( + write_file, + (self.record, outputs), + "writing list of installed files to '%s'" % self.record, + ) sys_path = map(os.path.normpath, sys.path) sys_path = map(os.path.normcase, sys_path) install_lib = os.path.normcase(os.path.normpath(self.install_lib)) - if (self.warn_dir and - not (self.path_file and self.install_path_file) and - install_lib not in sys_path): - log.debug(("modules installed to '%s', which is not in " - "Python's module search path (sys.path) -- " - "you'll have to change the search path yourself"), - self.install_lib) + if ( + self.warn_dir + and not (self.path_file and self.install_path_file) + and install_lib not in sys_path + ): + log.debug( + ( + "modules installed to '%s', which is not in " + "Python's module search path (sys.path) -- " + "you'll have to change the search path yourself" + ), + self.install_lib, + ) def create_path_file(self): """Creates the .pth file""" - filename = os.path.join(self.install_libbase, - self.path_file + ".pth") + filename = os.path.join(self.install_libbase, self.path_file + ".pth") if self.install_path_file: - self.execute(write_file, - (filename, [self.extra_dirs]), - "creating %s" % filename) + self.execute( + write_file, (filename, [self.extra_dirs]), "creating %s" % filename + ) else: self.warn("path file '%s' not created" % filename) - # -- Reporting methods --------------------------------------------- def get_outputs(self): @@ -733,8 +764,7 @@ def get_outputs(self): outputs.append(filename) if self.path_file and self.install_path_file: - outputs.append(os.path.join(self.install_libbase, - self.path_file + ".pth")) + outputs.append(os.path.join(self.install_libbase, self.path_file + ".pth")) return outputs @@ -753,8 +783,9 @@ def get_inputs(self): def has_lib(self): """Returns true if the current distribution has any Python modules to install.""" - return (self.distribution.has_pure_modules() or - self.distribution.has_ext_modules()) + return ( + self.distribution.has_pure_modules() or self.distribution.has_ext_modules() + ) def has_headers(self): """Returns true if the current distribution has any headers to @@ -773,9 +804,10 @@ def has_data(self): # 'sub_commands': a list of commands this command might have to run to # get its work done. See cmd.py for more info. - sub_commands = [('install_lib', has_lib), - ('install_headers', has_headers), - ('install_scripts', has_scripts), - ('install_data', has_data), - ('install_egg_info', lambda self:True), - ] + sub_commands = [ + ('install_lib', has_lib), + ('install_headers', has_headers), + ('install_scripts', has_scripts), + ('install_data', has_data), + ('install_egg_info', lambda self: True), + ] diff --git a/setuptools/_distutils/command/install_data.py b/setuptools/_distutils/command/install_data.py index 947cd76a99..7ba35eef82 100644 --- a/setuptools/_distutils/command/install_data.py +++ b/setuptools/_distutils/command/install_data.py @@ -6,21 +6,23 @@ # contributed by Bastian Kleineidam import os -from distutils.core import Command -from distutils.util import change_root, convert_path +from ..core import Command +from ..util import change_root, convert_path -class install_data(Command): +class install_data(Command): description = "install data files" user_options = [ - ('install-dir=', 'd', - "base directory for installing data files " - "(default: installation base dir)"), - ('root=', None, - "install everything relative to this alternate root directory"), + ( + 'install-dir=', + 'd', + "base directory for installing data files " + "(default: installation base dir)", + ), + ('root=', None, "install everything relative to this alternate root directory"), ('force', 'f', "force installation (overwrite existing files)"), - ] + ] boolean_options = ['force'] @@ -33,11 +35,12 @@ def initialize_options(self): self.warn_dir = 1 def finalize_options(self): - self.set_undefined_options('install', - ('install_data', 'install_dir'), - ('root', 'root'), - ('force', 'force'), - ) + self.set_undefined_options( + 'install', + ('install_data', 'install_dir'), + ('root', 'root'), + ('force', 'force'), + ) def run(self): self.mkpath(self.install_dir) @@ -46,9 +49,10 @@ def run(self): # it's a simple file, so copy it f = convert_path(f) if self.warn_dir: - self.warn("setup script did not provide a directory for " - "'%s' -- installing right in '%s'" % - (f, self.install_dir)) + self.warn( + "setup script did not provide a directory for " + "'%s' -- installing right in '%s'" % (f, self.install_dir) + ) (out, _) = self.copy_file(f, self.install_dir) self.outfiles.append(out) else: diff --git a/setuptools/_distutils/command/install_egg_info.py b/setuptools/_distutils/command/install_egg_info.py index adc0323f98..f3e8f3447d 100644 --- a/setuptools/_distutils/command/install_egg_info.py +++ b/setuptools/_distutils/command/install_egg_info.py @@ -1,12 +1,18 @@ -"""distutils.command.install_egg_info +""" +distutils.command.install_egg_info Implements the Distutils 'install_egg_info' command, for installing -a package's PKG-INFO metadata.""" +a package's PKG-INFO metadata. +""" +import os +import sys +import re + +from ..cmd import Command +from .. import dir_util +from .._log import log -from distutils.cmd import Command -from distutils import log, dir_util -import os, sys, re class install_egg_info(Command): """Install an .egg-info file for the package""" @@ -28,11 +34,11 @@ def basename(self): return "%s-%s-py%d.%d.egg-info" % ( to_filename(safe_name(self.distribution.get_name())), to_filename(safe_version(self.distribution.get_version())), - *sys.version_info[:2] + *sys.version_info[:2], ) def finalize_options(self): - self.set_undefined_options('install_lib',('install_dir','install_dir')) + self.set_undefined_options('install_lib', ('install_dir', 'install_dir')) self.target = os.path.join(self.install_dir, self.basename) self.outputs = [self.target] @@ -41,10 +47,11 @@ def run(self): if os.path.isdir(target) and not os.path.islink(target): dir_util.remove_tree(target, dry_run=self.dry_run) elif os.path.exists(target): - self.execute(os.unlink,(self.target,),"Removing "+target) + self.execute(os.unlink, (self.target,), "Removing " + target) elif not os.path.isdir(self.install_dir): - self.execute(os.makedirs, (self.install_dir,), - "Creating "+self.install_dir) + self.execute( + os.makedirs, (self.install_dir,), "Creating " + self.install_dir + ) log.info("Writing %s", target) if not self.dry_run: with open(target, 'w', encoding='UTF-8') as f: @@ -58,6 +65,7 @@ def get_outputs(self): # can be replaced by importing them from pkg_resources once it is included # in the stdlib. + def safe_name(name): """Convert an arbitrary string to a standard distribution name @@ -72,7 +80,7 @@ def safe_version(version): Spaces become dots, and all other non-alphanumeric characters become dashes, with runs of multiple dashes condensed to a single dash. """ - version = version.replace(' ','.') + version = version.replace(' ', '.') return re.sub('[^A-Za-z0-9.]+', '-', version) @@ -81,4 +89,4 @@ def to_filename(name): Any '-' characters are currently replaced with '_'. """ - return name.replace('-','_') + return name.replace('-', '_') diff --git a/setuptools/_distutils/command/install_headers.py b/setuptools/_distutils/command/install_headers.py index 9bb0b18dc0..085272c1a2 100644 --- a/setuptools/_distutils/command/install_headers.py +++ b/setuptools/_distutils/command/install_headers.py @@ -3,19 +3,17 @@ Implements the Distutils 'install_headers' command, to install C/C++ header files to the Python include directory.""" -from distutils.core import Command +from ..core import Command # XXX force is never used class install_headers(Command): - description = "install C/C++ header files" - user_options = [('install-dir=', 'd', - "directory to install header files to"), - ('force', 'f', - "force installation (overwrite existing files)"), - ] + user_options = [ + ('install-dir=', 'd', "directory to install header files to"), + ('force', 'f', "force installation (overwrite existing files)"), + ] boolean_options = ['force'] @@ -25,10 +23,9 @@ def initialize_options(self): self.outfiles = [] def finalize_options(self): - self.set_undefined_options('install', - ('install_headers', 'install_dir'), - ('force', 'force')) - + self.set_undefined_options( + 'install', ('install_headers', 'install_dir'), ('force', 'force') + ) def run(self): headers = self.distribution.headers diff --git a/setuptools/_distutils/command/install_lib.py b/setuptools/_distutils/command/install_lib.py index 6154cf0943..be4c243321 100644 --- a/setuptools/_distutils/command/install_lib.py +++ b/setuptools/_distutils/command/install_lib.py @@ -7,15 +7,15 @@ import importlib.util import sys -from distutils.core import Command -from distutils.errors import DistutilsOptionError +from ..core import Command +from ..errors import DistutilsOptionError # Extension for Python source files. PYTHON_SOURCE_EXTENSION = ".py" -class install_lib(Command): +class install_lib(Command): description = "install all Python modules (extensions and pure Python)" # The byte-compilation options are a tad confusing. Here are the @@ -35,18 +35,21 @@ class install_lib(Command): user_options = [ ('install-dir=', 'd', "directory to install to"), - ('build-dir=','b', "build directory (where to install from)"), + ('build-dir=', 'b', "build directory (where to install from)"), ('force', 'f', "force installation (overwrite existing files)"), ('compile', 'c', "compile .py to .pyc [default]"), ('no-compile', None, "don't compile .py files"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + ( + 'optimize=', + 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]", + ), ('skip-build', None, "skip the build steps"), - ] + ] boolean_options = ['force', 'compile', 'skip-build'] - negative_opt = {'no-compile' : 'compile'} + negative_opt = {'no-compile': 'compile'} def initialize_options(self): # let the 'install' command dictate our installation directory @@ -61,14 +64,15 @@ def finalize_options(self): # Get all the information we need to install pure Python modules # from the umbrella 'install' command -- build (source) directory, # install (target) directory, and whether to compile .py files. - self.set_undefined_options('install', - ('build_lib', 'build_dir'), - ('install_lib', 'install_dir'), - ('force', 'force'), - ('compile', 'compile'), - ('optimize', 'optimize'), - ('skip_build', 'skip_build'), - ) + self.set_undefined_options( + 'install', + ('build_lib', 'build_dir'), + ('install_lib', 'install_dir'), + ('force', 'force'), + ('compile', 'compile'), + ('optimize', 'optimize'), + ('skip_build', 'skip_build'), + ) if self.compile is None: self.compile = True @@ -110,8 +114,9 @@ def install(self): if os.path.isdir(self.build_dir): outfiles = self.copy_tree(self.build_dir, self.install_dir) else: - self.warn("'%s' does not exist -- no Python modules to install" % - self.build_dir) + self.warn( + "'%s' does not exist -- no Python modules to install" % self.build_dir + ) return return outfiles @@ -120,7 +125,7 @@ def byte_compile(self, files): self.warn('byte-compiling is disabled, skipping.') return - from distutils.util import byte_compile + from ..util import byte_compile # Get the "--root" directory supplied to the "install" command, # and use it as a prefix to strip off the purported filename @@ -129,14 +134,22 @@ def byte_compile(self, files): install_root = self.get_finalized_command('install').root if self.compile: - byte_compile(files, optimize=0, - force=self.force, prefix=install_root, - dry_run=self.dry_run) + byte_compile( + files, + optimize=0, + force=self.force, + prefix=install_root, + dry_run=self.dry_run, + ) if self.optimize > 0: - byte_compile(files, optimize=self.optimize, - force=self.force, prefix=install_root, - verbose=self.verbose, dry_run=self.dry_run) - + byte_compile( + files, + optimize=self.optimize, + force=self.force, + prefix=install_root, + verbose=self.verbose, + dry_run=self.dry_run, + ) # -- Utility methods ----------------------------------------------- @@ -165,15 +178,18 @@ def _bytecode_filenames(self, py_filenames): if ext != PYTHON_SOURCE_EXTENSION: continue if self.compile: - bytecode_files.append(importlib.util.cache_from_source( - py_file, optimization='')) + bytecode_files.append( + importlib.util.cache_from_source(py_file, optimization='') + ) if self.optimize > 0: - bytecode_files.append(importlib.util.cache_from_source( - py_file, optimization=self.optimize)) + bytecode_files.append( + importlib.util.cache_from_source( + py_file, optimization=self.optimize + ) + ) return bytecode_files - # -- External interface -------------------------------------------- # (called by outsiders) @@ -182,19 +198,23 @@ def get_outputs(self): were actually run. Not affected by the "dry-run" flag or whether modules have actually been built yet. """ - pure_outputs = \ - self._mutate_outputs(self.distribution.has_pure_modules(), - 'build_py', 'build_lib', - self.install_dir) + pure_outputs = self._mutate_outputs( + self.distribution.has_pure_modules(), + 'build_py', + 'build_lib', + self.install_dir, + ) if self.compile: bytecode_outputs = self._bytecode_filenames(pure_outputs) else: bytecode_outputs = [] - ext_outputs = \ - self._mutate_outputs(self.distribution.has_ext_modules(), - 'build_ext', 'build_lib', - self.install_dir) + ext_outputs = self._mutate_outputs( + self.distribution.has_ext_modules(), + 'build_ext', + 'build_lib', + self.install_dir, + ) return pure_outputs + bytecode_outputs + ext_outputs diff --git a/setuptools/_distutils/command/install_scripts.py b/setuptools/_distutils/command/install_scripts.py index 31a1130ee5..20f07aaa27 100644 --- a/setuptools/_distutils/command/install_scripts.py +++ b/setuptools/_distutils/command/install_scripts.py @@ -6,18 +6,17 @@ # contributed by Bastian Kleineidam import os -from distutils.core import Command -from distutils import log +from ..core import Command +from distutils._log import log from stat import ST_MODE class install_scripts(Command): - description = "install scripts (Python or otherwise)" user_options = [ ('install-dir=', 'd', "directory to install scripts to"), - ('build-dir=','b', "build directory (where to install from)"), + ('build-dir=', 'b', "build directory (where to install from)"), ('force', 'f', "force installation (overwrite existing files)"), ('skip-build', None, "skip the build steps"), ] @@ -32,11 +31,12 @@ def initialize_options(self): def finalize_options(self): self.set_undefined_options('build', ('build_scripts', 'build_dir')) - self.set_undefined_options('install', - ('install_scripts', 'install_dir'), - ('force', 'force'), - ('skip_build', 'skip_build'), - ) + self.set_undefined_options( + 'install', + ('install_scripts', 'install_dir'), + ('force', 'force'), + ('skip_build', 'skip_build'), + ) def run(self): if not self.skip_build: diff --git a/setuptools/_distutils/command/py37compat.py b/setuptools/_distutils/command/py37compat.py index 754715a508..aa0c0a7fcd 100644 --- a/setuptools/_distutils/command/py37compat.py +++ b/setuptools/_distutils/command/py37compat.py @@ -7,12 +7,13 @@ def _pythonlib_compat(): library. See pypa/distutils#9. """ from distutils import sysconfig + if not sysconfig.get_config_var('Py_ENABLED_SHARED'): return yield 'python{}.{}{}'.format( sys.hexversion >> 24, - (sys.hexversion >> 16) & 0xff, + (sys.hexversion >> 16) & 0xFF, sysconfig.get_config_var('ABIFLAGS'), ) diff --git a/setuptools/_distutils/command/register.py b/setuptools/_distutils/command/register.py index 0fac94e9e5..c19aabb91f 100644 --- a/setuptools/_distutils/command/register.py +++ b/setuptools/_distutils/command/register.py @@ -7,24 +7,30 @@ import getpass import io -import urllib.parse, urllib.request +import logging +import urllib.parse +import urllib.request from warnings import warn -from distutils.core import PyPIRCCommand -from distutils.errors import * -from distutils import log +from ..core import PyPIRCCommand +from distutils._log import log -class register(PyPIRCCommand): - description = ("register the distribution with the Python package index") +class register(PyPIRCCommand): + description = "register the distribution with the Python package index" user_options = PyPIRCCommand.user_options + [ - ('list-classifiers', None, - 'list the valid Trove classifiers'), - ('strict', None , - 'Will stop the registering if the meta-data are not fully compliant') - ] + ('list-classifiers', None, 'list the valid Trove classifiers'), + ( + 'strict', + None, + 'Will stop the registering if the meta-data are not fully compliant', + ), + ] boolean_options = PyPIRCCommand.boolean_options + [ - 'verify', 'list-classifiers', 'strict'] + 'verify', + 'list-classifiers', + 'strict', + ] sub_commands = [('check', lambda self: True)] @@ -36,8 +42,10 @@ def initialize_options(self): def finalize_options(self): PyPIRCCommand.finalize_options(self) # setting options for the `check` subcommand - check_options = {'strict': ('register', self.strict), - 'restructuredtext': ('register', 1)} + check_options = { + 'strict': ('register', self.strict), + 'restructuredtext': ('register', 1), + } self.distribution.command_options['check'] = check_options def run(self): @@ -57,8 +65,11 @@ def run(self): def check_metadata(self): """Deprecated API.""" - warn("distutils.command.register.check_metadata is deprecated, \ - use the check command instead", PendingDeprecationWarning) + warn( + "distutils.command.register.check_metadata is deprecated; " + "use the check command instead", + DeprecationWarning, + ) check = self.distribution.get_command_obj('check') check.ensure_finalized() check.strict = self.strict @@ -66,8 +77,7 @@ def check_metadata(self): check.run() def _set_config(self): - ''' Reads the configuration file and set attributes. - ''' + '''Reads the configuration file and set attributes.''' config = self._read_pypirc() if config != {}: self.username = config['username'] @@ -83,45 +93,43 @@ def _set_config(self): self.has_config = False def classifiers(self): - ''' Fetch the list of classifiers from the server. - ''' - url = self.repository+'?:action=list_classifiers' + '''Fetch the list of classifiers from the server.''' + url = self.repository + '?:action=list_classifiers' response = urllib.request.urlopen(url) log.info(self._read_pypi_response(response)) def verify_metadata(self): - ''' Send the metadata to the package index server to be checked. - ''' + '''Send the metadata to the package index server to be checked.''' # send the info to the server and report the result (code, result) = self.post_to_server(self.build_post_data('verify')) log.info('Server response (%s): %s', code, result) - def send_metadata(self): - ''' Send the metadata to the package index server. + def send_metadata(self): # noqa: C901 + '''Send the metadata to the package index server. - Well, do the following: - 1. figure who the user is, and then - 2. send the data as a Basic auth'ed POST. + Well, do the following: + 1. figure who the user is, and then + 2. send the data as a Basic auth'ed POST. - First we try to read the username/password from $HOME/.pypirc, - which is a ConfigParser-formatted file with a section - [distutils] containing username and password entries (both - in clear text). Eg: + First we try to read the username/password from $HOME/.pypirc, + which is a ConfigParser-formatted file with a section + [distutils] containing username and password entries (both + in clear text). Eg: - [distutils] - index-servers = - pypi + [distutils] + index-servers = + pypi - [pypi] - username: fred - password: sekrit + [pypi] + username: fred + password: sekrit - Otherwise, to figure who the user is, we offer the user three - choices: + Otherwise, to figure who the user is, we offer the user three + choices: - 1. use existing login, - 2. register as a new user, or - 3. set the password to a random string and email the user. + 1. use existing login, + 2. register as a new user, or + 3. set the password to a random string and email the user. ''' # see if we can short-cut and get the username/password from the @@ -137,13 +145,16 @@ def send_metadata(self): # get the user's login info choices = '1 2 3 4'.split() while choice not in choices: - self.announce('''\ + self.announce( + '''\ We need to know who you are, so please choose either: 1. use your existing login, 2. register as a new user, 3. have the server generate a new password for you (and email it to you), or 4. quit -Your selection [default 1]: ''', log.INFO) +Your selection [default 1]: ''', + logging.INFO, + ) choice = input() if not choice: choice = '1' @@ -162,10 +173,8 @@ def send_metadata(self): host = urllib.parse.urlparse(self.repository)[1] auth.add_password(self.realm, host, username, password) # send the info to the server and report the result - code, result = self.post_to_server(self.build_post_data('submit'), - auth) - self.announce('Server response (%s): %s' % (code, result), - log.INFO) + code, result = self.post_to_server(self.build_post_data('submit'), auth) + self.announce('Server response ({}): {}'.format(code, result), logging.INFO) # possibly save the login if code == 200: @@ -174,10 +183,17 @@ def send_metadata(self): # so the upload command can reuse it self.distribution.password = password else: - self.announce(('I can store your PyPI login so future ' - 'submissions will be faster.'), log.INFO) - self.announce('(the login will be stored in %s)' % \ - self._get_rc_file(), log.INFO) + self.announce( + ( + 'I can store your PyPI login so future ' + 'submissions will be faster.' + ), + logging.INFO, + ) + self.announce( + '(the login will be stored in %s)' % self._get_rc_file(), + logging.INFO, + ) choice = 'X' while choice.lower() not in 'yn': choice = input('Save your login (y/N)?') @@ -208,8 +224,7 @@ def send_metadata(self): log.info('Server response (%s): %s', code, result) else: log.info('You will receive an email shortly.') - log.info(('Follow the instructions in it to ' - 'complete registration.')) + log.info('Follow the instructions in it to ' 'complete registration.') elif choice == '3': data = {':action': 'password_reset'} data['email'] = '' @@ -224,7 +239,7 @@ def build_post_data(self, action): meta = self.distribution.metadata data = { ':action': action, - 'metadata_version' : '1.0', + 'metadata_version': '1.0', 'name': meta.get_name(), 'version': meta.get_version(), 'summary': meta.get_description(), @@ -246,13 +261,13 @@ def build_post_data(self, action): data['metadata_version'] = '1.1' return data - def post_to_server(self, data, auth=None): - ''' Post a query to the server, and return a string response. - ''' + def post_to_server(self, data, auth=None): # noqa: C901 + '''Post a query to the server, and return a string response.''' if 'name' in data: - self.announce('Registering %s to %s' % (data['name'], - self.repository), - log.INFO) + self.announce( + 'Registering {} to {}'.format(data['name'], self.repository), + logging.INFO, + ) # Build up the MIME payload for the urllib2 POST data boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' sep_boundary = '\n--' + boundary @@ -260,12 +275,12 @@ def post_to_server(self, data, auth=None): body = io.StringIO() for key, value in data.items(): # handle multiple entries for the same name - if type(value) not in (type([]), type( () )): + if type(value) not in (type([]), type(())): value = [value] for value in value: value = str(value) body.write(sep_boundary) - body.write('\nContent-Disposition: form-data; name="%s"'%key) + body.write('\nContent-Disposition: form-data; name="%s"' % key) body.write("\n\n") body.write(value) if value and value[-1] == '\r': @@ -276,8 +291,9 @@ def post_to_server(self, data, auth=None): # build the Request headers = { - 'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary, - 'Content-length': str(len(body)) + 'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8' + % boundary, + 'Content-length': str(len(body)), } req = urllib.request.Request(self.repository, body, headers) @@ -300,5 +316,5 @@ def post_to_server(self, data, auth=None): result = 200, 'OK' if self.show_response: msg = '\n'.join(('-' * 75, data, '-' * 75)) - self.announce(msg, log.INFO) + self.announce(msg, logging.INFO) return result diff --git a/setuptools/_distutils/command/sdist.py b/setuptools/_distutils/command/sdist.py index b4996fcb1d..ac489726ca 100644 --- a/setuptools/_distutils/command/sdist.py +++ b/setuptools/_distutils/command/sdist.py @@ -7,34 +7,32 @@ from glob import glob from warnings import warn -from distutils.core import Command +from ..core import Command from distutils import dir_util from distutils import file_util from distutils import archive_util -from distutils.text_file import TextFile -from distutils.filelist import FileList -from distutils import log -from distutils.util import convert_path -from distutils.errors import DistutilsTemplateError, DistutilsOptionError +from ..text_file import TextFile +from ..filelist import FileList +from distutils._log import log +from ..util import convert_path +from ..errors import DistutilsOptionError, DistutilsTemplateError def show_formats(): """Print all possible values for the 'formats' option (used by the "--help-formats" command-line option). """ - from distutils.fancy_getopt import FancyGetopt - from distutils.archive_util import ARCHIVE_FORMATS + from ..fancy_getopt import FancyGetopt + from ..archive_util import ARCHIVE_FORMATS + formats = [] for format in ARCHIVE_FORMATS.keys(): - formats.append(("formats=" + format, None, - ARCHIVE_FORMATS[format][2])) + formats.append(("formats=" + format, None, ARCHIVE_FORMATS[format][2])) formats.sort() - FancyGetopt(formats).print_help( - "List of available source distribution formats:") + FancyGetopt(formats).print_help("List of available source distribution formats:") class sdist(Command): - description = "create a source distribution (tarball, zip file, etc.)" def checking_metadata(self): @@ -44,55 +42,77 @@ def checking_metadata(self): return self.metadata_check user_options = [ - ('template=', 't', - "name of manifest template file [default: MANIFEST.in]"), - ('manifest=', 'm', - "name of manifest file [default: MANIFEST]"), - ('use-defaults', None, - "include the default file set in the manifest " - "[default; disable with --no-defaults]"), - ('no-defaults', None, - "don't include the default file set"), - ('prune', None, - "specifically exclude files/directories that should not be " - "distributed (build tree, RCS/CVS dirs, etc.) " - "[default; disable with --no-prune]"), - ('no-prune', None, - "don't automatically exclude anything"), - ('manifest-only', 'o', - "just regenerate the manifest and then stop " - "(implies --force-manifest)"), - ('force-manifest', 'f', - "forcibly regenerate the manifest and carry on as usual. " - "Deprecated: now the manifest is always regenerated."), - ('formats=', None, - "formats for source distribution (comma-separated list)"), - ('keep-temp', 'k', - "keep the distribution tree around after creating " + - "archive file(s)"), - ('dist-dir=', 'd', - "directory to put the source distribution archive(s) in " - "[default: dist]"), - ('metadata-check', None, - "Ensure that all required elements of meta-data " - "are supplied. Warn if any missing. [default]"), - ('owner=', 'u', - "Owner name used when creating a tar file [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file [default: current group]"), - ] - - boolean_options = ['use-defaults', 'prune', - 'manifest-only', 'force-manifest', - 'keep-temp', 'metadata-check'] + ('template=', 't', "name of manifest template file [default: MANIFEST.in]"), + ('manifest=', 'm', "name of manifest file [default: MANIFEST]"), + ( + 'use-defaults', + None, + "include the default file set in the manifest " + "[default; disable with --no-defaults]", + ), + ('no-defaults', None, "don't include the default file set"), + ( + 'prune', + None, + "specifically exclude files/directories that should not be " + "distributed (build tree, RCS/CVS dirs, etc.) " + "[default; disable with --no-prune]", + ), + ('no-prune', None, "don't automatically exclude anything"), + ( + 'manifest-only', + 'o', + "just regenerate the manifest and then stop " "(implies --force-manifest)", + ), + ( + 'force-manifest', + 'f', + "forcibly regenerate the manifest and carry on as usual. " + "Deprecated: now the manifest is always regenerated.", + ), + ('formats=', None, "formats for source distribution (comma-separated list)"), + ( + 'keep-temp', + 'k', + "keep the distribution tree around after creating " + "archive file(s)", + ), + ( + 'dist-dir=', + 'd', + "directory to put the source distribution archive(s) in " "[default: dist]", + ), + ( + 'metadata-check', + None, + "Ensure that all required elements of meta-data " + "are supplied. Warn if any missing. [default]", + ), + ( + 'owner=', + 'u', + "Owner name used when creating a tar file [default: current user]", + ), + ( + 'group=', + 'g', + "Group name used when creating a tar file [default: current group]", + ), + ] + + boolean_options = [ + 'use-defaults', + 'prune', + 'manifest-only', + 'force-manifest', + 'keep-temp', + 'metadata-check', + ] help_options = [ - ('help-formats', None, - "list available distribution formats", show_formats), - ] + ('help-formats', None, "list available distribution formats", show_formats), + ] - negative_opt = {'no-defaults': 'use-defaults', - 'no-prune': 'prune' } + negative_opt = {'no-defaults': 'use-defaults', 'no-prune': 'prune'} sub_commands = [('check', checking_metadata)] @@ -131,8 +151,7 @@ def finalize_options(self): bad_format = archive_util.check_archive_formats(self.formats) if bad_format: - raise DistutilsOptionError( - "unknown archive format '%s'" % bad_format) + raise DistutilsOptionError("unknown archive format '%s'" % bad_format) if self.dist_dir is None: self.dist_dir = "dist" @@ -161,8 +180,11 @@ def run(self): def check_metadata(self): """Deprecated API.""" - warn("distutils.command.sdist.check_metadata is deprecated, \ - use the check command instead", PendingDeprecationWarning) + warn( + "distutils.command.sdist.check_metadata is deprecated, \ + use the check command instead", + PendingDeprecationWarning, + ) check = self.distribution.get_command_obj('check') check.ensure_finalized() check.run() @@ -189,9 +211,10 @@ def get_file_list(self): return if not template_exists: - self.warn(("manifest template '%s' does not exist " + - "(using default file list)") % - self.template) + self.warn( + ("manifest template '%s' does not exist " + "(using default file list)") + % self.template + ) self.filelist.findall() if self.use_defaults: @@ -211,7 +234,7 @@ def add_defaults(self): """Add all the default files to self.filelist: - README or README.txt - setup.py - - test/test*.py + - tests/test*.py and test/test*.py - all pure Python modules mentioned in setup script - all files pointed by package_data (build_py) - all files defined in data_files. @@ -259,8 +282,9 @@ def _add_defaults_standards(self): break if not got_it: - self.warn("standard file not found: should have one of " + - ', '.join(alts)) + self.warn( + "standard file not found: should have one of " + ', '.join(alts) + ) else: if self._cs_path_exists(fn): self.filelist.append(fn) @@ -268,7 +292,7 @@ def _add_defaults_standards(self): self.warn("standard file '%s' not found" % fn) def _add_defaults_optional(self): - optional = ['test/test*.py', 'setup.cfg'] + optional = ['tests/test*.py', 'test/test*.py', 'setup.cfg'] for pattern in optional: files = filter(os.path.isfile, glob(pattern)) self.filelist.extend(files) @@ -328,14 +352,20 @@ def read_template(self): 'self.filelist', which updates itself accordingly. """ log.info("reading manifest template '%s'", self.template) - template = TextFile(self.template, strip_comments=1, skip_blanks=1, - join_lines=1, lstrip_ws=1, rstrip_ws=1, - collapse_join=1) + template = TextFile( + self.template, + strip_comments=1, + skip_blanks=1, + join_lines=1, + lstrip_ws=1, + rstrip_ws=1, + collapse_join=1, + ) try: while True: line = template.readline() - if line is None: # end of file + if line is None: # end of file break try: @@ -344,9 +374,10 @@ def read_template(self): # malformed lines, or a ValueError from the lower-level # convert_path function except (DistutilsTemplateError, ValueError) as msg: - self.warn("%s, line %d: %s" % (template.filename, - template.current_line, - msg)) + self.warn( + "%s, line %d: %s" + % (template.filename, template.current_line, msg) + ) finally: template.close() @@ -369,9 +400,8 @@ def prune_file_list(self): else: seps = '/' - vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', - '_darcs'] - vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps) + vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', '_darcs'] + vcs_ptrn = r'(^|{})({})({}).*'.format(seps, '|'.join(vcs_dirs), seps) self.filelist.exclude_pattern(vcs_ptrn, is_regex=1) def write_manifest(self): @@ -380,14 +410,19 @@ def write_manifest(self): named by 'self.manifest'. """ if self._manifest_is_not_generated(): - log.info("not writing to manually maintained " - "manifest file '%s'" % self.manifest) + log.info( + "not writing to manually maintained " + "manifest file '%s'" % self.manifest + ) return content = self.filelist.files[:] content.insert(0, '# file GENERATED by distutils, do NOT edit') - self.execute(file_util.write_file, (self.manifest, content), - "writing manifest file '%s'" % self.manifest) + self.execute( + file_util.write_file, + (self.manifest, content), + "writing manifest file '%s'" % self.manifest, + ) def _manifest_is_not_generated(self): # check for special comment used in 3.1.3 and higher @@ -437,20 +472,20 @@ def make_release_tree(self, base_dir, files): # out-of-date, because by default we blow away 'base_dir' when # we're done making the distribution archives.) - if hasattr(os, 'link'): # can make hard links on this system + if hasattr(os, 'link'): # can make hard links on this system link = 'hard' msg = "making hard links in %s..." % base_dir - else: # nope, have to copy + else: # nope, have to copy link = None msg = "copying files to %s..." % base_dir if not files: - log.warn("no files to distribute -- empty manifest?") + log.warning("no files to distribute -- empty manifest?") else: log.info(msg) for file in files: if not os.path.isfile(file): - log.warn("'%s' not a regular file -- skipping", file) + log.warning("'%s' not a regular file -- skipping", file) else: dest = os.path.join(base_dir, file) self.copy_file(file, dest, link=link) @@ -471,14 +506,15 @@ def make_distribution(self): base_name = os.path.join(self.dist_dir, base_dir) self.make_release_tree(base_dir, self.filelist.files) - archive_files = [] # remember names of files we create + archive_files = [] # remember names of files we create # tar archive must be created last to avoid overwrite and remove if 'tar' in self.formats: self.formats.append(self.formats.pop(self.formats.index('tar'))) for fmt in self.formats: - file = self.make_archive(base_name, fmt, base_dir=base_dir, - owner=self.owner, group=self.group) + file = self.make_archive( + base_name, fmt, base_dir=base_dir, owner=self.owner, group=self.group + ) archive_files.append(file) self.distribution.dist_files.append(('sdist', '', file)) diff --git a/setuptools/_distutils/command/upload.py b/setuptools/_distutils/command/upload.py index 95e9fda186..caf15f04a6 100644 --- a/setuptools/_distutils/command/upload.py +++ b/setuptools/_distutils/command/upload.py @@ -8,13 +8,13 @@ import os import io import hashlib +import logging from base64 import standard_b64encode from urllib.request import urlopen, Request, HTTPError from urllib.parse import urlparse -from distutils.errors import DistutilsError, DistutilsOptionError -from distutils.core import PyPIRCCommand -from distutils.spawn import spawn -from distutils import log +from ..errors import DistutilsError, DistutilsOptionError +from ..core import PyPIRCCommand +from ..spawn import spawn # PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256) @@ -27,14 +27,12 @@ class upload(PyPIRCCommand): - description = "upload binary package to PyPI" user_options = PyPIRCCommand.user_options + [ - ('sign', 's', - 'sign files to upload using gpg'), + ('sign', 's', 'sign files to upload using gpg'), ('identity=', 'i', 'GPG identity used to sign files'), - ] + ] boolean_options = PyPIRCCommand.boolean_options + ['sign'] @@ -49,9 +47,7 @@ def initialize_options(self): def finalize_options(self): PyPIRCCommand.finalize_options(self) if self.identity and not self.sign: - raise DistutilsOptionError( - "Must use --sign for --identity to have meaning" - ) + raise DistutilsOptionError("Must use --sign for --identity to have meaning") config = self._read_pypirc() if config != {}: self.username = config['username'] @@ -66,16 +62,17 @@ def finalize_options(self): def run(self): if not self.distribution.dist_files: - msg = ("Must create and upload files in one command " - "(e.g. setup.py sdist upload)") + msg = ( + "Must create and upload files in one command " + "(e.g. setup.py sdist upload)" + ) raise DistutilsOptionError(msg) for command, pyversion, filename in self.distribution.dist_files: self.upload_file(command, pyversion, filename) - def upload_file(self, command, pyversion, filename): + def upload_file(self, command, pyversion, filename): # noqa: C901 # Makes sure the repository URL is compliant - schema, netloc, url, params, query, fragments = \ - urlparse(self.repository) + schema, netloc, url, params, query, fragments = urlparse(self.repository) if params or query or fragments: raise AssertionError("Incompatible url %s" % self.repository) @@ -87,12 +84,11 @@ def upload_file(self, command, pyversion, filename): gpg_args = ["gpg", "--detach-sign", "-a", filename] if self.identity: gpg_args[2:2] = ["--local-user", self.identity] - spawn(gpg_args, - dry_run=self.dry_run) + spawn(gpg_args, dry_run=self.dry_run) # Fill in the data - send all the meta-data in case we need to # register a new release - f = open(filename,'rb') + f = open(filename, 'rb') try: content = f.read() finally: @@ -103,16 +99,13 @@ def upload_file(self, command, pyversion, filename): # action ':action': 'file_upload', 'protocol_version': '1', - # identify release 'name': meta.get_name(), 'version': meta.get_version(), - # file content - 'content': (os.path.basename(filename),content), + 'content': (os.path.basename(filename), content), 'filetype': command, 'pyversion': pyversion, - # additional meta-data 'metadata_version': '1.0', 'summary': meta.get_description(), @@ -129,7 +122,7 @@ def upload_file(self, command, pyversion, filename): 'provides': meta.get_provides(), 'requires': meta.get_requires(), 'obsoletes': meta.get_obsoletes(), - } + } data['comment'] = '' @@ -145,8 +138,7 @@ def upload_file(self, command, pyversion, filename): if self.sign: with open(filename + ".asc", "rb") as f: - data['gpg_signature'] = (os.path.basename(filename) + ".asc", - f.read()) + data['gpg_signature'] = (os.path.basename(filename) + ".asc", f.read()) # set up the authentication user_pass = (self.username + ":" + self.password).encode('ascii') @@ -177,8 +169,8 @@ def upload_file(self, command, pyversion, filename): body.write(end_boundary) body = body.getvalue() - msg = "Submitting %s to %s" % (filename, self.repository) - self.announce(msg, log.INFO) + msg = "Submitting {} to {}".format(filename, self.repository) + self.announce(msg, logging.INFO) # build the Request headers = { @@ -187,8 +179,7 @@ def upload_file(self, command, pyversion, filename): 'Authorization': auth, } - request = Request(self.repository, data=body, - headers=headers) + request = Request(self.repository, data=body, headers=headers) # send the data try: result = urlopen(request) @@ -198,17 +189,18 @@ def upload_file(self, command, pyversion, filename): status = e.code reason = e.msg except OSError as e: - self.announce(str(e), log.ERROR) + self.announce(str(e), logging.ERROR) raise if status == 200: - self.announce('Server response (%s): %s' % (status, reason), - log.INFO) + self.announce( + 'Server response ({}): {}'.format(status, reason), logging.INFO + ) if self.show_response: text = self._read_pypi_response(result) msg = '\n'.join(('-' * 75, text, '-' * 75)) - self.announce(msg, log.INFO) + self.announce(msg, logging.INFO) else: - msg = 'Upload failed (%s): %s' % (status, reason) - self.announce(msg, log.ERROR) + msg = 'Upload failed ({}): {}'.format(status, reason) + self.announce(msg, logging.ERROR) raise DistutilsError(msg) diff --git a/setuptools/_distutils/config.py b/setuptools/_distutils/config.py index 2171abd696..9a4044adaf 100644 --- a/setuptools/_distutils/config.py +++ b/setuptools/_distutils/config.py @@ -6,7 +6,7 @@ import os from configparser import RawConfigParser -from distutils.cmd import Command +from .cmd import Command DEFAULT_PYPIRC = """\ [distutils] @@ -18,20 +18,19 @@ password:%s """ + class PyPIRCCommand(Command): - """Base command that knows how to handle the .pypirc file - """ + """Base command that knows how to handle the .pypirc file""" + DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' DEFAULT_REALM = 'pypi' repository = None realm = None user_options = [ - ('repository=', 'r', - "url of repository [default: %s]" % \ - DEFAULT_REPOSITORY), - ('show-response', None, - 'display full response text from server')] + ('repository=', 'r', "url of repository [default: %s]" % DEFAULT_REPOSITORY), + ('show-response', None, 'display full response text from server'), + ] boolean_options = ['show-response'] @@ -45,7 +44,7 @@ def _store_pypirc(self, username, password): with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f: f.write(DEFAULT_PYPIRC % (username, password)) - def _read_pypirc(self): + def _read_pypirc(self): # noqa: C901 """Reads the .pypirc file.""" rc = self._get_rc_file() if os.path.exists(rc): @@ -58,9 +57,11 @@ def _read_pypirc(self): if 'distutils' in sections: # let's get the list of servers index_servers = config.get('distutils', 'index-servers') - _servers = [server.strip() for server in - index_servers.split('\n') - if server.strip() != ''] + _servers = [ + server.strip() + for server in index_servers.split('\n') + if server.strip() != '' + ] if _servers == []: # nothing set, let's try to get the default pypi if 'pypi' in sections: @@ -74,10 +75,11 @@ def _read_pypirc(self): current['username'] = config.get(server, 'username') # optional params - for key, default in (('repository', - self.DEFAULT_REPOSITORY), - ('realm', self.DEFAULT_REALM), - ('password', None)): + for key, default in ( + ('repository', self.DEFAULT_REPOSITORY), + ('realm', self.DEFAULT_REALM), + ('password', None), + ): if config.has_option(server, key): current[key] = config.get(server, key) else: @@ -86,13 +88,17 @@ def _read_pypirc(self): # work around people having "repository" for the "pypi" # section of their config set to the HTTP (rather than # HTTPS) URL - if (server == 'pypi' and - repository in (self.DEFAULT_REPOSITORY, 'pypi')): + if server == 'pypi' and repository in ( + self.DEFAULT_REPOSITORY, + 'pypi', + ): current['repository'] = self.DEFAULT_REPOSITORY return current - if (current['server'] == repository or - current['repository'] == repository): + if ( + current['server'] == repository + or current['repository'] == repository + ): return current elif 'server-login' in sections: # old format @@ -101,17 +107,20 @@ def _read_pypirc(self): repository = config.get(server, 'repository') else: repository = self.DEFAULT_REPOSITORY - return {'username': config.get(server, 'username'), - 'password': config.get(server, 'password'), - 'repository': repository, - 'server': server, - 'realm': self.DEFAULT_REALM} + return { + 'username': config.get(server, 'username'), + 'password': config.get(server, 'password'), + 'repository': repository, + 'server': server, + 'realm': self.DEFAULT_REALM, + } return {} def _read_pypi_response(self, response): """Read and decode a PyPI HTTP response.""" import cgi + content_type = response.getheader('content-type', 'text/plain') encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii') return response.read().decode(encoding) diff --git a/setuptools/_distutils/core.py b/setuptools/_distutils/core.py index f43888ea60..05d2971994 100644 --- a/setuptools/_distutils/core.py +++ b/setuptools/_distutils/core.py @@ -10,14 +10,22 @@ import sys import tokenize -from distutils.debug import DEBUG -from distutils.errors import * +from .debug import DEBUG +from .errors import ( + DistutilsSetupError, + DistutilsError, + CCompilerError, + DistutilsArgError, +) # Mainly import these so setup scripts can "from distutils.core import" them. -from distutils.dist import Distribution -from distutils.cmd import Command -from distutils.config import PyPIRCCommand -from distutils.extension import Extension +from .dist import Distribution +from .cmd import Command +from .config import PyPIRCCommand +from .extension import Extension + + +__all__ = ['Distribution', 'Command', 'PyPIRCCommand', 'Extension', 'setup'] # This is a barebones help message generated displayed when the user # runs the setup script with no arguments at all. More useful help @@ -30,9 +38,10 @@ or: %(script)s cmd --help """ -def gen_usage (script_name): + +def gen_usage(script_name): script = os.path.basename(script_name) - return USAGE % vars() + return USAGE % locals() # Some mild magic to control the behaviour of 'setup()' from 'run_setup()'. @@ -40,22 +49,51 @@ def gen_usage (script_name): _setup_distribution = None # Legal keyword arguments for the setup() function -setup_keywords = ('distclass', 'script_name', 'script_args', 'options', - 'name', 'version', 'author', 'author_email', - 'maintainer', 'maintainer_email', 'url', 'license', - 'description', 'long_description', 'keywords', - 'platforms', 'classifiers', 'download_url', - 'requires', 'provides', 'obsoletes', - ) +setup_keywords = ( + 'distclass', + 'script_name', + 'script_args', + 'options', + 'name', + 'version', + 'author', + 'author_email', + 'maintainer', + 'maintainer_email', + 'url', + 'license', + 'description', + 'long_description', + 'keywords', + 'platforms', + 'classifiers', + 'download_url', + 'requires', + 'provides', + 'obsoletes', +) # Legal keyword arguments for the Extension constructor -extension_keywords = ('name', 'sources', 'include_dirs', - 'define_macros', 'undef_macros', - 'library_dirs', 'libraries', 'runtime_library_dirs', - 'extra_objects', 'extra_compile_args', 'extra_link_args', - 'swig_opts', 'export_symbols', 'depends', 'language') - -def setup (**attrs): +extension_keywords = ( + 'name', + 'sources', + 'include_dirs', + 'define_macros', + 'undef_macros', + 'library_dirs', + 'libraries', + 'runtime_library_dirs', + 'extra_objects', + 'extra_compile_args', + 'extra_link_args', + 'swig_opts', + 'export_symbols', + 'depends', + 'language', +) + + +def setup(**attrs): # noqa: C901 """The gateway to the Distutils: do everything your setup script needs to do, in a highly flexible and user-driven way. Briefly: create a Distribution instance; find and parse config files; parse the command @@ -94,13 +132,13 @@ class found in 'cmdclass' is used in place of the default, which is # our Distribution (see below). klass = attrs.get('distclass') if klass: - del attrs['distclass'] + attrs.pop('distclass') else: klass = Distribution if 'script_name' not in attrs: attrs['script_name'] = os.path.basename(sys.argv[0]) - if 'script_args' not in attrs: + if 'script_args' not in attrs: attrs['script_args'] = sys.argv[1:] # Create the Distribution instance, using the remaining arguments @@ -111,8 +149,7 @@ class found in 'cmdclass' is used in place of the default, which is if 'name' not in attrs: raise SystemExit("error in setup command: %s" % msg) else: - raise SystemExit("error in %s setup command: %s" % \ - (attrs['name'], msg)) + raise SystemExit("error in {} setup command: {}".format(attrs['name'], msg)) if _setup_stop_after == "init": return dist @@ -149,10 +186,11 @@ class found in 'cmdclass' is used in place of the default, which is return dist + # setup () -def run_commands (dist): +def run_commands(dist): """Given a Distribution object run all the commands, raising ``SystemExit`` errors in the case of failure. @@ -165,13 +203,12 @@ def run_commands (dist): raise SystemExit("interrupted") except OSError as exc: if DEBUG: - sys.stderr.write("error: %s\n" % (exc,)) + sys.stderr.write("error: {}\n".format(exc)) raise else: - raise SystemExit("error: %s" % (exc,)) + raise SystemExit("error: {}".format(exc)) - except (DistutilsError, - CCompilerError) as msg: + except (DistutilsError, CCompilerError) as msg: if DEBUG: raise else: @@ -180,7 +217,7 @@ def run_commands (dist): return dist -def run_setup (script_name, script_args=None, stop_after="run"): +def run_setup(script_name, script_args=None, stop_after="run"): """Run a setup script in a somewhat controlled environment, and return the Distribution instance that drives things. This is useful if you need to find out the distribution meta-data (passed as @@ -212,7 +249,7 @@ def run_setup (script_name, script_args=None, stop_after="run"): used to drive the Distutils. """ if stop_after not in ('init', 'config', 'commandline', 'run'): - raise ValueError("invalid value for 'stop_after': %r" % (stop_after,)) + raise ValueError("invalid value for 'stop_after': {!r}".format(stop_after)) global _setup_stop_after, _setup_distribution _setup_stop_after = stop_after @@ -237,13 +274,18 @@ def run_setup (script_name, script_args=None, stop_after="run"): pass if _setup_distribution is None: - raise RuntimeError(("'distutils.core.setup()' was never called -- " - "perhaps '%s' is not a Distutils setup script?") % \ - script_name) + raise RuntimeError( + ( + "'distutils.core.setup()' was never called -- " + "perhaps '%s' is not a Distutils setup script?" + ) + % script_name + ) # I wonder if the setup script's namespace -- g and l -- would be of # any interest to callers? - #print "_setup_distribution:", _setup_distribution + # print "_setup_distribution:", _setup_distribution return _setup_distribution + # run_setup () diff --git a/setuptools/_distutils/cygwinccompiler.py b/setuptools/_distutils/cygwinccompiler.py index c5c86d8f07..47efa377c5 100644 --- a/setuptools/_distutils/cygwinccompiler.py +++ b/setuptools/_distutils/cygwinccompiler.py @@ -6,119 +6,96 @@ cygwin in no-cygwin mode). """ -# problems: -# -# * if you use a msvc compiled python version (1.5.2) -# 1. you have to insert a __GNUC__ section in its config.h -# 2. you have to generate an import library for its dll -# - create a def-file for python??.dll -# - create an import library using -# dlltool --dllname python15.dll --def python15.def \ -# --output-lib libpython15.a -# -# see also http://starship.python.net/crew/kernr/mingw32/Notes.html -# -# * We put export_symbols in a def-file, and don't use -# --export-all-symbols because it doesn't worked reliable in some -# tested configurations. And because other windows compilers also -# need their symbols specified this no serious problem. -# -# tested configurations: -# -# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works -# (after patching python's config.h and for C++ some other include files) -# see also http://starship.python.net/crew/kernr/mingw32/Notes.html -# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works -# (ld doesn't support -shared, so we use dllwrap) -# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now -# - its dllwrap doesn't work, there is a bug in binutils 2.10.90 -# see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html -# - using gcc -mdll instead dllwrap doesn't work without -static because -# it tries to link against dlls instead their import libraries. (If -# it finds the dll first.) -# By specifying -static we force ld to link against the import libraries, -# this is windows standard and there are normally not the necessary symbols -# in the dlls. -# *** only the version of June 2000 shows these problems -# * cygwin gcc 3.2/ld 2.13.90 works -# (ld supports -shared) -# * mingw gcc 3.2/ld 2.13 works -# (ld supports -shared) -# * llvm-mingw with Clang 11 works -# (lld supports -shared) - import os +import re import sys import copy import shlex import warnings from subprocess import check_output -from distutils.unixccompiler import UnixCCompiler -from distutils.file_util import write_file -from distutils.errors import (DistutilsExecError, CCompilerError, - CompileError, UnknownFileError) -from distutils.version import LooseVersion, suppress_known_deprecation +from .unixccompiler import UnixCCompiler +from .file_util import write_file +from .errors import ( + DistutilsExecError, + DistutilsPlatformError, + CCompilerError, + CompileError, +) +from .version import LooseVersion, suppress_known_deprecation +from ._collections import RangeMap + + +_msvcr_lookup = RangeMap.left( + { + # MSVC 7.0 + 1300: ['msvcr70'], + # MSVC 7.1 + 1310: ['msvcr71'], + # VS2005 / MSVC 8.0 + 1400: ['msvcr80'], + # VS2008 / MSVC 9.0 + 1500: ['msvcr90'], + # VS2010 / MSVC 10.0 + 1600: ['msvcr100'], + # VS2012 / MSVC 11.0 + 1700: ['msvcr110'], + # VS2013 / MSVC 12.0 + 1800: ['msvcr120'], + # VS2015 / MSVC 14.0 + 1900: ['vcruntime140'], + 2000: RangeMap.undefined_value, + }, +) + def get_msvcr(): """Include the appropriate MSVC runtime library if Python was built with MSVC 7.0 or later. """ - msc_pos = sys.version.find('MSC v.') - if msc_pos != -1: - msc_ver = sys.version[msc_pos+6:msc_pos+10] - if msc_ver == '1300': - # MSVC 7.0 - return ['msvcr70'] - elif msc_ver == '1310': - # MSVC 7.1 - return ['msvcr71'] - elif msc_ver == '1400': - # VS2005 / MSVC 8.0 - return ['msvcr80'] - elif msc_ver == '1500': - # VS2008 / MSVC 9.0 - return ['msvcr90'] - elif msc_ver == '1600': - # VS2010 / MSVC 10.0 - return ['msvcr100'] - elif msc_ver == '1700': - # VS2012 / MSVC 11.0 - return ['msvcr110'] - elif msc_ver == '1800': - # VS2013 / MSVC 12.0 - return ['msvcr120'] - elif 1900 <= int(msc_ver) < 2000: - # VS2015 / MSVC 14.0 - return ['ucrt', 'vcruntime140'] - else: - raise ValueError("Unknown MS Compiler version %s " % msc_ver) + match = re.search(r'MSC v\.(\d{4})', sys.version) + try: + msc_ver = int(match.group(1)) + except AttributeError: + return + try: + return _msvcr_lookup[msc_ver] + except KeyError: + raise ValueError("Unknown MS Compiler version %s " % msc_ver) + + +_runtime_library_dirs_msg = ( + "Unable to set runtime library search path on Windows, " + "usually indicated by `runtime_library_dirs` parameter to Extension" +) class CygwinCCompiler(UnixCCompiler): - """ Handles the Cygwin port of the GNU C compiler to Windows. - """ + """Handles the Cygwin port of the GNU C compiler to Windows.""" + compiler_type = 'cygwin' obj_extension = ".o" static_lib_extension = ".a" - shared_lib_extension = ".dll" + shared_lib_extension = ".dll.a" + dylib_lib_extension = ".dll" static_lib_format = "lib%s%s" - shared_lib_format = "%s%s" + shared_lib_format = "lib%s%s" + dylib_lib_format = "cyg%s%s" exe_extension = ".exe" def __init__(self, verbose=0, dry_run=0, force=0): - super().__init__(verbose, dry_run, force) status, details = check_config_h() - self.debug_print("Python's GCC status: %s (details: %s)" % - (status, details)) + self.debug_print( + "Python's GCC status: {} (details: {})".format(status, details) + ) if status is not CONFIG_H_OK: self.warn( "Python's pyconfig.h doesn't seem to support your compiler. " "Reason: %s. " - "Compiling may fail because of undefined preprocessor macros." - % details) + "Compiling may fail because of undefined preprocessor macros." % details + ) self.cc = os.environ.get('CC', 'gcc') self.cxx = os.environ.get('CXX', 'g++') @@ -126,12 +103,13 @@ def __init__(self, verbose=0, dry_run=0, force=0): self.linker_dll = self.cc shared_option = "-shared" - self.set_executables(compiler='%s -mcygwin -O -Wall' % self.cc, - compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc, - compiler_cxx='%s -mcygwin -O -Wall' % self.cxx, - linker_exe='%s -mcygwin' % self.cc, - linker_so=('%s -mcygwin %s' % - (self.linker_dll, shared_option))) + self.set_executables( + compiler='%s -mcygwin -O -Wall' % self.cc, + compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc, + compiler_cxx='%s -mcygwin -O -Wall' % self.cxx, + linker_exe='%s -mcygwin' % self.cc, + linker_so=('{} -mcygwin {}'.format(self.linker_dll, shared_option)), + ) # Include the appropriate MSVC runtime library if Python was built # with MSVC 7.0 or later. @@ -139,7 +117,7 @@ def __init__(self, verbose=0, dry_run=0, force=0): @property def gcc_version(self): - # Older numpy dependend on this existing to check for ancient + # Older numpy depended on this existing to check for ancient # gcc versions. This doesn't make much sense with clang etc so # just hardcode to something recent. # https://github.com/numpy/numpy/pull/20333 @@ -154,36 +132,53 @@ def gcc_version(self): def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): """Compiles the source by spawning GCC and windres if needed.""" - if ext == '.rc' or ext == '.res': + if ext in ('.rc', '.res'): # gcc needs '.res' and '.rc' compiled to object files !!! try: self.spawn(["windres", "-i", src, "-o", obj]) except DistutilsExecError as msg: raise CompileError(msg) - else: # for other files use the C-compiler + else: # for other files use the C-compiler try: - self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + - extra_postargs) + self.spawn( + self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs + ) except DistutilsExecError as msg: raise CompileError(msg) - def link(self, target_desc, objects, output_filename, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): + def link( + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): """Link the objects.""" # use separate copies, so we can modify the lists extra_preargs = copy.copy(extra_preargs or []) libraries = copy.copy(libraries or []) objects = copy.copy(objects or []) + if runtime_library_dirs: + self.warn(_runtime_library_dirs_msg) + # Additional libraries libraries.extend(self.dll_libraries) # handle export symbols by creating a def-file # with executables this only works with gcc/ld as linker - if ((export_symbols is not None) and - (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): + if (export_symbols is not None) and ( + target_desc != self.EXECUTABLE or self.linker_dll == "gcc" + ): # (The linker doesn't do anything if output is up-to-date. # So it would probably better to check if we really need this, # but for this we had to insert some unchanged parts of @@ -195,29 +190,24 @@ def link(self, target_desc, objects, output_filename, output_dir=None, temp_dir = os.path.dirname(objects[0]) # name of dll to give the helper files the same base name (dll_name, dll_extension) = os.path.splitext( - os.path.basename(output_filename)) + os.path.basename(output_filename) + ) # generate the filenames for these files def_file = os.path.join(temp_dir, dll_name + ".def") - lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a") # Generate .def file - contents = [ - "LIBRARY %s" % os.path.basename(output_filename), - "EXPORTS"] + contents = ["LIBRARY %s" % os.path.basename(output_filename), "EXPORTS"] for sym in export_symbols: contents.append(sym) - self.execute(write_file, (def_file, contents), - "writing %s" % def_file) + self.execute(write_file, (def_file, contents), "writing %s" % def_file) - # next add options for def-file and to creating import libraries + # next add options for def-file - # doesn't work: bfd_close build\...\libfoo.a: Invalid operation - #extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file]) # for gcc/ld the def-file is specified as any object files objects.append(def_file) - #end: if ((export_symbols is not None) and + # end: if ((export_symbols is not None) and # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): # who wants symbols and a many times larger output file @@ -229,70 +219,73 @@ def link(self, target_desc, objects, output_filename, output_dir=None, if not debug: extra_preargs.append("-s") - UnixCCompiler.link(self, target_desc, objects, output_filename, - output_dir, libraries, library_dirs, - runtime_library_dirs, - None, # export_symbols, we do this in our def-file - debug, extra_preargs, extra_postargs, build_temp, - target_lang) + UnixCCompiler.link( + self, + target_desc, + objects, + output_filename, + output_dir, + libraries, + library_dirs, + runtime_library_dirs, + None, # export_symbols, we do this in our def-file + debug, + extra_preargs, + extra_postargs, + build_temp, + target_lang, + ) + + def runtime_library_dir_option(self, dir): + # cygwin doesn't support rpath. While in theory we could error + # out like MSVC does, code might expect it to work like on Unix, so + # just warn and hope for the best. + self.warn(_runtime_library_dirs_msg) + return [] # -- Miscellaneous methods ----------------------------------------- - def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): - """Adds supports for rc and res files.""" - if output_dir is None: - output_dir = '' - obj_names = [] - for src_name in source_filenames: - # use normcase to make sure '.rc' is really '.rc' and not '.RC' - base, ext = os.path.splitext(os.path.normcase(src_name)) - if ext not in (self.src_extensions + ['.rc','.res']): - raise UnknownFileError("unknown file type '%s' (from '%s')" % \ - (ext, src_name)) - if strip_dir: - base = os.path.basename (base) - if ext in ('.res', '.rc'): - # these need to be compiled to object files - obj_names.append (os.path.join(output_dir, - base + ext + self.obj_extension)) - else: - obj_names.append (os.path.join(output_dir, - base + self.obj_extension)) - return obj_names + def _make_out_path(self, output_dir, strip_dir, src_name): + # use normcase to make sure '.rc' is really '.rc' and not '.RC' + norm_src_name = os.path.normcase(src_name) + return super()._make_out_path(output_dir, strip_dir, norm_src_name) + + @property + def out_extensions(self): + """ + Add support for rc and res files. + """ + return { + **super().out_extensions, + **{ext: ext + self.obj_extension for ext in ('.res', '.rc')}, + } + # the same as cygwin plus some additional parameters class Mingw32CCompiler(CygwinCCompiler): - """ Handles the Mingw32 port of the GNU C compiler to Windows. - """ + """Handles the Mingw32 port of the GNU C compiler to Windows.""" + compiler_type = 'mingw32' def __init__(self, verbose=0, dry_run=0, force=0): - - super().__init__ (verbose, dry_run, force) + super().__init__(verbose, dry_run, force) shared_option = "-shared" if is_cygwincc(self.cc): - raise CCompilerError( - 'Cygwin gcc cannot be used with --compiler=mingw32') - - self.set_executables(compiler='%s -O -Wall' % self.cc, - compiler_so='%s -mdll -O -Wall' % self.cc, - compiler_cxx='%s -O -Wall' % self.cxx, - linker_exe='%s' % self.cc, - linker_so='%s %s' - % (self.linker_dll, shared_option)) + raise CCompilerError('Cygwin gcc cannot be used with --compiler=mingw32') + + self.set_executables( + compiler='%s -O -Wall' % self.cc, + compiler_so='%s -mdll -O -Wall' % self.cc, + compiler_cxx='%s -O -Wall' % self.cxx, + linker_exe='%s' % self.cc, + linker_so='{} {}'.format(self.linker_dll, shared_option), + ) - # Maybe we should also append -mthreads, but then the finished - # dlls need another dll (mingwm10.dll see Mingw32 docs) - # (-mthreads: Support thread-safe exception handling on `Mingw32') + def runtime_library_dir_option(self, dir): + raise DistutilsPlatformError(_runtime_library_dirs_msg) - # no additional libraries needed - self.dll_libraries=[] - - # Include the appropriate MSVC runtime library if Python was built - # with MSVC 7.0 or later. - self.dll_libraries = get_msvcr() # Because these compilers aren't configured in Python's pyconfig.h file by # default, we should at least warn the user if he is using an unmodified @@ -302,6 +295,7 @@ def __init__(self, verbose=0, dry_run=0, force=0): CONFIG_H_NOTOK = "not ok" CONFIG_H_UNCERTAIN = "uncertain" + def check_config_h(): """Check if the current Python installation appears amenable to building extensions with GCC. @@ -346,8 +340,8 @@ def check_config_h(): finally: config_h.close() except OSError as exc: - return (CONFIG_H_UNCERTAIN, - "couldn't read '%s': %s" % (fn, exc.strerror)) + return (CONFIG_H_UNCERTAIN, "couldn't read '{}': {}".format(fn, exc.strerror)) + def is_cygwincc(cc): '''Try to determine if the compiler that would be used is from cygwin.''' diff --git a/setuptools/_distutils/dep_util.py b/setuptools/_distutils/dep_util.py index d74f5e4e92..48da8641c6 100644 --- a/setuptools/_distutils/dep_util.py +++ b/setuptools/_distutils/dep_util.py @@ -5,31 +5,32 @@ timestamp dependency analysis.""" import os -from distutils.errors import DistutilsFileError +from .errors import DistutilsFileError -def newer (source, target): +def newer(source, target): """Return true if 'source' exists and is more recently modified than 'target', or if 'source' exists and 'target' doesn't. Return false if both exist and 'target' is the same age or younger than 'source'. Raise DistutilsFileError if 'source' does not exist. """ if not os.path.exists(source): - raise DistutilsFileError("file '%s' does not exist" % - os.path.abspath(source)) + raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(source)) if not os.path.exists(target): return 1 from stat import ST_MTIME + mtime1 = os.stat(source)[ST_MTIME] mtime2 = os.stat(target)[ST_MTIME] return mtime1 > mtime2 + # newer () -def newer_pairwise (sources, targets): +def newer_pairwise(sources, targets): """Walk two filename lists in parallel, testing if each source is newer than its corresponding target. Return a pair of lists (sources, targets) where source is newer than target, according to the semantics @@ -48,10 +49,11 @@ def newer_pairwise (sources, targets): return (n_sources, n_targets) + # newer_pairwise () -def newer_group (sources, target, missing='error'): +def newer_group(sources, target, missing='error'): """Return true if 'target' is out-of-date with respect to any file listed in 'sources'. In other words, if 'target' exists and is newer than every file in 'sources', return false; otherwise return true. @@ -73,15 +75,16 @@ def newer_group (sources, target, missing='error'): # we can immediately return true. If we fall through to the end # of the loop, then 'target' is up-to-date and we return false. from stat import ST_MTIME + target_mtime = os.stat(target)[ST_MTIME] for source in sources: if not os.path.exists(source): - if missing == 'error': # blow up when we stat() the file + if missing == 'error': # blow up when we stat() the file pass - elif missing == 'ignore': # missing source dropped from - continue # target's dependency list - elif missing == 'newer': # missing source means target is - return 1 # out-of-date + elif missing == 'ignore': # missing source dropped from + continue # target's dependency list + elif missing == 'newer': # missing source means target is + return 1 # out-of-date source_mtime = os.stat(source)[ST_MTIME] if source_mtime > target_mtime: @@ -89,4 +92,5 @@ def newer_group (sources, target, missing='error'): else: return 0 + # newer_group () diff --git a/setuptools/_distutils/dir_util.py b/setuptools/_distutils/dir_util.py index d5cd8e3e24..23dc3392a2 100644 --- a/setuptools/_distutils/dir_util.py +++ b/setuptools/_distutils/dir_util.py @@ -4,17 +4,15 @@ import os import errno -from distutils.errors import DistutilsFileError, DistutilsInternalError -from distutils import log +from .errors import DistutilsInternalError, DistutilsFileError +from ._log import log # cache for by mkpath() -- in addition to cheapening redundant calls, # eliminates redundant "creating /foo/bar/baz" messages in dry-run mode _path_created = {} -# I don't use os.makedirs because a) it's new to Python 1.5.2, and -# b) it blows up if the directory already exists (I want to silently -# succeed in that case). -def mkpath(name, mode=0o777, verbose=1, dry_run=0): + +def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901 """Create a directory and any missing ancestor directories. If the directory already exists (or if 'name' is the empty string, which @@ -23,6 +21,12 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): (eg. some sub-path exists, but is a file rather than a directory). If 'verbose' is true, print a one-line summary of each mkdir to stdout. Return the list of directories actually created. + + os.makedirs is not used because: + + a) It's new to Python 1.5.2, and + b) it blows up if the directory already exists (in which case it should + silently succeed). """ global _path_created @@ -30,7 +34,8 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): # Detect a common bug -- name is None if not isinstance(name, str): raise DistutilsInternalError( - "mkpath: 'name' must be a string (got %r)" % (name,)) + "mkpath: 'name' must be a string (got {!r})".format(name) + ) # XXX what's the better way to handle verbosity? print as we create # each directory in the path (the current behaviour), or only announce @@ -45,17 +50,17 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): return created_dirs (head, tail) = os.path.split(name) - tails = [tail] # stack of lone dirs to create + tails = [tail] # stack of lone dirs to create while head and tail and not os.path.isdir(head): (head, tail) = os.path.split(head) - tails.insert(0, tail) # push next higher dir onto stack + tails.insert(0, tail) # push next higher dir onto stack # now 'head' contains the deepest directory that already exists # (that is, the child of 'head' in 'name' is the highest directory # that does *not* exist) for d in tails: - #print "head = %s, d = %s: " % (head, d), + # print "head = %s, d = %s: " % (head, d), head = os.path.join(head, d) abs_head = os.path.abspath(head) @@ -71,12 +76,14 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): except OSError as exc: if not (exc.errno == errno.EEXIST and os.path.isdir(head)): raise DistutilsFileError( - "could not create '%s': %s" % (head, exc.args[-1])) + "could not create '{}': {}".format(head, exc.args[-1]) + ) created_dirs.append(head) _path_created[abs_head] = 1 return created_dirs + def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0): """Create all the empty directories under 'base_dir' needed to put 'files' there. @@ -96,8 +103,17 @@ def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0): for dir in sorted(need_dir): mkpath(dir, mode, verbose=verbose, dry_run=dry_run) -def copy_tree(src, dst, preserve_mode=1, preserve_times=1, - preserve_symlinks=0, update=0, verbose=1, dry_run=0): + +def copy_tree( # noqa: C901 + src, + dst, + preserve_mode=1, + preserve_times=1, + preserve_symlinks=0, + update=0, + verbose=1, + dry_run=0, +): """Copy an entire directory tree 'src' to a new location 'dst'. Both 'src' and 'dst' must be directory names. If 'src' is not a @@ -120,8 +136,7 @@ def copy_tree(src, dst, preserve_mode=1, preserve_times=1, from distutils.file_util import copy_file if not dry_run and not os.path.isdir(src): - raise DistutilsFileError( - "cannot copy tree '%s': not a directory" % src) + raise DistutilsFileError("cannot copy tree '%s': not a directory" % src) try: names = os.listdir(src) except OSError as e: @@ -129,7 +144,8 @@ def copy_tree(src, dst, preserve_mode=1, preserve_times=1, names = [] else: raise DistutilsFileError( - "error listing files in '%s': %s" % (src, e.strerror)) + "error listing files in '{}': {}".format(src, e.strerror) + ) if not dry_run: mkpath(dst, verbose=verbose) @@ -154,27 +170,43 @@ def copy_tree(src, dst, preserve_mode=1, preserve_times=1, elif os.path.isdir(src_name): outputs.extend( - copy_tree(src_name, dst_name, preserve_mode, - preserve_times, preserve_symlinks, update, - verbose=verbose, dry_run=dry_run)) + copy_tree( + src_name, + dst_name, + preserve_mode, + preserve_times, + preserve_symlinks, + update, + verbose=verbose, + dry_run=dry_run, + ) + ) else: - copy_file(src_name, dst_name, preserve_mode, - preserve_times, update, verbose=verbose, - dry_run=dry_run) + copy_file( + src_name, + dst_name, + preserve_mode, + preserve_times, + update, + verbose=verbose, + dry_run=dry_run, + ) outputs.append(dst_name) return outputs + def _build_cmdtuple(path, cmdtuples): """Helper for remove_tree().""" for f in os.listdir(path): - real_f = os.path.join(path,f) + real_f = os.path.join(path, f) if os.path.isdir(real_f) and not os.path.islink(real_f): _build_cmdtuple(real_f, cmdtuples) else: cmdtuples.append((os.remove, real_f)) cmdtuples.append((os.rmdir, path)) + def remove_tree(directory, verbose=1, dry_run=0): """Recursively remove an entire directory tree. @@ -195,9 +227,10 @@ def remove_tree(directory, verbose=1, dry_run=0): # remove dir from cache if it's already there abspath = os.path.abspath(cmd[1]) if abspath in _path_created: - del _path_created[abspath] + _path_created.pop(abspath) except OSError as exc: - log.warn("error removing %s: %s", directory, exc) + log.warning("error removing %s: %s", directory, exc) + def ensure_relative(path): """Take the full path 'path', and make it a relative path. diff --git a/setuptools/_distutils/dist.py b/setuptools/_distutils/dist.py index 37db4d6cd7..7c0f0e5b78 100644 --- a/setuptools/_distutils/dist.py +++ b/setuptools/_distutils/dist.py @@ -7,6 +7,9 @@ import sys import os import re +import pathlib +import contextlib +import logging from email import message_from_file try: @@ -14,11 +17,16 @@ except ImportError: warnings = None -from distutils.errors import * -from distutils.fancy_getopt import FancyGetopt, translate_longopt -from distutils.util import check_environ, strtobool, rfc822_escape -from distutils import log -from distutils.debug import DEBUG +from .errors import ( + DistutilsOptionError, + DistutilsModuleError, + DistutilsArgError, + DistutilsClassError, +) +from .fancy_getopt import FancyGetopt, translate_longopt +from .util import check_environ, strtobool, rfc822_escape +from ._log import log +from .debug import DEBUG # Regex to define acceptable Distutils command names. This is not *quite* # the same as a Python NAME -- I don't allow leading underscores. The fact @@ -37,7 +45,7 @@ def _ensure_list(value, fieldname): typename = type(value).__name__ msg = "Warning: '{fieldname}' should be a list, got type '{typename}'" msg = msg.format(**locals()) - log.log(log.WARN, msg) + log.warning(msg) value = list(value) return value @@ -69,8 +77,7 @@ class Distribution: ('quiet', 'q', "run quietly (turns verbosity off)"), ('dry-run', 'n', "don't actually do anything"), ('help', 'h', "show detailed help message"), - ('no-user-cfg', None, - 'ignore pydistutils.cfg in your home directory'), + ('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'), ] # 'common_usage' is a short (2-3 line) string describing the common @@ -84,49 +91,32 @@ class Distribution: # options that are not propagated to the commands display_options = [ - ('help-commands', None, - "list all available commands"), - ('name', None, - "print package name"), - ('version', 'V', - "print package version"), - ('fullname', None, - "print -"), - ('author', None, - "print the author's name"), - ('author-email', None, - "print the author's email address"), - ('maintainer', None, - "print the maintainer's name"), - ('maintainer-email', None, - "print the maintainer's email address"), - ('contact', None, - "print the maintainer's name if known, else the author's"), - ('contact-email', None, - "print the maintainer's email address if known, else the author's"), - ('url', None, - "print the URL for this package"), - ('license', None, - "print the license of the package"), - ('licence', None, - "alias for --license"), - ('description', None, - "print the package description"), - ('long-description', None, - "print the long package description"), - ('platforms', None, - "print the list of platforms"), - ('classifiers', None, - "print the list of classifiers"), - ('keywords', None, - "print the list of keywords"), - ('provides', None, - "print the list of packages/modules provided"), - ('requires', None, - "print the list of packages/modules required"), - ('obsoletes', None, - "print the list of packages/modules made obsolete") - ] + ('help-commands', None, "list all available commands"), + ('name', None, "print package name"), + ('version', 'V', "print package version"), + ('fullname', None, "print -"), + ('author', None, "print the author's name"), + ('author-email', None, "print the author's email address"), + ('maintainer', None, "print the maintainer's name"), + ('maintainer-email', None, "print the maintainer's email address"), + ('contact', None, "print the maintainer's name if known, else the author's"), + ( + 'contact-email', + None, + "print the maintainer's email address if known, else the author's", + ), + ('url', None, "print the URL for this package"), + ('license', None, "print the license of the package"), + ('licence', None, "alias for --license"), + ('description', None, "print the package description"), + ('long-description', None, "print the long package description"), + ('platforms', None, "print the list of platforms"), + ('classifiers', None, "print the list of classifiers"), + ('keywords', None, "print the list of keywords"), + ('provides', None, "print the list of packages/modules provided"), + ('requires', None, "print the list of packages/modules required"), + ('obsoletes', None, "print the list of packages/modules made obsolete"), + ] display_option_names = [translate_longopt(x[0]) for x in display_options] # negative options are options that exclude other options @@ -134,7 +124,7 @@ class Distribution: # -- Creation/initialization methods ------------------------------- - def __init__(self, attrs=None): + def __init__(self, attrs=None): # noqa: C901 """Construct a new Distribution instance: initialize all the attributes of a Distribution, and then use 'attrs' (a dictionary mapping attribute names to values) to assign some of those @@ -247,9 +237,9 @@ def __init__(self, attrs=None): options = attrs.get('options') if options is not None: del attrs['options'] - for (command, cmd_options) in options.items(): + for command, cmd_options in options.items(): opt_dict = self.get_option_dict(command) - for (opt, val) in cmd_options.items(): + for opt, val in cmd_options.items(): opt_dict[opt] = ("setup script", val) if 'licence' in attrs: @@ -263,7 +253,7 @@ def __init__(self, attrs=None): # Now work on the rest of the attributes. Any attribute that's # not already defined is invalid! - for (key, val) in attrs.items(): + for key, val in attrs.items(): if hasattr(self.metadata, "set_" + key): getattr(self.metadata, "set_" + key)(val) elif hasattr(self.metadata, key): @@ -306,7 +296,7 @@ def get_option_dict(self, command): def dump_option_dicts(self, header=None, commands=None, indent=""): from pprint import pformat - if commands is None: # dump all command option dicts + if commands is None: # dump all command option dicts commands = sorted(self.command_options.keys()) if header is not None: @@ -320,11 +310,9 @@ def dump_option_dicts(self, header=None, commands=None, indent=""): for cmd_name in commands: opt_dict = self.command_options.get(cmd_name) if opt_dict is None: - self.announce(indent + - "no option dict for '%s' command" % cmd_name) + self.announce(indent + "no option dict for '%s' command" % cmd_name) else: - self.announce(indent + - "option dict for '%s' command:" % cmd_name) + self.announce(indent + "option dict for '%s' command:" % cmd_name) out = pformat(opt_dict) for line in out.split('\n'): self.announce(indent + " " + line) @@ -337,58 +325,61 @@ def find_config_files(self): should be parsed. The filenames returned are guaranteed to exist (modulo nasty race conditions). - There are three possible config files: distutils.cfg in the - Distutils installation directory (ie. where the top-level - Distutils __inst__.py file lives), a file in the user's home - directory named .pydistutils.cfg on Unix and pydistutils.cfg - on Windows/Mac; and setup.cfg in the current directory. - - The file in the user's home directory can be disabled with the - --no-user-cfg option. + There are multiple possible config files: + - distutils.cfg in the Distutils installation directory (i.e. + where the top-level Distutils __inst__.py file lives) + - a file in the user's home directory named .pydistutils.cfg + on Unix and pydistutils.cfg on Windows/Mac; may be disabled + with the ``--no-user-cfg`` option + - setup.cfg in the current directory + - a file named by an environment variable """ - files = [] check_environ() + files = [str(path) for path in self._gen_paths() if os.path.isfile(path)] - # Where to look for the system-wide Distutils config file - sys_dir = os.path.dirname(sys.modules['distutils'].__file__) + if DEBUG: + self.announce("using config files: %s" % ', '.join(files)) - # Look for the system config file - sys_file = os.path.join(sys_dir, "distutils.cfg") - if os.path.isfile(sys_file): - files.append(sys_file) + return files - # What to call the per-user config file - if os.name == 'posix': - user_filename = ".pydistutils.cfg" - else: - user_filename = "pydistutils.cfg" + def _gen_paths(self): + # The system-wide Distutils config file + sys_dir = pathlib.Path(sys.modules['distutils'].__file__).parent + yield sys_dir / "distutils.cfg" - # And look for the user config file + # The per-user config file + prefix = '.' * (os.name == 'posix') + filename = prefix + 'pydistutils.cfg' if self.want_user_cfg: - user_file = os.path.join(os.path.expanduser('~'), user_filename) - if os.path.isfile(user_file): - files.append(user_file) + yield pathlib.Path('~').expanduser() / filename # All platforms support local setup.cfg - local_file = "setup.cfg" - if os.path.isfile(local_file): - files.append(local_file) + yield pathlib.Path('setup.cfg') - if DEBUG: - self.announce("using config files: %s" % ', '.join(files)) - - return files + # Additional config indicated in the environment + with contextlib.suppress(TypeError): + yield pathlib.Path(os.getenv("DIST_EXTRA_CONFIG")) - def parse_config_files(self, filenames=None): + def parse_config_files(self, filenames=None): # noqa: C901 from configparser import ConfigParser # Ignore install directory options if we have a venv if sys.prefix != sys.base_prefix: ignore_options = [ - 'install-base', 'install-platbase', 'install-lib', - 'install-platlib', 'install-purelib', 'install-headers', - 'install-scripts', 'install-data', 'prefix', 'exec-prefix', - 'home', 'user', 'root'] + 'install-base', + 'install-platbase', + 'install-lib', + 'install-platlib', + 'install-purelib', + 'install-headers', + 'install-scripts', + 'install-data', + 'prefix', + 'exec-prefix', + 'home', + 'user', + 'root', + ] else: ignore_options = [] @@ -411,7 +402,7 @@ def parse_config_files(self, filenames=None): for opt in options: if opt != '__name__' and opt not in ignore_options: - val = parser.get(section,opt) + val = parser.get(section, opt) opt = opt.replace('-', '_') opt_dict[opt] = (filename, val) @@ -423,12 +414,12 @@ def parse_config_files(self, filenames=None): # to set Distribution options. if 'global' in self.command_options: - for (opt, (src, val)) in self.command_options['global'].items(): + for opt, (src, val) in self.command_options['global'].items(): alias = self.negative_opt.get(opt) try: if alias: setattr(self, alias, not strtobool(val)) - elif opt in ('verbose', 'dry_run'): # ugh! + elif opt in ('verbose', 'dry_run'): # ugh! setattr(self, opt, strtobool(val)) else: setattr(self, opt, val) @@ -475,14 +466,14 @@ def parse_command_line(self): parser.set_aliases({'licence': 'license'}) args = parser.getopt(args=self.script_args, object=self) option_order = parser.get_option_order() - log.set_verbosity(self.verbose) + logging.getLogger().setLevel(logging.WARN - 10 * self.verbose) # for display options we return immediately if self.handle_display_options(option_order): return while args: args = self._parse_command_opts(parser, args) - if args is None: # user asked for help (and got it) + if args is None: # user asked for help (and got it) return # Handle the cases of --help as a "global" option, ie. @@ -492,9 +483,9 @@ def parse_command_line(self): # latter, we omit the display-only options and show help for # each command listed on the command line. if self.help: - self._show_help(parser, - display_options=len(self.commands) == 0, - commands=self.commands) + self._show_help( + parser, display_options=len(self.commands) == 0, commands=self.commands + ) return # Oops, no commands found -- an end-user error @@ -511,11 +502,14 @@ def _get_toplevel_options(self): level as well as options recognized for commands. """ return self.global_options + [ - ("command-packages=", None, - "list of packages that provide distutils commands"), - ] + ( + "command-packages=", + None, + "list of packages that provide distutils commands", + ), + ] - def _parse_command_opts(self, parser, args): + def _parse_command_opts(self, parser, args): # noqa: C901 """Parse the command-line options for a single command. 'parser' must be a FancyGetopt instance; 'args' must be the list of arguments, starting with the current command (whose options @@ -545,14 +539,19 @@ def _parse_command_opts(self, parser, args): # to be sure that the basic "command" interface is implemented. if not issubclass(cmd_class, Command): raise DistutilsClassError( - "command class %s must subclass Command" % cmd_class) + "command class %s must subclass Command" % cmd_class + ) # Also make sure that the command object provides a list of its # known options. - if not (hasattr(cmd_class, 'user_options') and - isinstance(cmd_class.user_options, list)): - msg = ("command class %s must provide " - "'user_options' attribute (a list of tuples)") + if not ( + hasattr(cmd_class, 'user_options') + and isinstance(cmd_class.user_options, list) + ): + msg = ( + "command class %s must provide " + "'user_options' attribute (a list of tuples)" + ) raise DistutilsClassError(msg % cmd_class) # If the command class has a list of negative alias options, @@ -564,36 +563,39 @@ def _parse_command_opts(self, parser, args): # Check for help_options in command class. They have a different # format (tuple of four) so we need to preprocess them here. - if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): + if hasattr(cmd_class, 'help_options') and isinstance( + cmd_class.help_options, list + ): help_options = fix_help_options(cmd_class.help_options) else: help_options = [] # All commands support the global options too, just by adding # in 'global_options'. - parser.set_option_table(self.global_options + - cmd_class.user_options + - help_options) + parser.set_option_table( + self.global_options + cmd_class.user_options + help_options + ) parser.set_negative_aliases(negative_opt) (args, opts) = parser.getopt(args[1:]) if hasattr(opts, 'help') and opts.help: self._show_help(parser, display_options=0, commands=[cmd_class]) return - if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): - help_option_found=0 - for (help_option, short, desc, func) in cmd_class.help_options: + if hasattr(cmd_class, 'help_options') and isinstance( + cmd_class.help_options, list + ): + help_option_found = 0 + for help_option, short, desc, func in cmd_class.help_options: if hasattr(opts, parser.get_attr_name(help_option)): - help_option_found=1 + help_option_found = 1 if callable(func): func() else: raise DistutilsClassError( "invalid help function %r for help option '%s': " "must be a callable object (function, etc.)" - % (func, help_option)) + % (func, help_option) + ) if help_option_found: return @@ -601,7 +603,7 @@ def _parse_command_opts(self, parser, args): # Put the options from the command-line into their official # holding pen, the 'command_options' dictionary. opt_dict = self.get_option_dict(command) - for (name, value) in vars(opts).items(): + for name, value in vars(opts).items(): opt_dict[name] = ("command line", value) return args @@ -619,8 +621,7 @@ def finalize_options(self): value = [elm.strip() for elm in value.split(',')] setattr(self.metadata, attr, value) - def _show_help(self, parser, global_options=1, display_options=1, - commands=[]): + def _show_help(self, parser, global_options=1, display_options=1, commands=[]): """Show help for the setup script command-line in the form of several lists of command-line options. 'parser' should be a FancyGetopt instance; do not expect it to be returned in the @@ -649,8 +650,9 @@ def _show_help(self, parser, global_options=1, display_options=1, if display_options: parser.set_option_table(self.display_options) parser.print_help( - "Information display options (just display " + - "information, ignore any commands)") + "Information display options (just display " + + "information, ignore any commands)" + ) print('') for command in self.commands: @@ -658,10 +660,10 @@ def _show_help(self, parser, global_options=1, display_options=1, klass = command else: klass = self.get_command_class(command) - if (hasattr(klass, 'help_options') and - isinstance(klass.help_options, list)): - parser.set_option_table(klass.user_options + - fix_help_options(klass.help_options)) + if hasattr(klass, 'help_options') and isinstance(klass.help_options, list): + parser.set_option_table( + klass.user_options + fix_help_options(klass.help_options) + ) else: parser.set_option_table(klass.user_options) parser.print_help("Options for '%s' command:" % klass.__name__) @@ -694,14 +696,13 @@ def handle_display_options(self, option_order): for option in self.display_options: is_display_option[option[0]] = 1 - for (opt, val) in option_order: + for opt, val in option_order: if val and is_display_option.get(opt): opt = translate_longopt(opt) - value = getattr(self.metadata, "get_"+opt)() - if opt in ['keywords', 'platforms']: + value = getattr(self.metadata, "get_" + opt)() + if opt in ('keywords', 'platforms'): print(','.join(value)) - elif opt in ('classifiers', 'provides', 'requires', - 'obsoletes'): + elif opt in ('classifiers', 'provides', 'requires', 'obsoletes'): print('\n'.join(value)) else: print(value) @@ -735,6 +736,7 @@ def print_commands(self): 'description'. """ import distutils.command + std_commands = distutils.command.__all__ is_std = {} for cmd in std_commands: @@ -746,18 +748,14 @@ def print_commands(self): extra_commands.append(cmd) max_length = 0 - for cmd in (std_commands + extra_commands): + for cmd in std_commands + extra_commands: if len(cmd) > max_length: max_length = len(cmd) - self.print_command_list(std_commands, - "Standard commands", - max_length) + self.print_command_list(std_commands, "Standard commands", max_length) if extra_commands: print() - self.print_command_list(extra_commands, - "Extra commands", - max_length) + self.print_command_list(extra_commands, "Extra commands", max_length) def get_command_list(self): """Get a list of (command, description) tuples. @@ -769,6 +767,7 @@ def get_command_list(self): # Currently this is only used on Mac OS, for the Mac-only GUI # Distutils interface (by Jack Jansen) import distutils.command + std_commands = distutils.command.__all__ is_std = {} for cmd in std_commands: @@ -780,7 +779,7 @@ def get_command_list(self): extra_commands.append(cmd) rv = [] - for cmd in (std_commands + extra_commands): + for cmd in std_commands + extra_commands: klass = self.cmdclass.get(cmd) if not klass: klass = self.get_command_class(cmd) @@ -822,7 +821,7 @@ def get_command_class(self, command): return klass for pkgname in self.get_command_packages(): - module_name = "%s.%s" % (pkgname, command) + module_name = "{}.{}".format(pkgname, command) klass_name = command try: @@ -836,7 +835,8 @@ def get_command_class(self, command): except AttributeError: raise DistutilsModuleError( "invalid command '%s' (no class '%s' in module '%s')" - % (command, klass_name, module_name)) + % (command, klass_name, module_name) + ) self.cmdclass[command] = klass return klass @@ -852,8 +852,10 @@ def get_command_obj(self, command, create=1): cmd_obj = self.command_obj.get(command) if not cmd_obj and create: if DEBUG: - self.announce("Distribution.get_command_obj(): " - "creating '%s' command object" % command) + self.announce( + "Distribution.get_command_obj(): " + "creating '%s' command object" % command + ) klass = self.get_command_class(command) cmd_obj = self.command_obj[command] = klass(self) @@ -870,7 +872,7 @@ def get_command_obj(self, command, create=1): return cmd_obj - def _set_command_options(self, command_obj, option_dict=None): + def _set_command_options(self, command_obj, option_dict=None): # noqa: C901 """Set the options for 'command_obj' from 'option_dict'. Basically this means copying elements of a dictionary ('option_dict') to attributes of an instance ('command'). @@ -885,13 +887,11 @@ def _set_command_options(self, command_obj, option_dict=None): if DEBUG: self.announce(" setting options for '%s' command:" % command_name) - for (option, (source, value)) in option_dict.items(): + for option, (source, value) in option_dict.items(): if DEBUG: - self.announce(" %s = %s (from %s)" % (option, value, - source)) + self.announce(" {} = {} (from {})".format(option, value, source)) try: - bool_opts = [translate_longopt(o) - for o in command_obj.boolean_options] + bool_opts = [translate_longopt(o) for o in command_obj.boolean_options] except AttributeError: bool_opts = [] try: @@ -910,7 +910,8 @@ def _set_command_options(self, command_obj, option_dict=None): else: raise DistutilsOptionError( "error in %s: command '%s' has no such option '%s'" - % (source, command_name, option)) + % (source, command_name, option) + ) except ValueError as msg: raise DistutilsOptionError(msg) @@ -934,6 +935,7 @@ def reinitialize_command(self, command, reinit_subcommands=0): Returns the reinitialized command object. """ from distutils.cmd import Command + if not isinstance(command, Command): command_name = command command = self.get_command_obj(command_name) @@ -955,7 +957,7 @@ def reinitialize_command(self, command, reinit_subcommands=0): # -- Methods that operate on the Distribution ---------------------- - def announce(self, msg, level=log.INFO): + def announce(self, msg, level=logging.INFO): log.log(level, msg) def run_commands(self): @@ -1010,9 +1012,11 @@ def has_data_files(self): return self.data_files and len(self.data_files) > 0 def is_pure(self): - return (self.has_pure_modules() and - not self.has_ext_modules() and - not self.has_c_libraries()) + return ( + self.has_pure_modules() + and not self.has_ext_modules() + and not self.has_c_libraries() + ) # -- Metadata query methods ---------------------------------------- @@ -1021,19 +1025,35 @@ def is_pure(self): # to self.metadata.get_XXX. The actual code is in the # DistributionMetadata class, below. + class DistributionMetadata: """Dummy class to hold the distribution meta-data: name, version, author, and so forth. """ - _METHOD_BASENAMES = ("name", "version", "author", "author_email", - "maintainer", "maintainer_email", "url", - "license", "description", "long_description", - "keywords", "platforms", "fullname", "contact", - "contact_email", "classifiers", "download_url", - # PEP 314 - "provides", "requires", "obsoletes", - ) + _METHOD_BASENAMES = ( + "name", + "version", + "author", + "author_email", + "maintainer", + "maintainer_email", + "url", + "license", + "description", + "long_description", + "keywords", + "platforms", + "fullname", + "contact", + "contact_email", + "classifiers", + "download_url", + # PEP 314 + "provides", + "requires", + "obsoletes", + ) def __init__(self, path=None): if path is not None: @@ -1064,9 +1084,8 @@ def read_pkg_file(self, file): def _read_field(name): value = msg[name] - if value == 'UNKNOWN': - return None - return value + if value and value != "UNKNOWN": + return value def _read_list(name): values = msg.get_all(name, None) @@ -1111,37 +1130,42 @@ def _read_list(name): self.obsoletes = None def write_pkg_info(self, base_dir): - """Write the PKG-INFO file into the release tree. - """ - with open(os.path.join(base_dir, 'PKG-INFO'), 'w', - encoding='UTF-8') as pkg_info: + """Write the PKG-INFO file into the release tree.""" + with open( + os.path.join(base_dir, 'PKG-INFO'), 'w', encoding='UTF-8' + ) as pkg_info: self.write_pkg_file(pkg_info) def write_pkg_file(self, file): - """Write the PKG-INFO format data to a file object. - """ + """Write the PKG-INFO format data to a file object.""" version = '1.0' - if (self.provides or self.requires or self.obsoletes or - self.classifiers or self.download_url): + if ( + self.provides + or self.requires + or self.obsoletes + or self.classifiers + or self.download_url + ): version = '1.1' + # required fields file.write('Metadata-Version: %s\n' % version) file.write('Name: %s\n' % self.get_name()) file.write('Version: %s\n' % self.get_version()) - file.write('Summary: %s\n' % self.get_description()) - file.write('Home-page: %s\n' % self.get_url()) - file.write('Author: %s\n' % self.get_contact()) - file.write('Author-email: %s\n' % self.get_contact_email()) - file.write('License: %s\n' % self.get_license()) - if self.download_url: - file.write('Download-URL: %s\n' % self.download_url) - long_desc = rfc822_escape(self.get_long_description()) - file.write('Description: %s\n' % long_desc) + def maybe_write(header, val): + if val: + file.write(f"{header}: {val}\n") - keywords = ','.join(self.get_keywords()) - if keywords: - file.write('Keywords: %s\n' % keywords) + # optional fields + maybe_write("Summary", self.get_description()) + maybe_write("Home-page", self.get_url()) + maybe_write("Author", self.get_contact()) + maybe_write("Author-email", self.get_contact_email()) + maybe_write("License", self.get_license()) + maybe_write("Download-URL", self.download_url) + maybe_write("Description", rfc822_escape(self.get_long_description() or "")) + maybe_write("Keywords", ",".join(self.get_keywords())) self._write_list(file, 'Platform', self.get_platforms()) self._write_list(file, 'Classifier', self.get_classifiers()) @@ -1152,8 +1176,9 @@ def write_pkg_file(self, file): self._write_list(file, 'Obsoletes', self.get_obsoletes()) def _write_list(self, file, name, values): + values = values or [] for value in values: - file.write('%s: %s\n' % (name, value)) + file.write('{}: {}\n'.format(name, value)) # -- Metadata query methods ---------------------------------------- @@ -1164,38 +1189,39 @@ def get_version(self): return self.version or "0.0.0" def get_fullname(self): - return "%s-%s" % (self.get_name(), self.get_version()) + return "{}-{}".format(self.get_name(), self.get_version()) def get_author(self): - return self.author or "UNKNOWN" + return self.author def get_author_email(self): - return self.author_email or "UNKNOWN" + return self.author_email def get_maintainer(self): - return self.maintainer or "UNKNOWN" + return self.maintainer def get_maintainer_email(self): - return self.maintainer_email or "UNKNOWN" + return self.maintainer_email def get_contact(self): - return self.maintainer or self.author or "UNKNOWN" + return self.maintainer or self.author def get_contact_email(self): - return self.maintainer_email or self.author_email or "UNKNOWN" + return self.maintainer_email or self.author_email def get_url(self): - return self.url or "UNKNOWN" + return self.url def get_license(self): - return self.license or "UNKNOWN" + return self.license + get_licence = get_license def get_description(self): - return self.description or "UNKNOWN" + return self.description def get_long_description(self): - return self.long_description or "UNKNOWN" + return self.long_description def get_keywords(self): return self.keywords or [] @@ -1204,7 +1230,7 @@ def set_keywords(self, value): self.keywords = _ensure_list(value, 'keywords') def get_platforms(self): - return self.platforms or ["UNKNOWN"] + return self.platforms def set_platforms(self, value): self.platforms = _ensure_list(value, 'platforms') @@ -1216,7 +1242,7 @@ def set_classifiers(self, value): self.classifiers = _ensure_list(value, 'classifiers') def get_download_url(self): - return self.download_url or "UNKNOWN" + return self.download_url # PEP 314 def get_requires(self): @@ -1224,6 +1250,7 @@ def get_requires(self): def set_requires(self, value): import distutils.versionpredicate + for v in value: distutils.versionpredicate.VersionPredicate(v) self.requires = list(value) @@ -1235,6 +1262,7 @@ def set_provides(self, value): value = [v.strip() for v in value] for v in value: import distutils.versionpredicate + distutils.versionpredicate.split_provision(v) self.provides = value @@ -1243,10 +1271,12 @@ def get_obsoletes(self): def set_obsoletes(self, value): import distutils.versionpredicate + for v in value: distutils.versionpredicate.VersionPredicate(v) self.obsoletes = list(value) + def fix_help_options(options): """Convert a 4-tuple 'help_options' list as found in various command classes to the 3-tuple form required by FancyGetopt. diff --git a/setuptools/_distutils/errors.py b/setuptools/_distutils/errors.py index 8b93059e19..626254c321 100644 --- a/setuptools/_distutils/errors.py +++ b/setuptools/_distutils/errors.py @@ -8,90 +8,120 @@ This module is safe to use in "from ... import *" mode; it only exports symbols whose names start with "Distutils" and end with "Error".""" -class DistutilsError (Exception): + +class DistutilsError(Exception): """The root of all Distutils evil.""" + pass -class DistutilsModuleError (DistutilsError): + +class DistutilsModuleError(DistutilsError): """Unable to load an expected module, or to find an expected class within some module (in particular, command modules and classes).""" + pass -class DistutilsClassError (DistutilsError): + +class DistutilsClassError(DistutilsError): """Some command class (or possibly distribution class, if anyone feels a need to subclass Distribution) is found not to be holding up its end of the bargain, ie. implementing some part of the "command "interface.""" + pass -class DistutilsGetoptError (DistutilsError): + +class DistutilsGetoptError(DistutilsError): """The option table provided to 'fancy_getopt()' is bogus.""" + pass -class DistutilsArgError (DistutilsError): + +class DistutilsArgError(DistutilsError): """Raised by fancy_getopt in response to getopt.error -- ie. an error in the command line usage.""" + pass -class DistutilsFileError (DistutilsError): + +class DistutilsFileError(DistutilsError): """Any problems in the filesystem: expected file not found, etc. Typically this is for problems that we detect before OSError could be raised.""" + pass -class DistutilsOptionError (DistutilsError): + +class DistutilsOptionError(DistutilsError): """Syntactic/semantic errors in command options, such as use of mutually conflicting options, or inconsistent options, badly-spelled values, etc. No distinction is made between option values originating in the setup script, the command line, config files, or what-have-you -- but if we *know* something originated in the setup script, we'll raise DistutilsSetupError instead.""" + pass -class DistutilsSetupError (DistutilsError): + +class DistutilsSetupError(DistutilsError): """For errors that can be definitely blamed on the setup script, such as invalid keyword arguments to 'setup()'.""" + pass -class DistutilsPlatformError (DistutilsError): + +class DistutilsPlatformError(DistutilsError): """We don't know how to do something on the current platform (but we do know how to do it on some platform) -- eg. trying to compile C files on a platform not supported by a CCompiler subclass.""" + pass -class DistutilsExecError (DistutilsError): + +class DistutilsExecError(DistutilsError): """Any problems executing an external program (such as the C compiler, when compiling C files).""" + pass -class DistutilsInternalError (DistutilsError): + +class DistutilsInternalError(DistutilsError): """Internal inconsistencies or impossibilities (obviously, this should never be seen if the code is working!).""" + pass -class DistutilsTemplateError (DistutilsError): + +class DistutilsTemplateError(DistutilsError): """Syntax error in a file list template.""" + class DistutilsByteCompileError(DistutilsError): """Byte compile error.""" + # Exception classes used by the CCompiler implementation classes -class CCompilerError (Exception): +class CCompilerError(Exception): """Some compile/link operation failed.""" -class PreprocessError (CCompilerError): + +class PreprocessError(CCompilerError): """Failure to preprocess one or more C/C++ files.""" -class CompileError (CCompilerError): + +class CompileError(CCompilerError): """Failure to compile one or more C/C++ source files.""" -class LibError (CCompilerError): + +class LibError(CCompilerError): """Failure to create a static library from one or more C/C++ object files.""" -class LinkError (CCompilerError): + +class LinkError(CCompilerError): """Failure to link one or more C/C++ object files into an executable or shared library file.""" -class UnknownFileError (CCompilerError): + +class UnknownFileError(CCompilerError): """Attempt to process an unknown file type.""" diff --git a/setuptools/_distutils/extension.py b/setuptools/_distutils/extension.py index c507da360a..6b8575de29 100644 --- a/setuptools/_distutils/extension.py +++ b/setuptools/_distutils/extension.py @@ -16,6 +16,7 @@ # import that large-ish module (indirectly, through distutils.core) in # order to do anything. + class Extension: """Just a collection of attributes that describes an extension module and everything needed to build it (hopefully in a portable @@ -83,27 +84,29 @@ class Extension: # When adding arguments to this constructor, be sure to update # setup_keywords in core.py. - def __init__(self, name, sources, - include_dirs=None, - define_macros=None, - undef_macros=None, - library_dirs=None, - libraries=None, - runtime_library_dirs=None, - extra_objects=None, - extra_compile_args=None, - extra_link_args=None, - export_symbols=None, - swig_opts = None, - depends=None, - language=None, - optional=None, - **kw # To catch unknown keywords - ): + def __init__( + self, + name, + sources, + include_dirs=None, + define_macros=None, + undef_macros=None, + library_dirs=None, + libraries=None, + runtime_library_dirs=None, + extra_objects=None, + extra_compile_args=None, + extra_link_args=None, + export_symbols=None, + swig_opts=None, + depends=None, + language=None, + optional=None, + **kw # To catch unknown keywords + ): if not isinstance(name, str): raise AssertionError("'name' must be a string") - if not (isinstance(sources, list) and - all(isinstance(v, str) for v in sources)): + if not (isinstance(sources, list) and all(isinstance(v, str) for v in sources)): raise AssertionError("'sources' must be a list of strings") self.name = name @@ -131,17 +134,17 @@ def __init__(self, name, sources, warnings.warn(msg) def __repr__(self): - return '<%s.%s(%r) at %#x>' % ( + return '<{}.{}({!r}) at {:#x}>'.format( self.__class__.__module__, self.__class__.__qualname__, self.name, - id(self)) + id(self), + ) -def read_setup_file(filename): +def read_setup_file(filename): # noqa: C901 """Reads a Setup file and returns Extension instances.""" - from distutils.sysconfig import (parse_makefile, expand_makefile_vars, - _variable_rx) + from distutils.sysconfig import parse_makefile, expand_makefile_vars, _variable_rx from distutils.text_file import TextFile from distutils.util import split_quoted @@ -151,17 +154,22 @@ def read_setup_file(filename): # Second pass to gobble up the real content: lines of the form # ... [ ...] [ ...] [ ...] - file = TextFile(filename, - strip_comments=1, skip_blanks=1, join_lines=1, - lstrip_ws=1, rstrip_ws=1) + file = TextFile( + filename, + strip_comments=1, + skip_blanks=1, + join_lines=1, + lstrip_ws=1, + rstrip_ws=1, + ) try: extensions = [] while True: line = file.readline() - if line is None: # eof + if line is None: # eof break - if _variable_rx.match(line): # VAR=VALUE, handled in first pass + if _variable_rx.match(line): # VAR=VALUE, handled in first pass continue if line[0] == line[-1] == "*": @@ -188,7 +196,8 @@ def read_setup_file(filename): continue suffix = os.path.splitext(word)[1] - switch = word[0:2] ; value = word[2:] + switch = word[0:2] + value = word[2:] if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"): # hmm, should we do something about C vs. C++ sources? @@ -199,14 +208,13 @@ def read_setup_file(filename): ext.include_dirs.append(value) elif switch == "-D": equals = value.find("=") - if equals == -1: # bare "-DFOO" -- no value + if equals == -1: # bare "-DFOO" -- no value ext.define_macros.append((value, None)) - else: # "-DFOO=blah" - ext.define_macros.append((value[0:equals], - value[equals+2:])) + else: # "-DFOO=blah" + ext.define_macros.append((value[0:equals], value[equals + 2 :])) elif switch == "-U": ext.undef_macros.append(value) - elif switch == "-C": # only here 'cause makesetup has it! + elif switch == "-C": # only here 'cause makesetup has it! ext.extra_compile_args.append(word) elif switch == "-l": ext.libraries.append(value) diff --git a/setuptools/_distutils/fancy_getopt.py b/setuptools/_distutils/fancy_getopt.py index 7d170dd277..3b887dc5a4 100644 --- a/setuptools/_distutils/fancy_getopt.py +++ b/setuptools/_distutils/fancy_getopt.py @@ -8,9 +8,11 @@ * options set attributes of a passed-in object """ -import sys, string, re +import sys +import string +import re import getopt -from distutils.errors import * +from .errors import DistutilsGetoptError, DistutilsArgError # Much like command_re in distutils.core, this is close to but not quite # the same as a Python NAME -- except, in the spirit of most GNU @@ -20,12 +22,13 @@ longopt_re = re.compile(r'^%s$' % longopt_pat) # For recognizing "negative alias" options, eg. "quiet=!verbose" -neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat)) +neg_alias_re = re.compile("^({})=!({})$".format(longopt_pat, longopt_pat)) # This is used to translate long options to legitimate Python identifiers # (for use as attributes of some object). longopt_xlate = str.maketrans('-', '_') + class FancyGetopt: """Wrapper around the standard 'getopt()' module that provides some handy extra functionality: @@ -90,7 +93,8 @@ def set_option_table(self, option_table): def add_option(self, long_option, short_option=None, help_string=None): if long_option in self.option_index: raise DistutilsGetoptError( - "option conflict: already an option '%s'" % long_option) + "option conflict: already an option '%s'" % long_option + ) else: option = (long_option, short_option, help_string) self.option_table.append(option) @@ -109,13 +113,17 @@ def get_attr_name(self, long_option): def _check_alias_dict(self, aliases, what): assert isinstance(aliases, dict) - for (alias, opt) in aliases.items(): + for alias, opt in aliases.items(): if alias not in self.option_index: - raise DistutilsGetoptError(("invalid %s '%s': " - "option '%s' not defined") % (what, alias, alias)) + raise DistutilsGetoptError( + ("invalid %s '%s': " "option '%s' not defined") + % (what, alias, alias) + ) if opt not in self.option_index: - raise DistutilsGetoptError(("invalid %s '%s': " - "aliased option '%s' not defined") % (what, alias, opt)) + raise DistutilsGetoptError( + ("invalid %s '%s': " "aliased option '%s' not defined") + % (what, alias, opt) + ) def set_aliases(self, alias): """Set the aliases for this option parser.""" @@ -130,7 +138,7 @@ def set_negative_aliases(self, negative_alias): self._check_alias_dict(negative_alias, "negative alias") self.negative_alias = negative_alias - def _grok_option_table(self): + def _grok_option_table(self): # noqa: C901 """Populate the various data structures that keep tabs on the option table. Called by 'getopt()' before it can do anything worthwhile. @@ -149,23 +157,27 @@ def _grok_option_table(self): else: # the option table is part of the code, so simply # assert that it is correct - raise ValueError("invalid option tuple: %r" % (option,)) + raise ValueError("invalid option tuple: {!r}".format(option)) # Type- and value-check the option names if not isinstance(long, str) or len(long) < 2: - raise DistutilsGetoptError(("invalid long option '%s': " - "must be a string of length >= 2") % long) + raise DistutilsGetoptError( + ("invalid long option '%s': " "must be a string of length >= 2") + % long + ) - if (not ((short is None) or - (isinstance(short, str) and len(short) == 1))): - raise DistutilsGetoptError("invalid short option '%s': " - "must a single character or None" % short) + if not ((short is None) or (isinstance(short, str) and len(short) == 1)): + raise DistutilsGetoptError( + "invalid short option '%s': " + "must a single character or None" % short + ) self.repeat[long] = repeat self.long_opts.append(long) - if long[-1] == '=': # option takes an argument? - if short: short = short + ':' + if long[-1] == '=': # option takes an argument? + if short: + short = short + ':' long = long[0:-1] self.takes_arg[long] = 1 else: @@ -175,11 +187,11 @@ def _grok_option_table(self): if alias_to is not None: if self.takes_arg[alias_to]: raise DistutilsGetoptError( - "invalid negative alias '%s': " - "aliased option '%s' takes a value" - % (long, alias_to)) + "invalid negative alias '%s': " + "aliased option '%s' takes a value" % (long, alias_to) + ) - self.long_opts[-1] = long # XXX redundant?! + self.long_opts[-1] = long # XXX redundant?! self.takes_arg[long] = 0 # If this is an alias option, make sure its "takes arg" flag is @@ -188,10 +200,10 @@ def _grok_option_table(self): if alias_to is not None: if self.takes_arg[long] != self.takes_arg[alias_to]: raise DistutilsGetoptError( - "invalid alias '%s': inconsistent with " - "aliased option '%s' (one of them takes a value, " - "the other doesn't" - % (long, alias_to)) + "invalid alias '%s': inconsistent with " + "aliased option '%s' (one of them takes a value, " + "the other doesn't" % (long, alias_to) + ) # Now enforce some bondage on the long option name, so we can # later translate it to an attribute name on some object. Have @@ -199,15 +211,16 @@ def _grok_option_table(self): # '='. if not longopt_re.match(long): raise DistutilsGetoptError( - "invalid long option name '%s' " - "(must be letters, numbers, hyphens only" % long) + "invalid long option name '%s' " + "(must be letters, numbers, hyphens only" % long + ) self.attr_name[long] = self.get_attr_name(long) if short: self.short_opts.append(short) self.short2long[short[0]] = long - def getopt(self, args=None, object=None): + def getopt(self, args=None, object=None): # noqa: C901 """Parse command-line options in args. Store as attributes on object. If 'args' is None or not supplied, uses 'sys.argv[1:]'. If @@ -235,7 +248,7 @@ def getopt(self, args=None, object=None): raise DistutilsArgError(msg) for opt, val in opts: - if len(opt) == 2 and opt[0] == '-': # it's a short option + if len(opt) == 2 and opt[0] == '-': # it's a short option opt = self.short2long[opt[1]] else: assert len(opt) > 2 and opt[:2] == '--' @@ -245,7 +258,7 @@ def getopt(self, args=None, object=None): if alias: opt = alias - if not self.takes_arg[opt]: # boolean option? + if not self.takes_arg[opt]: # boolean option? assert val == '', "boolean option can't have value" alias = self.negative_alias.get(opt) if alias: @@ -278,7 +291,7 @@ def get_option_order(self): else: return self.option_order - def generate_help(self, header=None): + def generate_help(self, header=None): # noqa: C901 """Generate help text (a list of strings, one per suggested line of output) from the option table for this FancyGetopt object. """ @@ -290,15 +303,15 @@ def generate_help(self, header=None): for option in self.option_table: long = option[0] short = option[1] - l = len(long) + ell = len(long) if long[-1] == '=': - l = l - 1 + ell = ell - 1 if short is not None: - l = l + 5 # " (-x)" where short == 'x' - if l > max_opt: - max_opt = l + ell = ell + 5 # " (-x)" where short == 'x' + if ell > max_opt: + max_opt = ell - opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter + opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter # Typical help block looks like this: # --foo controls foonabulation @@ -346,15 +359,14 @@ def generate_help(self, header=None): # Case 2: we have a short option, so we have to include it # just after the long option else: - opt_names = "%s (-%s)" % (long, short) + opt_names = "{} (-{})".format(long, short) if text: - lines.append(" --%-*s %s" % - (max_opt, opt_names, text[0])) + lines.append(" --%-*s %s" % (max_opt, opt_names, text[0])) else: lines.append(" --%-*s" % opt_names) - for l in text[1:]: - lines.append(big_indent + l) + for ell in text[1:]: + lines.append(big_indent + ell) return lines def print_help(self, header=None, file=None): @@ -370,7 +382,8 @@ def fancy_getopt(options, negative_opt, object, args): return parser.getopt(args, object) -WS_TRANS = {ord(_wschar) : ' ' for _wschar in string.whitespace} +WS_TRANS = {ord(_wschar): ' ' for _wschar in string.whitespace} + def wrap_text(text, width): """wrap_text(text : string, width : int) -> [string] @@ -386,26 +399,26 @@ def wrap_text(text, width): text = text.expandtabs() text = text.translate(WS_TRANS) chunks = re.split(r'( +|-+)', text) - chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings + chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings lines = [] while chunks: - cur_line = [] # list of chunks (to-be-joined) - cur_len = 0 # length of current line + cur_line = [] # list of chunks (to-be-joined) + cur_len = 0 # length of current line while chunks: - l = len(chunks[0]) - if cur_len + l <= width: # can squeeze (at least) this chunk in + ell = len(chunks[0]) + if cur_len + ell <= width: # can squeeze (at least) this chunk in cur_line.append(chunks[0]) del chunks[0] - cur_len = cur_len + l - else: # this line is full + cur_len = cur_len + ell + else: # this line is full # drop last chunk if all space if cur_line and cur_line[-1][0] == ' ': del cur_line[-1] break - if chunks: # any chunks left to process? + if chunks: # any chunks left to process? # if the current line is still empty, then we had a single # chunk that's too big too fit on a line -- so we break # down and break it up at the line width diff --git a/setuptools/_distutils/file_util.py b/setuptools/_distutils/file_util.py index b3fee35a6c..7c69906646 100644 --- a/setuptools/_distutils/file_util.py +++ b/setuptools/_distutils/file_util.py @@ -4,16 +4,14 @@ """ import os -from distutils.errors import DistutilsFileError -from distutils import log +from .errors import DistutilsFileError +from ._log import log # for generating verbose output in 'copy_file()' -_copy_action = { None: 'copying', - 'hard': 'hard linking', - 'sym': 'symbolically linking' } +_copy_action = {None: 'copying', 'hard': 'hard linking', 'sym': 'symbolically linking'} -def _copy_file_contents(src, dst, buffer_size=16*1024): +def _copy_file_contents(src, dst, buffer_size=16 * 1024): # noqa: C901 """Copy the file 'src' to 'dst'; both must be filenames. Any error opening either file, reading from 'src', or writing to 'dst', raises DistutilsFileError. Data is read/written in chunks of 'buffer_size' @@ -28,27 +26,30 @@ def _copy_file_contents(src, dst, buffer_size=16*1024): try: fsrc = open(src, 'rb') except OSError as e: - raise DistutilsFileError("could not open '%s': %s" % (src, e.strerror)) + raise DistutilsFileError("could not open '{}': {}".format(src, e.strerror)) if os.path.exists(dst): try: os.unlink(dst) except OSError as e: raise DistutilsFileError( - "could not delete '%s': %s" % (dst, e.strerror)) + "could not delete '{}': {}".format(dst, e.strerror) + ) try: fdst = open(dst, 'wb') except OSError as e: raise DistutilsFileError( - "could not create '%s': %s" % (dst, e.strerror)) + "could not create '{}': {}".format(dst, e.strerror) + ) while True: try: buf = fsrc.read(buffer_size) except OSError as e: raise DistutilsFileError( - "could not read from '%s': %s" % (src, e.strerror)) + "could not read from '{}': {}".format(src, e.strerror) + ) if not buf: break @@ -57,15 +58,25 @@ def _copy_file_contents(src, dst, buffer_size=16*1024): fdst.write(buf) except OSError as e: raise DistutilsFileError( - "could not write to '%s': %s" % (dst, e.strerror)) + "could not write to '{}': {}".format(dst, e.strerror) + ) finally: if fdst: fdst.close() if fsrc: fsrc.close() -def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, - link=None, verbose=1, dry_run=0): + +def copy_file( # noqa: C901 + src, + dst, + preserve_mode=1, + preserve_times=1, + update=0, + link=None, + verbose=1, + dry_run=0, +): """Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is copied there with the same name; otherwise, it must be a filename. (If the file exists, it will be ruthlessly clobbered.) If 'preserve_mode' @@ -102,7 +113,8 @@ def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, if not os.path.isfile(src): raise DistutilsFileError( - "can't copy '%s': doesn't exist or not a regular file" % src) + "can't copy '%s': doesn't exist or not a regular file" % src + ) if os.path.isdir(dst): dir = dst @@ -163,10 +175,7 @@ def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, # XXX I suspect this is Unix-specific -- need porting help! -def move_file (src, dst, - verbose=1, - dry_run=0): - +def move_file(src, dst, verbose=1, dry_run=0): # noqa: C901 """Move a file 'src' to 'dst'. If 'dst' is a directory, the file will be moved into it with the same name; otherwise, 'src' is just renamed to 'dst'. Return the new full name of the file. @@ -190,13 +199,13 @@ def move_file (src, dst, dst = os.path.join(dst, basename(src)) elif exists(dst): raise DistutilsFileError( - "can't move '%s': destination '%s' already exists" % - (src, dst)) + "can't move '{}': destination '{}' already exists".format(src, dst) + ) if not isdir(dirname(dst)): raise DistutilsFileError( - "can't move '%s': destination '%s' not a valid path" % - (src, dst)) + "can't move '{}': destination '{}' not a valid path".format(src, dst) + ) copy_it = False try: @@ -207,7 +216,8 @@ def move_file (src, dst, copy_it = True else: raise DistutilsFileError( - "couldn't move '%s' to '%s': %s" % (src, dst, msg)) + "couldn't move '{}' to '{}': {}".format(src, dst, msg) + ) if copy_it: copy_file(src, dst, verbose=verbose) @@ -220,13 +230,13 @@ def move_file (src, dst, except OSError: pass raise DistutilsFileError( - "couldn't move '%s' to '%s' by copy/delete: " - "delete '%s' failed: %s" - % (src, dst, src, msg)) + "couldn't move '%s' to '%s' by copy/delete: " + "delete '%s' failed: %s" % (src, dst, src, msg) + ) return dst -def write_file (filename, contents): +def write_file(filename, contents): """Create a file with the specified name and write 'contents' (a sequence of strings without line terminators) to it. """ diff --git a/setuptools/_distutils/filelist.py b/setuptools/_distutils/filelist.py index 82a77384dc..6dadf923d7 100644 --- a/setuptools/_distutils/filelist.py +++ b/setuptools/_distutils/filelist.py @@ -9,9 +9,9 @@ import fnmatch import functools -from distutils.util import convert_path -from distutils.errors import DistutilsTemplateError, DistutilsInternalError -from distutils import log +from .util import convert_path +from .errors import DistutilsTemplateError, DistutilsInternalError +from ._log import log class FileList: @@ -46,6 +46,7 @@ def debug_print(self, msg): DISTUTILS_DEBUG environment variable) flag is true. """ from distutils.debug import DEBUG + if DEBUG: print(msg) @@ -80,29 +81,31 @@ def _parse_template_line(self, line): patterns = dir = dir_pattern = None - if action in ('include', 'exclude', - 'global-include', 'global-exclude'): + if action in ('include', 'exclude', 'global-include', 'global-exclude'): if len(words) < 2: raise DistutilsTemplateError( - "'%s' expects ..." % action) + "'%s' expects ..." % action + ) patterns = [convert_path(w) for w in words[1:]] elif action in ('recursive-include', 'recursive-exclude'): if len(words) < 3: raise DistutilsTemplateError( - "'%s' expects ..." % action) + "'%s' expects ..." % action + ) dir = convert_path(words[1]) patterns = [convert_path(w) for w in words[2:]] elif action in ('graft', 'prune'): if len(words) != 2: raise DistutilsTemplateError( - "'%s' expects a single " % action) + "'%s' expects a single " % action + ) dir_pattern = convert_path(words[1]) else: raise DistutilsTemplateError("unknown action '%s'" % action) return (action, patterns, dir, dir_pattern) - def process_template_line(self, line): + def process_template_line(self, line): # noqa: C901 # Parse the line: split it up, make sure the right number of words # is there, and return the relevant words. 'action' is always # defined: it's the first word of the line. Which of the other @@ -117,65 +120,82 @@ def process_template_line(self, line): self.debug_print("include " + ' '.join(patterns)) for pattern in patterns: if not self.include_pattern(pattern, anchor=1): - log.warn("warning: no files found matching '%s'", - pattern) + log.warning("warning: no files found matching '%s'", pattern) elif action == 'exclude': self.debug_print("exclude " + ' '.join(patterns)) for pattern in patterns: if not self.exclude_pattern(pattern, anchor=1): - log.warn(("warning: no previously-included files " - "found matching '%s'"), pattern) + log.warning( + ( + "warning: no previously-included files " + "found matching '%s'" + ), + pattern, + ) elif action == 'global-include': self.debug_print("global-include " + ' '.join(patterns)) for pattern in patterns: if not self.include_pattern(pattern, anchor=0): - log.warn(("warning: no files found matching '%s' " - "anywhere in distribution"), pattern) + log.warning( + ( + "warning: no files found matching '%s' " + "anywhere in distribution" + ), + pattern, + ) elif action == 'global-exclude': self.debug_print("global-exclude " + ' '.join(patterns)) for pattern in patterns: if not self.exclude_pattern(pattern, anchor=0): - log.warn(("warning: no previously-included files matching " - "'%s' found anywhere in distribution"), - pattern) + log.warning( + ( + "warning: no previously-included files matching " + "'%s' found anywhere in distribution" + ), + pattern, + ) elif action == 'recursive-include': - self.debug_print("recursive-include %s %s" % - (dir, ' '.join(patterns))) + self.debug_print("recursive-include {} {}".format(dir, ' '.join(patterns))) for pattern in patterns: if not self.include_pattern(pattern, prefix=dir): msg = ( - "warning: no files found matching '%s' " - "under directory '%s'" + "warning: no files found matching '%s' " "under directory '%s'" ) - log.warn(msg, pattern, dir) + log.warning(msg, pattern, dir) elif action == 'recursive-exclude': - self.debug_print("recursive-exclude %s %s" % - (dir, ' '.join(patterns))) + self.debug_print("recursive-exclude {} {}".format(dir, ' '.join(patterns))) for pattern in patterns: if not self.exclude_pattern(pattern, prefix=dir): - log.warn(("warning: no previously-included files matching " - "'%s' found under directory '%s'"), - pattern, dir) + log.warning( + ( + "warning: no previously-included files matching " + "'%s' found under directory '%s'" + ), + pattern, + dir, + ) elif action == 'graft': self.debug_print("graft " + dir_pattern) if not self.include_pattern(None, prefix=dir_pattern): - log.warn("warning: no directories found matching '%s'", - dir_pattern) + log.warning("warning: no directories found matching '%s'", dir_pattern) elif action == 'prune': self.debug_print("prune " + dir_pattern) if not self.exclude_pattern(None, prefix=dir_pattern): - log.warn(("no previously-included directories found " - "matching '%s'"), dir_pattern) + log.warning( + ("no previously-included directories found " "matching '%s'"), + dir_pattern, + ) else: raise DistutilsInternalError( - "this cannot happen: invalid action '%s'" % action) + "this cannot happen: invalid action '%s'" % action + ) # Filtering/selection methods @@ -207,8 +227,7 @@ def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0): # XXX docstring lying about what the special chars are? files_found = False pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) - self.debug_print("include_pattern: applying regex r'%s'" % - pattern_re.pattern) + self.debug_print("include_pattern: applying regex r'%s'" % pattern_re.pattern) # delayed loading of allfiles list if self.allfiles is None: @@ -221,8 +240,7 @@ def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0): files_found = True return files_found - def exclude_pattern( - self, pattern, anchor=1, prefix=None, is_regex=0): + def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0): """Remove strings (presumably filenames) from 'files' that match 'pattern'. Other parameters are the same as for 'include_pattern()', above. @@ -231,9 +249,8 @@ def exclude_pattern( """ files_found = False pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) - self.debug_print("exclude_pattern: applying regex r'%s'" % - pattern_re.pattern) - for i in range(len(self.files)-1, -1, -1): + self.debug_print("exclude_pattern: applying regex r'%s'" % pattern_re.pattern) + for i in range(len(self.files) - 1, -1, -1): if pattern_re.search(self.files[i]): self.debug_print(" removing " + self.files[i]) del self.files[i] @@ -243,15 +260,14 @@ def exclude_pattern( # Utility functions + def _find_all_simple(path): """ Find all files under 'path' """ all_unique = _UniqueDirs.filter(os.walk(path, followlinks=True)) results = ( - os.path.join(base, file) - for base, dirs, files in all_unique - for file in files + os.path.join(base, file) for base, dirs, files in all_unique for file in files ) return filter(os.path.isfile, results) @@ -262,6 +278,7 @@ class _UniqueDirs(set): avoiding infinite recursion. Ref https://bugs.python.org/issue44497. """ + def __call__(self, walk_item): """ Given an item from an os.walk result, determine @@ -341,15 +358,14 @@ def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0): if prefix is not None: prefix_re = glob_to_re(prefix) assert prefix_re.startswith(start) and prefix_re.endswith(end) - prefix_re = prefix_re[len(start): len(prefix_re) - len(end)] + prefix_re = prefix_re[len(start) : len(prefix_re) - len(end)] sep = os.sep if os.sep == '\\': sep = r'\\' - pattern_re = pattern_re[len(start): len(pattern_re) - len(end)] - pattern_re = r'%s\A%s%s.*%s%s' % ( - start, prefix_re, sep, pattern_re, end) - else: # no prefix -- respect anchor flag + pattern_re = pattern_re[len(start) : len(pattern_re) - len(end)] + pattern_re = r'{}\A{}{}.*{}{}'.format(start, prefix_re, sep, pattern_re, end) + else: # no prefix -- respect anchor flag if anchor: - pattern_re = r'%s\A%s' % (start, pattern_re[len(start):]) + pattern_re = r'{}\A{}'.format(start, pattern_re[len(start) :]) return re.compile(pattern_re) diff --git a/setuptools/_distutils/log.py b/setuptools/_distutils/log.py index a68b156b5b..239f315850 100644 --- a/setuptools/_distutils/log.py +++ b/setuptools/_distutils/log.py @@ -1,81 +1,57 @@ -"""A simple log mechanism styled after PEP 282.""" +""" +A simple log mechanism styled after PEP 282. -# The class here is styled after PEP 282 so that it could later be -# replaced with a standard Python logging implementation. +Retained for compatibility and should not be used. +""" -import sys +import logging +import warnings -DEBUG = 1 -INFO = 2 -WARN = 3 -ERROR = 4 -FATAL = 5 +from ._log import log as _global_log -class Log: +DEBUG = logging.DEBUG +INFO = logging.INFO +WARN = logging.WARN +ERROR = logging.ERROR +FATAL = logging.FATAL - def __init__(self, threshold=WARN): - self.threshold = threshold - - def _log(self, level, msg, args): - if level not in (DEBUG, INFO, WARN, ERROR, FATAL): - raise ValueError('%s wrong log level' % str(level)) - - if level >= self.threshold: - if args: - msg = msg % args - if level in (WARN, ERROR, FATAL): - stream = sys.stderr - else: - stream = sys.stdout - try: - stream.write('%s\n' % msg) - except UnicodeEncodeError: - # emulate backslashreplace error handler - encoding = stream.encoding - msg = msg.encode(encoding, "backslashreplace").decode(encoding) - stream.write('%s\n' % msg) - stream.flush() - - def log(self, level, msg, *args): - self._log(level, msg, args) - - def debug(self, msg, *args): - self._log(DEBUG, msg, args) - - def info(self, msg, *args): - self._log(INFO, msg, args) - - def warn(self, msg, *args): - self._log(WARN, msg, args) - - def error(self, msg, *args): - self._log(ERROR, msg, args) - - def fatal(self, msg, *args): - self._log(FATAL, msg, args) - - -_global_log = Log() log = _global_log.log debug = _global_log.debug info = _global_log.info -warn = _global_log.warn +warn = _global_log.warning error = _global_log.error fatal = _global_log.fatal def set_threshold(level): - # return the old threshold for use from tests - old = _global_log.threshold - _global_log.threshold = level - return old + orig = _global_log.level + _global_log.setLevel(level) + return orig def set_verbosity(v): if v <= 0: - set_threshold(WARN) + set_threshold(logging.WARN) elif v == 1: - set_threshold(INFO) + set_threshold(logging.INFO) elif v >= 2: - set_threshold(DEBUG) + set_threshold(logging.DEBUG) + + +class Log(logging.Logger): + """distutils.log.Log is deprecated, please use an alternative from `logging`.""" + + def __init__(self, threshold=WARN): + warnings.warn(Log.__doc__) # avoid DeprecationWarning to ensure warn is shown + super().__init__(__name__, level=threshold) + + @property + def threshold(self): + return self.level + + @threshold.setter + def threshold(self, level): + self.setLevel(level) + + warn = logging.Logger.warning diff --git a/setuptools/_distutils/msvc9compiler.py b/setuptools/_distutils/msvc9compiler.py index 6b6273836e..f9f9f2d844 100644 --- a/setuptools/_distutils/msvc9compiler.py +++ b/setuptools/_distutils/msvc9compiler.py @@ -16,26 +16,41 @@ import subprocess import sys import re - -from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError -from distutils.ccompiler import CCompiler, gen_lib_options -from distutils import log -from distutils.util import get_platform +import warnings + +from .errors import ( + DistutilsExecError, + DistutilsPlatformError, + CompileError, + LibError, + LinkError, +) +from .ccompiler import CCompiler, gen_lib_options +from ._log import log +from .util import get_platform import winreg +warnings.warn( + "msvc9compiler is deprecated and slated to be removed " + "in the future. Please discontinue use or file an issue " + "with pypa/distutils describing your use case.", + DeprecationWarning, +) + RegOpenKeyEx = winreg.OpenKeyEx RegEnumKey = winreg.EnumKey RegEnumValue = winreg.EnumValue RegError = winreg.error -HKEYS = (winreg.HKEY_USERS, - winreg.HKEY_CURRENT_USER, - winreg.HKEY_LOCAL_MACHINE, - winreg.HKEY_CLASSES_ROOT) +HKEYS = ( + winreg.HKEY_USERS, + winreg.HKEY_CURRENT_USER, + winreg.HKEY_LOCAL_MACHINE, + winreg.HKEY_CLASSES_ROOT, +) -NATIVE_WIN64 = (sys.platform == 'win32' and sys.maxsize > 2**32) +NATIVE_WIN64 = sys.platform == 'win32' and sys.maxsize > 2**32 if NATIVE_WIN64: # Visual C++ is a 32-bit application, so we need to look in # the corresponding registry branch, if we're running a @@ -52,13 +67,13 @@ # 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is # the param to cross-compile on x86 targeting amd64.) PLAT_TO_VCVARS = { - 'win32' : 'x86', - 'win-amd64' : 'amd64', + 'win32': 'x86', + 'win-amd64': 'amd64', } + class Reg: - """Helper class to read values from the registry - """ + """Helper class to read values from the registry""" def get_value(cls, path, key): for base in HKEYS: @@ -66,6 +81,7 @@ def get_value(cls, path, key): if d and key in d: return d[key] raise KeyError(key) + get_value = classmethod(get_value) def read_keys(cls, base, key): @@ -84,6 +100,7 @@ def read_keys(cls, base, key): L.append(k) i += 1 return L + read_keys = classmethod(read_keys) def read_values(cls, base, key): @@ -106,6 +123,7 @@ def read_values(cls, base, key): d[cls.convert_mbcs(name)] = cls.convert_mbcs(value) i += 1 return d + read_values = classmethod(read_values) def convert_mbcs(s): @@ -116,10 +134,11 @@ def convert_mbcs(s): except UnicodeError: pass return s + convert_mbcs = staticmethod(convert_mbcs) -class MacroExpander: +class MacroExpander: def __init__(self, version): self.macros = {} self.vsbase = VS_BASE % version @@ -134,16 +153,16 @@ def load_macros(self, version): self.set_macro("FrameworkDir", NET_BASE, "installroot") try: if version >= 8.0: - self.set_macro("FrameworkSDKDir", NET_BASE, - "sdkinstallrootv2.0") + self.set_macro("FrameworkSDKDir", NET_BASE, "sdkinstallrootv2.0") else: raise KeyError("sdkinstallrootv2.0") except KeyError: raise DistutilsPlatformError( - """Python was built with Visual Studio 2008; + """Python was built with Visual Studio 2008; extensions must be built with a compiler than can generate compatible binaries. Visual Studio 2008 was not found on this system. If you have Cygwin installed, -you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") +you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""" + ) if version >= 9.0: self.set_macro("FrameworkVersion", self.vsbase, "clr version") @@ -156,7 +175,7 @@ def load_macros(self, version): except RegError: continue key = RegEnumKey(h, 0) - d = Reg.get_value(base, r"%s\%s" % (p, key)) + d = Reg.get_value(base, r"{}\{}".format(p, key)) self.macros["$(FrameworkVersion)"] = d["version"] def sub(self, s): @@ -164,6 +183,7 @@ def sub(self, s): s = s.replace(k, v) return s + def get_build_version(): """Return the version of MSVC that was used to build Python. @@ -189,6 +209,7 @@ def get_build_version(): # else we don't know what version of the compiler this is return None + def normalize_and_reduce_paths(paths): """Return a list of normalized paths with duplicates removed. @@ -203,9 +224,9 @@ def normalize_and_reduce_paths(paths): reduced_paths.append(np) return reduced_paths + def removeDuplicates(variable): - """Remove duplicate values of an environment variable. - """ + """Remove duplicate values of an environment variable.""" oldList = variable.split(os.pathsep) newList = [] for i in oldList: @@ -214,6 +235,7 @@ def removeDuplicates(variable): newVariable = os.pathsep.join(newList) return newVariable + def find_vcvarsall(version): """Find the vcvarsall.bat file @@ -222,8 +244,7 @@ def find_vcvarsall(version): """ vsbase = VS_BASE % version try: - productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, - "productdir") + productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, "productdir") except KeyError: log.debug("Unable to find productdir in registry") productdir = None @@ -249,9 +270,9 @@ def find_vcvarsall(version): log.debug("Unable to find vcvarsall.bat") return None + def query_vcvarsall(version, arch="x86"): - """Launch vcvarsall.bat and read the settings from its environment - """ + """Launch vcvarsall.bat and read the settings from its environment""" vcvarsall = find_vcvarsall(version) interesting = {"include", "lib", "libpath", "path"} result = {} @@ -259,9 +280,11 @@ def query_vcvarsall(version, arch="x86"): if vcvarsall is None: raise DistutilsPlatformError("Unable to find vcvarsall.bat") log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version) - popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch), - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + popen = subprocess.Popen( + '"{}" {} & set'.format(vcvarsall, arch), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) try: stdout, stderr = popen.communicate() if popen.wait() != 0: @@ -289,13 +312,15 @@ def query_vcvarsall(version, arch="x86"): return result + # More globals VERSION = get_build_version() # MACROS = MacroExpander(VERSION) -class MSVCCompiler(CCompiler) : + +class MSVCCompiler(CCompiler): """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" + as defined by the CCompiler abstract class.""" compiler_type = 'msvc' @@ -314,8 +339,7 @@ class MSVCCompiler(CCompiler) : # Needed for the filename generation methods provided by the # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) + src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions res_extension = '.res' obj_extension = '.obj' static_lib_extension = '.lib' @@ -331,23 +355,30 @@ def __init__(self, verbose=0, dry_run=0, force=0): self.__paths = [] # target platform (.plat_name is consistent with 'bdist') self.plat_name = None - self.__arch = None # deprecated name + self.__arch = None # deprecated name self.initialized = False - def initialize(self, plat_name=None): + def initialize(self, plat_name=None): # noqa: C901 # multi-init means we would need to check platform same each time... assert not self.initialized, "don't init multiple times" if self.__version < 8.0: - raise DistutilsPlatformError("VC %0.1f is not supported by this module" % self.__version) + raise DistutilsPlatformError( + "VC %0.1f is not supported by this module" % self.__version + ) if plat_name is None: plat_name = get_platform() # sanity check for platforms to prevent obscure errors later. ok_plats = 'win32', 'win-amd64' if plat_name not in ok_plats: - raise DistutilsPlatformError("--plat-name must be one of %s" % - (ok_plats,)) - - if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): + raise DistutilsPlatformError( + "--plat-name must be one of {}".format(ok_plats) + ) + + if ( + "DISTUTILS_USE_SDK" in os.environ + and "MSSdk" in os.environ + and self.find_exe("cl.exe") + ): # Assume that the SDK set up everything alright; don't try to be # smarter self.cc = "cl.exe" @@ -360,13 +391,14 @@ def initialize(self, plat_name=None): # to cross compile, you use 'x86_amd64'. # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross # compile use 'x86' (ie, it runs the x86 compiler directly) - if plat_name == get_platform() or plat_name == 'win32': + if plat_name in (get_platform(), 'win32'): # native build or cross-compile to win32 plat_spec = PLAT_TO_VCVARS[plat_name] else: # cross compile from win32 -> some 64bit - plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \ - PLAT_TO_VCVARS[plat_name] + plat_spec = ( + PLAT_TO_VCVARS[get_platform()] + '_' + PLAT_TO_VCVARS[plat_name] + ) vc_env = query_vcvarsall(VERSION, plat_spec) @@ -375,18 +407,19 @@ def initialize(self, plat_name=None): os.environ['include'] = vc_env['include'] if len(self.__paths) == 0: - raise DistutilsPlatformError("Python was built with %s, " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." - % self.__product) + raise DistutilsPlatformError( + "Python was built with %s, " + "and extensions need to be built with the same " + "version of the compiler, but it isn't installed." % self.__product + ) self.cc = self.find_exe("cl.exe") self.linker = self.find_exe("link.exe") self.lib = self.find_exe("lib.exe") - self.rc = self.find_exe("rc.exe") # resource compiler - self.mc = self.find_exe("mc.exe") # message compiler - #self.set_path_env_var('lib') - #self.set_path_env_var('include') + self.rc = self.find_exe("rc.exe") # resource compiler + self.mc = self.find_exe("mc.exe") # message compiler + # self.set_path_env_var('lib') + # self.set_path_env_var('include') # extend the MSVC path with the current path try: @@ -399,71 +432,82 @@ def initialize(self, plat_name=None): self.preprocess_options = None if self.__arch == "x86": - self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', - '/Z7', '/D_DEBUG'] + self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/DNDEBUG'] + self.compile_options_debug = [ + '/nologo', + '/Od', + '/MDd', + '/W3', + '/Z7', + '/D_DEBUG', + ] else: # Win64 - self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GS-' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', - '/Z7', '/D_DEBUG'] + self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GS-', '/DNDEBUG'] + self.compile_options_debug = [ + '/nologo', + '/Od', + '/MDd', + '/W3', + '/GS-', + '/Z7', + '/D_DEBUG', + ] self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] if self.__version >= 7: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' - ] - self.ldflags_static = [ '/nologo'] + self.ldflags_shared_debug = ['/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'] + self.ldflags_static = ['/nologo'] self.initialized = True # -- Worker methods ------------------------------------------------ - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): + def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): # Copied from ccompiler.py, extended to return .res as 'object'-file # for .rc input file - if output_dir is None: output_dir = '' + if output_dir is None: + output_dir = '' obj_names = [] for src_name in source_filenames: - (base, ext) = os.path.splitext (src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / + (base, ext) = os.path.splitext(src_name) + base = os.path.splitdrive(base)[1] # Chop off the drive + base = base[os.path.isabs(base) :] # If abs, chop off leading / if ext not in self.src_extensions: # Better to raise an exception instead of silently continuing # and later complain about sources and targets having # different lengths - raise CompileError ("Don't know how to compile %s" % src_name) + raise CompileError("Don't know how to compile %s" % src_name) if strip_dir: - base = os.path.basename (base) + base = os.path.basename(base) if ext in self._rc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) + obj_names.append(os.path.join(output_dir, base + self.res_extension)) elif ext in self._mc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) + obj_names.append(os.path.join(output_dir, base + self.res_extension)) else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) + obj_names.append(os.path.join(output_dir, base + self.obj_extension)) return obj_names - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - + def compile( # noqa: C901 + self, + sources, + output_dir=None, + macros=None, + include_dirs=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + depends=None, + ): if not self.initialized: self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) + compile_info = self._setup_compile( + output_dir, macros, include_dirs, sources, depends, extra_postargs + ) macros, objects, extra_postargs, pp_opts, build = compile_info compile_opts = extra_preargs or [] - compile_opts.append ('/c') + compile_opts.append('/c') if debug: compile_opts.extend(self.compile_options_debug) else: @@ -489,8 +533,7 @@ def compile(self, sources, input_opt = src output_opt = "/fo" + obj try: - self.spawn([self.rc] + pp_opts + - [output_opt] + [input_opt]) + self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt]) except DistutilsExecError as msg: raise CompileError(msg) continue @@ -510,50 +553,47 @@ def compile(self, sources, rc_dir = os.path.dirname(obj) try: # first compile .MC to .RC and .H file - self.spawn([self.mc] + - ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext (os.path.basename (src)) - rc_file = os.path.join (rc_dir, base + '.rc') + self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src]) + base, _ = os.path.splitext(os.path.basename(src)) + rc_file = os.path.join(rc_dir, base + '.rc') # then compile .RC to .RES file - self.spawn([self.rc] + - ["/fo" + obj] + [rc_file]) + self.spawn([self.rc] + ["/fo" + obj] + [rc_file]) except DistutilsExecError as msg: raise CompileError(msg) continue else: # how to handle this file? - raise CompileError("Don't know how to compile %s to %s" - % (src, obj)) + raise CompileError( + "Don't know how to compile {} to {}".format(src, obj) + ) output_opt = "/Fo" + obj try: - self.spawn([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs) + self.spawn( + [self.cc] + + compile_opts + + pp_opts + + [input_opt, output_opt] + + extra_postargs + ) except DistutilsExecError as msg: raise CompileError(msg) return objects - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - + def create_static_lib( + self, objects, output_libname, output_dir=None, debug=0, target_lang=None + ): if not self.initialized: self.initialize() (objects, output_dir) = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) + output_filename = self.library_filename(output_libname, output_dir=output_dir) if self._need_link(objects, output_filename): lib_args = objects + ['/OUT:' + output_filename] if debug: - pass # XXX what goes here? + pass # XXX what goes here? try: self.spawn([self.lib] + lib_args) except DistutilsExecError as msg: @@ -561,36 +601,35 @@ def create_static_lib(self, else: log.debug("skipping %s (up-to-date)", output_filename) - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - + def link( # noqa: C901 + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): if not self.initialized: self.initialize() (objects, output_dir) = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) + fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) (libraries, library_dirs, runtime_library_dirs) = fixed_args if runtime_library_dirs: - self.warn ("I don't know what to do with 'runtime_library_dirs': " - + str (runtime_library_dirs)) + self.warn( + "I don't know what to do with 'runtime_library_dirs': " + + str(runtime_library_dirs) + ) - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) + lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) if output_dir is not None: output_filename = os.path.join(output_dir, output_filename) @@ -607,11 +646,12 @@ def link(self, ldflags = self.ldflags_shared export_opts = [] - for sym in (export_symbols or []): + for sym in export_symbols or []: export_opts.append("/EXPORT:" + sym) - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) + ld_args = ( + ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename] + ) # The MSVC linker generates .lib and .exp files, which cannot be # suppressed by any linker switches. The .lib files may even be @@ -621,11 +661,10 @@ def link(self, build_temp = os.path.dirname(objects[0]) if export_symbols is not None: (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - build_temp, - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) + os.path.basename(output_filename) + ) + implib_file = os.path.join(build_temp, self.library_filename(dll_name)) + ld_args.append('/IMPLIB:' + implib_file) self.manifest_setup_ldargs(output_filename, build_temp, ld_args) @@ -648,10 +687,9 @@ def link(self, mfinfo = self.manifest_get_embed_info(target_desc, ld_args) if mfinfo is not None: mffilename, mfid = mfinfo - out_arg = '-outputresource:%s;%s' % (output_filename, mfid) + out_arg = '-outputresource:{};{}'.format(output_filename, mfid) try: - self.spawn(['mt.exe', '-nologo', '-manifest', - mffilename, out_arg]) + self.spawn(['mt.exe', '-nologo', '-manifest', mffilename, out_arg]) except DistutilsExecError as msg: raise LinkError(msg) else: @@ -665,8 +703,8 @@ def manifest_setup_ldargs(self, output_filename, build_temp, ld_args): # Ask the linker to generate the manifest in the temp dir, so # we can check it, and possibly embed it, later. temp_manifest = os.path.join( - build_temp, - os.path.basename(output_filename) + ".manifest") + build_temp, os.path.basename(output_filename) + ".manifest" + ) ld_args.append('/MANIFESTFILE:' + temp_manifest) def manifest_get_embed_info(self, target_desc, ld_args): @@ -709,9 +747,10 @@ def _remove_visual_c_ref(self, manifest_file): finally: manifest_f.close() pattern = re.compile( - r"""|)""", - re.DOTALL) + re.DOTALL, + ) manifest_buf = re.sub(pattern, "", manifest_buf) pattern = r"\s*" manifest_buf = re.sub(pattern, "", manifest_buf) @@ -719,7 +758,9 @@ def _remove_visual_c_ref(self, manifest_file): # don't want a manifest embedded. pattern = re.compile( r"""|)""", re.DOTALL) + r""".*?(?:/>|)""", + re.DOTALL, + ) if re.search(pattern, manifest_buf) is None: return None @@ -741,12 +782,12 @@ def library_dir_option(self, dir): def runtime_library_dir_option(self, dir): raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC++") + "don't know how to set runtime library search path for MSVC++" + ) def library_option(self, lib): return self.library_filename(lib) - def find_library_file(self, dirs, lib, debug=0): # Prefer a debugging library if found (and requested), but deal # with it if we don't have one. @@ -756,7 +797,7 @@ def find_library_file(self, dirs, lib, debug=0): try_names = [lib] for dir in dirs: for name in try_names: - libfile = os.path.join(dir, self.library_filename (name)) + libfile = os.path.join(dir, self.library_filename(name)) if os.path.exists(libfile): return libfile else: @@ -781,7 +822,7 @@ def find_exe(self, exe): # didn't find it; try existing path for p in os.environ['Path'].split(';'): - fn = os.path.join(os.path.abspath(p),exe) + fn = os.path.join(os.path.abspath(p), exe) if os.path.isfile(fn): return fn diff --git a/setuptools/_distutils/msvccompiler.py b/setuptools/_distutils/msvccompiler.py index e1367b8918..c3823e257e 100644 --- a/setuptools/_distutils/msvccompiler.py +++ b/setuptools/_distutils/msvccompiler.py @@ -8,13 +8,18 @@ # hacked by Robin Becker and Thomas Heller to do a better job of # finding DevStudio (through the registry) -import sys, os -from distutils.errors import \ - DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError -from distutils.ccompiler import \ - CCompiler, gen_lib_options -from distutils import log +import sys +import os +import warnings +from .errors import ( + DistutilsExecError, + DistutilsPlatformError, + CompileError, + LibError, + LinkError, +) +from .ccompiler import CCompiler, gen_lib_options +from ._log import log _can_read_reg = False try: @@ -32,6 +37,7 @@ try: import win32api import win32con + _can_read_reg = True hkey_mod = win32con @@ -40,17 +46,30 @@ RegEnumValue = win32api.RegEnumValue RegError = win32api.error except ImportError: - log.info("Warning: Can't read registry to find the " - "necessary compiler setting\n" - "Make sure that Python modules winreg, " - "win32api or win32con are installed.") + log.info( + "Warning: Can't read registry to find the " + "necessary compiler setting\n" + "Make sure that Python modules winreg, " + "win32api or win32con are installed." + ) pass if _can_read_reg: - HKEYS = (hkey_mod.HKEY_USERS, - hkey_mod.HKEY_CURRENT_USER, - hkey_mod.HKEY_LOCAL_MACHINE, - hkey_mod.HKEY_CLASSES_ROOT) + HKEYS = ( + hkey_mod.HKEY_USERS, + hkey_mod.HKEY_CURRENT_USER, + hkey_mod.HKEY_LOCAL_MACHINE, + hkey_mod.HKEY_CLASSES_ROOT, + ) + + +warnings.warn( + "msvccompiler is deprecated and slated to be removed " + "in the future. Please discontinue use or file an issue " + "with pypa/distutils describing your use case.", + DeprecationWarning, +) + def read_keys(base, key): """Return list of registry keys.""" @@ -69,6 +88,7 @@ def read_keys(base, key): i += 1 return L + def read_values(base, key): """Return dict of registry keys and values. @@ -90,6 +110,7 @@ def read_values(base, key): i += 1 return d + def convert_mbcs(s): dec = getattr(s, "decode", None) if dec is not None: @@ -99,6 +120,7 @@ def convert_mbcs(s): pass return s + class MacroExpander: def __init__(self, version): self.macros = {} @@ -122,12 +144,13 @@ def load_macros(self, version): self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1") else: self.set_macro("FrameworkSDKDir", net, "sdkinstallroot") - except KeyError as exc: # + except KeyError: raise DistutilsPlatformError( - """Python was built with Visual Studio 2003; + """Python was built with Visual Studio 2003; extensions must be built with a compiler than can generate compatible binaries. Visual Studio 2003 was not found on this system. If you have Cygwin installed, -you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") +you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""" + ) p = r"Software\Microsoft\NET Framework Setup\Product" for base in HKEYS: @@ -136,7 +159,7 @@ def load_macros(self, version): except RegError: continue key = RegEnumKey(h, 0) - d = read_values(base, r"%s\%s" % (p, key)) + d = read_values(base, r"{}\{}".format(p, key)) self.macros["$(FrameworkVersion)"] = d["version"] def sub(self, s): @@ -144,6 +167,7 @@ def sub(self, s): s = s.replace(k, v) return s + def get_build_version(): """Return the version of MSVC that was used to build Python. @@ -169,6 +193,7 @@ def get_build_version(): # else we don't know what version of the compiler this is return None + def get_build_architecture(): """Return the processor architecture. @@ -180,7 +205,8 @@ def get_build_architecture(): if i == -1: return "Intel" j = sys.version.find(")", i) - return sys.version[i+len(prefix):j] + return sys.version[i + len(prefix) : j] + def normalize_and_reduce_paths(paths): """Return a list of normalized paths with duplicates removed. @@ -197,9 +223,9 @@ def normalize_and_reduce_paths(paths): return reduced_paths -class MSVCCompiler(CCompiler) : +class MSVCCompiler(CCompiler): """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" + as defined by the CCompiler abstract class.""" compiler_type = 'msvc' @@ -218,8 +244,7 @@ class MSVCCompiler(CCompiler) : # Needed for the filename generation methods provided by the # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) + src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions res_extension = '.res' obj_extension = '.obj' static_lib_extension = '.lib' @@ -247,7 +272,11 @@ def __init__(self, verbose=0, dry_run=0, force=0): def initialize(self): self.__paths = [] - if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): + if ( + "DISTUTILS_USE_SDK" in os.environ + and "MSSdk" in os.environ + and self.find_exe("cl.exe") + ): # Assume that the SDK set up everything alright; don't try to be # smarter self.cc = "cl.exe" @@ -259,16 +288,17 @@ def initialize(self): self.__paths = self.get_msvc_paths("path") if len(self.__paths) == 0: - raise DistutilsPlatformError("Python was built with %s, " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." - % self.__product) + raise DistutilsPlatformError( + "Python was built with %s, " + "and extensions need to be built with the same " + "version of the compiler, but it isn't installed." % self.__product + ) self.cc = self.find_exe("cl.exe") self.linker = self.find_exe("link.exe") self.lib = self.find_exe("lib.exe") - self.rc = self.find_exe("rc.exe") # resource compiler - self.mc = self.find_exe("mc.exe") # message compiler + self.rc = self.find_exe("rc.exe") # resource compiler + self.mc = self.find_exe("mc.exe") # message compiler self.set_path_env_var('lib') self.set_path_env_var('include') @@ -283,75 +313,91 @@ def initialize(self): self.preprocess_options = None if self.__arch == "Intel": - self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GX' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX', - '/Z7', '/D_DEBUG'] + self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GX', '/DNDEBUG'] + self.compile_options_debug = [ + '/nologo', + '/Od', + '/MDd', + '/W3', + '/GX', + '/Z7', + '/D_DEBUG', + ] else: # Win64 - self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GS-' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', - '/Z7', '/D_DEBUG'] + self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GS-', '/DNDEBUG'] + self.compile_options_debug = [ + '/nologo', + '/Od', + '/MDd', + '/W3', + '/GS-', + '/Z7', + '/D_DEBUG', + ] self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] if self.__version >= 7: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' - ] + self.ldflags_shared_debug = ['/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'] else: self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG' - ] - self.ldflags_static = [ '/nologo'] + '/DLL', + '/nologo', + '/INCREMENTAL:no', + '/pdb:None', + '/DEBUG', + ] + self.ldflags_static = ['/nologo'] self.initialized = True # -- Worker methods ------------------------------------------------ - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): + def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): # Copied from ccompiler.py, extended to return .res as 'object'-file # for .rc input file - if output_dir is None: output_dir = '' + if output_dir is None: + output_dir = '' obj_names = [] for src_name in source_filenames: - (base, ext) = os.path.splitext (src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / + (base, ext) = os.path.splitext(src_name) + base = os.path.splitdrive(base)[1] # Chop off the drive + base = base[os.path.isabs(base) :] # If abs, chop off leading / if ext not in self.src_extensions: # Better to raise an exception instead of silently continuing # and later complain about sources and targets having # different lengths - raise CompileError ("Don't know how to compile %s" % src_name) + raise CompileError("Don't know how to compile %s" % src_name) if strip_dir: - base = os.path.basename (base) + base = os.path.basename(base) if ext in self._rc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) + obj_names.append(os.path.join(output_dir, base + self.res_extension)) elif ext in self._mc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) + obj_names.append(os.path.join(output_dir, base + self.res_extension)) else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) + obj_names.append(os.path.join(output_dir, base + self.obj_extension)) return obj_names - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - + def compile( # noqa: C901 + self, + sources, + output_dir=None, + macros=None, + include_dirs=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + depends=None, + ): if not self.initialized: self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) + compile_info = self._setup_compile( + output_dir, macros, include_dirs, sources, depends, extra_postargs + ) macros, objects, extra_postargs, pp_opts, build = compile_info compile_opts = extra_preargs or [] - compile_opts.append ('/c') + compile_opts.append('/c') if debug: compile_opts.extend(self.compile_options_debug) else: @@ -377,8 +423,7 @@ def compile(self, sources, input_opt = src output_opt = "/fo" + obj try: - self.spawn([self.rc] + pp_opts + - [output_opt] + [input_opt]) + self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt]) except DistutilsExecError as msg: raise CompileError(msg) continue @@ -398,50 +443,47 @@ def compile(self, sources, rc_dir = os.path.dirname(obj) try: # first compile .MC to .RC and .H file - self.spawn([self.mc] + - ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext (os.path.basename (src)) - rc_file = os.path.join (rc_dir, base + '.rc') + self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src]) + base, _ = os.path.splitext(os.path.basename(src)) + rc_file = os.path.join(rc_dir, base + '.rc') # then compile .RC to .RES file - self.spawn([self.rc] + - ["/fo" + obj] + [rc_file]) + self.spawn([self.rc] + ["/fo" + obj] + [rc_file]) except DistutilsExecError as msg: raise CompileError(msg) continue else: # how to handle this file? - raise CompileError("Don't know how to compile %s to %s" - % (src, obj)) + raise CompileError( + "Don't know how to compile {} to {}".format(src, obj) + ) output_opt = "/Fo" + obj try: - self.spawn([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs) + self.spawn( + [self.cc] + + compile_opts + + pp_opts + + [input_opt, output_opt] + + extra_postargs + ) except DistutilsExecError as msg: raise CompileError(msg) return objects - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - + def create_static_lib( + self, objects, output_libname, output_dir=None, debug=0, target_lang=None + ): if not self.initialized: self.initialize() (objects, output_dir) = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) + output_filename = self.library_filename(output_libname, output_dir=output_dir) if self._need_link(objects, output_filename): lib_args = objects + ['/OUT:' + output_filename] if debug: - pass # XXX what goes here? + pass # XXX what goes here? try: self.spawn([self.lib] + lib_args) except DistutilsExecError as msg: @@ -449,36 +491,35 @@ def create_static_lib(self, else: log.debug("skipping %s (up-to-date)", output_filename) - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - + def link( # noqa: C901 + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): if not self.initialized: self.initialize() (objects, output_dir) = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) + fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) (libraries, library_dirs, runtime_library_dirs) = fixed_args if runtime_library_dirs: - self.warn ("I don't know what to do with 'runtime_library_dirs': " - + str (runtime_library_dirs)) + self.warn( + "I don't know what to do with 'runtime_library_dirs': " + + str(runtime_library_dirs) + ) - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) + lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) if output_dir is not None: output_filename = os.path.join(output_dir, output_filename) @@ -495,11 +536,12 @@ def link(self, ldflags = self.ldflags_shared export_opts = [] - for sym in (export_symbols or []): + for sym in export_symbols or []: export_opts.append("/EXPORT:" + sym) - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) + ld_args = ( + ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename] + ) # The MSVC linker generates .lib and .exp files, which cannot be # suppressed by any linker switches. The .lib files may even be @@ -508,11 +550,12 @@ def link(self, # builds, they can go into the same directory. if export_symbols is not None: (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) + os.path.basename(output_filename) + ) implib_file = os.path.join( - os.path.dirname(objects[0]), - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) + os.path.dirname(objects[0]), self.library_filename(dll_name) + ) + ld_args.append('/IMPLIB:' + implib_file) if extra_preargs: ld_args[:0] = extra_preargs @@ -528,7 +571,6 @@ def link(self, else: log.debug("skipping %s (up-to-date)", output_filename) - # -- Miscellaneous methods ----------------------------------------- # These are all used by the 'gen_lib_options() function, in # ccompiler.py. @@ -538,12 +580,12 @@ def library_dir_option(self, dir): def runtime_library_dir_option(self, dir): raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC++") + "don't know how to set runtime library search path for MSVC++" + ) def library_option(self, lib): return self.library_filename(lib) - def find_library_file(self, dirs, lib, debug=0): # Prefer a debugging library if found (and requested), but deal # with it if we don't have one. @@ -553,7 +595,7 @@ def find_library_file(self, dirs, lib, debug=0): try_names = [lib] for dir in dirs: for name in try_names: - libfile = os.path.join(dir, self.library_filename (name)) + libfile = os.path.join(dir, self.library_filename(name)) if os.path.exists(libfile): return libfile else: @@ -578,7 +620,7 @@ def find_exe(self, exe): # didn't find it; try existing path for p in os.environ['Path'].split(';'): - fn = os.path.join(os.path.abspath(p),exe) + fn = os.path.join(os.path.abspath(p), exe) if os.path.isfile(fn): return fn @@ -595,11 +637,15 @@ def get_msvc_paths(self, path, platform='x86'): path = path + " dirs" if self.__version >= 7: - key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories" - % (self.__root, self.__version)) + key = r"{}\{:0.1f}\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories".format( + self.__root, + self.__version, + ) else: - key = (r"%s\6.0\Build System\Components\Platforms" - r"\Win32 (%s)\Directories" % (self.__root, platform)) + key = ( + r"%s\6.0\Build System\Components\Platforms" + r"\Win32 (%s)\Directories" % (self.__root, platform) + ) for base in HKEYS: d = read_values(base, key) @@ -613,10 +659,12 @@ def get_msvc_paths(self, path, platform='x86'): if self.__version == 6: for base in HKEYS: if read_values(base, r"%s\6.0" % self.__root) is not None: - self.warn("It seems you have Visual Studio 6 installed, " + self.warn( + "It seems you have Visual Studio 6 installed, " "but the expected registry settings are not present.\n" "You must at least run the Visual Studio GUI once " - "so that these entries are created.") + "so that these entries are created." + ) break return [] @@ -639,5 +687,6 @@ def set_path_env_var(self, name): log.debug("Importing new compiler from distutils.msvc9compiler") OldMSVCCompiler = MSVCCompiler from distutils.msvc9compiler import MSVCCompiler + # get_build_architecture not really relevant now we support cross-compile - from distutils.msvc9compiler import MacroExpander + from distutils.msvc9compiler import MacroExpander # noqa: F811 diff --git a/setuptools/_distutils/py35compat.py b/setuptools/_distutils/py35compat.py deleted file mode 100644 index 79b2e7f38c..0000000000 --- a/setuptools/_distutils/py35compat.py +++ /dev/null @@ -1,19 +0,0 @@ -import sys -import subprocess - - -def __optim_args_from_interpreter_flags(): - """Return a list of command-line arguments reproducing the current - optimization settings in sys.flags.""" - args = [] - value = sys.flags.optimize - if value > 0: - args.append("-" + "O" * value) - return args - - -_optim_args_from_interpreter_flags = getattr( - subprocess, - "_optim_args_from_interpreter_flags", - __optim_args_from_interpreter_flags, -) diff --git a/setuptools/_distutils/py38compat.py b/setuptools/_distutils/py38compat.py index 7dbe8cef54..59224e71e5 100644 --- a/setuptools/_distutils/py38compat.py +++ b/setuptools/_distutils/py38compat.py @@ -1,7 +1,8 @@ def aix_platform(osname, version, release): try: import _aix_support + return _aix_support.aix_platform() except ImportError: pass - return "%s-%s.%s" % (osname, version, release) + return "{}-{}.{}".format(osname, version, release) diff --git a/setuptools/_distutils/py39compat.py b/setuptools/_distutils/py39compat.py new file mode 100644 index 0000000000..c43e5f10fd --- /dev/null +++ b/setuptools/_distutils/py39compat.py @@ -0,0 +1,22 @@ +import sys +import platform + + +def add_ext_suffix_39(vars): + """ + Ensure vars contains 'EXT_SUFFIX'. pypa/distutils#130 + """ + import _imp + + ext_suffix = _imp.extension_suffixes()[0] + vars.update( + EXT_SUFFIX=ext_suffix, + # sysconfig sets SO to match EXT_SUFFIX, so maintain + # that expectation. + # https://github.com/python/cpython/blob/785cc6770588de087d09e89a69110af2542be208/Lib/sysconfig.py#L671-L673 + SO=ext_suffix, + ) + + +needs_ext_suffix = sys.version_info < (3, 10) and platform.system() == 'Windows' +add_ext_suffix = add_ext_suffix_39 if needs_ext_suffix else lambda vars: None diff --git a/setuptools/_distutils/spawn.py b/setuptools/_distutils/spawn.py index b2d10e39d3..afefe525ef 100644 --- a/setuptools/_distutils/spawn.py +++ b/setuptools/_distutils/spawn.py @@ -10,12 +10,12 @@ import os import subprocess -from distutils.errors import DistutilsExecError -from distutils.debug import DEBUG -from distutils import log +from .errors import DistutilsExecError +from .debug import DEBUG +from ._log import log -def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): +def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): # noqa: C901 """Run another program, specified as a command list 'cmd', in a new process. 'cmd' is just the argument list for the new process, ie. @@ -48,6 +48,7 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): if sys.platform == 'darwin': from distutils.util import MACOSX_VERSION_VAR, get_macosx_target_ver + macosx_target_ver = get_macosx_target_ver() if macosx_target_ver: env[MACOSX_VERSION_VAR] = macosx_target_ver @@ -60,13 +61,15 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): if not DEBUG: cmd = cmd[0] raise DistutilsExecError( - "command %r failed: %s" % (cmd, exc.args[-1])) from exc + "command {!r} failed: {}".format(cmd, exc.args[-1]) + ) from exc if exitcode: if not DEBUG: cmd = cmd[0] raise DistutilsExecError( - "command %r failed with exit code %s" % (cmd, exitcode)) + "command {!r} failed with exit code {}".format(cmd, exitcode) + ) def find_executable(executable, path=None): diff --git a/setuptools/_distutils/sysconfig.py b/setuptools/_distutils/sysconfig.py index 4a77a431dc..a40a7231b3 100644 --- a/setuptools/_distutils/sysconfig.py +++ b/setuptools/_distutils/sysconfig.py @@ -9,13 +9,15 @@ Email: """ -import _imp import os import re import sys import sysconfig +import pathlib from .errors import DistutilsPlatformError +from . import py39compat +from ._functools import pass_none IS_PYPY = '__pypy__' in sys.builtin_module_names @@ -39,31 +41,48 @@ project_base = os.getcwd() -# python_build: (Boolean) if true, we're either building Python or -# building an extension with an un-installed Python, so we use -# different (hard-wired) directories. def _is_python_source_dir(d): - for fn in ("Setup", "Setup.local"): - if os.path.isfile(os.path.join(d, "Modules", fn)): - return True - return False + """ + Return True if the target directory appears to point to an + un-installed Python. + """ + modules = pathlib.Path(d).joinpath('Modules') + return any(modules.joinpath(fn).is_file() for fn in ('Setup', 'Setup.local')) + _sys_home = getattr(sys, '_home', None) + +def _is_parent(dir_a, dir_b): + """ + Return True if a is a parent of b. + """ + return os.path.normcase(dir_a).startswith(os.path.normcase(dir_b)) + + if os.name == 'nt': + + @pass_none def _fix_pcbuild(d): - if d and os.path.normcase(d).startswith( - os.path.normcase(os.path.join(PREFIX, "PCbuild"))): - return PREFIX - return d + # In a venv, sys._home will be inside BASE_PREFIX rather than PREFIX. + prefixes = PREFIX, BASE_PREFIX + matched = ( + prefix + for prefix in prefixes + if _is_parent(d, os.path.join(prefix, "PCbuild")) + ) + return next(matched, d) + project_base = _fix_pcbuild(project_base) _sys_home = _fix_pcbuild(_sys_home) + def _python_build(): if _sys_home: return _is_python_source_dir(_sys_home) return _is_python_source_dir(project_base) + python_build = _python_build() @@ -79,6 +98,7 @@ def _python_build(): # this attribute, which is fine. pass + def get_python_version(): """Return a string containing the major and minor Python version, leaving off the patchlevel. Sample return values could be '1.5' @@ -98,36 +118,91 @@ def get_python_inc(plat_specific=0, prefix=None): If 'prefix' is supplied, use it instead of sys.base_prefix or sys.base_exec_prefix -- i.e., ignore 'plat_specific'. """ - if prefix is None: - prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX - if os.name == "posix": - if IS_PYPY and sys.version_info < (3, 8): - return os.path.join(prefix, 'include') - if python_build: - # Assume the executable is in the build directory. The - # pyconfig.h file should be in the same directory. Since - # the build directory may not be the source directory, we - # must use "srcdir" from the makefile to find the "Include" - # directory. - if plat_specific: - return _sys_home or project_base - else: - incdir = os.path.join(get_config_var('srcdir'), 'Include') - return os.path.normpath(incdir) - implementation = 'pypy' if IS_PYPY else 'python' - python_dir = implementation + get_python_version() + build_flags - return os.path.join(prefix, "include", python_dir) - elif os.name == "nt": - if python_build: - # Include both the include and PC dir to ensure we can find - # pyconfig.h - return (os.path.join(prefix, "include") + os.path.pathsep + - os.path.join(prefix, "PC")) - return os.path.join(prefix, "include") - else: + default_prefix = BASE_EXEC_PREFIX if plat_specific else BASE_PREFIX + resolved_prefix = prefix if prefix is not None else default_prefix + try: + getter = globals()[f'_get_python_inc_{os.name}'] + except KeyError: raise DistutilsPlatformError( "I don't know where Python installs its C header files " - "on platform '%s'" % os.name) + "on platform '%s'" % os.name + ) + return getter(resolved_prefix, prefix, plat_specific) + + +@pass_none +def _extant(path): + """ + Replace path with None if it doesn't exist. + """ + return path if os.path.exists(path) else None + + +def _get_python_inc_posix(prefix, spec_prefix, plat_specific): + if IS_PYPY and sys.version_info < (3, 8): + return os.path.join(prefix, 'include') + return ( + _get_python_inc_posix_python(plat_specific) + or _extant(_get_python_inc_from_config(plat_specific, spec_prefix)) + or _get_python_inc_posix_prefix(prefix) + ) + + +def _get_python_inc_posix_python(plat_specific): + """ + Assume the executable is in the build directory. The + pyconfig.h file should be in the same directory. Since + the build directory may not be the source directory, + use "srcdir" from the makefile to find the "Include" + directory. + """ + if not python_build: + return + if plat_specific: + return _sys_home or project_base + incdir = os.path.join(get_config_var('srcdir'), 'Include') + return os.path.normpath(incdir) + + +def _get_python_inc_from_config(plat_specific, spec_prefix): + """ + If no prefix was explicitly specified, provide the include + directory from the config vars. Useful when + cross-compiling, since the config vars may come from + the host + platform Python installation, while the current Python + executable is from the build platform installation. + + >>> monkeypatch = getfixture('monkeypatch') + >>> gpifc = _get_python_inc_from_config + >>> monkeypatch.setitem(gpifc.__globals__, 'get_config_var', str.lower) + >>> gpifc(False, '/usr/bin/') + >>> gpifc(False, '') + >>> gpifc(False, None) + 'includepy' + >>> gpifc(True, None) + 'confincludepy' + """ + if spec_prefix is None: + return get_config_var('CONF' * plat_specific + 'INCLUDEPY') + + +def _get_python_inc_posix_prefix(prefix): + implementation = 'pypy' if IS_PYPY else 'python' + python_dir = implementation + get_python_version() + build_flags + return os.path.join(prefix, "include", python_dir) + + +def _get_python_inc_nt(prefix, spec_prefix, plat_specific): + if python_build: + # Include both the include and PC dir to ensure we can find + # pyconfig.h + return ( + os.path.join(prefix, "include") + + os.path.pathsep + + os.path.join(prefix, "PC") + ) + return os.path.join(prefix, "include") # allow this behavior to be monkey-patched. Ref pypa/distutils#2. @@ -178,8 +253,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): # Pure Python libdir = "lib" implementation = 'pypy' if IS_PYPY else 'python' - libpython = os.path.join(prefix, libdir, - implementation + get_python_version()) + libpython = os.path.join(prefix, libdir, implementation + get_python_version()) return _posix_lib(standard_lib, libpython, early_prefix, prefix) elif os.name == "nt": if standard_lib: @@ -189,11 +263,11 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): else: raise DistutilsPlatformError( "I don't know where Python installs its library " - "on platform '%s'" % os.name) - + "on platform '%s'" % os.name + ) -def customize_compiler(compiler): +def customize_compiler(compiler): # noqa: C901 """Do any platform-specific customization of a CCompiler instance. Mainly needed on Unix, so we can plug in the information that @@ -213,20 +287,36 @@ def customize_compiler(compiler): # Use get_config_var() to ensure _config_vars is initialized. if not get_config_var('CUSTOMIZED_OSX_COMPILER'): import _osx_support + _osx_support.customize_compiler(_config_vars) _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' - (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ - get_config_vars('CC', 'CXX', 'CFLAGS', - 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') + ( + cc, + cxx, + cflags, + ccshared, + ldshared, + shlib_suffix, + ar, + ar_flags, + ) = get_config_vars( + 'CC', + 'CXX', + 'CFLAGS', + 'CCSHARED', + 'LDSHARED', + 'SHLIB_SUFFIX', + 'AR', + 'ARFLAGS', + ) if 'CC' in os.environ: newcc = os.environ['CC'] - if('LDSHARED' not in os.environ - and ldshared.startswith(cc)): + if 'LDSHARED' not in os.environ and ldshared.startswith(cc): # If CC is overridden, use that as the default # command for LDSHARED as well - ldshared = newcc + ldshared[len(cc):] + ldshared = newcc + ldshared[len(cc) :] cc = newcc if 'CXX' in os.environ: cxx = os.environ['CXX'] @@ -235,7 +325,7 @@ def customize_compiler(compiler): if 'CPP' in os.environ: cpp = os.environ['CPP'] else: - cpp = cc + " -E" # not always + cpp = cc + " -E" # not always if 'LDFLAGS' in os.environ: ldshared = ldshared + ' ' + os.environ['LDFLAGS'] if 'CFLAGS' in os.environ: @@ -260,7 +350,8 @@ def customize_compiler(compiler): compiler_cxx=cxx, linker_so=ldshared, linker_exe=cc, - archiver=archiver) + archiver=archiver, + ) if 'RANLIB' in os.environ and compiler.executables.get('ranlib', None): compiler.set_executables(ranlib=os.environ['RANLIB']) @@ -280,7 +371,6 @@ def get_config_h_filename(): return sysconfig.get_config_h_filename() - def get_makefile_filename(): """Return full pathname of installed Makefile from the Python build.""" return sysconfig.get_makefile_filename() @@ -302,7 +392,8 @@ def parse_config_h(fp, g=None): _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") -def parse_makefile(fn, g=None): + +def parse_makefile(fn, g=None): # noqa: C901 """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an @@ -310,7 +401,10 @@ def parse_makefile(fn, g=None): used instead of a new dictionary. """ from distutils.text_file import TextFile - fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape") + + fp = TextFile( + fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape" + ) if g is None: g = {} @@ -319,7 +413,7 @@ def parse_makefile(fn, g=None): while True: line = fp.readline() - if line is None: # eof + if line is None: # eof break m = _variable_rx.match(line) if m: @@ -374,21 +468,20 @@ def parse_makefile(fn, g=None): else: done[n] = item = "" if found: - after = value[m.end():] - value = value[:m.start()] + item + after + after = value[m.end() :] + value = value[: m.start()] + item + after if "$" in after: notdone[name] = value else: - try: value = int(value) + try: + value = int(value) except ValueError: done[name] = value.strip() else: done[name] = value del notdone[name] - if name.startswith('PY_') \ - and name[3:] in renamed_variables: - + if name.startswith('PY_') and name[3:] in renamed_variables: name = name[3:] if name not in done: done[name] = value @@ -436,51 +529,6 @@ def expand_makefile_vars(s, vars): _config_vars = None -_sysconfig_name_tmpl = '_sysconfigdata_{abi}_{platform}_{multiarch}' - - -def _init_posix(): - """Initialize the module as appropriate for POSIX systems.""" - # _sysconfigdata is generated at build time, see the sysconfig module - name = os.environ.get( - '_PYTHON_SYSCONFIGDATA_NAME', - _sysconfig_name_tmpl.format( - abi=sys.abiflags, - platform=sys.platform, - multiarch=getattr(sys.implementation, '_multiarch', ''), - ), - ) - try: - _temp = __import__(name, globals(), locals(), ['build_time_vars'], 0) - except ImportError: - # Python 3.5 and pypy 7.3.1 - _temp = __import__( - '_sysconfigdata', globals(), locals(), ['build_time_vars'], 0) - build_time_vars = _temp.build_time_vars - global _config_vars - _config_vars = {} - _config_vars.update(build_time_vars) - - -def _init_nt(): - """Initialize the module as appropriate for NT""" - g = {} - # set basic install directories - g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1) - g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1) - - # XXX hmmm.. a normal install puts include files here - g['INCLUDEPY'] = get_python_inc(plat_specific=0) - - g['EXT_SUFFIX'] = _imp.extension_suffixes()[0] - g['EXE'] = ".exe" - g['VERSION'] = get_python_version().replace(".", "") - g['BINDIR'] = os.path.dirname(os.path.abspath(sys.executable)) - - global _config_vars - _config_vars = g - - def get_config_vars(*args): """With no arguments, return a dictionary of all configuration variables relevant for the current platform. Generally this includes @@ -493,68 +541,11 @@ def get_config_vars(*args): """ global _config_vars if _config_vars is None: - func = globals().get("_init_" + os.name) - if func: - func() - else: - _config_vars = {} - - # Normalized versions of prefix and exec_prefix are handy to have; - # in fact, these are the standard versions used most places in the - # Distutils. - _config_vars['prefix'] = PREFIX - _config_vars['exec_prefix'] = EXEC_PREFIX - - if not IS_PYPY: - # For backward compatibility, see issue19555 - SO = _config_vars.get('EXT_SUFFIX') - if SO is not None: - _config_vars['SO'] = SO - - # Always convert srcdir to an absolute path - srcdir = _config_vars.get('srcdir', project_base) - if os.name == 'posix': - if python_build: - # If srcdir is a relative path (typically '.' or '..') - # then it should be interpreted relative to the directory - # containing Makefile. - base = os.path.dirname(get_makefile_filename()) - srcdir = os.path.join(base, srcdir) - else: - # srcdir is not meaningful since the installation is - # spread about the filesystem. We choose the - # directory containing the Makefile since we know it - # exists. - srcdir = os.path.dirname(get_makefile_filename()) - _config_vars['srcdir'] = os.path.abspath(os.path.normpath(srcdir)) - - # Convert srcdir into an absolute path if it appears necessary. - # Normally it is relative to the build directory. However, during - # testing, for example, we might be running a non-installed python - # from a different directory. - if python_build and os.name == "posix": - base = project_base - if (not os.path.isabs(_config_vars['srcdir']) and - base != os.getcwd()): - # srcdir is relative and we are not in the same directory - # as the executable. Assume executable is in the build - # directory and make srcdir absolute. - srcdir = os.path.join(base, _config_vars['srcdir']) - _config_vars['srcdir'] = os.path.normpath(srcdir) - - # OS X platforms require special customization to handle - # multi-architecture, multi-os-version installers - if sys.platform == 'darwin': - import _osx_support - _osx_support.customize_config_vars(_config_vars) - - if args: - vals = [] - for name in args: - vals.append(_config_vars.get(name)) - return vals - else: - return _config_vars + _config_vars = sysconfig.get_config_vars().copy() + py39compat.add_ext_suffix(_config_vars) + + return [_config_vars.get(name) for name in args] if args else _config_vars + def get_config_var(name): """Return the value of a single variable using the dictionary @@ -563,5 +554,6 @@ def get_config_var(name): """ if name == 'SO': import warnings + warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2) return get_config_vars().get(name) diff --git a/setuptools/_distutils/tests/__init__.py b/setuptools/_distutils/tests/__init__.py index c7dcc7ec3d..27e73393a0 100644 --- a/setuptools/_distutils/tests/__init__.py +++ b/setuptools/_distutils/tests/__init__.py @@ -1,42 +1,8 @@ -"""Test suite for distutils. - -This test suite consists of a collection of test modules in the -distutils.tests package. Each test module has a name starting with -'test' and contains a function test_suite(). The function is expected -to return an initialized unittest.TestSuite instance. +""" +Test suite for distutils. Tests for the command classes in the distutils.command package are included in distutils.tests as well, instead of using a separate distutils.command.tests package, since command identification is done by import rather than matching pre-defined names. - """ - -import os -import sys -import unittest -from test.support import run_unittest - -from .py38compat import save_restore_warnings_filters - - -here = os.path.dirname(__file__) or os.curdir - - -def test_suite(): - suite = unittest.TestSuite() - for fn in os.listdir(here): - if fn.startswith("test") and fn.endswith(".py"): - modname = "distutils.tests." + fn[:-3] - # bpo-40055: Save/restore warnings filters to leave them unchanged. - # Importing tests imports docutils which imports pkg_resources - # which adds a warnings filter. - with save_restore_warnings_filters(): - __import__(modname) - module = sys.modules[modname] - suite.addTest(module.test_suite()) - return suite - - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/py35compat.py b/setuptools/_distutils/tests/py35compat.py deleted file mode 100644 index 0c755261ad..0000000000 --- a/setuptools/_distutils/tests/py35compat.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Backward compatibility support for Python 3.5 -""" - -import sys -import test.support -import subprocess - - -# copied from Python 3.9 test.support module -def _missing_compiler_executable(cmd_names=[]): - """Check if the compiler components used to build the interpreter exist. - - Check for the existence of the compiler executables whose names are listed - in 'cmd_names' or all the compiler executables when 'cmd_names' is empty - and return the first missing executable or None when none is found - missing. - - """ - from distutils import ccompiler, sysconfig, spawn - compiler = ccompiler.new_compiler() - sysconfig.customize_compiler(compiler) - for name in compiler.executables: - if cmd_names and name not in cmd_names: - continue - cmd = getattr(compiler, name) - if cmd_names: - assert cmd is not None, \ - "the '%s' executable is not configured" % name - elif not cmd: - continue - if spawn.find_executable(cmd[0]) is None: - return cmd[0] - - -missing_compiler_executable = vars(test.support).setdefault( - 'missing_compiler_executable', - _missing_compiler_executable, -) - - -try: - from test.support import unix_shell -except ImportError: - # Adapted from Python 3.9 test.support module - is_android = hasattr(sys, 'getandroidapilevel') - unix_shell = ( - None if sys.platform == 'win32' else - '/system/bin/sh' if is_android else - '/bin/sh' - ) - - -# copied from Python 3.9 subprocess module -def _optim_args_from_interpreter_flags(): - """Return a list of command-line arguments reproducing the current - optimization settings in sys.flags.""" - args = [] - value = sys.flags.optimize - if value > 0: - args.append('-' + 'O' * value) - return args - - -vars(subprocess).setdefault( - '_optim_args_from_interpreter_flags', - _optim_args_from_interpreter_flags, -) - - -def adapt_glob(regex): - """ - Supply legacy expectation on Python 3.5 - """ - if sys.version_info > (3, 6): - return regex - return regex.replace('(?s:', '').replace(r')\Z', r'\Z(?ms)') diff --git a/setuptools/_distutils/tests/py37compat.py b/setuptools/_distutils/tests/py37compat.py new file mode 100644 index 0000000000..e5d406a3b6 --- /dev/null +++ b/setuptools/_distutils/tests/py37compat.py @@ -0,0 +1,18 @@ +import os +import sys +import platform + + +def subprocess_args_compat(*args): + return list(map(os.fspath, args)) + + +def subprocess_args_passthrough(*args): + return list(args) + + +subprocess_args = ( + subprocess_args_compat + if platform.system() == "Windows" and sys.version_info < (3, 8) + else subprocess_args_passthrough +) diff --git a/setuptools/_distutils/tests/py38compat.py b/setuptools/_distutils/tests/py38compat.py index c949f58ecd..211d3a6c50 100644 --- a/setuptools/_distutils/tests/py38compat.py +++ b/setuptools/_distutils/tests/py38compat.py @@ -18,44 +18,32 @@ try: from test.support.os_helper import ( - change_cwd, rmtree, EnvironmentVarGuard, - TESTFN, unlink, skip_unless_symlink, temp_dir, - create_empty_file, - temp_cwd, ) except (ModuleNotFoundError, ImportError): from test.support import ( - change_cwd, rmtree, EnvironmentVarGuard, - TESTFN, unlink, skip_unless_symlink, temp_dir, - create_empty_file, - temp_cwd, ) -# From Python 3.9 -@contextlib.contextmanager -def _save_restore_warnings_filters(): - old_filters = warnings.filters[:] - try: - yield - finally: - warnings.filters[:] = old_filters - - try: - from test.support.warnings_helper import save_restore_warnings_filters + from test.support.import_helper import ( + DirsOnSysPath, + CleanImport, + ) except (ModuleNotFoundError, ImportError): - save_restore_warnings_filters = _save_restore_warnings_filters + from test.support import ( + DirsOnSysPath, + CleanImport, + ) if sys.version_info < (3, 9): diff --git a/setuptools/_distutils/tests/support.py b/setuptools/_distutils/tests/support.py index b4410fc9d9..fd4b11bf75 100644 --- a/setuptools/_distutils/tests/support.py +++ b/setuptools/_distutils/tests/support.py @@ -3,69 +3,21 @@ import sys import shutil import tempfile -import unittest import sysconfig -from copy import deepcopy +import itertools +import pathlib -from . import py38compat as os_helper +import pytest +from more_itertools import always_iterable -from distutils import log -from distutils.log import DEBUG, INFO, WARN, ERROR, FATAL from distutils.core import Distribution -class LoggingSilencer(object): - - def setUp(self): - super().setUp() - self.threshold = log.set_threshold(log.FATAL) - # catching warnings - # when log will be replaced by logging - # we won't need such monkey-patch anymore - self._old_log = log.Log._log - log.Log._log = self._log - self.logs = [] - - def tearDown(self): - log.set_threshold(self.threshold) - log.Log._log = self._old_log - super().tearDown() - - def _log(self, level, msg, args): - if level not in (DEBUG, INFO, WARN, ERROR, FATAL): - raise ValueError('%s wrong log level' % str(level)) - if not isinstance(msg, str): - raise TypeError("msg should be str, not '%.200s'" - % (type(msg).__name__)) - self.logs.append((level, msg, args)) - - def get_logs(self, *levels): - return [msg % args for level, msg, args - in self.logs if level in levels] - - def clear_logs(self): - self.logs = [] - - -class TempdirManager(object): - """Mix-in class that handles temporary directories for test cases. - - This is intended to be used with unittest.TestCase. +@pytest.mark.usefixtures('distutils_managed_tempdir') +class TempdirManager: + """ + Mix-in class that handles temporary directories for test cases. """ - - def setUp(self): - super().setUp() - self.old_cwd = os.getcwd() - self.tempdirs = [] - - def tearDown(self): - # Restore working dir, for Solaris and derivatives, where rmdir() - # on the current directory fails. - os.chdir(self.old_cwd) - super().tearDown() - while self.tempdirs: - tmpdir = self.tempdirs.pop() - os_helper.rmtree(tmpdir) def mkdtemp(self): """Create a temporary directory that will be cleaned up. @@ -79,16 +31,9 @@ def mkdtemp(self): def write_file(self, path, content='xxx'): """Writes a file in the given path. - path can be a string or a sequence. """ - if isinstance(path, (list, tuple)): - path = os.path.join(*path) - f = open(path, 'w') - try: - f.write(content) - finally: - f.close() + pathlib.Path(*always_iterable(path)).write_text(content) def create_dist(self, pkg_name='foo', **kw): """Will generate a test environment. @@ -112,31 +57,12 @@ class DummyCommand: """Class to store options for retrieval via set_undefined_options().""" def __init__(self, **kwargs): - for kw, val in kwargs.items(): - setattr(self, kw, val) + vars(self).update(kwargs) def ensure_finalized(self): pass -class EnvironGuard(object): - - def setUp(self): - super(EnvironGuard, self).setUp() - self.old_environ = deepcopy(os.environ) - - def tearDown(self): - for key, value in self.old_environ.items(): - if os.environ.get(key) != value: - os.environ[key] = value - - for key in tuple(os.environ.keys()): - if key not in self.old_environ: - del os.environ[key] - - super(EnvironGuard, self).tearDown() - - def copy_xxmodule_c(directory): """Helper for tests that need the xxmodule.c source file. @@ -149,28 +75,12 @@ def test_compile(self): If the source file can be found, it will be copied to *directory*. If not, the test will be skipped. Errors during copy are not caught. """ - filename = _get_xxmodule_path() - if filename is None: - raise unittest.SkipTest('cannot find xxmodule.c (test must run in ' - 'the python build dir)') - shutil.copy(filename, directory) + shutil.copy(_get_xxmodule_path(), os.path.join(directory, 'xxmodule.c')) def _get_xxmodule_path(): - srcdir = sysconfig.get_config_var('srcdir') - candidates = [ - # use installed copy if available - os.path.join(os.path.dirname(__file__), 'xxmodule.c'), - # otherwise try using copy from build directory - os.path.join(srcdir, 'Modules', 'xxmodule.c'), - # srcdir mysteriously can be $srcdir/Lib/distutils/tests when - # this file is run from its parent directory, so walk up the - # tree to find the real srcdir - os.path.join(srcdir, '..', '..', '..', 'Modules', 'xxmodule.c'), - ] - for path in candidates: - if os.path.exists(path): - return path + source_name = 'xxmodule.c' if sys.version_info > (3, 9) else 'xxmodule-3.8.c' + return os.path.join(os.path.dirname(__file__), source_name) def fixup_build_ext(cmd): @@ -208,3 +118,17 @@ def fixup_build_ext(cmd): else: name, equals, value = runshared.partition('=') cmd.library_dirs = [d for d in value.split(os.pathsep) if d] + + +def combine_markers(cls): + """ + pytest will honor markers as found on the class, but when + markers are on multiple subclasses, only one appears. Use + this decorator to combine those markers. + """ + cls.pytestmark = [ + mark + for base in itertools.chain([cls], cls.__bases__) + for mark in getattr(base, 'pytestmark', []) + ] + return cls diff --git a/setuptools/_distutils/tests/test_archive_util.py b/setuptools/_distutils/tests/test_archive_util.py index 800b90180e..89c415d761 100644 --- a/setuptools/_distutils/tests/test_archive_util.py +++ b/setuptools/_distutils/tests/test_archive_util.py @@ -1,47 +1,32 @@ -# -*- coding: utf-8 -*- """Tests for distutils.archive_util.""" -import unittest import os import sys import tarfile from os.path import splitdrive import warnings +import functools +import operator +import pathlib + +import pytest +import path from distutils import archive_util -from distutils.archive_util import (check_archive_formats, make_tarball, - make_zipfile, make_archive, - ARCHIVE_FORMATS) -from distutils.spawn import find_executable, spawn +from distutils.archive_util import ( + check_archive_formats, + make_tarball, + make_zipfile, + make_archive, + ARCHIVE_FORMATS, +) +from distutils.spawn import spawn from distutils.tests import support -from test.support import run_unittest, patch +from test.support import patch from .unix_compat import require_unix_id, require_uid_0, grp, pwd, UID_0_SUPPORT -from .py38compat import change_cwd from .py38compat import check_warnings -try: - import zipfile - ZIP_SUPPORT = True -except ImportError: - ZIP_SUPPORT = find_executable('zip') - -try: - import zlib - ZLIB_SUPPORT = True -except ImportError: - ZLIB_SUPPORT = False - -try: - import bz2 -except ImportError: - bz2 = None - -try: - import lzma -except ImportError: - lzma = None - def can_fs_encode(filename): """ Return True if the filename can be saved in the file system. @@ -55,11 +40,16 @@ def can_fs_encode(filename): return True -class ArchiveUtilTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): +def all_equal(values): + return functools.reduce(operator.eq, values) + - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') +def same_drive(*paths): + return all_equal(pathlib.Path(path).drive for path in paths) + + +class ArchiveUtilTestCase(support.TempdirManager): + @pytest.mark.usefixtures('needs_zlib') def test_make_tarball(self, name='archive'): # creating something to tar tmpdir = self._create_files() @@ -67,53 +57,51 @@ def test_make_tarball(self, name='archive'): # trying an uncompressed one self._make_tarball(tmpdir, name, '.tar', compress=None) - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + @pytest.mark.usefixtures('needs_zlib') def test_make_tarball_gzip(self): tmpdir = self._create_files() self._make_tarball(tmpdir, 'archive', '.tar.gz', compress='gzip') - @unittest.skipUnless(bz2, 'Need bz2 support to run') def test_make_tarball_bzip2(self): + pytest.importorskip('bz2') tmpdir = self._create_files() self._make_tarball(tmpdir, 'archive', '.tar.bz2', compress='bzip2') - @unittest.skipUnless(lzma, 'Need lzma support to run') def test_make_tarball_xz(self): + pytest.importorskip('lzma') tmpdir = self._create_files() self._make_tarball(tmpdir, 'archive', '.tar.xz', compress='xz') - @unittest.skipUnless(can_fs_encode('årchiv'), - 'File system cannot handle this filename') + @pytest.mark.skipif("not can_fs_encode('årchiv')") def test_make_tarball_latin1(self): """ Mirror test_make_tarball, except filename contains latin characters. """ - self.test_make_tarball('årchiv') # note this isn't a real word + self.test_make_tarball('årchiv') # note this isn't a real word - @unittest.skipUnless(can_fs_encode('のアーカイブ'), - 'File system cannot handle this filename') + @pytest.mark.skipif("not can_fs_encode('のアーカイブ')") def test_make_tarball_extended(self): """ Mirror test_make_tarball, except filename contains extended characters outside the latin charset. """ - self.test_make_tarball('のアーカイブ') # japanese for archive + self.test_make_tarball('のアーカイブ') # japanese for archive def _make_tarball(self, tmpdir, target_name, suffix, **kwargs): tmpdir2 = self.mkdtemp() - unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], - "source and target should be on same drive") + if same_drive(tmpdir, tmpdir2): + pytest.skip("source and target should be on same drive") base_name = os.path.join(tmpdir2, target_name) # working with relative paths to avoid tar warnings - with change_cwd(tmpdir): + with path.Path(tmpdir): make_tarball(splitdrive(base_name)[1], 'dist', **kwargs) # check if the compressed tarball was created tarball = base_name + suffix - self.assertTrue(os.path.exists(tarball)) - self.assertEqual(self._tarinfo(tarball), self._created_files) + assert os.path.exists(tarball) + assert self._tarinfo(tarball) == self._created_files def _tarinfo(self, path): tar = tarfile.open(path) @@ -124,8 +112,14 @@ def _tarinfo(self, path): finally: tar.close() - _zip_created_files = ['dist/', 'dist/file1', 'dist/file2', - 'dist/sub/', 'dist/sub/file3', 'dist/sub2/'] + _zip_created_files = [ + 'dist/', + 'dist/file1', + 'dist/file2', + 'dist/sub/', + 'dist/sub/file3', + 'dist/sub2/', + ] _created_files = [p.rstrip('/') for p in _zip_created_files] def _create_files(self): @@ -140,11 +134,10 @@ def _create_files(self): os.mkdir(os.path.join(dist, 'sub2')) return tmpdir - @unittest.skipUnless(find_executable('tar') and find_executable('gzip') - and ZLIB_SUPPORT, - 'Need the tar, gzip and zlib command to run') + @pytest.mark.usefixtures('needs_zlib') + @pytest.mark.skipif("not (find_executable('tar') and find_executable('gzip'))") def test_tarfile_vs_tar(self): - tmpdir = self._create_files() + tmpdir = self._create_files() tmpdir2 = self.mkdtemp() base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() @@ -156,7 +149,7 @@ def test_tarfile_vs_tar(self): # check if the compressed tarball was created tarball = base_name + '.tar.gz' - self.assertTrue(os.path.exists(tarball)) + assert os.path.exists(tarball) # now create another tarball using `tar` tarball2 = os.path.join(tmpdir, 'archive2.tar.gz') @@ -170,10 +163,10 @@ def test_tarfile_vs_tar(self): finally: os.chdir(old_dir) - self.assertTrue(os.path.exists(tarball2)) + assert os.path.exists(tarball2) # let's compare both tarballs - self.assertEqual(self._tarinfo(tarball), self._created_files) - self.assertEqual(self._tarinfo(tarball2), self._created_files) + assert self._tarinfo(tarball) == self._created_files + assert self._tarinfo(tarball2) == self._created_files # trying an uncompressed one base_name = os.path.join(tmpdir2, 'archive') @@ -184,7 +177,7 @@ def test_tarfile_vs_tar(self): finally: os.chdir(old_dir) tarball = base_name + '.tar' - self.assertTrue(os.path.exists(tarball)) + assert os.path.exists(tarball) # now for a dry_run base_name = os.path.join(tmpdir2, 'archive') @@ -195,15 +188,14 @@ def test_tarfile_vs_tar(self): finally: os.chdir(old_dir) tarball = base_name + '.tar' - self.assertTrue(os.path.exists(tarball)) + assert os.path.exists(tarball) - @unittest.skipUnless(find_executable('compress'), - 'The compress program is required') + @pytest.mark.skipif("not find_executable('compress')") def test_compress_deprecated(self): - tmpdir = self._create_files() + tmpdir = self._create_files() base_name = os.path.join(self.mkdtemp(), 'archive') - # using compress and testing the PendingDeprecationWarning + # using compress and testing the DeprecationWarning old_dir = os.getcwd() os.chdir(tmpdir) try: @@ -213,8 +205,8 @@ def test_compress_deprecated(self): finally: os.chdir(old_dir) tarball = base_name + '.tar.Z' - self.assertTrue(os.path.exists(tarball)) - self.assertEqual(len(w.warnings), 1) + assert os.path.exists(tarball) + assert len(w.warnings) == 1 # same test with dry_run os.remove(tarball) @@ -223,34 +215,34 @@ def test_compress_deprecated(self): try: with check_warnings() as w: warnings.simplefilter("always") - make_tarball(base_name, 'dist', compress='compress', - dry_run=True) + make_tarball(base_name, 'dist', compress='compress', dry_run=True) finally: os.chdir(old_dir) - self.assertFalse(os.path.exists(tarball)) - self.assertEqual(len(w.warnings), 1) + assert not os.path.exists(tarball) + assert len(w.warnings) == 1 - @unittest.skipUnless(ZIP_SUPPORT and ZLIB_SUPPORT, - 'Need zip and zlib support to run') + @pytest.mark.usefixtures('needs_zlib') def test_make_zipfile(self): + zipfile = pytest.importorskip('zipfile') # creating something to tar tmpdir = self._create_files() base_name = os.path.join(self.mkdtemp(), 'archive') - with change_cwd(tmpdir): + with path.Path(tmpdir): make_zipfile(base_name, 'dist') # check if the compressed tarball was created tarball = base_name + '.zip' - self.assertTrue(os.path.exists(tarball)) + assert os.path.exists(tarball) with zipfile.ZipFile(tarball) as zf: - self.assertEqual(sorted(zf.namelist()), self._zip_created_files) + assert sorted(zf.namelist()) == self._zip_created_files - @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run') def test_make_zipfile_no_zlib(self): + zipfile = pytest.importorskip('zipfile') patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError called = [] zipfile_class = zipfile.ZipFile + def fake_zipfile(*a, **kw): if kw.get('compression', None) == zipfile.ZIP_STORED: called.append((a, kw)) @@ -261,75 +253,78 @@ def fake_zipfile(*a, **kw): # create something to tar and compress tmpdir = self._create_files() base_name = os.path.join(self.mkdtemp(), 'archive') - with change_cwd(tmpdir): + with path.Path(tmpdir): make_zipfile(base_name, 'dist') tarball = base_name + '.zip' - self.assertEqual(called, - [((tarball, "w"), {'compression': zipfile.ZIP_STORED})]) - self.assertTrue(os.path.exists(tarball)) + assert called == [((tarball, "w"), {'compression': zipfile.ZIP_STORED})] + assert os.path.exists(tarball) with zipfile.ZipFile(tarball) as zf: - self.assertEqual(sorted(zf.namelist()), self._zip_created_files) + assert sorted(zf.namelist()) == self._zip_created_files def test_check_archive_formats(self): - self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']), - 'xxx') - self.assertIsNone(check_archive_formats(['gztar', 'bztar', 'xztar', - 'ztar', 'tar', 'zip'])) + assert check_archive_formats(['gztar', 'xxx', 'zip']) == 'xxx' + assert ( + check_archive_formats(['gztar', 'bztar', 'xztar', 'ztar', 'tar', 'zip']) + is None + ) def test_make_archive(self): tmpdir = self.mkdtemp() base_name = os.path.join(tmpdir, 'archive') - self.assertRaises(ValueError, make_archive, base_name, 'xxx') + with pytest.raises(ValueError): + make_archive(base_name, 'xxx') def test_make_archive_cwd(self): current_dir = os.getcwd() + def _breaks(*args, **kw): raise RuntimeError() + ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file') try: try: make_archive('xxx', 'xxx', root_dir=self.mkdtemp()) - except: + except Exception: pass - self.assertEqual(os.getcwd(), current_dir) + assert os.getcwd() == current_dir finally: - del ARCHIVE_FORMATS['xxx'] + ARCHIVE_FORMATS.pop('xxx') def test_make_archive_tar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') res = make_archive(base_name, 'tar', base_dir, 'dist') - self.assertTrue(os.path.exists(res)) - self.assertEqual(os.path.basename(res), 'archive.tar') - self.assertEqual(self._tarinfo(res), self._created_files) + assert os.path.exists(res) + assert os.path.basename(res) == 'archive.tar' + assert self._tarinfo(res) == self._created_files - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + @pytest.mark.usefixtures('needs_zlib') def test_make_archive_gztar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') res = make_archive(base_name, 'gztar', base_dir, 'dist') - self.assertTrue(os.path.exists(res)) - self.assertEqual(os.path.basename(res), 'archive.tar.gz') - self.assertEqual(self._tarinfo(res), self._created_files) + assert os.path.exists(res) + assert os.path.basename(res) == 'archive.tar.gz' + assert self._tarinfo(res) == self._created_files - @unittest.skipUnless(bz2, 'Need bz2 support to run') def test_make_archive_bztar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') + pytest.importorskip('bz2') + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') res = make_archive(base_name, 'bztar', base_dir, 'dist') - self.assertTrue(os.path.exists(res)) - self.assertEqual(os.path.basename(res), 'archive.tar.bz2') - self.assertEqual(self._tarinfo(res), self._created_files) + assert os.path.exists(res) + assert os.path.basename(res) == 'archive.tar.bz2' + assert self._tarinfo(res) == self._created_files - @unittest.skipUnless(lzma, 'Need xz support to run') def test_make_archive_xztar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') + pytest.importorskip('lzma') + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') res = make_archive(base_name, 'xztar', base_dir, 'dist') - self.assertTrue(os.path.exists(res)) - self.assertEqual(os.path.basename(res), 'archive.tar.xz') - self.assertEqual(self._tarinfo(res), self._created_files) + assert os.path.exists(res) + assert os.path.basename(res) == 'archive.tar.xz' + assert self._tarinfo(res) == self._created_files def test_make_archive_owner_group(self): # testing make_archive with owner and group, with various combinations @@ -340,54 +335,52 @@ def test_make_archive_owner_group(self): else: group = owner = 'root' - base_dir = self._create_files() + base_dir = self._create_files() root_dir = self.mkdtemp() - base_name = os.path.join(self.mkdtemp() , 'archive') - res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner, - group=group) - self.assertTrue(os.path.exists(res)) + base_name = os.path.join(self.mkdtemp(), 'archive') + res = make_archive( + base_name, 'zip', root_dir, base_dir, owner=owner, group=group + ) + assert os.path.exists(res) res = make_archive(base_name, 'zip', root_dir, base_dir) - self.assertTrue(os.path.exists(res)) + assert os.path.exists(res) - res = make_archive(base_name, 'tar', root_dir, base_dir, - owner=owner, group=group) - self.assertTrue(os.path.exists(res)) + res = make_archive( + base_name, 'tar', root_dir, base_dir, owner=owner, group=group + ) + assert os.path.exists(res) - res = make_archive(base_name, 'tar', root_dir, base_dir, - owner='kjhkjhkjg', group='oihohoh') - self.assertTrue(os.path.exists(res)) + res = make_archive( + base_name, 'tar', root_dir, base_dir, owner='kjhkjhkjg', group='oihohoh' + ) + assert os.path.exists(res) - @unittest.skipUnless(ZLIB_SUPPORT, "Requires zlib") + @pytest.mark.usefixtures('needs_zlib') @require_unix_id @require_uid_0 def test_tarfile_root_owner(self): - tmpdir = self._create_files() + tmpdir = self._create_files() base_name = os.path.join(self.mkdtemp(), 'archive') old_dir = os.getcwd() os.chdir(tmpdir) group = grp.getgrgid(0)[0] owner = pwd.getpwuid(0)[0] try: - archive_name = make_tarball(base_name, 'dist', compress=None, - owner=owner, group=group) + archive_name = make_tarball( + base_name, 'dist', compress=None, owner=owner, group=group + ) finally: os.chdir(old_dir) # check if the compressed tarball was created - self.assertTrue(os.path.exists(archive_name)) + assert os.path.exists(archive_name) # now checks the rights archive = tarfile.open(archive_name) try: for member in archive.getmembers(): - self.assertEqual(member.uid, 0) - self.assertEqual(member.gid, 0) + assert member.uid == 0 + assert member.gid == 0 finally: archive.close() - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(ArchiveUtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_bdist.py b/setuptools/_distutils/tests/test_bdist.py index 8b7498e3eb..af330a06e7 100644 --- a/setuptools/_distutils/tests/test_bdist.py +++ b/setuptools/_distutils/tests/test_bdist.py @@ -1,30 +1,30 @@ """Tests for distutils.command.bdist.""" -import os -import unittest -from test.support import run_unittest -import warnings - from distutils.command.bdist import bdist from distutils.tests import support -class BuildTestCase(support.TempdirManager, - unittest.TestCase): - +class TestBuild(support.TempdirManager): def test_formats(self): # let's create a command and make sure # we can set the format dist = self.create_dist()[1] cmd = bdist(dist) - cmd.formats = ['msi'] + cmd.formats = ['gztar'] cmd.ensure_finalized() - self.assertEqual(cmd.formats, ['msi']) + assert cmd.formats == ['gztar'] # what formats does bdist offer? - formats = ['bztar', 'gztar', 'msi', 'rpm', 'tar', - 'wininst', 'xztar', 'zip', 'ztar'] - found = sorted(cmd.format_command) - self.assertEqual(found, formats) + formats = [ + 'bztar', + 'gztar', + 'rpm', + 'tar', + 'xztar', + 'zip', + 'ztar', + ] + found = sorted(cmd.format_commands) + assert found == formats def test_skip_build(self): # bug #10946: bdist --skip-build should trickle down to subcommands @@ -34,24 +34,13 @@ def test_skip_build(self): cmd.ensure_finalized() dist.command_obj['bdist'] = cmd - names = ['bdist_dumb', 'bdist_wininst'] # bdist_rpm does not support --skip-build - if os.name == 'nt': - names.append('bdist_msi') + names = [ + 'bdist_dumb', + ] # bdist_rpm does not support --skip-build for name in names: - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', 'bdist_wininst command is deprecated', - DeprecationWarning) - subcmd = cmd.get_finalized_command(name) + subcmd = cmd.get_finalized_command(name) if getattr(subcmd, '_unsupported', False): # command is not supported on this build continue - self.assertTrue(subcmd.skip_build, - '%s should take --skip-build from bdist' % name) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildTestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) + assert subcmd.skip_build, '%s should take --skip-build from bdist' % name diff --git a/setuptools/_distutils/tests/test_bdist_dumb.py b/setuptools/_distutils/tests/test_bdist_dumb.py index bb860c8ac7..6fb50c4b8e 100644 --- a/setuptools/_distutils/tests/test_bdist_dumb.py +++ b/setuptools/_distutils/tests/test_bdist_dumb.py @@ -3,8 +3,8 @@ import os import sys import zipfile -import unittest -from test.support import run_unittest + +import pytest from distutils.core import Distribution from distutils.command.bdist_dumb import bdist_dumb @@ -19,32 +19,16 @@ """ -try: - import zlib - ZLIB_SUPPORT = True -except ImportError: - ZLIB_SUPPORT = False - - -class BuildDumbTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - - def setUp(self): - super(BuildDumbTestCase, self).setUp() - self.old_location = os.getcwd() - self.old_sys_argv = sys.argv, sys.argv[:] - def tearDown(self): - os.chdir(self.old_location) - sys.argv = self.old_sys_argv[0] - sys.argv[:] = self.old_sys_argv[1] - super(BuildDumbTestCase, self).tearDown() - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') +@support.combine_markers +@pytest.mark.usefixtures('save_env') +@pytest.mark.usefixtures('save_argv') +@pytest.mark.usefixtures('save_cwd') +class TestBuildDumb( + support.TempdirManager, +): + @pytest.mark.usefixtures('needs_zlib') def test_simple_built(self): - # let's create a simple package tmp_dir = self.mkdtemp() pkg_dir = os.path.join(tmp_dir, 'foo') @@ -54,10 +38,16 @@ def test_simple_built(self): self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') self.write_file((pkg_dir, 'README'), '') - dist = Distribution({'name': 'foo', 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'}) + dist = Distribution( + { + 'name': 'foo', + 'version': '0.1', + 'py_modules': ['foo'], + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + } + ) dist.script_name = 'setup.py' os.chdir(pkg_dir) @@ -73,9 +63,9 @@ def test_simple_built(self): # see what we have dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) - base = "%s.%s.zip" % (dist.get_fullname(), cmd.plat_name) + base = "{}.{}.zip".format(dist.get_fullname(), cmd.plat_name) - self.assertEqual(dist_created, [base]) + assert dist_created == [base] # now let's check what we have in the zip file fp = zipfile.ZipFile(os.path.join('dist', base)) @@ -88,10 +78,4 @@ def test_simple_built(self): wanted = ['foo-0.1-py%s.%s.egg-info' % sys.version_info[:2], 'foo.py'] if not sys.dont_write_bytecode: wanted.append('foo.%s.pyc' % sys.implementation.cache_tag) - self.assertEqual(contents, sorted(wanted)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildDumbTestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) + assert contents == sorted(wanted) diff --git a/setuptools/_distutils/tests/test_bdist_msi.py b/setuptools/_distutils/tests/test_bdist_msi.py deleted file mode 100644 index b1831ef20c..0000000000 --- a/setuptools/_distutils/tests/test_bdist_msi.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Tests for distutils.command.bdist_msi.""" -import sys -import unittest -from test.support import run_unittest -from distutils.tests import support - -from .py38compat import check_warnings - - -@unittest.skipUnless(sys.platform == 'win32', 'these tests require Windows') -class BDistMSITestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - def test_minimal(self): - # minimal test XXX need more tests - from distutils.command.bdist_msi import bdist_msi - project_dir, dist = self.create_dist() - with check_warnings(("", DeprecationWarning)): - cmd = bdist_msi(dist) - cmd.ensure_finalized() - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BDistMSITestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_bdist_rpm.py b/setuptools/_distutils/tests/test_bdist_rpm.py index 08a7cb46d8..4a702fb913 100644 --- a/setuptools/_distutils/tests/test_bdist_rpm.py +++ b/setuptools/_distutils/tests/test_bdist_rpm.py @@ -1,14 +1,14 @@ """Tests for distutils.command.bdist_rpm.""" -import unittest import sys import os -from test.support import run_unittest + +import pytest from distutils.core import Distribution from distutils.command.bdist_rpm import bdist_rpm from distutils.tests import support -from distutils.spawn import find_executable +from distutils.spawn import find_executable # noqa: F401 from .py38compat import requires_zlib @@ -22,40 +22,35 @@ """ -class BuildRpmTestCase(support.TempdirManager, - support.EnvironGuard, - support.LoggingSilencer, - unittest.TestCase): - - def setUp(self): - try: - sys.executable.encode("UTF-8") - except UnicodeEncodeError: - raise unittest.SkipTest("sys.executable is not encodable to UTF-8") - - super(BuildRpmTestCase, self).setUp() - self.old_location = os.getcwd() - self.old_sys_argv = sys.argv, sys.argv[:] - - def tearDown(self): - os.chdir(self.old_location) - sys.argv = self.old_sys_argv[0] - sys.argv[:] = self.old_sys_argv[1] - super(BuildRpmTestCase, self).tearDown() - - # XXX I am unable yet to make this test work without - # spurious sdtout/stderr output under Mac OS X - @unittest.skipUnless(sys.platform.startswith('linux'), - 'spurious sdtout/stderr output under Mac OS X') + +@pytest.fixture(autouse=True) +def sys_executable_encodable(): + try: + sys.executable.encode('UTF-8') + except UnicodeEncodeError: + pytest.skip("sys.executable is not encodable to UTF-8") + + +mac_woes = pytest.mark.skipif( + "not sys.platform.startswith('linux')", + reason='spurious sdtout/stderr output under macOS', +) + + +@pytest.mark.usefixtures('save_env') +@pytest.mark.usefixtures('save_argv') +@pytest.mark.usefixtures('save_cwd') +class TestBuildRpm( + support.TempdirManager, +): + @mac_woes @requires_zlib() - @unittest.skipIf(find_executable('rpm') is None, - 'the rpm command is not found') - @unittest.skipIf(find_executable('rpmbuild') is None, - 'the rpmbuild command is not found') + @pytest.mark.skipif("not find_executable('rpm')") + @pytest.mark.skipif("not find_executable('rpmbuild')") def test_quiet(self): # let's create a package tmp_dir = self.mkdtemp() - os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation + os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation pkg_dir = os.path.join(tmp_dir, 'foo') os.mkdir(pkg_dir) self.write_file((pkg_dir, 'setup.py'), SETUP_PY) @@ -63,10 +58,16 @@ def test_quiet(self): self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') self.write_file((pkg_dir, 'README'), '') - dist = Distribution({'name': 'foo', 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'}) + dist = Distribution( + { + 'name': 'foo', + 'version': '0.1', + 'py_modules': ['foo'], + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + } + ) dist.script_name = 'setup.py' os.chdir(pkg_dir) @@ -80,26 +81,21 @@ def test_quiet(self): cmd.run() dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) - self.assertIn('foo-0.1-1.noarch.rpm', dist_created) + assert 'foo-0.1-1.noarch.rpm' in dist_created # bug #2945: upload ignores bdist_rpm files - self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files) - self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files) + assert ('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm') in dist.dist_files + assert ('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm') in dist.dist_files - # XXX I am unable yet to make this test work without - # spurious sdtout/stderr output under Mac OS X - @unittest.skipUnless(sys.platform.startswith('linux'), - 'spurious sdtout/stderr output under Mac OS X') + @mac_woes @requires_zlib() # http://bugs.python.org/issue1533164 - @unittest.skipIf(find_executable('rpm') is None, - 'the rpm command is not found') - @unittest.skipIf(find_executable('rpmbuild') is None, - 'the rpmbuild command is not found') + @pytest.mark.skipif("not find_executable('rpm')") + @pytest.mark.skipif("not find_executable('rpmbuild')") def test_no_optimize_flag(self): # let's create a package that breaks bdist_rpm tmp_dir = self.mkdtemp() - os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation + os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation pkg_dir = os.path.join(tmp_dir, 'foo') os.mkdir(pkg_dir) self.write_file((pkg_dir, 'setup.py'), SETUP_PY) @@ -107,10 +103,16 @@ def test_no_optimize_flag(self): self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') self.write_file((pkg_dir, 'README'), '') - dist = Distribution({'name': 'foo', 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'}) + dist = Distribution( + { + 'name': 'foo', + 'version': '0.1', + 'py_modules': ['foo'], + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + } + ) dist.script_name = 'setup.py' os.chdir(pkg_dir) @@ -123,16 +125,10 @@ def test_no_optimize_flag(self): cmd.run() dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) - self.assertIn('foo-0.1-1.noarch.rpm', dist_created) + assert 'foo-0.1-1.noarch.rpm' in dist_created # bug #2945: upload ignores bdist_rpm files - self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files) - self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files) + assert ('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm') in dist.dist_files + assert ('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm') in dist.dist_files os.remove(os.path.join(pkg_dir, 'dist', 'foo-0.1-1.noarch.rpm')) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildRpmTestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_bdist_wininst.py b/setuptools/_distutils/tests/test_bdist_wininst.py deleted file mode 100644 index 59f25167e6..0000000000 --- a/setuptools/_distutils/tests/test_bdist_wininst.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Tests for distutils.command.bdist_wininst.""" -import sys -import platform -import unittest -from test.support import run_unittest - -from .py38compat import check_warnings - -from distutils.command.bdist_wininst import bdist_wininst -from distutils.tests import support - -@unittest.skipIf(sys.platform == 'win32' and platform.machine() == 'ARM64', - 'bdist_wininst is not supported in this install') -@unittest.skipIf(getattr(bdist_wininst, '_unsupported', False), - 'bdist_wininst is not supported in this install') -class BuildWinInstTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - def test_get_exe_bytes(self): - - # issue5731: command was broken on non-windows platforms - # this test makes sure it works now for every platform - # let's create a command - pkg_pth, dist = self.create_dist() - with check_warnings(("", DeprecationWarning)): - cmd = bdist_wininst(dist) - cmd.ensure_finalized() - - # let's run the code that finds the right wininst*.exe file - # and make sure it finds it and returns its content - # no matter what platform we have - exe_file = cmd.get_exe_bytes() - self.assertGreater(len(exe_file), 10) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildWinInstTestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_build.py b/setuptools/_distutils/tests/test_build.py index 83a9e4f4dd..66d8af50ac 100644 --- a/setuptools/_distutils/tests/test_build.py +++ b/setuptools/_distutils/tests/test_build.py @@ -1,56 +1,45 @@ """Tests for distutils.command.build.""" -import unittest import os import sys -from test.support import run_unittest from distutils.command.build import build from distutils.tests import support from sysconfig import get_platform -class BuildTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): +class TestBuild(support.TempdirManager): def test_finalize_options(self): pkg_dir, dist = self.create_dist() cmd = build(dist) cmd.finalize_options() # if not specified, plat_name gets the current platform - self.assertEqual(cmd.plat_name, get_platform()) + assert cmd.plat_name == get_platform() # build_purelib is build + lib wanted = os.path.join(cmd.build_base, 'lib') - self.assertEqual(cmd.build_purelib, wanted) + assert cmd.build_purelib == wanted - # build_platlib is 'build/lib.platform-x.x[-pydebug]' + # build_platlib is 'build/lib.platform-cache_tag[-pydebug]' # examples: - # build/lib.macosx-10.3-i386-2.7 - plat_spec = '.%s-%d.%d' % (cmd.plat_name, *sys.version_info[:2]) + # build/lib.macosx-10.3-i386-cpython39 + plat_spec = '.{}-{}'.format(cmd.plat_name, sys.implementation.cache_tag) if hasattr(sys, 'gettotalrefcount'): - self.assertTrue(cmd.build_platlib.endswith('-pydebug')) + assert cmd.build_platlib.endswith('-pydebug') plat_spec += '-pydebug' wanted = os.path.join(cmd.build_base, 'lib' + plat_spec) - self.assertEqual(cmd.build_platlib, wanted) + assert cmd.build_platlib == wanted # by default, build_lib = build_purelib - self.assertEqual(cmd.build_lib, cmd.build_purelib) + assert cmd.build_lib == cmd.build_purelib # build_temp is build/temp. wanted = os.path.join(cmd.build_base, 'temp' + plat_spec) - self.assertEqual(cmd.build_temp, wanted) + assert cmd.build_temp == wanted # build_scripts is build/scripts-x.x - wanted = os.path.join(cmd.build_base, - 'scripts-%d.%d' % sys.version_info[:2]) - self.assertEqual(cmd.build_scripts, wanted) + wanted = os.path.join(cmd.build_base, 'scripts-%d.%d' % sys.version_info[:2]) + assert cmd.build_scripts == wanted # executable is os.path.normpath(sys.executable) - self.assertEqual(cmd.executable, os.path.normpath(sys.executable)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + assert cmd.executable == os.path.normpath(sys.executable) diff --git a/setuptools/_distutils/tests/test_build_clib.py b/setuptools/_distutils/tests/test_build_clib.py index d50ead7c94..b5a392a85f 100644 --- a/setuptools/_distutils/tests/test_build_clib.py +++ b/setuptools/_distutils/tests/test_build_clib.py @@ -1,45 +1,44 @@ """Tests for distutils.command.build_clib.""" -import unittest import os -import sys -from test.support import run_unittest +from test.support import missing_compiler_executable -from .py35compat import missing_compiler_executable +import pytest from distutils.command.build_clib import build_clib from distutils.errors import DistutilsSetupError from distutils.tests import support -class BuildCLibTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): +class TestBuildCLib(support.TempdirManager): def test_check_library_dist(self): pkg_dir, dist = self.create_dist() cmd = build_clib(dist) # 'libraries' option must be a list - self.assertRaises(DistutilsSetupError, cmd.check_library_list, 'foo') + with pytest.raises(DistutilsSetupError): + cmd.check_library_list('foo') # each element of 'libraries' must a 2-tuple - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - ['foo1', 'foo2']) + with pytest.raises(DistutilsSetupError): + cmd.check_library_list(['foo1', 'foo2']) # first element of each tuple in 'libraries' # must be a string (the library name) - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - [(1, 'foo1'), ('name', 'foo2')]) + with pytest.raises(DistutilsSetupError): + cmd.check_library_list([(1, 'foo1'), ('name', 'foo2')]) # library name may not contain directory separators - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - [('name', 'foo1'), - ('another/name', 'foo2')]) + with pytest.raises(DistutilsSetupError): + cmd.check_library_list( + [('name', 'foo1'), ('another/name', 'foo2')], + ) # second element of each tuple must be a dictionary (build info) - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - [('name', {}), - ('another', 'foo2')]) + with pytest.raises(DistutilsSetupError): + cmd.check_library_list( + [('name', {}), ('another', 'foo2')], + ) # those work libs = [('name', {}), ('name', {'ok': 'good'})] @@ -52,35 +51,41 @@ def test_get_source_files(self): # "in 'libraries' option 'sources' must be present and must be # a list of source filenames cmd.libraries = [('name', {})] - self.assertRaises(DistutilsSetupError, cmd.get_source_files) + with pytest.raises(DistutilsSetupError): + cmd.get_source_files() cmd.libraries = [('name', {'sources': 1})] - self.assertRaises(DistutilsSetupError, cmd.get_source_files) + with pytest.raises(DistutilsSetupError): + cmd.get_source_files() cmd.libraries = [('name', {'sources': ['a', 'b']})] - self.assertEqual(cmd.get_source_files(), ['a', 'b']) + assert cmd.get_source_files() == ['a', 'b'] cmd.libraries = [('name', {'sources': ('a', 'b')})] - self.assertEqual(cmd.get_source_files(), ['a', 'b']) + assert cmd.get_source_files() == ['a', 'b'] - cmd.libraries = [('name', {'sources': ('a', 'b')}), - ('name2', {'sources': ['c', 'd']})] - self.assertEqual(cmd.get_source_files(), ['a', 'b', 'c', 'd']) + cmd.libraries = [ + ('name', {'sources': ('a', 'b')}), + ('name2', {'sources': ['c', 'd']}), + ] + assert cmd.get_source_files() == ['a', 'b', 'c', 'd'] def test_build_libraries(self): - pkg_dir, dist = self.create_dist() cmd = build_clib(dist) + class FakeCompiler: def compile(*args, **kw): pass + create_static_lib = compile cmd.compiler = FakeCompiler() # build_libraries is also doing a bit of typo checking lib = [('name', {'sources': 'notvalid'})] - self.assertRaises(DistutilsSetupError, cmd.build_libraries, lib) + with pytest.raises(DistutilsSetupError): + cmd.build_libraries(lib) lib = [('name', {'sources': list()})] cmd.build_libraries(lib) @@ -94,16 +99,17 @@ def test_finalize_options(self): cmd.include_dirs = 'one-dir' cmd.finalize_options() - self.assertEqual(cmd.include_dirs, ['one-dir']) + assert cmd.include_dirs == ['one-dir'] cmd.include_dirs = None cmd.finalize_options() - self.assertEqual(cmd.include_dirs, []) + assert cmd.include_dirs == [] cmd.distribution.libraries = 'WONTWORK' - self.assertRaises(DistutilsSetupError, cmd.finalize_options) + with pytest.raises(DistutilsSetupError): + cmd.finalize_options() - @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") + @pytest.mark.skipif('platform.system() == "Windows"') def test_run(self): pkg_dir, dist = self.create_dist() cmd = build_clib(dist) @@ -127,10 +133,4 @@ def test_run(self): cmd.run() # let's check the result - self.assertIn('libfoo.a', os.listdir(build_temp)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildCLibTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + assert 'libfoo.a' in os.listdir(build_temp) diff --git a/setuptools/_distutils/tests/test_build_ext.py b/setuptools/_distutils/tests/test_build_ext.py index 920e4dc870..cb61ad7455 100644 --- a/setuptools/_distutils/tests/test_build_ext.py +++ b/setuptools/_distutils/tests/test_build_ext.py @@ -2,62 +2,94 @@ import os from io import StringIO import textwrap +import site +import contextlib +import platform +import tempfile +import importlib +import shutil +import re + +import path +import pytest from distutils.core import Distribution from distutils.command.build_ext import build_ext from distutils import sysconfig -from distutils.tests.support import (TempdirManager, LoggingSilencer, - copy_xxmodule_c, fixup_build_ext) +from distutils.tests.support import ( + TempdirManager, + copy_xxmodule_c, + fixup_build_ext, +) from distutils.extension import Extension from distutils.errors import ( - CompileError, DistutilsPlatformError, DistutilsSetupError, - UnknownFileError) + CompileError, + DistutilsPlatformError, + DistutilsSetupError, + UnknownFileError, +) -import unittest from test import support -from . import py38compat as os_helper -from test.support.script_helper import assert_python_ok +from . import py38compat as import_helper -# http://bugs.python.org/issue4373 -# Don't load the xx module more than once. -ALREADY_TESTED = False +@pytest.fixture() +def user_site_dir(request): + self = request.instance + self.tmp_dir = self.mkdtemp() + from distutils.command import build_ext + + orig_user_base = site.USER_BASE + + site.USER_BASE = self.mkdtemp() + build_ext.USER_BASE = site.USER_BASE + + # bpo-30132: On Windows, a .pdb file may be created in the current + # working directory. Create a temporary working directory to cleanup + # everything at the end of the test. + with path.Path(self.tmp_dir): + yield + + site.USER_BASE = orig_user_base + build_ext.USER_BASE = orig_user_base -class BuildExtTestCase(TempdirManager, - LoggingSilencer, - unittest.TestCase): - def setUp(self): - # Create a simple test environment - super(BuildExtTestCase, self).setUp() - self.tmp_dir = self.mkdtemp() - import site - self.old_user_base = site.USER_BASE - site.USER_BASE = self.mkdtemp() - from distutils.command import build_ext - build_ext.USER_BASE = site.USER_BASE - - # bpo-30132: On Windows, a .pdb file may be created in the current - # working directory. Create a temporary working directory to cleanup - # everything at the end of the test. - change_cwd = os_helper.change_cwd(self.tmp_dir) - change_cwd.__enter__() - self.addCleanup(change_cwd.__exit__, None, None, None) - - def tearDown(self): - import site - site.USER_BASE = self.old_user_base - from distutils.command import build_ext - build_ext.USER_BASE = self.old_user_base - super(BuildExtTestCase, self).tearDown() +@contextlib.contextmanager +def safe_extension_import(name, path): + with import_helper.CleanImport(name): + with extension_redirect(name, path) as new_path: + with import_helper.DirsOnSysPath(new_path): + yield + + +@contextlib.contextmanager +def extension_redirect(mod, path): + """ + Tests will fail to tear down an extension module if it's been imported. + + Before importing, copy the file to a temporary directory that won't + be cleaned up. Yield the new path. + """ + if platform.system() != "Windows" and sys.platform != "cygwin": + yield path + return + with import_helper.DirsOnSysPath(path): + spec = importlib.util.find_spec(mod) + filename = os.path.basename(spec.origin) + trash_dir = tempfile.mkdtemp(prefix='deleteme') + dest = os.path.join(trash_dir, os.path.basename(filename)) + shutil.copy(spec.origin, dest) + yield trash_dir + # TODO: can the file be scheduled for deletion? + + +@pytest.mark.usefixtures('user_site_dir') +class TestBuildExt(TempdirManager): def build_ext(self, *args, **kwargs): return build_ext(*args, **kwargs) def test_build_ext(self): cmd = support.missing_compiler_executable() - if cmd is not None: - self.skipTest('The %r command is not found' % cmd) - global ALREADY_TESTED copy_xxmodule_c(self.tmp_dir) xx_c = os.path.join(self.tmp_dir, 'xxmodule.c') xx_ext = Extension('xx', [xx_c]) @@ -78,47 +110,33 @@ def test_build_ext(self): finally: sys.stdout = old_stdout - if ALREADY_TESTED: - self.skipTest('Already tested in %s' % ALREADY_TESTED) - else: - ALREADY_TESTED = type(self).__name__ - - code = textwrap.dedent(""" - tmp_dir = {self.tmp_dir!r} + with safe_extension_import('xx', self.tmp_dir): + self._test_xx() - import sys - import unittest - from test import support + @staticmethod + def _test_xx(): + import xx - sys.path.insert(0, tmp_dir) - import xx + for attr in ('error', 'foo', 'new', 'roj'): + assert hasattr(xx, attr) - class Tests(unittest.TestCase): - def test_xx(self): - for attr in ('error', 'foo', 'new', 'roj'): - self.assertTrue(hasattr(xx, attr)) - - self.assertEqual(xx.foo(2, 5), 7) - self.assertEqual(xx.foo(13,15), 28) - self.assertEqual(xx.new().demo(), None) - if support.HAVE_DOCSTRINGS: - doc = 'This is a template module just for instruction.' - self.assertEqual(xx.__doc__, doc) - self.assertIsInstance(xx.Null(), xx.Null) - self.assertIsInstance(xx.Str(), xx.Str) - - - unittest.main() - """.format(**locals())) - assert_python_ok('-c', code) + assert xx.foo(2, 5) == 7 + assert xx.foo(13, 15) == 28 + assert xx.new().demo() is None + if support.HAVE_DOCSTRINGS: + doc = 'This is a template module just for instruction.' + assert xx.__doc__ == doc + assert isinstance(xx.Null(), xx.Null) + assert isinstance(xx.Str(), xx.Str) def test_solaris_enable_shared(self): dist = Distribution({'name': 'xx'}) cmd = self.build_ext(dist) old = sys.platform - sys.platform = 'sunos' # fooling finalize_options - from distutils.sysconfig import _config_vars + sys.platform = 'sunos' # fooling finalize_options + from distutils.sysconfig import _config_vars + old_var = _config_vars.get('Py_ENABLE_SHARED') _config_vars['Py_ENABLE_SHARED'] = 1 try: @@ -131,17 +149,17 @@ def test_solaris_enable_shared(self): _config_vars['Py_ENABLE_SHARED'] = old_var # make sure we get some library dirs under solaris - self.assertGreater(len(cmd.library_dirs), 0) + assert len(cmd.library_dirs) > 0 def test_user_site(self): import site + dist = Distribution({'name': 'xx'}) cmd = self.build_ext(dist) # making sure the user option is there - options = [name for name, short, lable in - cmd.user_options] - self.assertIn('user', options) + options = [name for name, short, label in cmd.user_options] + assert 'user' in options # setting a value cmd.user = 1 @@ -157,20 +175,19 @@ def test_user_site(self): # see if include_dirs and library_dirs # were set - self.assertIn(lib, cmd.library_dirs) - self.assertIn(lib, cmd.rpath) - self.assertIn(incl, cmd.include_dirs) + assert lib in cmd.library_dirs + assert lib in cmd.rpath + assert incl in cmd.include_dirs def test_optional_extension(self): - # this extension will fail, but let's ignore this failure # with the optional argument. modules = [Extension('foo', ['xxx'], optional=False)] dist = Distribution({'name': 'xx', 'ext_modules': modules}) cmd = self.build_ext(dist) cmd.ensure_finalized() - self.assertRaises((UnknownFileError, CompileError), - cmd.run) # should raise an error + with pytest.raises((UnknownFileError, CompileError)): + cmd.run() # should raise an error modules = [Extension('foo', ['xxx'], optional=True)] dist = Distribution({'name': 'xx', 'ext_modules': modules}) @@ -188,40 +205,40 @@ def test_finalize_options(self): py_include = sysconfig.get_python_inc() for p in py_include.split(os.path.pathsep): - self.assertIn(p, cmd.include_dirs) + assert p in cmd.include_dirs plat_py_include = sysconfig.get_python_inc(plat_specific=1) for p in plat_py_include.split(os.path.pathsep): - self.assertIn(p, cmd.include_dirs) + assert p in cmd.include_dirs # make sure cmd.libraries is turned into a list # if it's a string cmd = self.build_ext(dist) cmd.libraries = 'my_lib, other_lib lastlib' cmd.finalize_options() - self.assertEqual(cmd.libraries, ['my_lib', 'other_lib', 'lastlib']) + assert cmd.libraries == ['my_lib', 'other_lib', 'lastlib'] # make sure cmd.library_dirs is turned into a list # if it's a string cmd = self.build_ext(dist) cmd.library_dirs = 'my_lib_dir%sother_lib_dir' % os.pathsep cmd.finalize_options() - self.assertIn('my_lib_dir', cmd.library_dirs) - self.assertIn('other_lib_dir', cmd.library_dirs) + assert 'my_lib_dir' in cmd.library_dirs + assert 'other_lib_dir' in cmd.library_dirs # make sure rpath is turned into a list # if it's a string cmd = self.build_ext(dist) cmd.rpath = 'one%stwo' % os.pathsep cmd.finalize_options() - self.assertEqual(cmd.rpath, ['one', 'two']) + assert cmd.rpath == ['one', 'two'] # make sure cmd.link_objects is turned into a list # if it's a string cmd = build_ext(dist) cmd.link_objects = 'one two,three' cmd.finalize_options() - self.assertEqual(cmd.link_objects, ['one', 'two', 'three']) + assert cmd.link_objects == ['one', 'two', 'three'] # XXX more tests to perform for win32 @@ -230,80 +247,92 @@ def test_finalize_options(self): cmd = self.build_ext(dist) cmd.define = 'one,two' cmd.finalize_options() - self.assertEqual(cmd.define, [('one', '1'), ('two', '1')]) + assert cmd.define == [('one', '1'), ('two', '1')] # make sure undef is turned into a list of # strings if they are ','-separated strings cmd = self.build_ext(dist) cmd.undef = 'one,two' cmd.finalize_options() - self.assertEqual(cmd.undef, ['one', 'two']) + assert cmd.undef == ['one', 'two'] # make sure swig_opts is turned into a list cmd = self.build_ext(dist) cmd.swig_opts = None cmd.finalize_options() - self.assertEqual(cmd.swig_opts, []) + assert cmd.swig_opts == [] cmd = self.build_ext(dist) cmd.swig_opts = '1 2' cmd.finalize_options() - self.assertEqual(cmd.swig_opts, ['1', '2']) + assert cmd.swig_opts == ['1', '2'] def test_check_extensions_list(self): dist = Distribution() cmd = self.build_ext(dist) cmd.finalize_options() - #'extensions' option must be a list of Extension instances - self.assertRaises(DistutilsSetupError, - cmd.check_extensions_list, 'foo') + # 'extensions' option must be a list of Extension instances + with pytest.raises(DistutilsSetupError): + cmd.check_extensions_list('foo') # each element of 'ext_modules' option must be an # Extension instance or 2-tuple exts = [('bar', 'foo', 'bar'), 'foo'] - self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) + with pytest.raises(DistutilsSetupError): + cmd.check_extensions_list(exts) # first element of each tuple in 'ext_modules' # must be the extension name (a string) and match # a python dotted-separated name exts = [('foo-bar', '')] - self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) + with pytest.raises(DistutilsSetupError): + cmd.check_extensions_list(exts) # second element of each tuple in 'ext_modules' # must be a dictionary (build info) exts = [('foo.bar', '')] - self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) + with pytest.raises(DistutilsSetupError): + cmd.check_extensions_list(exts) # ok this one should pass - exts = [('foo.bar', {'sources': [''], 'libraries': 'foo', - 'some': 'bar'})] + exts = [('foo.bar', {'sources': [''], 'libraries': 'foo', 'some': 'bar'})] cmd.check_extensions_list(exts) ext = exts[0] - self.assertIsInstance(ext, Extension) + assert isinstance(ext, Extension) # check_extensions_list adds in ext the values passed # when they are in ('include_dirs', 'library_dirs', 'libraries' # 'extra_objects', 'extra_compile_args', 'extra_link_args') - self.assertEqual(ext.libraries, 'foo') - self.assertFalse(hasattr(ext, 'some')) + assert ext.libraries == 'foo' + assert not hasattr(ext, 'some') # 'macros' element of build info dict must be 1- or 2-tuple - exts = [('foo.bar', {'sources': [''], 'libraries': 'foo', - 'some': 'bar', 'macros': [('1', '2', '3'), 'foo']})] - self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) + exts = [ + ( + 'foo.bar', + { + 'sources': [''], + 'libraries': 'foo', + 'some': 'bar', + 'macros': [('1', '2', '3'), 'foo'], + }, + ) + ] + with pytest.raises(DistutilsSetupError): + cmd.check_extensions_list(exts) exts[0][1]['macros'] = [('1', '2'), ('3',)] cmd.check_extensions_list(exts) - self.assertEqual(exts[0].undef_macros, ['3']) - self.assertEqual(exts[0].define_macros, [('1', '2')]) + assert exts[0].undef_macros == ['3'] + assert exts[0].define_macros == [('1', '2')] def test_get_source_files(self): modules = [Extension('foo', ['xxx'], optional=False)] dist = Distribution({'name': 'xx', 'ext_modules': modules}) cmd = self.build_ext(dist) cmd.ensure_finalized() - self.assertEqual(cmd.get_source_files(), ['xxx']) + assert cmd.get_source_files() == ['xxx'] def test_unicode_module_names(self): modules = [ @@ -313,10 +342,10 @@ def test_unicode_module_names(self): dist = Distribution({'name': 'xx', 'ext_modules': modules}) cmd = self.build_ext(dist) cmd.ensure_finalized() - self.assertRegex(cmd.get_ext_filename(modules[0].name), r'foo(_d)?\..*') - self.assertRegex(cmd.get_ext_filename(modules[1].name), r'föö(_d)?\..*') - self.assertEqual(cmd.get_export_symbols(modules[0]), ['PyInit_foo']) - self.assertEqual(cmd.get_export_symbols(modules[1]), ['PyInitU_f_1gaa']) + assert re.search(r'foo(_d)?\..*', cmd.get_ext_filename(modules[0].name)) + assert re.search(r'föö(_d)?\..*', cmd.get_ext_filename(modules[1].name)) + assert cmd.get_export_symbols(modules[0]) == ['PyInit_foo'] + assert cmd.get_export_symbols(modules[1]) == ['PyInitU_f_1gaa'] def test_compiler_option(self): # cmd.compiler is an option and @@ -327,22 +356,19 @@ def test_compiler_option(self): cmd.compiler = 'unix' cmd.ensure_finalized() cmd.run() - self.assertEqual(cmd.compiler, 'unix') + assert cmd.compiler == 'unix' def test_get_outputs(self): cmd = support.missing_compiler_executable() - if cmd is not None: - self.skipTest('The %r command is not found' % cmd) tmp_dir = self.mkdtemp() c_file = os.path.join(tmp_dir, 'foo.c') self.write_file(c_file, 'void PyInit_foo(void) {}\n') ext = Extension('foo', [c_file], optional=False) - dist = Distribution({'name': 'xx', - 'ext_modules': [ext]}) + dist = Distribution({'name': 'xx', 'ext_modules': [ext]}) cmd = self.build_ext(dist) fixup_build_ext(cmd) cmd.ensure_finalized() - self.assertEqual(len(cmd.get_outputs()), 1) + assert len(cmd.get_outputs()) == 1 cmd.build_lib = os.path.join(self.tmp_dir, 'build') cmd.build_temp = os.path.join(self.tmp_dir, 'tempt') @@ -358,20 +384,20 @@ def test_get_outputs(self): so_file = cmd.get_outputs()[0] finally: os.chdir(old_wd) - self.assertTrue(os.path.exists(so_file)) + assert os.path.exists(so_file) ext_suffix = sysconfig.get_config_var('EXT_SUFFIX') - self.assertTrue(so_file.endswith(ext_suffix)) + assert so_file.endswith(ext_suffix) so_dir = os.path.dirname(so_file) - self.assertEqual(so_dir, other_tmp_dir) + assert so_dir == other_tmp_dir cmd.inplace = 0 cmd.compiler = None cmd.run() so_file = cmd.get_outputs()[0] - self.assertTrue(os.path.exists(so_file)) - self.assertTrue(so_file.endswith(ext_suffix)) + assert os.path.exists(so_file) + assert so_file.endswith(ext_suffix) so_dir = os.path.dirname(so_file) - self.assertEqual(so_dir, cmd.build_lib) + assert so_dir == cmd.build_lib # inplace = 0, cmd.package = 'bar' build_py = cmd.get_finalized_command('build_py') @@ -379,7 +405,7 @@ def test_get_outputs(self): path = cmd.get_ext_fullpath('foo') # checking that the last directory is the build_dir path = os.path.split(path)[0] - self.assertEqual(path, cmd.build_lib) + assert path == cmd.build_lib # inplace = 1, cmd.package = 'bar' cmd.inplace = 1 @@ -393,14 +419,14 @@ def test_get_outputs(self): # checking that the last directory is bar path = os.path.split(path)[0] lastdir = os.path.split(path)[-1] - self.assertEqual(lastdir, 'bar') + assert lastdir == 'bar' def test_ext_fullpath(self): ext = sysconfig.get_config_var('EXT_SUFFIX') # building lxml.etree inplace - #etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c') - #etree_ext = Extension('lxml.etree', [etree_c]) - #dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]}) + # etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c') + # etree_ext = Extension('lxml.etree', [etree_c]) + # dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]}) dist = Distribution() cmd = self.build_ext(dist) cmd.inplace = 1 @@ -409,46 +435,47 @@ def test_ext_fullpath(self): curdir = os.getcwd() wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext) path = cmd.get_ext_fullpath('lxml.etree') - self.assertEqual(wanted, path) + assert wanted == path # building lxml.etree not inplace cmd.inplace = 0 cmd.build_lib = os.path.join(curdir, 'tmpdir') wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext) path = cmd.get_ext_fullpath('lxml.etree') - self.assertEqual(wanted, path) + assert wanted == path # building twisted.runner.portmap not inplace build_py = cmd.get_finalized_command('build_py') build_py.package_dir = {} cmd.distribution.packages = ['twisted', 'twisted.runner.portmap'] path = cmd.get_ext_fullpath('twisted.runner.portmap') - wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner', - 'portmap' + ext) - self.assertEqual(wanted, path) + wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner', 'portmap' + ext) + assert wanted == path # building twisted.runner.portmap inplace cmd.inplace = 1 path = cmd.get_ext_fullpath('twisted.runner.portmap') wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext) - self.assertEqual(wanted, path) + assert wanted == path - - @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX') + @pytest.mark.skipif('platform.system() != "Darwin"') + @pytest.mark.usefixtures('save_env') def test_deployment_target_default(self): # Issue 9516: Test that, in the absence of the environment variable, # an extension module is compiled with the same deployment target as # the interpreter. self._try_compile_deployment_target('==', None) - @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX') + @pytest.mark.skipif('platform.system() != "Darwin"') + @pytest.mark.usefixtures('save_env') def test_deployment_target_too_low(self): # Issue 9516: Test that an extension module is not allowed to be # compiled with a deployment target less than that of the interpreter. - self.assertRaises(DistutilsPlatformError, - self._try_compile_deployment_target, '>', '10.1') + with pytest.raises(DistutilsPlatformError): + self._try_compile_deployment_target('>', '10.1') - @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX') + @pytest.mark.skipif('platform.system() != "Darwin"') + @pytest.mark.usefixtures('save_env') def test_deployment_target_higher_ok(self): # Issue 9516: Test that an extension module can be compiled with a # deployment target higher than that of the interpreter: the ext @@ -462,10 +489,6 @@ def test_deployment_target_higher_ok(self): self._try_compile_deployment_target('<', deptarget) def _try_compile_deployment_target(self, operator, target): - orig_environ = os.environ - os.environ = orig_environ.copy() - self.addCleanup(setattr, os, 'environ', orig_environ) - if target is None: if os.environ.get('MACOSX_DEPLOYMENT_TARGET'): del os.environ['MACOSX_DEPLOYMENT_TARGET'] @@ -475,7 +498,9 @@ def _try_compile_deployment_target(self, operator, target): deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c') with open(deptarget_c, 'w') as fp: - fp.write(textwrap.dedent('''\ + fp.write( + textwrap.dedent( + '''\ #include int dummy; @@ -485,7 +510,10 @@ def _try_compile_deployment_target(self, operator, target): #error "Unexpected target" #endif - ''' % operator)) + ''' + % operator + ) + ) # get the deployment target that the interpreter was built with target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') @@ -506,12 +534,9 @@ def _try_compile_deployment_target(self, operator, target): deptarget_ext = Extension( 'deptarget', [deptarget_c], - extra_compile_args=['-DTARGET=%s'%(target,)], + extra_compile_args=['-DTARGET={}'.format(target)], ) - dist = Distribution({ - 'name': 'deptarget', - 'ext_modules': [deptarget_ext] - }) + dist = Distribution({'name': 'deptarget', 'ext_modules': [deptarget_ext]}) dist.package_dir = self.tmp_dir cmd = self.build_ext(dist) cmd.build_lib = self.tmp_dir @@ -532,19 +557,8 @@ def _try_compile_deployment_target(self, operator, target): self.fail("Wrong deployment target during compilation") -class ParallelBuildExtTestCase(BuildExtTestCase): - +class TestParallelBuildExt(TestBuildExt): def build_ext(self, *args, **kwargs): build_ext = super().build_ext(*args, **kwargs) build_ext.parallel = True return build_ext - - -def test_suite(): - suite = unittest.TestSuite() - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(BuildExtTestCase)) - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(ParallelBuildExtTestCase)) - return suite - -if __name__ == '__main__': - support.run_unittest(__name__) diff --git a/setuptools/_distutils/tests/test_build_py.py b/setuptools/_distutils/tests/test_build_py.py index a590a485a2..3bef9d79ec 100644 --- a/setuptools/_distutils/tests/test_build_py.py +++ b/setuptools/_distutils/tests/test_build_py.py @@ -2,20 +2,18 @@ import os import sys -import unittest + +import pytest from distutils.command.build_py import build_py from distutils.core import Distribution from distutils.errors import DistutilsFileError from distutils.tests import support -from test.support import run_unittest - -class BuildPyTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): +@support.combine_markers +class TestBuildPy(support.TempdirManager): def test_package_data(self): sources = self.mkdtemp() f = open(os.path.join(sources, "__init__.py"), "w") @@ -31,13 +29,10 @@ def test_package_data(self): destination = self.mkdtemp() - dist = Distribution({"packages": ["pkg"], - "package_dir": {"pkg": sources}}) + dist = Distribution({"packages": ["pkg"], "package_dir": {"pkg": sources}}) # script_name need not exist, it just need to be initialized dist.script_name = os.path.join(sources, "setup.py") - dist.command_obj["build"] = support.DummyCommand( - force=0, - build_lib=destination) + dist.command_obj["build"] = support.DummyCommand(force=0, build_lib=destination) dist.packages = ["pkg"] dist.package_data = {"pkg": ["README.txt"]} dist.package_dir = {"pkg": sources} @@ -45,25 +40,24 @@ def test_package_data(self): cmd = build_py(dist) cmd.compile = 1 cmd.ensure_finalized() - self.assertEqual(cmd.package_data, dist.package_data) + assert cmd.package_data == dist.package_data cmd.run() # This makes sure the list of outputs includes byte-compiled # files for Python modules but not for package data files # (there shouldn't *be* byte-code files for those!). - self.assertEqual(len(cmd.get_outputs()), 3) + assert len(cmd.get_outputs()) == 3 pkgdest = os.path.join(destination, "pkg") files = os.listdir(pkgdest) pycache_dir = os.path.join(pkgdest, "__pycache__") - self.assertIn("__init__.py", files) - self.assertIn("README.txt", files) + assert "__init__.py" in files + assert "README.txt" in files if sys.dont_write_bytecode: - self.assertFalse(os.path.exists(pycache_dir)) + assert not os.path.exists(pycache_dir) else: pyc_files = os.listdir(pycache_dir) - self.assertIn("__init__.%s.pyc" % sys.implementation.cache_tag, - pyc_files) + assert "__init__.%s.pyc" % sys.implementation.cache_tag in pyc_files def test_empty_package_dir(self): # See bugs #1668596/#1720897 @@ -75,9 +69,13 @@ def test_empty_package_dir(self): open(os.path.join(testdir, "testfile"), "w").close() os.chdir(sources) - dist = Distribution({"packages": ["pkg"], - "package_dir": {"pkg": ""}, - "package_data": {"pkg": ["doc/*"]}}) + dist = Distribution( + { + "packages": ["pkg"], + "package_dir": {"pkg": ""}, + "package_data": {"pkg": ["doc/*"]}, + } + ) # script_name need not exist, it just need to be initialized dist.script_name = os.path.join(sources, "setup.py") dist.script_args = ["build"] @@ -88,7 +86,7 @@ def test_empty_package_dir(self): except DistutilsFileError: self.fail("failed package_data test when package_dir is ''") - @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled') + @pytest.mark.skipif('sys.dont_write_bytecode') def test_byte_compile(self): project_dir, dist = self.create_dist(py_modules=['boiledeggs']) os.chdir(project_dir) @@ -100,12 +98,11 @@ def test_byte_compile(self): cmd.run() found = os.listdir(cmd.build_lib) - self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py']) + assert sorted(found) == ['__pycache__', 'boiledeggs.py'] found = os.listdir(os.path.join(cmd.build_lib, '__pycache__')) - self.assertEqual(found, - ['boiledeggs.%s.pyc' % sys.implementation.cache_tag]) + assert found == ['boiledeggs.%s.pyc' % sys.implementation.cache_tag] - @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled') + @pytest.mark.skipif('sys.dont_write_bytecode') def test_byte_compile_optimized(self): project_dir, dist = self.create_dist(py_modules=['boiledeggs']) os.chdir(project_dir) @@ -118,10 +115,10 @@ def test_byte_compile_optimized(self): cmd.run() found = os.listdir(cmd.build_lib) - self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py']) + assert sorted(found) == ['__pycache__', 'boiledeggs.py'] found = os.listdir(os.path.join(cmd.build_lib, '__pycache__')) - expect = 'boiledeggs.{}.opt-1.pyc'.format(sys.implementation.cache_tag) - self.assertEqual(sorted(found), [expect]) + expect = f'boiledeggs.{sys.implementation.cache_tag}.opt-1.pyc' + assert sorted(found) == [expect] def test_dir_in_package_data(self): """ @@ -142,8 +139,7 @@ def test_dir_in_package_data(self): os.mkdir(os.path.join(docdir, 'otherdir')) os.chdir(sources) - dist = Distribution({"packages": ["pkg"], - "package_data": {"pkg": ["doc/*"]}}) + dist = Distribution({"packages": ["pkg"], "package_data": {"pkg": ["doc/*"]}}) # script_name need not exist, it just need to be initialized dist.script_name = os.path.join(sources, "setup.py") dist.script_args = ["build"] @@ -154,7 +150,7 @@ def test_dir_in_package_data(self): except DistutilsFileError: self.fail("failed package_data when data dir includes a dir") - def test_dont_write_bytecode(self): + def test_dont_write_bytecode(self, caplog): # makes sure byte_compile is not used dist = self.create_dist()[1] cmd = build_py(dist) @@ -168,12 +164,40 @@ def test_dont_write_bytecode(self): finally: sys.dont_write_bytecode = old_dont_write_bytecode - self.assertIn('byte-compiling is disabled', - self.logs[0][1] % self.logs[0][2]) + assert 'byte-compiling is disabled' in caplog.records[0].message + def test_namespace_package_does_not_warn(self, caplog): + """ + Originally distutils implementation did not account for PEP 420 + and included warns for package directories that did not contain + ``__init__.py`` files. + After the acceptance of PEP 420, these warnings don't make more sense + so we want to ensure there are not displayed to not confuse the users. + """ + # Create a fake project structure with a package namespace: + tmp = self.mkdtemp() + os.chdir(tmp) + os.makedirs("ns/pkg") + open("ns/pkg/module.py", "w").close() + + # Configure the package: + attrs = { + "name": "ns.pkg", + "packages": ["ns", "ns.pkg"], + "script_name": "setup.py", + } + dist = Distribution(attrs) + + # Run code paths that would trigger the trap: + cmd = dist.get_command_obj("build_py") + cmd.finalize_options() + modules = cmd.find_all_modules() + assert len(modules) == 1 + module_path = modules[0][-1] + assert module_path.replace(os.sep, "/") == "ns/pkg/module.py" -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildPyTestCase) + cmd.run() -if __name__ == "__main__": - run_unittest(test_suite()) + assert not any( + "package init file" in msg and "not found" in msg for msg in caplog.messages + ) diff --git a/setuptools/_distutils/tests/test_build_scripts.py b/setuptools/_distutils/tests/test_build_scripts.py index f299e51ef7..1a5753c772 100644 --- a/setuptools/_distutils/tests/test_build_scripts.py +++ b/setuptools/_distutils/tests/test_build_scripts.py @@ -1,73 +1,74 @@ """Tests for distutils.command.build_scripts.""" import os -import unittest from distutils.command.build_scripts import build_scripts from distutils.core import Distribution from distutils import sysconfig from distutils.tests import support -from test.support import run_unittest -class BuildScriptsTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - +class TestBuildScripts(support.TempdirManager): def test_default_settings(self): cmd = self.get_build_scripts_cmd("/foo/bar", []) - self.assertFalse(cmd.force) - self.assertIsNone(cmd.build_dir) + assert not cmd.force + assert cmd.build_dir is None cmd.finalize_options() - self.assertTrue(cmd.force) - self.assertEqual(cmd.build_dir, "/foo/bar") + assert cmd.force + assert cmd.build_dir == "/foo/bar" def test_build(self): source = self.mkdtemp() target = self.mkdtemp() expected = self.write_sample_scripts(source) - cmd = self.get_build_scripts_cmd(target, - [os.path.join(source, fn) - for fn in expected]) + cmd = self.get_build_scripts_cmd( + target, [os.path.join(source, fn) for fn in expected] + ) cmd.finalize_options() cmd.run() built = os.listdir(target) for name in expected: - self.assertIn(name, built) + assert name in built def get_build_scripts_cmd(self, target, scripts): import sys + dist = Distribution() dist.scripts = scripts dist.command_obj["build"] = support.DummyCommand( - build_scripts=target, - force=1, - executable=sys.executable - ) + build_scripts=target, force=1, executable=sys.executable + ) return build_scripts(dist) def write_sample_scripts(self, dir): expected = [] expected.append("script1.py") - self.write_script(dir, "script1.py", - ("#! /usr/bin/env python2.3\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) + self.write_script( + dir, + "script1.py", + ( + "#! /usr/bin/env python2.3\n" + "# bogus script w/ Python sh-bang\n" + "pass\n" + ), + ) expected.append("script2.py") - self.write_script(dir, "script2.py", - ("#!/usr/bin/python\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) + self.write_script( + dir, + "script2.py", + ("#!/usr/bin/python\n" "# bogus script w/ Python sh-bang\n" "pass\n"), + ) expected.append("shell.sh") - self.write_script(dir, "shell.sh", - ("#!/bin/sh\n" - "# bogus shell script w/ sh-bang\n" - "exit 0\n")) + self.write_script( + dir, + "shell.sh", + ("#!/bin/sh\n" "# bogus shell script w/ sh-bang\n" "exit 0\n"), + ) return expected def write_script(self, dir, name, text): @@ -82,10 +83,9 @@ def test_version_int(self): target = self.mkdtemp() expected = self.write_sample_scripts(source) - - cmd = self.get_build_scripts_cmd(target, - [os.path.join(source, fn) - for fn in expected]) + cmd = self.get_build_scripts_cmd( + target, [os.path.join(source, fn) for fn in expected] + ) cmd.finalize_options() # http://bugs.python.org/issue4524 @@ -103,10 +103,4 @@ def test_version_int(self): built = os.listdir(target) for name in expected: - self.assertIn(name, built) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildScriptsTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + assert name in built diff --git a/setuptools/_distutils/tests/test_ccompiler.py b/setuptools/_distutils/tests/test_ccompiler.py new file mode 100644 index 0000000000..49691d4b9b --- /dev/null +++ b/setuptools/_distutils/tests/test_ccompiler.py @@ -0,0 +1,92 @@ +import os +import sys +import platform +import textwrap +import sysconfig + +import pytest + +from distutils import ccompiler + + +def _make_strs(paths): + """ + Convert paths to strings for legacy compatibility. + """ + if sys.version_info > (3, 8) and platform.system() != "Windows": + return paths + return list(map(os.fspath, paths)) + + +@pytest.fixture +def c_file(tmp_path): + c_file = tmp_path / 'foo.c' + gen_headers = ('Python.h',) + is_windows = platform.system() == "Windows" + plat_headers = ('windows.h',) * is_windows + all_headers = gen_headers + plat_headers + headers = '\n'.join(f'#include <{header}>\n' for header in all_headers) + payload = ( + textwrap.dedent( + """ + #headers + void PyInit_foo(void) {} + """ + ) + .lstrip() + .replace('#headers', headers) + ) + c_file.write_text(payload) + return c_file + + +def test_set_include_dirs(c_file): + """ + Extensions should build even if set_include_dirs is invoked. + In particular, compiler-specific paths should not be overridden. + """ + compiler = ccompiler.new_compiler() + python = sysconfig.get_paths()['include'] + compiler.set_include_dirs([python]) + compiler.compile(_make_strs([c_file])) + + # do it again, setting include dirs after any initialization + compiler.set_include_dirs([python]) + compiler.compile(_make_strs([c_file])) + + +def test_has_function_prototype(): + # Issue https://github.com/pypa/setuptools/issues/3648 + # Test prototype-generating behavior. + + compiler = ccompiler.new_compiler() + + # Every C implementation should have these. + assert compiler.has_function('abort') + assert compiler.has_function('exit') + with pytest.deprecated_call(match='includes is deprecated'): + # abort() is a valid expression with the prototype. + assert compiler.has_function('abort', includes=['stdlib.h']) + with pytest.deprecated_call(match='includes is deprecated'): + # But exit() is not valid with the actual prototype in scope. + assert not compiler.has_function('exit', includes=['stdlib.h']) + # And setuptools_does_not_exist is not declared or defined at all. + assert not compiler.has_function('setuptools_does_not_exist') + with pytest.deprecated_call(match='includes is deprecated'): + assert not compiler.has_function( + 'setuptools_does_not_exist', includes=['stdio.h'] + ) + + +def test_include_dirs_after_multiple_compile_calls(c_file): + """ + Calling compile multiple times should not change the include dirs + (regression test for setuptools issue #3591). + """ + compiler = ccompiler.new_compiler() + python = sysconfig.get_paths()['include'] + compiler.set_include_dirs([python]) + compiler.compile(_make_strs([c_file])) + assert compiler.include_dirs == [python] + compiler.compile(_make_strs([c_file])) + assert compiler.include_dirs == [python] diff --git a/setuptools/_distutils/tests/test_check.py b/setuptools/_distutils/tests/test_check.py index 91bcdceb43..6d240b8b2b 100644 --- a/setuptools/_distutils/tests/test_check.py +++ b/setuptools/_distutils/tests/test_check.py @@ -1,10 +1,10 @@ """Tests for distutils.command.check.""" import os import textwrap -import unittest -from test.support import run_unittest -from distutils.command.check import check, HAS_DOCUTILS +import pytest + +from distutils.command.check import check from distutils.tests import support from distutils.errors import DistutilsSetupError @@ -17,10 +17,8 @@ HERE = os.path.dirname(__file__) -class CheckTestCase(support.LoggingSilencer, - support.TempdirManager, - unittest.TestCase): - +@support.combine_markers +class TestCheck(support.TempdirManager): def _run(self, metadata=None, cwd=None, **options): if metadata is None: metadata = {} @@ -43,97 +41,139 @@ def test_check_metadata(self): # by default, check is checking the metadata # should have some warnings cmd = self._run() - self.assertEqual(cmd._warnings, 2) + assert cmd._warnings == 1 # now let's add the required fields # and run it again, to make sure we don't get # any warning anymore - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx'} + metadata = { + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + 'name': 'xxx', + 'version': 'xxx', + } cmd = self._run(metadata) - self.assertEqual(cmd._warnings, 0) + assert cmd._warnings == 0 # now with the strict mode, we should # get an error if there are missing metadata - self.assertRaises(DistutilsSetupError, self._run, {}, **{'strict': 1}) + with pytest.raises(DistutilsSetupError): + self._run({}, **{'strict': 1}) # and of course, no error when all metadata are present cmd = self._run(metadata, strict=1) - self.assertEqual(cmd._warnings, 0) + assert cmd._warnings == 0 # now a test with non-ASCII characters - metadata = {'url': 'xxx', 'author': '\u00c9ric', - 'author_email': 'xxx', 'name': 'xxx', - 'version': 'xxx', - 'description': 'Something about esszet \u00df', - 'long_description': 'More things about esszet \u00df'} + metadata = { + 'url': 'xxx', + 'author': '\u00c9ric', + 'author_email': 'xxx', + 'name': 'xxx', + 'version': 'xxx', + 'description': 'Something about esszet \u00df', + 'long_description': 'More things about esszet \u00df', + } cmd = self._run(metadata) - self.assertEqual(cmd._warnings, 0) + assert cmd._warnings == 0 + + def test_check_author_maintainer(self): + for kind in ("author", "maintainer"): + # ensure no warning when author_email or maintainer_email is given + # (the spec allows these fields to take the form "Name ") + metadata = { + 'url': 'xxx', + kind + '_email': 'Name ', + 'name': 'xxx', + 'version': 'xxx', + } + cmd = self._run(metadata) + assert cmd._warnings == 0 + + # the check should not warn if only email is given + metadata[kind + '_email'] = 'name@email.com' + cmd = self._run(metadata) + assert cmd._warnings == 0 + + # the check should not warn if only the name is given + metadata[kind] = "Name" + del metadata[kind + '_email'] + cmd = self._run(metadata) + assert cmd._warnings == 0 - @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") def test_check_document(self): + pytest.importorskip('docutils') pkg_info, dist = self.create_dist() cmd = check(dist) # let's see if it detects broken rest broken_rest = 'title\n===\n\ntest' msgs = cmd._check_rst_data(broken_rest) - self.assertEqual(len(msgs), 1) + assert len(msgs) == 1 # and non-broken rest rest = 'title\n=====\n\ntest' msgs = cmd._check_rst_data(rest) - self.assertEqual(len(msgs), 0) + assert len(msgs) == 0 - @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") def test_check_restructuredtext(self): + pytest.importorskip('docutils') # let's see if it detects broken rest in long_description broken_rest = 'title\n===\n\ntest' pkg_info, dist = self.create_dist(long_description=broken_rest) cmd = check(dist) cmd.check_restructuredtext() - self.assertEqual(cmd._warnings, 1) + assert cmd._warnings == 1 # let's see if we have an error with strict=1 - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx', - 'long_description': broken_rest} - self.assertRaises(DistutilsSetupError, self._run, metadata, - **{'strict': 1, 'restructuredtext': 1}) + metadata = { + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + 'name': 'xxx', + 'version': 'xxx', + 'long_description': broken_rest, + } + with pytest.raises(DistutilsSetupError): + self._run(metadata, **{'strict': 1, 'restructuredtext': 1}) # and non-broken rest, including a non-ASCII character to test #12114 metadata['long_description'] = 'title\n=====\n\ntest \u00df' cmd = self._run(metadata, strict=1, restructuredtext=1) - self.assertEqual(cmd._warnings, 0) + assert cmd._warnings == 0 # check that includes work to test #31292 metadata['long_description'] = 'title\n=====\n\n.. include:: includetest.rst' cmd = self._run(metadata, cwd=HERE, strict=1, restructuredtext=1) - self.assertEqual(cmd._warnings, 0) + assert cmd._warnings == 0 - @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") def test_check_restructuredtext_with_syntax_highlight(self): + pytest.importorskip('docutils') # Don't fail if there is a `code` or `code-block` directive - example_rst_docs = [] - example_rst_docs.append(textwrap.dedent("""\ + example_rst_docs = [ + textwrap.dedent( + """\ Here's some code: .. code:: python def foo(): pass - """)) - example_rst_docs.append(textwrap.dedent("""\ + """ + ), + textwrap.dedent( + """\ Here's some code: .. code-block:: python def foo(): pass - """)) + """ + ), + ] for rest_with_code in example_rst_docs: pkg_info, dist = self.create_dist(long_description=rest_with_code) @@ -141,23 +181,14 @@ def foo(): cmd.check_restructuredtext() msgs = cmd._check_rst_data(rest_with_code) if pygments is not None: - self.assertEqual(len(msgs), 0) + assert len(msgs) == 0 else: - self.assertEqual(len(msgs), 1) - self.assertEqual( - str(msgs[0][1]), - 'Cannot analyze code. Pygments package not found.' + assert len(msgs) == 1 + assert ( + str(msgs[0][1]) + == 'Cannot analyze code. Pygments package not found.' ) def test_check_all(self): - - metadata = {'url': 'xxx', 'author': 'xxx'} - self.assertRaises(DistutilsSetupError, self._run, - {}, **{'strict': 1, - 'restructuredtext': 1}) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(CheckTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + with pytest.raises(DistutilsSetupError): + self._run({}, **{'strict': 1, 'restructuredtext': 1}) diff --git a/setuptools/_distutils/tests/test_clean.py b/setuptools/_distutils/tests/test_clean.py index 92367499ce..157b60a1e9 100644 --- a/setuptools/_distutils/tests/test_clean.py +++ b/setuptools/_distutils/tests/test_clean.py @@ -1,23 +1,26 @@ """Tests for distutils.command.clean.""" import os -import unittest from distutils.command.clean import clean from distutils.tests import support -from test.support import run_unittest -class cleanTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): +class TestClean(support.TempdirManager): def test_simple_run(self): pkg_dir, dist = self.create_dist() cmd = clean(dist) # let's add some elements clean should remove - dirs = [(d, os.path.join(pkg_dir, d)) - for d in ('build_temp', 'build_lib', 'bdist_base', - 'build_scripts', 'build_base')] + dirs = [ + (d, os.path.join(pkg_dir, d)) + for d in ( + 'build_temp', + 'build_lib', + 'bdist_base', + 'build_scripts', + 'build_base', + ) + ] for name, path in dirs: os.mkdir(path) @@ -34,16 +37,9 @@ def test_simple_run(self): # make sure the files where removed for name, path in dirs: - self.assertFalse(os.path.exists(path), - '%s was not removed' % path) + assert not os.path.exists(path), '%s was not removed' % path # let's run the command again (should spit warnings but succeed) cmd.all = 1 cmd.ensure_finalized() cmd.run() - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(cleanTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_cmd.py b/setuptools/_distutils/tests/test_cmd.py index 2319214a9e..cc740d1a8b 100644 --- a/setuptools/_distutils/tests/test_cmd.py +++ b/setuptools/_distutils/tests/test_cmd.py @@ -1,26 +1,25 @@ """Tests for distutils.cmd.""" -import unittest import os -from test.support import captured_stdout, run_unittest from distutils.cmd import Command from distutils.dist import Distribution from distutils.errors import DistutilsOptionError from distutils import debug +import pytest + class MyCmd(Command): def initialize_options(self): pass -class CommandTestCase(unittest.TestCase): - def setUp(self): - dist = Distribution() - self.cmd = MyCmd(dist) +@pytest.fixture +def cmd(request): + return MyCmd(Distribution()) - def test_ensure_string_list(self): - cmd = self.cmd +class TestCommand: + def test_ensure_string_list(self, cmd): cmd.not_string_list = ['one', 2, 'three'] cmd.yes_string_list = ['one', 'two', 'three'] cmd.not_string_list2 = object() @@ -28,99 +27,80 @@ def test_ensure_string_list(self): cmd.ensure_string_list('yes_string_list') cmd.ensure_string_list('yes_string_list2') - self.assertRaises(DistutilsOptionError, - cmd.ensure_string_list, 'not_string_list') + with pytest.raises(DistutilsOptionError): + cmd.ensure_string_list('not_string_list') - self.assertRaises(DistutilsOptionError, - cmd.ensure_string_list, 'not_string_list2') + with pytest.raises(DistutilsOptionError): + cmd.ensure_string_list('not_string_list2') cmd.option1 = 'ok,dok' cmd.ensure_string_list('option1') - self.assertEqual(cmd.option1, ['ok', 'dok']) + assert cmd.option1 == ['ok', 'dok'] cmd.option2 = ['xxx', 'www'] cmd.ensure_string_list('option2') cmd.option3 = ['ok', 2] - self.assertRaises(DistutilsOptionError, cmd.ensure_string_list, - 'option3') - - - def test_make_file(self): - - cmd = self.cmd + with pytest.raises(DistutilsOptionError): + cmd.ensure_string_list('option3') + def test_make_file(self, cmd): # making sure it raises when infiles is not a string or a list/tuple - self.assertRaises(TypeError, cmd.make_file, - infiles=1, outfile='', func='func', args=()) + with pytest.raises(TypeError): + cmd.make_file(infiles=1, outfile='', func='func', args=()) # making sure execute gets called properly def _execute(func, args, exec_msg, level): - self.assertEqual(exec_msg, 'generating out from in') + assert exec_msg == 'generating out from in' + cmd.force = True cmd.execute = _execute cmd.make_file(infiles='in', outfile='out', func='func', args=()) - def test_dump_options(self): - + def test_dump_options(self, cmd): msgs = [] + def _announce(msg, level): msgs.append(msg) - cmd = self.cmd + cmd.announce = _announce cmd.option1 = 1 cmd.option2 = 1 cmd.user_options = [('option1', '', ''), ('option2', '', '')] cmd.dump_options() - wanted = ["command options for 'MyCmd':", ' option1 = 1', - ' option2 = 1'] - self.assertEqual(msgs, wanted) + wanted = ["command options for 'MyCmd':", ' option1 = 1', ' option2 = 1'] + assert msgs == wanted - def test_ensure_string(self): - cmd = self.cmd + def test_ensure_string(self, cmd): cmd.option1 = 'ok' cmd.ensure_string('option1') cmd.option2 = None cmd.ensure_string('option2', 'xxx') - self.assertTrue(hasattr(cmd, 'option2')) + assert hasattr(cmd, 'option2') cmd.option3 = 1 - self.assertRaises(DistutilsOptionError, cmd.ensure_string, 'option3') + with pytest.raises(DistutilsOptionError): + cmd.ensure_string('option3') - def test_ensure_filename(self): - cmd = self.cmd + def test_ensure_filename(self, cmd): cmd.option1 = __file__ cmd.ensure_filename('option1') cmd.option2 = 'xxx' - self.assertRaises(DistutilsOptionError, cmd.ensure_filename, 'option2') + with pytest.raises(DistutilsOptionError): + cmd.ensure_filename('option2') - def test_ensure_dirname(self): - cmd = self.cmd + def test_ensure_dirname(self, cmd): cmd.option1 = os.path.dirname(__file__) or os.curdir cmd.ensure_dirname('option1') cmd.option2 = 'xxx' - self.assertRaises(DistutilsOptionError, cmd.ensure_dirname, 'option2') - - def test_debug_print(self): - cmd = self.cmd - with captured_stdout() as stdout: - cmd.debug_print('xxx') - stdout.seek(0) - self.assertEqual(stdout.read(), '') - - debug.DEBUG = True - try: - with captured_stdout() as stdout: - cmd.debug_print('xxx') - stdout.seek(0) - self.assertEqual(stdout.read(), 'xxx\n') - finally: - debug.DEBUG = False - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(CommandTestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) + with pytest.raises(DistutilsOptionError): + cmd.ensure_dirname('option2') + + def test_debug_print(self, cmd, capsys, monkeypatch): + cmd.debug_print('xxx') + assert capsys.readouterr().out == '' + monkeypatch.setattr(debug, 'DEBUG', True) + cmd.debug_print('xxx') + assert capsys.readouterr().out == 'xxx\n' diff --git a/setuptools/_distutils/tests/test_config.py b/setuptools/_distutils/tests/test_config.py index 27bd9d4435..1ae615db95 100644 --- a/setuptools/_distutils/tests/test_config.py +++ b/setuptools/_distutils/tests/test_config.py @@ -1,14 +1,9 @@ """Tests for distutils.pypirc.pypirc.""" import os -import unittest -from distutils.core import PyPIRCCommand -from distutils.core import Distribution -from distutils.log import set_threshold -from distutils.log import WARN +import pytest from distutils.tests import support -from test.support import run_unittest PYPIRC = """\ [distutils] @@ -50,38 +45,13 @@ """ -class BasePyPIRCCommandTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - - def setUp(self): - """Patches the environment.""" - super(BasePyPIRCCommandTestCase, self).setUp() - self.tmp_dir = self.mkdtemp() - os.environ['HOME'] = self.tmp_dir - os.environ['USERPROFILE'] = self.tmp_dir - self.rc = os.path.join(self.tmp_dir, '.pypirc') - self.dist = Distribution() - - class command(PyPIRCCommand): - def __init__(self, dist): - super().__init__(dist) - def initialize_options(self): - pass - finalize_options = initialize_options - - self._cmd = command - self.old_threshold = set_threshold(WARN) - - def tearDown(self): - """Removes the patch.""" - set_threshold(self.old_threshold) - super(BasePyPIRCCommandTestCase, self).tearDown() +@support.combine_markers +@pytest.mark.usefixtures('pypirc') +class BasePyPIRCCommandTestCase(support.TempdirManager): + pass class PyPIRCCommandTestCase(BasePyPIRCCommandTestCase): - def test_server_registration(self): # This test makes sure PyPIRCCommand knows how to: # 1. handle several sections in .pypirc @@ -93,30 +63,38 @@ def test_server_registration(self): config = cmd._read_pypirc() config = list(sorted(config.items())) - waited = [('password', 'secret'), ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/'), - ('server', 'server1'), ('username', 'me')] - self.assertEqual(config, waited) + waited = [ + ('password', 'secret'), + ('realm', 'pypi'), + ('repository', 'https://upload.pypi.org/legacy/'), + ('server', 'server1'), + ('username', 'me'), + ] + assert config == waited # old format self.write_file(self.rc, PYPIRC_OLD) config = cmd._read_pypirc() config = list(sorted(config.items())) - waited = [('password', 'secret'), ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/'), - ('server', 'server-login'), ('username', 'tarek')] - self.assertEqual(config, waited) + waited = [ + ('password', 'secret'), + ('realm', 'pypi'), + ('repository', 'https://upload.pypi.org/legacy/'), + ('server', 'server-login'), + ('username', 'tarek'), + ] + assert config == waited def test_server_empty_registration(self): cmd = self._cmd(self.dist) rc = cmd._get_rc_file() - self.assertFalse(os.path.exists(rc)) + assert not os.path.exists(rc) cmd._store_pypirc('tarek', 'xxx') - self.assertTrue(os.path.exists(rc)) + assert os.path.exists(rc) f = open(rc) try: content = f.read() - self.assertEqual(content, WANTED) + assert content == WANTED finally: f.close() @@ -128,14 +106,11 @@ def test_config_interpolation(self): config = cmd._read_pypirc() config = list(sorted(config.items())) - waited = [('password', 'yh^%#rest-of-my-password'), ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/'), - ('server', 'server3'), ('username', 'cbiggles')] - self.assertEqual(config, waited) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(PyPIRCCommandTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + waited = [ + ('password', 'yh^%#rest-of-my-password'), + ('realm', 'pypi'), + ('repository', 'https://upload.pypi.org/legacy/'), + ('server', 'server3'), + ('username', 'cbiggles'), + ] + assert config == waited diff --git a/setuptools/_distutils/tests/test_config_cmd.py b/setuptools/_distutils/tests/test_config_cmd.py index 2c84719aad..e72a7c5ff8 100644 --- a/setuptools/_distutils/tests/test_config_cmd.py +++ b/setuptools/_distutils/tests/test_config_cmd.py @@ -1,33 +1,28 @@ """Tests for distutils.command.config.""" -import unittest import os import sys -from test.support import run_unittest +from test.support import missing_compiler_executable -from .py35compat import missing_compiler_executable +import pytest from distutils.command.config import dump_file, config from distutils.tests import support -from distutils import log +from distutils._log import log -class ConfigTestCase(support.LoggingSilencer, - support.TempdirManager, - unittest.TestCase): +@pytest.fixture(autouse=True) +def info_log(request, monkeypatch): + self = request.instance + self._logs = [] + monkeypatch.setattr(log, 'info', self._info) + + +@support.combine_markers +class TestConfig(support.TempdirManager): def _info(self, msg, *args): for line in msg.splitlines(): self._logs.append(line) - def setUp(self): - super(ConfigTestCase, self).setUp() - self._logs = [] - self.old_log = log.info - log.info = self._info - - def tearDown(self): - log.info = self.old_log - super(ConfigTestCase, self).tearDown() - def test_dump_file(self): this_file = os.path.splitext(__file__)[0] + '.py' f = open(this_file) @@ -37,9 +32,9 @@ def test_dump_file(self): f.close() dump_file(this_file, 'I am the header') - self.assertEqual(len(self._logs), numlines+1) + assert len(self._logs) == numlines + 1 - @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") + @pytest.mark.skipif('platform.system() == "Windows"') def test_search_cpp(self): cmd = missing_compiler_executable(['preprocessor']) if cmd is not None: @@ -49,14 +44,16 @@ def test_search_cpp(self): cmd._check_compiler() compiler = cmd.compiler if sys.platform[:3] == "aix" and "xlc" in compiler.preprocessor[0].lower(): - self.skipTest('xlc: The -E option overrides the -P, -o, and -qsyntaxonly options') + self.skipTest( + 'xlc: The -E option overrides the -P, -o, and -qsyntaxonly options' + ) # simple pattern searches match = cmd.search_cpp(pattern='xxx', body='/* xxx */') - self.assertEqual(match, 0) + assert match == 0 match = cmd.search_cpp(pattern='_configtest', body='/* xxx */') - self.assertEqual(match, 1) + assert match == 1 def test_finalize_options(self): # finalize_options does a bit of transformation @@ -68,9 +65,9 @@ def test_finalize_options(self): cmd.library_dirs = 'three%sfour' % os.pathsep cmd.ensure_finalized() - self.assertEqual(cmd.include_dirs, ['one', 'two']) - self.assertEqual(cmd.libraries, ['one']) - self.assertEqual(cmd.library_dirs, ['three', 'four']) + assert cmd.include_dirs == ['one', 'two'] + assert cmd.libraries == ['one'] + assert cmd.library_dirs == ['three', 'four'] def test_clean(self): # _clean removes files @@ -82,17 +79,11 @@ def test_clean(self): self.write_file(f2, 'xxx') for f in (f1, f2): - self.assertTrue(os.path.exists(f)) + assert os.path.exists(f) pkg_dir, dist = self.create_dist() cmd = config(dist) cmd._clean(f1, f2) for f in (f1, f2): - self.assertFalse(os.path.exists(f)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(ConfigTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + assert not os.path.exists(f) diff --git a/setuptools/_distutils/tests/test_core.py b/setuptools/_distutils/tests/test_core.py index 7270d699a3..2c11ff769e 100644 --- a/setuptools/_distutils/tests/test_core.py +++ b/setuptools/_distutils/tests/test_core.py @@ -3,13 +3,10 @@ import io import distutils.core import os -import shutil import sys -from test.support import captured_stdout, run_unittest -from . import py38compat as os_helper -import unittest -from distutils.tests import support -from distutils import log + +import pytest + from distutils.dist import Distribution # setup script that uses __file__ @@ -56,110 +53,79 @@ def main(): main() """ -class CoreTestCase(support.EnvironGuard, unittest.TestCase): - - def setUp(self): - super(CoreTestCase, self).setUp() - self.old_stdout = sys.stdout - self.cleanup_testfn() - self.old_argv = sys.argv, sys.argv[:] - self.addCleanup(log.set_threshold, log._global_log.threshold) - - def tearDown(self): - sys.stdout = self.old_stdout - self.cleanup_testfn() - sys.argv = self.old_argv[0] - sys.argv[:] = self.old_argv[1] - super(CoreTestCase, self).tearDown() - - def cleanup_testfn(self): - path = os_helper.TESTFN - if os.path.isfile(path): - os.remove(path) - elif os.path.isdir(path): - shutil.rmtree(path) - - def write_setup(self, text, path=os_helper.TESTFN): - f = open(path, "w") - try: - f.write(text) - finally: - f.close() - return path - - def test_run_setup_provides_file(self): + +@pytest.fixture(autouse=True) +def save_stdout(monkeypatch): + monkeypatch.setattr(sys, 'stdout', sys.stdout) + + +@pytest.fixture +def temp_file(tmp_path): + return tmp_path / 'file' + + +@pytest.mark.usefixtures('save_env') +@pytest.mark.usefixtures('save_argv') +class TestCore: + def test_run_setup_provides_file(self, temp_file): # Make sure the script can use __file__; if that's missing, the test # setup.py script will raise NameError. - distutils.core.run_setup( - self.write_setup(setup_using___file__)) + temp_file.write_text(setup_using___file__) + distutils.core.run_setup(temp_file) - def test_run_setup_preserves_sys_argv(self): + def test_run_setup_preserves_sys_argv(self, temp_file): # Make sure run_setup does not clobber sys.argv argv_copy = sys.argv.copy() - distutils.core.run_setup( - self.write_setup(setup_does_nothing)) - self.assertEqual(sys.argv, argv_copy) + temp_file.write_text(setup_does_nothing) + distutils.core.run_setup(temp_file) + assert sys.argv == argv_copy - def test_run_setup_defines_subclass(self): + def test_run_setup_defines_subclass(self, temp_file): # Make sure the script can use __file__; if that's missing, the test # setup.py script will raise NameError. - dist = distutils.core.run_setup( - self.write_setup(setup_defines_subclass)) + temp_file.write_text(setup_defines_subclass) + dist = distutils.core.run_setup(temp_file) install = dist.get_command_obj('install') - self.assertIn('cmd', install.sub_commands) + assert 'cmd' in install.sub_commands - def test_run_setup_uses_current_dir(self): - # This tests that the setup script is run with the current directory - # as its own current directory; this was temporarily broken by a - # previous patch when TESTFN did not use the current directory. + def test_run_setup_uses_current_dir(self, tmp_path): + """ + Test that the setup script is run with the current directory + as its own current directory. + """ sys.stdout = io.StringIO() cwd = os.getcwd() # Create a directory and write the setup.py file there: - os.mkdir(os_helper.TESTFN) - setup_py = os.path.join(os_helper.TESTFN, "setup.py") - distutils.core.run_setup( - self.write_setup(setup_prints_cwd, path=setup_py)) + setup_py = tmp_path / 'setup.py' + setup_py.write_text(setup_prints_cwd) + distutils.core.run_setup(setup_py) output = sys.stdout.getvalue() if output.endswith("\n"): output = output[:-1] - self.assertEqual(cwd, output) + assert cwd == output - def test_run_setup_within_if_main(self): - dist = distutils.core.run_setup( - self.write_setup(setup_within_if_main), stop_after="config") - self.assertIsInstance(dist, Distribution) - self.assertEqual(dist.get_name(), "setup_within_if_main") + def test_run_setup_within_if_main(self, temp_file): + temp_file.write_text(setup_within_if_main) + dist = distutils.core.run_setup(temp_file, stop_after="config") + assert isinstance(dist, Distribution) + assert dist.get_name() == "setup_within_if_main" - def test_run_commands(self): + def test_run_commands(self, temp_file): sys.argv = ['setup.py', 'build'] - dist = distutils.core.run_setup( - self.write_setup(setup_within_if_main), stop_after="commandline") - self.assertNotIn('build', dist.have_run) + temp_file.write_text(setup_within_if_main) + dist = distutils.core.run_setup(temp_file, stop_after="commandline") + assert 'build' not in dist.have_run distutils.core.run_commands(dist) - self.assertIn('build', dist.have_run) + assert 'build' in dist.have_run - def test_debug_mode(self): + def test_debug_mode(self, capsys, monkeypatch): # this covers the code called when DEBUG is set sys.argv = ['setup.py', '--name'] - with captured_stdout() as stdout: - distutils.core.setup(name='bar') - stdout.seek(0) - self.assertEqual(stdout.read(), 'bar\n') - - distutils.core.DEBUG = True - try: - with captured_stdout() as stdout: - distutils.core.setup(name='bar') - finally: - distutils.core.DEBUG = False - stdout.seek(0) + distutils.core.setup(name='bar') + capsys.readouterr().out == 'bar\n' + monkeypatch.setattr(distutils.core, 'DEBUG', True) + distutils.core.setup(name='bar') wanted = "options (after parsing config files):\n" - self.assertEqual(stdout.readlines()[0], wanted) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(CoreTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + assert capsys.readouterr().out.startswith(wanted) diff --git a/setuptools/_distutils/tests/test_cygwinccompiler.py b/setuptools/_distutils/tests/test_cygwinccompiler.py index 8715a53539..6fb449a6c2 100644 --- a/setuptools/_distutils/tests/test_cygwinccompiler.py +++ b/setuptools/_distutils/tests/test_cygwinccompiler.py @@ -1,96 +1,116 @@ """Tests for distutils.cygwinccompiler.""" -import unittest import sys import os -from test.support import run_unittest -from distutils.cygwinccompiler import (check_config_h, - CONFIG_H_OK, CONFIG_H_NOTOK, - CONFIG_H_UNCERTAIN, - get_msvcr) -from distutils.tests import support +import pytest +from distutils.cygwinccompiler import ( + check_config_h, + CONFIG_H_OK, + CONFIG_H_NOTOK, + CONFIG_H_UNCERTAIN, + get_msvcr, +) +from distutils.tests import support +from distutils import sysconfig -class CygwinCCompilerTestCase(support.TempdirManager, - unittest.TestCase): - def setUp(self): - super(CygwinCCompilerTestCase, self).setUp() - self.version = sys.version - self.python_h = os.path.join(self.mkdtemp(), 'python.h') - from distutils import sysconfig - self.old_get_config_h_filename = sysconfig.get_config_h_filename - sysconfig.get_config_h_filename = self._get_config_h_filename +@pytest.fixture(autouse=True) +def stuff(request, monkeypatch, distutils_managed_tempdir): + self = request.instance + self.python_h = os.path.join(self.mkdtemp(), 'python.h') + monkeypatch.setattr(sysconfig, 'get_config_h_filename', self._get_config_h_filename) + monkeypatch.setattr(sys, 'version', sys.version) - def tearDown(self): - sys.version = self.version - from distutils import sysconfig - sysconfig.get_config_h_filename = self.old_get_config_h_filename - super(CygwinCCompilerTestCase, self).tearDown() +class TestCygwinCCompiler(support.TempdirManager): def _get_config_h_filename(self): return self.python_h - def test_check_config_h(self): + @pytest.mark.skipif('sys.platform != "cygwin"') + @pytest.mark.skipif('not os.path.exists("/usr/lib/libbash.dll.a")') + def test_find_library_file(self): + from distutils.cygwinccompiler import CygwinCCompiler + + compiler = CygwinCCompiler() + link_name = "bash" + linkable_file = compiler.find_library_file(["/usr/lib"], link_name) + assert linkable_file is not None + assert os.path.exists(linkable_file) + assert linkable_file == f"/usr/lib/lib{link_name:s}.dll.a" + + @pytest.mark.skipif('sys.platform != "cygwin"') + def test_runtime_library_dir_option(self): + from distutils.cygwinccompiler import CygwinCCompiler + + compiler = CygwinCCompiler() + assert compiler.runtime_library_dir_option('/foo') == [] + def test_check_config_h(self): # check_config_h looks for "GCC" in sys.version first # returns CONFIG_H_OK if found - sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC ' - '4.0.1 (Apple Computer, Inc. build 5370)]') + sys.version = ( + '2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC ' + '4.0.1 (Apple Computer, Inc. build 5370)]' + ) - self.assertEqual(check_config_h()[0], CONFIG_H_OK) + assert check_config_h()[0] == CONFIG_H_OK # then it tries to see if it can find "__GNUC__" in pyconfig.h sys.version = 'something without the *CC word' # if the file doesn't exist it returns CONFIG_H_UNCERTAIN - self.assertEqual(check_config_h()[0], CONFIG_H_UNCERTAIN) + assert check_config_h()[0] == CONFIG_H_UNCERTAIN # if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK self.write_file(self.python_h, 'xxx') - self.assertEqual(check_config_h()[0], CONFIG_H_NOTOK) + assert check_config_h()[0] == CONFIG_H_NOTOK # and CONFIG_H_OK if __GNUC__ is found self.write_file(self.python_h, 'xxx __GNUC__ xxx') - self.assertEqual(check_config_h()[0], CONFIG_H_OK) + assert check_config_h()[0] == CONFIG_H_OK def test_get_msvcr(self): - # none - sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) ' - '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]') - self.assertEqual(get_msvcr(), None) + sys.version = ( + '2.6.1 (r261:67515, Dec 6 2008, 16:42:21) ' + '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]' + ) + assert get_msvcr() is None # MSVC 7.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1300 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr70']) + sys.version = ( + '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1300 32 bits (Intel)]' + ) + assert get_msvcr() == ['msvcr70'] # MSVC 7.1 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1310 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr71']) + sys.version = ( + '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1310 32 bits (Intel)]' + ) + assert get_msvcr() == ['msvcr71'] # VS2005 / MSVC 8.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1400 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr80']) + sys.version = ( + '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1400 32 bits (Intel)]' + ) + assert get_msvcr() == ['msvcr80'] # VS2008 / MSVC 9.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1500 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr90']) - - sys.version = '3.10.0 (tags/v3.10.0:b494f59, Oct 4 2021, 18:46:30) [MSC v.1929 32 bit (Intel)]' - self.assertEqual(get_msvcr(), ['ucrt', 'vcruntime140']) + sys.version = ( + '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1500 32 bits (Intel)]' + ) + assert get_msvcr() == ['msvcr90'] - # unknown - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.2000 32 bits (Intel)]') - self.assertRaises(ValueError, get_msvcr) + sys.version = ( + '3.10.0 (tags/v3.10.0:b494f59, Oct 4 2021, 18:46:30) ' + '[MSC v.1929 32 bit (Intel)]' + ) + assert get_msvcr() == ['vcruntime140'] -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(CygwinCCompilerTestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) + # unknown + sys.version = ( + '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.2000 32 bits (Intel)]' + ) + with pytest.raises(ValueError): + get_msvcr() diff --git a/setuptools/_distutils/tests/test_dep_util.py b/setuptools/_distutils/tests/test_dep_util.py index 0d52740a9e..e5dcad9464 100644 --- a/setuptools/_distutils/tests/test_dep_util.py +++ b/setuptools/_distutils/tests/test_dep_util.py @@ -1,32 +1,31 @@ """Tests for distutils.dep_util.""" -import unittest import os from distutils.dep_util import newer, newer_pairwise, newer_group from distutils.errors import DistutilsFileError from distutils.tests import support -from test.support import run_unittest +import pytest -class DepUtilTestCase(support.TempdirManager, unittest.TestCase): +class TestDepUtil(support.TempdirManager): def test_newer(self): - tmpdir = self.mkdtemp() new_file = os.path.join(tmpdir, 'new') old_file = os.path.abspath(__file__) # Raise DistutilsFileError if 'new_file' does not exist. - self.assertRaises(DistutilsFileError, newer, new_file, old_file) + with pytest.raises(DistutilsFileError): + newer(new_file, old_file) # Return true if 'new_file' exists and is more recently modified than # 'old_file', or if 'new_file' exists and 'old_file' doesn't. self.write_file(new_file) - self.assertTrue(newer(new_file, 'I_dont_exist')) - self.assertTrue(newer(new_file, old_file)) + assert newer(new_file, 'I_dont_exist') + assert newer(new_file, old_file) # Return false if both exist and 'old_file' is the same age or younger # than 'new_file'. - self.assertFalse(newer(old_file, new_file)) + assert not newer(old_file, new_file) def test_newer_pairwise(self): tmpdir = self.mkdtemp() @@ -36,14 +35,13 @@ def test_newer_pairwise(self): os.mkdir(targets) one = os.path.join(sources, 'one') two = os.path.join(sources, 'two') - three = os.path.abspath(__file__) # I am the old file + three = os.path.abspath(__file__) # I am the old file four = os.path.join(targets, 'four') self.write_file(one) self.write_file(two) self.write_file(four) - self.assertEqual(newer_pairwise([one, two], [three, four]), - ([one],[three])) + assert newer_pairwise([one, two], [three, four]) == ([one], [three]) def test_newer_group(self): tmpdir = self.mkdtemp() @@ -59,22 +57,14 @@ def test_newer_group(self): self.write_file(one) self.write_file(two) self.write_file(three) - self.assertTrue(newer_group([one, two, three], old_file)) - self.assertFalse(newer_group([one, two, old_file], three)) + assert newer_group([one, two, three], old_file) + assert not newer_group([one, two, old_file], three) # missing handling os.remove(one) - self.assertRaises(OSError, newer_group, [one, two, old_file], three) - - self.assertFalse(newer_group([one, two, old_file], three, - missing='ignore')) - - self.assertTrue(newer_group([one, two, old_file], three, - missing='newer')) - + with pytest.raises(OSError): + newer_group([one, two, old_file], three) -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(DepUtilTestCase) + assert not newer_group([one, two, old_file], three, missing='ignore') -if __name__ == "__main__": - run_unittest(test_suite()) + assert newer_group([one, two, old_file], three, missing='newer') diff --git a/setuptools/_distutils/tests/test_dir_util.py b/setuptools/_distutils/tests/test_dir_util.py index 1b1f3bbb02..72aca4ee55 100644 --- a/setuptools/_distutils/tests/test_dir_util.py +++ b/setuptools/_distutils/tests/test_dir_util.py @@ -1,89 +1,71 @@ """Tests for distutils.dir_util.""" -import unittest import os import stat -import sys -from unittest.mock import patch +import unittest.mock as mock from distutils import dir_util, errors -from distutils.dir_util import (mkpath, remove_tree, create_tree, copy_tree, - ensure_relative) +from distutils.dir_util import ( + mkpath, + remove_tree, + create_tree, + copy_tree, + ensure_relative, +) -from distutils import log from distutils.tests import support -from test.support import run_unittest +import pytest -class DirUtilTestCase(support.TempdirManager, unittest.TestCase): +@pytest.fixture(autouse=True) +def stuff(request, monkeypatch, distutils_managed_tempdir): + self = request.instance + tmp_dir = self.mkdtemp() + self.root_target = os.path.join(tmp_dir, 'deep') + self.target = os.path.join(self.root_target, 'here') + self.target2 = os.path.join(tmp_dir, 'deep2') - def _log(self, msg, *args): - if len(args) > 0: - self._logs.append(msg % args) - else: - self._logs.append(msg) - - def setUp(self): - super(DirUtilTestCase, self).setUp() - self._logs = [] - tmp_dir = self.mkdtemp() - self.root_target = os.path.join(tmp_dir, 'deep') - self.target = os.path.join(self.root_target, 'here') - self.target2 = os.path.join(tmp_dir, 'deep2') - self.old_log = log.info - log.info = self._log - - def tearDown(self): - log.info = self.old_log - super(DirUtilTestCase, self).tearDown() - - def test_mkpath_remove_tree_verbosity(self): +class TestDirUtil(support.TempdirManager): + def test_mkpath_remove_tree_verbosity(self, caplog): mkpath(self.target, verbose=0) - wanted = [] - self.assertEqual(self._logs, wanted) + assert not caplog.records remove_tree(self.root_target, verbose=0) mkpath(self.target, verbose=1) - wanted = ['creating %s' % self.root_target, - 'creating %s' % self.target] - self.assertEqual(self._logs, wanted) - self._logs = [] + wanted = ['creating %s' % self.root_target, 'creating %s' % self.target] + assert caplog.messages == wanted + caplog.clear() remove_tree(self.root_target, verbose=1) wanted = ["removing '%s' (and everything under it)" % self.root_target] - self.assertEqual(self._logs, wanted) + assert caplog.messages == wanted - @unittest.skipIf(sys.platform.startswith('win'), - "This test is only appropriate for POSIX-like systems.") + @pytest.mark.skipif("platform.system() == 'Windows'") def test_mkpath_with_custom_mode(self): # Get and set the current umask value for testing mode bits. umask = os.umask(0o002) os.umask(umask) mkpath(self.target, 0o700) - self.assertEqual( - stat.S_IMODE(os.stat(self.target).st_mode), 0o700 & ~umask) + assert stat.S_IMODE(os.stat(self.target).st_mode) == 0o700 & ~umask mkpath(self.target2, 0o555) - self.assertEqual( - stat.S_IMODE(os.stat(self.target2).st_mode), 0o555 & ~umask) - - def test_create_tree_verbosity(self): + assert stat.S_IMODE(os.stat(self.target2).st_mode) == 0o555 & ~umask + def test_create_tree_verbosity(self, caplog): create_tree(self.root_target, ['one', 'two', 'three'], verbose=0) - self.assertEqual(self._logs, []) + assert caplog.messages == [] remove_tree(self.root_target, verbose=0) wanted = ['creating %s' % self.root_target] create_tree(self.root_target, ['one', 'two', 'three'], verbose=1) - self.assertEqual(self._logs, wanted) + assert caplog.messages == wanted remove_tree(self.root_target, verbose=0) - def test_copy_tree_verbosity(self): - + def test_copy_tree_verbosity(self, caplog): mkpath(self.target, verbose=0) copy_tree(self.target, self.target2, verbose=0) - self.assertEqual(self._logs, []) + assert caplog.messages == [] remove_tree(self.root_target, verbose=0) @@ -92,9 +74,9 @@ def test_copy_tree_verbosity(self): with open(a_file, 'w') as f: f.write('some content') - wanted = ['copying %s -> %s' % (a_file, self.target2)] + wanted = ['copying {} -> {}'.format(a_file, self.target2)] copy_tree(self.target, self.target2, verbose=1) - self.assertEqual(self._logs, wanted) + assert caplog.messages == wanted remove_tree(self.root_target, verbose=0) remove_tree(self.target2, verbose=0) @@ -109,31 +91,25 @@ def test_copy_tree_skips_nfs_temp_files(self): fh.write('some content') copy_tree(self.target, self.target2) - self.assertEqual(os.listdir(self.target2), ['ok.txt']) + assert os.listdir(self.target2) == ['ok.txt'] remove_tree(self.root_target, verbose=0) remove_tree(self.target2, verbose=0) def test_ensure_relative(self): if os.sep == '/': - self.assertEqual(ensure_relative('/home/foo'), 'home/foo') - self.assertEqual(ensure_relative('some/path'), 'some/path') - else: # \\ - self.assertEqual(ensure_relative('c:\\home\\foo'), 'c:home\\foo') - self.assertEqual(ensure_relative('home\\foo'), 'home\\foo') + assert ensure_relative('/home/foo') == 'home/foo' + assert ensure_relative('some/path') == 'some/path' + else: # \\ + assert ensure_relative('c:\\home\\foo') == 'c:home\\foo' + assert ensure_relative('home\\foo') == 'home\\foo' def test_copy_tree_exception_in_listdir(self): """ An exception in listdir should raise a DistutilsFileError """ - with patch("os.listdir", side_effect=OSError()), \ - self.assertRaises(errors.DistutilsFileError): + with mock.patch("os.listdir", side_effect=OSError()), pytest.raises( + errors.DistutilsFileError + ): src = self.tempdirs[-1] dir_util.copy_tree(src, None) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(DirUtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_dist.py b/setuptools/_distutils/tests/test_dist.py index 36155be152..30a6f9ff2e 100644 --- a/setuptools/_distutils/tests/test_dist.py +++ b/setuptools/_distutils/tests/test_dist.py @@ -2,21 +2,21 @@ import os import io import sys -import unittest import warnings import textwrap +import functools +import unittest.mock as mock -from unittest import mock +import pytest +import jaraco.path from distutils.dist import Distribution, fix_help_options from distutils.cmd import Command -from test.support import ( - captured_stdout, captured_stderr, run_unittest -) -from .py38compat import TESTFN from distutils.tests import support -from distutils import log + + +pydistutils_cfg = '.' * (os.name == 'posix') + 'pydistutils.cfg' class test_dist(Command): @@ -42,21 +42,15 @@ def find_config_files(self): return self._config_files -class DistributionTestCase(support.LoggingSilencer, - support.TempdirManager, - support.EnvironGuard, - unittest.TestCase): +@pytest.fixture +def clear_argv(): + del sys.argv[1:] - def setUp(self): - super(DistributionTestCase, self).setUp() - self.argv = sys.argv, sys.argv[:] - del sys.argv[1:] - - def tearDown(self): - sys.argv = self.argv[0] - sys.argv[:] = self.argv[1] - super(DistributionTestCase, self).tearDown() +@support.combine_markers +@pytest.mark.usefixtures('save_env') +@pytest.mark.usefixtures('save_argv') +class TestDistributionBehavior(support.TempdirManager): def create_distribution(self, configfiles=()): d = TestDistribution() d._config_files = configfiles @@ -64,57 +58,69 @@ def create_distribution(self, configfiles=()): d.parse_command_line() return d - def test_command_packages_unspecified(self): + def test_command_packages_unspecified(self, clear_argv): sys.argv.append("build") d = self.create_distribution() - self.assertEqual(d.get_command_packages(), ["distutils.command"]) + assert d.get_command_packages() == ["distutils.command"] - def test_command_packages_cmdline(self): + def test_command_packages_cmdline(self, clear_argv): from distutils.tests.test_dist import test_dist - sys.argv.extend(["--command-packages", - "foo.bar,distutils.tests", - "test_dist", - "-Ssometext", - ]) + + sys.argv.extend( + [ + "--command-packages", + "foo.bar,distutils.tests", + "test_dist", + "-Ssometext", + ] + ) d = self.create_distribution() # let's actually try to load our test command: - self.assertEqual(d.get_command_packages(), - ["distutils.command", "foo.bar", "distutils.tests"]) + assert d.get_command_packages() == [ + "distutils.command", + "foo.bar", + "distutils.tests", + ] cmd = d.get_command_obj("test_dist") - self.assertIsInstance(cmd, test_dist) - self.assertEqual(cmd.sample_option, "sometext") + assert isinstance(cmd, test_dist) + assert cmd.sample_option == "sometext" - @unittest.skipIf( + @pytest.mark.skipif( 'distutils' not in Distribution.parse_config_files.__module__, - 'Cannot test when virtualenv has monkey-patched Distribution.', + reason='Cannot test when virtualenv has monkey-patched Distribution', ) - def test_venv_install_options(self): + def test_venv_install_options(self, tmp_path): sys.argv.append("install") - self.addCleanup(os.unlink, TESTFN) + file = str(tmp_path / 'file') fakepath = '/somedir' - with open(TESTFN, "w") as f: - print(("[install]\n" - "install-base = {0}\n" - "install-platbase = {0}\n" - "install-lib = {0}\n" - "install-platlib = {0}\n" - "install-purelib = {0}\n" - "install-headers = {0}\n" - "install-scripts = {0}\n" - "install-data = {0}\n" - "prefix = {0}\n" - "exec-prefix = {0}\n" - "home = {0}\n" - "user = {0}\n" - "root = {0}").format(fakepath), file=f) + jaraco.path.build( + { + file: f""" + [install] + install-base = {fakepath} + install-platbase = {fakepath} + install-lib = {fakepath} + install-platlib = {fakepath} + install-purelib = {fakepath} + install-headers = {fakepath} + install-scripts = {fakepath} + install-data = {fakepath} + prefix = {fakepath} + exec-prefix = {fakepath} + home = {fakepath} + user = {fakepath} + root = {fakepath} + """, + } + ) # Base case: Not in a Virtual Environment - with mock.patch.multiple(sys, prefix='/a', base_prefix='/a') as values: - d = self.create_distribution([TESTFN]) + with mock.patch.multiple(sys, prefix='/a', base_prefix='/a'): + d = self.create_distribution([file]) - option_tuple = (TESTFN, fakepath) + option_tuple = (file, fakepath) result_dict = { 'install_base': option_tuple, @@ -132,47 +138,47 @@ def test_venv_install_options(self): 'root': option_tuple, } - self.assertEqual( - sorted(d.command_options.get('install').keys()), - sorted(result_dict.keys())) + assert sorted(d.command_options.get('install').keys()) == sorted( + result_dict.keys() + ) - for (key, value) in d.command_options.get('install').items(): - self.assertEqual(value, result_dict[key]) + for key, value in d.command_options.get('install').items(): + assert value == result_dict[key] # Test case: In a Virtual Environment - with mock.patch.multiple(sys, prefix='/a', base_prefix='/b') as values: - d = self.create_distribution([TESTFN]) + with mock.patch.multiple(sys, prefix='/a', base_prefix='/b'): + d = self.create_distribution([file]) for key in result_dict.keys(): - self.assertNotIn(key, d.command_options.get('install', {})) + assert key not in d.command_options.get('install', {}) - def test_command_packages_configfile(self): + def test_command_packages_configfile(self, tmp_path, clear_argv): sys.argv.append("build") - self.addCleanup(os.unlink, TESTFN) - f = open(TESTFN, "w") - try: - print("[global]", file=f) - print("command_packages = foo.bar, splat", file=f) - finally: - f.close() - - d = self.create_distribution([TESTFN]) - self.assertEqual(d.get_command_packages(), - ["distutils.command", "foo.bar", "splat"]) + file = str(tmp_path / "file") + jaraco.path.build( + { + file: """ + [global] + command_packages = foo.bar, splat + """, + } + ) + + d = self.create_distribution([file]) + assert d.get_command_packages() == ["distutils.command", "foo.bar", "splat"] # ensure command line overrides config: sys.argv[1:] = ["--command-packages", "spork", "build"] - d = self.create_distribution([TESTFN]) - self.assertEqual(d.get_command_packages(), - ["distutils.command", "spork"]) + d = self.create_distribution([file]) + assert d.get_command_packages() == ["distutils.command", "spork"] # Setting --command-packages to '' should cause the default to # be used even if a config file specified something else: sys.argv[1:] = ["--command-packages", "", "build"] - d = self.create_distribution([TESTFN]) - self.assertEqual(d.get_command_packages(), ["distutils.command"]) + d = self.create_distribution([file]) + assert d.get_command_packages() == ["distutils.command"] - def test_empty_options(self): + def test_empty_options(self, request): # an empty options dictionary should not stay in the # list of attributes @@ -182,328 +188,310 @@ def test_empty_options(self): def _warn(msg): warns.append(msg) - self.addCleanup(setattr, warnings, 'warn', warnings.warn) + request.addfinalizer( + functools.partial(setattr, warnings, 'warn', warnings.warn) + ) warnings.warn = _warn - dist = Distribution(attrs={'author': 'xxx', 'name': 'xxx', - 'version': 'xxx', 'url': 'xxxx', - 'options': {}}) - - self.assertEqual(len(warns), 0) - self.assertNotIn('options', dir(dist)) + dist = Distribution( + attrs={ + 'author': 'xxx', + 'name': 'xxx', + 'version': 'xxx', + 'url': 'xxxx', + 'options': {}, + } + ) + + assert len(warns) == 0 + assert 'options' not in dir(dist) def test_finalize_options(self): - attrs = {'keywords': 'one,two', - 'platforms': 'one,two'} + attrs = {'keywords': 'one,two', 'platforms': 'one,two'} dist = Distribution(attrs=attrs) dist.finalize_options() # finalize_option splits platforms and keywords - self.assertEqual(dist.metadata.platforms, ['one', 'two']) - self.assertEqual(dist.metadata.keywords, ['one', 'two']) + assert dist.metadata.platforms == ['one', 'two'] + assert dist.metadata.keywords == ['one', 'two'] - attrs = {'keywords': 'foo bar', - 'platforms': 'foo bar'} + attrs = {'keywords': 'foo bar', 'platforms': 'foo bar'} dist = Distribution(attrs=attrs) dist.finalize_options() - self.assertEqual(dist.metadata.platforms, ['foo bar']) - self.assertEqual(dist.metadata.keywords, ['foo bar']) + assert dist.metadata.platforms == ['foo bar'] + assert dist.metadata.keywords == ['foo bar'] def test_get_command_packages(self): dist = Distribution() - self.assertEqual(dist.command_packages, None) + assert dist.command_packages is None cmds = dist.get_command_packages() - self.assertEqual(cmds, ['distutils.command']) - self.assertEqual(dist.command_packages, - ['distutils.command']) + assert cmds == ['distutils.command'] + assert dist.command_packages == ['distutils.command'] dist.command_packages = 'one,two' cmds = dist.get_command_packages() - self.assertEqual(cmds, ['distutils.command', 'one', 'two']) + assert cmds == ['distutils.command', 'one', 'two'] def test_announce(self): # make sure the level is known dist = Distribution() - args = ('ok',) - kwargs = {'level': 'ok2'} - self.assertRaises(ValueError, dist.announce, args, kwargs) - + with pytest.raises(TypeError): + dist.announce('ok', level='ok2') - def test_find_config_files_disable(self): + def test_find_config_files_disable(self, temp_home): # Ticket #1180: Allow user to disable their home config file. - temp_home = self.mkdtemp() - if os.name == 'posix': - user_filename = os.path.join(temp_home, ".pydistutils.cfg") - else: - user_filename = os.path.join(temp_home, "pydistutils.cfg") - - with open(user_filename, 'w') as f: - f.write('[distutils]\n') - - def _expander(path): - return temp_home + jaraco.path.build({pydistutils_cfg: '[distutils]\n'}, temp_home) - old_expander = os.path.expanduser - os.path.expanduser = _expander - try: - d = Distribution() - all_files = d.find_config_files() + d = Distribution() + all_files = d.find_config_files() - d = Distribution(attrs={'script_args': ['--no-user-cfg']}) - files = d.find_config_files() - finally: - os.path.expanduser = old_expander + d = Distribution(attrs={'script_args': ['--no-user-cfg']}) + files = d.find_config_files() # make sure --no-user-cfg disables the user cfg file - self.assertEqual(len(all_files)-1, len(files)) - -class MetadataTestCase(support.TempdirManager, support.EnvironGuard, - unittest.TestCase): - - def setUp(self): - super(MetadataTestCase, self).setUp() - self.argv = sys.argv, sys.argv[:] - - def tearDown(self): - sys.argv = self.argv[0] - sys.argv[:] = self.argv[1] - super(MetadataTestCase, self).tearDown() + assert len(all_files) - 1 == len(files) + @pytest.mark.skipif( + 'platform.system() == "Windows"', + reason='Windows does not honor chmod 000', + ) + def test_find_config_files_permission_error(self, fake_home): + """ + Finding config files should not fail when directory is inaccessible. + """ + fake_home.joinpath(pydistutils_cfg).write_text('') + fake_home.chmod(0o000) + Distribution().find_config_files() + + +@pytest.mark.usefixtures('save_env') +@pytest.mark.usefixtures('save_argv') +class TestMetadata(support.TempdirManager): def format_metadata(self, dist): sio = io.StringIO() dist.metadata.write_pkg_file(sio) return sio.getvalue() def test_simple_metadata(self): - attrs = {"name": "package", - "version": "1.0"} + attrs = {"name": "package", "version": "1.0"} dist = Distribution(attrs) meta = self.format_metadata(dist) - self.assertIn("Metadata-Version: 1.0", meta) - self.assertNotIn("provides:", meta.lower()) - self.assertNotIn("requires:", meta.lower()) - self.assertNotIn("obsoletes:", meta.lower()) + assert "Metadata-Version: 1.0" in meta + assert "provides:" not in meta.lower() + assert "requires:" not in meta.lower() + assert "obsoletes:" not in meta.lower() def test_provides(self): - attrs = {"name": "package", - "version": "1.0", - "provides": ["package", "package.sub"]} + attrs = { + "name": "package", + "version": "1.0", + "provides": ["package", "package.sub"], + } dist = Distribution(attrs) - self.assertEqual(dist.metadata.get_provides(), - ["package", "package.sub"]) - self.assertEqual(dist.get_provides(), - ["package", "package.sub"]) + assert dist.metadata.get_provides() == ["package", "package.sub"] + assert dist.get_provides() == ["package", "package.sub"] meta = self.format_metadata(dist) - self.assertIn("Metadata-Version: 1.1", meta) - self.assertNotIn("requires:", meta.lower()) - self.assertNotIn("obsoletes:", meta.lower()) + assert "Metadata-Version: 1.1" in meta + assert "requires:" not in meta.lower() + assert "obsoletes:" not in meta.lower() def test_provides_illegal(self): - self.assertRaises(ValueError, Distribution, - {"name": "package", - "version": "1.0", - "provides": ["my.pkg (splat)"]}) + with pytest.raises(ValueError): + Distribution( + {"name": "package", "version": "1.0", "provides": ["my.pkg (splat)"]}, + ) def test_requires(self): - attrs = {"name": "package", - "version": "1.0", - "requires": ["other", "another (==1.0)"]} + attrs = { + "name": "package", + "version": "1.0", + "requires": ["other", "another (==1.0)"], + } dist = Distribution(attrs) - self.assertEqual(dist.metadata.get_requires(), - ["other", "another (==1.0)"]) - self.assertEqual(dist.get_requires(), - ["other", "another (==1.0)"]) + assert dist.metadata.get_requires() == ["other", "another (==1.0)"] + assert dist.get_requires() == ["other", "another (==1.0)"] meta = self.format_metadata(dist) - self.assertIn("Metadata-Version: 1.1", meta) - self.assertNotIn("provides:", meta.lower()) - self.assertIn("Requires: other", meta) - self.assertIn("Requires: another (==1.0)", meta) - self.assertNotIn("obsoletes:", meta.lower()) + assert "Metadata-Version: 1.1" in meta + assert "provides:" not in meta.lower() + assert "Requires: other" in meta + assert "Requires: another (==1.0)" in meta + assert "obsoletes:" not in meta.lower() def test_requires_illegal(self): - self.assertRaises(ValueError, Distribution, - {"name": "package", - "version": "1.0", - "requires": ["my.pkg (splat)"]}) + with pytest.raises(ValueError): + Distribution( + {"name": "package", "version": "1.0", "requires": ["my.pkg (splat)"]}, + ) def test_requires_to_list(self): - attrs = {"name": "package", - "requires": iter(["other"])} + attrs = {"name": "package", "requires": iter(["other"])} dist = Distribution(attrs) - self.assertIsInstance(dist.metadata.requires, list) - + assert isinstance(dist.metadata.requires, list) def test_obsoletes(self): - attrs = {"name": "package", - "version": "1.0", - "obsoletes": ["other", "another (<1.0)"]} + attrs = { + "name": "package", + "version": "1.0", + "obsoletes": ["other", "another (<1.0)"], + } dist = Distribution(attrs) - self.assertEqual(dist.metadata.get_obsoletes(), - ["other", "another (<1.0)"]) - self.assertEqual(dist.get_obsoletes(), - ["other", "another (<1.0)"]) + assert dist.metadata.get_obsoletes() == ["other", "another (<1.0)"] + assert dist.get_obsoletes() == ["other", "another (<1.0)"] meta = self.format_metadata(dist) - self.assertIn("Metadata-Version: 1.1", meta) - self.assertNotIn("provides:", meta.lower()) - self.assertNotIn("requires:", meta.lower()) - self.assertIn("Obsoletes: other", meta) - self.assertIn("Obsoletes: another (<1.0)", meta) + assert "Metadata-Version: 1.1" in meta + assert "provides:" not in meta.lower() + assert "requires:" not in meta.lower() + assert "Obsoletes: other" in meta + assert "Obsoletes: another (<1.0)" in meta def test_obsoletes_illegal(self): - self.assertRaises(ValueError, Distribution, - {"name": "package", - "version": "1.0", - "obsoletes": ["my.pkg (splat)"]}) + with pytest.raises(ValueError): + Distribution( + {"name": "package", "version": "1.0", "obsoletes": ["my.pkg (splat)"]}, + ) def test_obsoletes_to_list(self): - attrs = {"name": "package", - "obsoletes": iter(["other"])} + attrs = {"name": "package", "obsoletes": iter(["other"])} dist = Distribution(attrs) - self.assertIsInstance(dist.metadata.obsoletes, list) + assert isinstance(dist.metadata.obsoletes, list) def test_classifier(self): - attrs = {'name': 'Boa', 'version': '3.0', - 'classifiers': ['Programming Language :: Python :: 3']} + attrs = { + 'name': 'Boa', + 'version': '3.0', + 'classifiers': ['Programming Language :: Python :: 3'], + } dist = Distribution(attrs) - self.assertEqual(dist.get_classifiers(), - ['Programming Language :: Python :: 3']) + assert dist.get_classifiers() == ['Programming Language :: Python :: 3'] meta = self.format_metadata(dist) - self.assertIn('Metadata-Version: 1.1', meta) + assert 'Metadata-Version: 1.1' in meta - def test_classifier_invalid_type(self): - attrs = {'name': 'Boa', 'version': '3.0', - 'classifiers': ('Programming Language :: Python :: 3',)} - with captured_stderr() as error: - d = Distribution(attrs) + def test_classifier_invalid_type(self, caplog): + attrs = { + 'name': 'Boa', + 'version': '3.0', + 'classifiers': ('Programming Language :: Python :: 3',), + } + d = Distribution(attrs) # should have warning about passing a non-list - self.assertIn('should be a list', error.getvalue()) + assert 'should be a list' in caplog.messages[0] # should be converted to a list - self.assertIsInstance(d.metadata.classifiers, list) - self.assertEqual(d.metadata.classifiers, - list(attrs['classifiers'])) + assert isinstance(d.metadata.classifiers, list) + assert d.metadata.classifiers == list(attrs['classifiers']) def test_keywords(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'keywords': ['spam', 'eggs', 'life of brian']} + attrs = { + 'name': 'Monty', + 'version': '1.0', + 'keywords': ['spam', 'eggs', 'life of brian'], + } dist = Distribution(attrs) - self.assertEqual(dist.get_keywords(), - ['spam', 'eggs', 'life of brian']) - - def test_keywords_invalid_type(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'keywords': ('spam', 'eggs', 'life of brian')} - with captured_stderr() as error: - d = Distribution(attrs) + assert dist.get_keywords() == ['spam', 'eggs', 'life of brian'] + + def test_keywords_invalid_type(self, caplog): + attrs = { + 'name': 'Monty', + 'version': '1.0', + 'keywords': ('spam', 'eggs', 'life of brian'), + } + d = Distribution(attrs) # should have warning about passing a non-list - self.assertIn('should be a list', error.getvalue()) + assert 'should be a list' in caplog.messages[0] # should be converted to a list - self.assertIsInstance(d.metadata.keywords, list) - self.assertEqual(d.metadata.keywords, list(attrs['keywords'])) + assert isinstance(d.metadata.keywords, list) + assert d.metadata.keywords == list(attrs['keywords']) def test_platforms(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'platforms': ['GNU/Linux', 'Some Evil Platform']} + attrs = { + 'name': 'Monty', + 'version': '1.0', + 'platforms': ['GNU/Linux', 'Some Evil Platform'], + } dist = Distribution(attrs) - self.assertEqual(dist.get_platforms(), - ['GNU/Linux', 'Some Evil Platform']) - - def test_platforms_invalid_types(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'platforms': ('GNU/Linux', 'Some Evil Platform')} - with captured_stderr() as error: - d = Distribution(attrs) + assert dist.get_platforms() == ['GNU/Linux', 'Some Evil Platform'] + + def test_platforms_invalid_types(self, caplog): + attrs = { + 'name': 'Monty', + 'version': '1.0', + 'platforms': ('GNU/Linux', 'Some Evil Platform'), + } + d = Distribution(attrs) # should have warning about passing a non-list - self.assertIn('should be a list', error.getvalue()) + assert 'should be a list' in caplog.messages[0] # should be converted to a list - self.assertIsInstance(d.metadata.platforms, list) - self.assertEqual(d.metadata.platforms, list(attrs['platforms'])) + assert isinstance(d.metadata.platforms, list) + assert d.metadata.platforms == list(attrs['platforms']) def test_download_url(self): - attrs = {'name': 'Boa', 'version': '3.0', - 'download_url': 'http://example.org/boa'} + attrs = { + 'name': 'Boa', + 'version': '3.0', + 'download_url': 'http://example.org/boa', + } dist = Distribution(attrs) meta = self.format_metadata(dist) - self.assertIn('Metadata-Version: 1.1', meta) + assert 'Metadata-Version: 1.1' in meta def test_long_description(self): - long_desc = textwrap.dedent("""\ + long_desc = textwrap.dedent( + """\ example:: We start here and continue here - and end here.""") - attrs = {"name": "package", - "version": "1.0", - "long_description": long_desc} + and end here.""" + ) + attrs = {"name": "package", "version": "1.0", "long_description": long_desc} dist = Distribution(attrs) meta = self.format_metadata(dist) meta = meta.replace('\n' + 8 * ' ', '\n') - self.assertIn(long_desc, meta) - - def test_custom_pydistutils(self): - # fixes #2166 - # make sure pydistutils.cfg is found - if os.name == 'posix': - user_filename = ".pydistutils.cfg" - else: - user_filename = "pydistutils.cfg" - - temp_dir = self.mkdtemp() - user_filename = os.path.join(temp_dir, user_filename) - f = open(user_filename, 'w') - try: - f.write('.') - finally: - f.close() - - try: - dist = Distribution() - - # linux-style - if sys.platform in ('linux', 'darwin'): - os.environ['HOME'] = temp_dir - files = dist.find_config_files() - self.assertIn(user_filename, files) - - # win32-style - if sys.platform == 'win32': - # home drive should be found - os.environ['USERPROFILE'] = temp_dir - files = dist.find_config_files() - self.assertIn(user_filename, files, - '%r not found in %r' % (user_filename, files)) - finally: - os.remove(user_filename) + assert long_desc in meta + + def test_custom_pydistutils(self, temp_home): + """ + pydistutils.cfg is found + """ + jaraco.path.build({pydistutils_cfg: ''}, temp_home) + config_path = temp_home / pydistutils_cfg + + assert str(config_path) in Distribution().find_config_files() + + def test_extra_pydistutils(self, monkeypatch, tmp_path): + jaraco.path.build({'overrides.cfg': ''}, tmp_path) + filename = tmp_path / 'overrides.cfg' + monkeypatch.setenv('DIST_EXTRA_CONFIG', str(filename)) + assert str(filename) in Distribution().find_config_files() def test_fix_help_options(self): help_tuples = [('a', 'b', 'c', 'd'), (1, 2, 3, 4)] fancy_options = fix_help_options(help_tuples) - self.assertEqual(fancy_options[0], ('a', 'b', 'c')) - self.assertEqual(fancy_options[1], (1, 2, 3)) + assert fancy_options[0] == ('a', 'b', 'c') + assert fancy_options[1] == (1, 2, 3) - def test_show_help(self): + def test_show_help(self, request, capsys): # smoke test, just makes sure some help is displayed - self.addCleanup(log.set_threshold, log._global_log.threshold) dist = Distribution() sys.argv = [] dist.help = 1 dist.script_name = 'setup.py' - with captured_stdout() as s: - dist.parse_command_line() - - output = [line for line in s.getvalue().split('\n') - if line.strip() != ''] - self.assertTrue(output) + dist.parse_command_line() + output = [ + line for line in capsys.readouterr().out.split('\n') if line.strip() != '' + ] + assert output def test_read_metadata(self): - attrs = {"name": "package", - "version": "1.0", - "long_description": "desc", - "description": "xxx", - "download_url": "http://example.com", - "keywords": ['one', 'two'], - "requires": ['foo']} + attrs = { + "name": "package", + "version": "1.0", + "long_description": "desc", + "description": "xxx", + "download_url": "http://example.com", + "keywords": ['one', 'two'], + "requires": ['foo'], + } dist = Distribution(attrs) metadata = dist.metadata @@ -514,20 +502,11 @@ def test_read_metadata(self): PKG_INFO.seek(0) metadata.read_pkg_file(PKG_INFO) - self.assertEqual(metadata.name, "package") - self.assertEqual(metadata.version, "1.0") - self.assertEqual(metadata.description, "xxx") - self.assertEqual(metadata.download_url, 'http://example.com') - self.assertEqual(metadata.keywords, ['one', 'two']) - self.assertEqual(metadata.platforms, ['UNKNOWN']) - self.assertEqual(metadata.obsoletes, None) - self.assertEqual(metadata.requires, ['foo']) - -def test_suite(): - suite = unittest.TestSuite() - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(DistributionTestCase)) - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(MetadataTestCase)) - return suite - -if __name__ == "__main__": - run_unittest(test_suite()) + assert metadata.name == "package" + assert metadata.version == "1.0" + assert metadata.description == "xxx" + assert metadata.download_url == 'http://example.com' + assert metadata.keywords == ['one', 'two'] + assert metadata.platforms is None + assert metadata.obsoletes is None + assert metadata.requires == ['foo'] diff --git a/setuptools/_distutils/tests/test_extension.py b/setuptools/_distutils/tests/test_extension.py index 78a55daa08..f86af07376 100644 --- a/setuptools/_distutils/tests/test_extension.py +++ b/setuptools/_distutils/tests/test_extension.py @@ -1,15 +1,14 @@ """Tests for distutils.extension.""" -import unittest import os import warnings -from test.support import run_unittest from distutils.extension import read_setup_file, Extension from .py38compat import check_warnings +import pytest -class ExtensionTestCase(unittest.TestCase): +class TestExtension: def test_read_setup_file(self): # trying to read a Setup file # (sample extracted from the PyGame project) @@ -21,51 +20,85 @@ def test_read_setup_file(self): # here are the extensions read_setup_file should have created # out of the file - wanted = ['_arraysurfarray', '_camera', '_numericsndarray', - '_numericsurfarray', 'base', 'bufferproxy', 'cdrom', - 'color', 'constants', 'display', 'draw', 'event', - 'fastevent', 'font', 'gfxdraw', 'image', 'imageext', - 'joystick', 'key', 'mask', 'mixer', 'mixer_music', - 'mouse', 'movie', 'overlay', 'pixelarray', 'pypm', - 'rect', 'rwobject', 'scrap', 'surface', 'surflock', - 'time', 'transform'] + wanted = [ + '_arraysurfarray', + '_camera', + '_numericsndarray', + '_numericsurfarray', + 'base', + 'bufferproxy', + 'cdrom', + 'color', + 'constants', + 'display', + 'draw', + 'event', + 'fastevent', + 'font', + 'gfxdraw', + 'image', + 'imageext', + 'joystick', + 'key', + 'mask', + 'mixer', + 'mixer_music', + 'mouse', + 'movie', + 'overlay', + 'pixelarray', + 'pypm', + 'rect', + 'rwobject', + 'scrap', + 'surface', + 'surflock', + 'time', + 'transform', + ] - self.assertEqual(names, wanted) + assert names == wanted def test_extension_init(self): # the first argument, which is the name, must be a string - self.assertRaises(AssertionError, Extension, 1, []) + with pytest.raises(AssertionError): + Extension(1, []) ext = Extension('name', []) - self.assertEqual(ext.name, 'name') + assert ext.name == 'name' # the second argument, which is the list of files, must # be a list of strings - self.assertRaises(AssertionError, Extension, 'name', 'file') - self.assertRaises(AssertionError, Extension, 'name', ['file', 1]) + with pytest.raises(AssertionError): + Extension('name', 'file') + with pytest.raises(AssertionError): + Extension('name', ['file', 1]) ext = Extension('name', ['file1', 'file2']) - self.assertEqual(ext.sources, ['file1', 'file2']) + assert ext.sources == ['file1', 'file2'] # others arguments have defaults - for attr in ('include_dirs', 'define_macros', 'undef_macros', - 'library_dirs', 'libraries', 'runtime_library_dirs', - 'extra_objects', 'extra_compile_args', 'extra_link_args', - 'export_symbols', 'swig_opts', 'depends'): - self.assertEqual(getattr(ext, attr), []) + for attr in ( + 'include_dirs', + 'define_macros', + 'undef_macros', + 'library_dirs', + 'libraries', + 'runtime_library_dirs', + 'extra_objects', + 'extra_compile_args', + 'extra_link_args', + 'export_symbols', + 'swig_opts', + 'depends', + ): + assert getattr(ext, attr) == [] - self.assertEqual(ext.language, None) - self.assertEqual(ext.optional, None) + assert ext.language is None + assert ext.optional is None # if there are unknown keyword options, warn about them with check_warnings() as w: warnings.simplefilter('always') ext = Extension('name', ['file1', 'file2'], chic=True) - self.assertEqual(len(w.warnings), 1) - self.assertEqual(str(w.warnings[0].message), - "Unknown Extension options: 'chic'") - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(ExtensionTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + assert len(w.warnings) == 1 + assert str(w.warnings[0].message) == "Unknown Extension options: 'chic'" diff --git a/setuptools/_distutils/tests/test_file_util.py b/setuptools/_distutils/tests/test_file_util.py index 81b90d6c8a..9f44f91dfa 100644 --- a/setuptools/_distutils/tests/test_file_util.py +++ b/setuptools/_distutils/tests/test_file_util.py @@ -1,40 +1,26 @@ """Tests for distutils.file_util.""" -import unittest import os import errno -from unittest.mock import patch +import unittest.mock as mock from distutils.file_util import move_file, copy_file -from distutils import log from distutils.tests import support from distutils.errors import DistutilsFileError -from test.support import run_unittest from .py38compat import unlink +import pytest -class FileUtilTestCase(support.TempdirManager, unittest.TestCase): +@pytest.fixture(autouse=True) +def stuff(request, monkeypatch, distutils_managed_tempdir): + self = request.instance + tmp_dir = self.mkdtemp() + self.source = os.path.join(tmp_dir, 'f1') + self.target = os.path.join(tmp_dir, 'f2') + self.target_dir = os.path.join(tmp_dir, 'd1') - def _log(self, msg, *args): - if len(args) > 0: - self._logs.append(msg % args) - else: - self._logs.append(msg) - - def setUp(self): - super(FileUtilTestCase, self).setUp() - self._logs = [] - self.old_log = log.info - log.info = self._log - tmp_dir = self.mkdtemp() - self.source = os.path.join(tmp_dir, 'f1') - self.target = os.path.join(tmp_dir, 'f2') - self.target_dir = os.path.join(tmp_dir, 'd1') - - def tearDown(self): - log.info = self.old_log - super(FileUtilTestCase, self).tearDown() - def test_move_file_verbosity(self): +class TestFileUtil(support.TempdirManager): + def test_move_file_verbosity(self, caplog): f = open(self.source, 'w') try: f.write('some content') @@ -42,39 +28,41 @@ def test_move_file_verbosity(self): f.close() move_file(self.source, self.target, verbose=0) - wanted = [] - self.assertEqual(self._logs, wanted) + assert not caplog.messages # back to original state move_file(self.target, self.source, verbose=0) move_file(self.source, self.target, verbose=1) - wanted = ['moving %s -> %s' % (self.source, self.target)] - self.assertEqual(self._logs, wanted) + wanted = ['moving {} -> {}'.format(self.source, self.target)] + assert caplog.messages == wanted # back to original state move_file(self.target, self.source, verbose=0) - self._logs = [] + caplog.clear() # now the target is a dir os.mkdir(self.target_dir) move_file(self.source, self.target_dir, verbose=1) - wanted = ['moving %s -> %s' % (self.source, self.target_dir)] - self.assertEqual(self._logs, wanted) + wanted = ['moving {} -> {}'.format(self.source, self.target_dir)] + assert caplog.messages == wanted def test_move_file_exception_unpacking_rename(self): # see issue 22182 - with patch("os.rename", side_effect=OSError("wrong", 1)), \ - self.assertRaises(DistutilsFileError): + with mock.patch("os.rename", side_effect=OSError("wrong", 1)), pytest.raises( + DistutilsFileError + ): with open(self.source, 'w') as fobj: fobj.write('spam eggs') move_file(self.source, self.target, verbose=0) def test_move_file_exception_unpacking_unlink(self): # see issue 22182 - with patch("os.rename", side_effect=OSError(errno.EXDEV, "wrong")), \ - patch("os.unlink", side_effect=OSError("wrong", 1)), \ - self.assertRaises(DistutilsFileError): + with mock.patch( + "os.rename", side_effect=OSError(errno.EXDEV, "wrong") + ), mock.patch("os.unlink", side_effect=OSError("wrong", 1)), pytest.raises( + DistutilsFileError + ): with open(self.source, 'w') as fobj: fobj.write('spam eggs') move_file(self.source, self.target, verbose=0) @@ -94,10 +82,10 @@ def test_copy_file_hard_link(self): copy_file(self.source, self.target, link='hard') st2 = os.stat(self.source) st3 = os.stat(self.target) - self.assertTrue(os.path.samestat(st, st2), (st, st2)) - self.assertTrue(os.path.samestat(st2, st3), (st2, st3)) - with open(self.source, 'r') as f: - self.assertEqual(f.read(), 'some content') + assert os.path.samestat(st, st2), (st, st2) + assert os.path.samestat(st2, st3), (st2, st3) + with open(self.source) as f: + assert f.read() == 'some content' def test_copy_file_hard_link_failure(self): # If hard linking fails, copy_file() falls back on copying file @@ -106,19 +94,12 @@ def test_copy_file_hard_link_failure(self): with open(self.source, 'w') as f: f.write('some content') st = os.stat(self.source) - with patch("os.link", side_effect=OSError(0, "linking unsupported")): + with mock.patch("os.link", side_effect=OSError(0, "linking unsupported")): copy_file(self.source, self.target, link='hard') st2 = os.stat(self.source) st3 = os.stat(self.target) - self.assertTrue(os.path.samestat(st, st2), (st, st2)) - self.assertFalse(os.path.samestat(st2, st3), (st2, st3)) + assert os.path.samestat(st, st2), (st, st2) + assert not os.path.samestat(st2, st3), (st2, st3) for fn in (self.source, self.target): - with open(fn, 'r') as f: - self.assertEqual(f.read(), 'some content') - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(FileUtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + with open(fn) as f: + assert f.read() == 'some content' diff --git a/setuptools/_distutils/tests/test_filelist.py b/setuptools/_distutils/tests/test_filelist.py index a90edcf138..2cee42cddd 100644 --- a/setuptools/_distutils/tests/test_filelist.py +++ b/setuptools/_distutils/tests/test_filelist.py @@ -1,17 +1,16 @@ """Tests for distutils.filelist.""" import os import re -import unittest +import logging + from distutils import debug -from distutils.log import WARN from distutils.errors import DistutilsTemplateError from distutils.filelist import glob_to_re, translate_pattern, FileList from distutils import filelist -from test.support import captured_stdout, run_unittest -from distutils.tests import support +import pytest +import jaraco.path -from .py35compat import adapt_glob from . import py38compat as os_helper @@ -36,16 +35,16 @@ def make_local_path(s): return s.replace('/', os.sep) -class FileListTestCase(support.LoggingSilencer, - unittest.TestCase): - - def assertNoWarnings(self): - self.assertEqual(self.get_logs(WARN), []) - self.clear_logs() +class TestFileList: + def assertNoWarnings(self, caplog): + warnings = [rec for rec in caplog.records if rec.levelno == logging.WARNING] + assert not warnings + caplog.clear() - def assertWarnings(self): - self.assertGreater(len(self.get_logs(WARN)), 0) - self.clear_logs() + def assertWarnings(self, caplog): + warnings = [rec for rec in caplog.records if rec.levelno == logging.WARNING] + assert warnings + caplog.clear() def test_glob_to_re(self): sep = os.sep @@ -61,69 +60,70 @@ def test_glob_to_re(self): (r'foo\\*', r'(?s:foo\\\\[^%(sep)s]*)\Z'), (r'foo\\\*', r'(?s:foo\\\\\\[^%(sep)s]*)\Z'), ('foo????', r'(?s:foo[^%(sep)s][^%(sep)s][^%(sep)s][^%(sep)s])\Z'), - (r'foo\\??', r'(?s:foo\\\\[^%(sep)s][^%(sep)s])\Z')): + (r'foo\\??', r'(?s:foo\\\\[^%(sep)s][^%(sep)s])\Z'), + ): regex = regex % {'sep': sep} - self.assertEqual(glob_to_re(glob), adapt_glob(regex)) + assert glob_to_re(glob) == regex def test_process_template_line(self): # testing all MANIFEST.in template patterns file_list = FileList() - l = make_local_path + mlp = make_local_path # simulated file list - file_list.allfiles = ['foo.tmp', 'ok', 'xo', 'four.txt', - 'buildout.cfg', - # filelist does not filter out VCS directories, - # it's sdist that does - l('.hg/last-message.txt'), - l('global/one.txt'), - l('global/two.txt'), - l('global/files.x'), - l('global/here.tmp'), - l('f/o/f.oo'), - l('dir/graft-one'), - l('dir/dir2/graft2'), - l('dir3/ok'), - l('dir3/sub/ok.txt'), - ] + file_list.allfiles = [ + 'foo.tmp', + 'ok', + 'xo', + 'four.txt', + 'buildout.cfg', + # filelist does not filter out VCS directories, + # it's sdist that does + mlp('.hg/last-message.txt'), + mlp('global/one.txt'), + mlp('global/two.txt'), + mlp('global/files.x'), + mlp('global/here.tmp'), + mlp('f/o/f.oo'), + mlp('dir/graft-one'), + mlp('dir/dir2/graft2'), + mlp('dir3/ok'), + mlp('dir3/sub/ok.txt'), + ] for line in MANIFEST_IN.split('\n'): if line.strip() == '': continue file_list.process_template_line(line) - wanted = ['ok', - 'buildout.cfg', - 'four.txt', - l('.hg/last-message.txt'), - l('global/one.txt'), - l('global/two.txt'), - l('f/o/f.oo'), - l('dir/graft-one'), - l('dir/dir2/graft2'), - ] - - self.assertEqual(file_list.files, wanted) - - def test_debug_print(self): + wanted = [ + 'ok', + 'buildout.cfg', + 'four.txt', + mlp('.hg/last-message.txt'), + mlp('global/one.txt'), + mlp('global/two.txt'), + mlp('f/o/f.oo'), + mlp('dir/graft-one'), + mlp('dir/dir2/graft2'), + ] + + assert file_list.files == wanted + + def test_debug_print(self, capsys, monkeypatch): file_list = FileList() - with captured_stdout() as stdout: - file_list.debug_print('xxx') - self.assertEqual(stdout.getvalue(), '') - - debug.DEBUG = True - try: - with captured_stdout() as stdout: - file_list.debug_print('xxx') - self.assertEqual(stdout.getvalue(), 'xxx\n') - finally: - debug.DEBUG = False + file_list.debug_print('xxx') + assert capsys.readouterr().out == '' + + monkeypatch.setattr(debug, 'DEBUG', True) + file_list.debug_print('xxx') + assert capsys.readouterr().out == 'xxx\n' def test_set_allfiles(self): file_list = FileList() files = ['a', 'b', 'c'] file_list.set_allfiles(files) - self.assertEqual(file_list.allfiles, files) + assert file_list.allfiles == files def test_remove_duplicates(self): file_list = FileList() @@ -131,223 +131,204 @@ def test_remove_duplicates(self): # files must be sorted beforehand (sdist does it) file_list.sort() file_list.remove_duplicates() - self.assertEqual(file_list.files, ['a', 'b', 'c', 'g']) + assert file_list.files == ['a', 'b', 'c', 'g'] def test_translate_pattern(self): # not regex - self.assertTrue(hasattr( - translate_pattern('a', anchor=True, is_regex=False), - 'search')) + assert hasattr(translate_pattern('a', anchor=True, is_regex=False), 'search') # is a regex regex = re.compile('a') - self.assertEqual( - translate_pattern(regex, anchor=True, is_regex=True), - regex) + assert translate_pattern(regex, anchor=True, is_regex=True) == regex # plain string flagged as regex - self.assertTrue(hasattr( - translate_pattern('a', anchor=True, is_regex=True), - 'search')) + assert hasattr(translate_pattern('a', anchor=True, is_regex=True), 'search') # glob support - self.assertTrue(translate_pattern( - '*.py', anchor=True, is_regex=False).search('filelist.py')) + assert translate_pattern('*.py', anchor=True, is_regex=False).search( + 'filelist.py' + ) def test_exclude_pattern(self): # return False if no match file_list = FileList() - self.assertFalse(file_list.exclude_pattern('*.py')) + assert not file_list.exclude_pattern('*.py') # return True if files match file_list = FileList() file_list.files = ['a.py', 'b.py'] - self.assertTrue(file_list.exclude_pattern('*.py')) + assert file_list.exclude_pattern('*.py') # test excludes file_list = FileList() file_list.files = ['a.py', 'a.txt'] file_list.exclude_pattern('*.py') - self.assertEqual(file_list.files, ['a.txt']) + assert file_list.files == ['a.txt'] def test_include_pattern(self): # return False if no match file_list = FileList() file_list.set_allfiles([]) - self.assertFalse(file_list.include_pattern('*.py')) + assert not file_list.include_pattern('*.py') # return True if files match file_list = FileList() file_list.set_allfiles(['a.py', 'b.txt']) - self.assertTrue(file_list.include_pattern('*.py')) + assert file_list.include_pattern('*.py') # test * matches all files file_list = FileList() - self.assertIsNone(file_list.allfiles) + assert file_list.allfiles is None file_list.set_allfiles(['a.py', 'b.txt']) file_list.include_pattern('*') - self.assertEqual(file_list.allfiles, ['a.py', 'b.txt']) + assert file_list.allfiles == ['a.py', 'b.txt'] - def test_process_template(self): - l = make_local_path + def test_process_template(self, caplog): + mlp = make_local_path # invalid lines file_list = FileList() - for action in ('include', 'exclude', 'global-include', - 'global-exclude', 'recursive-include', - 'recursive-exclude', 'graft', 'prune', 'blarg'): - self.assertRaises(DistutilsTemplateError, - file_list.process_template_line, action) + for action in ( + 'include', + 'exclude', + 'global-include', + 'global-exclude', + 'recursive-include', + 'recursive-exclude', + 'graft', + 'prune', + 'blarg', + ): + with pytest.raises(DistutilsTemplateError): + file_list.process_template_line(action) # include file_list = FileList() - file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')]) + file_list.set_allfiles(['a.py', 'b.txt', mlp('d/c.py')]) file_list.process_template_line('include *.py') - self.assertEqual(file_list.files, ['a.py']) - self.assertNoWarnings() + assert file_list.files == ['a.py'] + self.assertNoWarnings(caplog) file_list.process_template_line('include *.rb') - self.assertEqual(file_list.files, ['a.py']) - self.assertWarnings() + assert file_list.files == ['a.py'] + self.assertWarnings(caplog) # exclude file_list = FileList() - file_list.files = ['a.py', 'b.txt', l('d/c.py')] + file_list.files = ['a.py', 'b.txt', mlp('d/c.py')] file_list.process_template_line('exclude *.py') - self.assertEqual(file_list.files, ['b.txt', l('d/c.py')]) - self.assertNoWarnings() + assert file_list.files == ['b.txt', mlp('d/c.py')] + self.assertNoWarnings(caplog) file_list.process_template_line('exclude *.rb') - self.assertEqual(file_list.files, ['b.txt', l('d/c.py')]) - self.assertWarnings() + assert file_list.files == ['b.txt', mlp('d/c.py')] + self.assertWarnings(caplog) # global-include file_list = FileList() - file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')]) + file_list.set_allfiles(['a.py', 'b.txt', mlp('d/c.py')]) file_list.process_template_line('global-include *.py') - self.assertEqual(file_list.files, ['a.py', l('d/c.py')]) - self.assertNoWarnings() + assert file_list.files == ['a.py', mlp('d/c.py')] + self.assertNoWarnings(caplog) file_list.process_template_line('global-include *.rb') - self.assertEqual(file_list.files, ['a.py', l('d/c.py')]) - self.assertWarnings() + assert file_list.files == ['a.py', mlp('d/c.py')] + self.assertWarnings(caplog) # global-exclude file_list = FileList() - file_list.files = ['a.py', 'b.txt', l('d/c.py')] + file_list.files = ['a.py', 'b.txt', mlp('d/c.py')] file_list.process_template_line('global-exclude *.py') - self.assertEqual(file_list.files, ['b.txt']) - self.assertNoWarnings() + assert file_list.files == ['b.txt'] + self.assertNoWarnings(caplog) file_list.process_template_line('global-exclude *.rb') - self.assertEqual(file_list.files, ['b.txt']) - self.assertWarnings() + assert file_list.files == ['b.txt'] + self.assertWarnings(caplog) # recursive-include file_list = FileList() - file_list.set_allfiles(['a.py', l('d/b.py'), l('d/c.txt'), - l('d/d/e.py')]) + file_list.set_allfiles(['a.py', mlp('d/b.py'), mlp('d/c.txt'), mlp('d/d/e.py')]) file_list.process_template_line('recursive-include d *.py') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) - self.assertNoWarnings() + assert file_list.files == [mlp('d/b.py'), mlp('d/d/e.py')] + self.assertNoWarnings(caplog) file_list.process_template_line('recursive-include e *.py') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) - self.assertWarnings() + assert file_list.files == [mlp('d/b.py'), mlp('d/d/e.py')] + self.assertWarnings(caplog) # recursive-exclude file_list = FileList() - file_list.files = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')] + file_list.files = ['a.py', mlp('d/b.py'), mlp('d/c.txt'), mlp('d/d/e.py')] file_list.process_template_line('recursive-exclude d *.py') - self.assertEqual(file_list.files, ['a.py', l('d/c.txt')]) - self.assertNoWarnings() + assert file_list.files == ['a.py', mlp('d/c.txt')] + self.assertNoWarnings(caplog) file_list.process_template_line('recursive-exclude e *.py') - self.assertEqual(file_list.files, ['a.py', l('d/c.txt')]) - self.assertWarnings() + assert file_list.files == ['a.py', mlp('d/c.txt')] + self.assertWarnings(caplog) # graft file_list = FileList() - file_list.set_allfiles(['a.py', l('d/b.py'), l('d/d/e.py'), - l('f/f.py')]) + file_list.set_allfiles(['a.py', mlp('d/b.py'), mlp('d/d/e.py'), mlp('f/f.py')]) file_list.process_template_line('graft d') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) - self.assertNoWarnings() + assert file_list.files == [mlp('d/b.py'), mlp('d/d/e.py')] + self.assertNoWarnings(caplog) file_list.process_template_line('graft e') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) - self.assertWarnings() + assert file_list.files == [mlp('d/b.py'), mlp('d/d/e.py')] + self.assertWarnings(caplog) # prune file_list = FileList() - file_list.files = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')] + file_list.files = ['a.py', mlp('d/b.py'), mlp('d/d/e.py'), mlp('f/f.py')] file_list.process_template_line('prune d') - self.assertEqual(file_list.files, ['a.py', l('f/f.py')]) - self.assertNoWarnings() + assert file_list.files == ['a.py', mlp('f/f.py')] + self.assertNoWarnings(caplog) file_list.process_template_line('prune e') - self.assertEqual(file_list.files, ['a.py', l('f/f.py')]) - self.assertWarnings() + assert file_list.files == ['a.py', mlp('f/f.py')] + self.assertWarnings(caplog) -class FindAllTestCase(unittest.TestCase): +class TestFindAll: @os_helper.skip_unless_symlink - def test_missing_symlink(self): - with os_helper.temp_cwd(): - os.symlink('foo', 'bar') - self.assertEqual(filelist.findall(), []) + def test_missing_symlink(self, temp_cwd): + os.symlink('foo', 'bar') + assert filelist.findall() == [] - def test_basic_discovery(self): + def test_basic_discovery(self, temp_cwd): """ When findall is called with no parameters or with '.' as the parameter, the dot should be omitted from the results. """ - with os_helper.temp_cwd(): - os.mkdir('foo') - file1 = os.path.join('foo', 'file1.txt') - os_helper.create_empty_file(file1) - os.mkdir('bar') - file2 = os.path.join('bar', 'file2.txt') - os_helper.create_empty_file(file2) - expected = [file2, file1] - self.assertEqual(sorted(filelist.findall()), expected) - - def test_non_local_discovery(self): + jaraco.path.build({'foo': {'file1.txt': ''}, 'bar': {'file2.txt': ''}}) + file1 = os.path.join('foo', 'file1.txt') + file2 = os.path.join('bar', 'file2.txt') + expected = [file2, file1] + assert sorted(filelist.findall()) == expected + + def test_non_local_discovery(self, tmp_path): """ When findall is called with another path, the full path name should be returned. """ - with os_helper.temp_dir() as temp_dir: - file1 = os.path.join(temp_dir, 'file1.txt') - os_helper.create_empty_file(file1) - expected = [file1] - self.assertEqual(filelist.findall(temp_dir), expected) + filename = tmp_path / 'file1.txt' + filename.write_text('') + expected = [str(filename)] + assert filelist.findall(tmp_path) == expected @os_helper.skip_unless_symlink - def test_symlink_loop(self): - with os_helper.temp_dir() as temp_dir: - link = os.path.join(temp_dir, 'link-to-parent') - content = os.path.join(temp_dir, 'somefile') - os_helper.create_empty_file(content) - os.symlink('.', link) - files = filelist.findall(temp_dir) - assert len(files) == 1 - - -def test_suite(): - return unittest.TestSuite([ - unittest.TestLoader().loadTestsFromTestCase(FileListTestCase), - unittest.TestLoader().loadTestsFromTestCase(FindAllTestCase), - ]) - - -if __name__ == "__main__": - run_unittest(test_suite()) + def test_symlink_loop(self, tmp_path): + tmp_path.joinpath('link-to-parent').symlink_to('.') + tmp_path.joinpath('somefile').write_text('') + files = filelist.findall(tmp_path) + assert len(files) == 1 diff --git a/setuptools/_distutils/tests/test_install.py b/setuptools/_distutils/tests/test_install.py index 5dbc06b090..3f525db42a 100644 --- a/setuptools/_distutils/tests/test_install.py +++ b/setuptools/_distutils/tests/test_install.py @@ -2,10 +2,11 @@ import os import sys -import unittest import site +import pathlib +import logging -from test.support import captured_stdout, run_unittest +import pytest from distutils import sysconfig from distutils.command.install import install @@ -24,11 +25,15 @@ def _make_ext_name(modname): return modname + sysconfig.get_config_var('EXT_SUFFIX') -class InstallTestCase(support.TempdirManager, - support.EnvironGuard, - support.LoggingSilencer, - unittest.TestCase): - +@support.combine_markers +@pytest.mark.usefixtures('save_env') +class TestInstall( + support.TempdirManager, +): + @pytest.mark.xfail( + 'platform.system() == "Windows" and sys.version_info > (3, 11)', + reason="pypa/distutils#148", + ) def test_home_installation_scheme(self): # This ensure two things: # - that --home generates the desired set of directory names @@ -42,98 +47,88 @@ def test_home_installation_scheme(self): dist.command_obj["build"] = support.DummyCommand( build_base=builddir, build_lib=os.path.join(builddir, "lib"), - ) + ) cmd = install(dist) cmd.home = destination cmd.ensure_finalized() - self.assertEqual(cmd.install_base, destination) - self.assertEqual(cmd.install_platbase, destination) + assert cmd.install_base == destination + assert cmd.install_platbase == destination def check_path(got, expected): got = os.path.normpath(got) expected = os.path.normpath(expected) - self.assertEqual(got, expected) + assert got == expected - libdir = os.path.join(destination, "lib", "python") + impl_name = sys.implementation.name.replace("cpython", "python") + libdir = os.path.join(destination, "lib", impl_name) check_path(cmd.install_lib, libdir) _platlibdir = getattr(sys, "platlibdir", "lib") - platlibdir = os.path.join(destination, _platlibdir, "python") + platlibdir = os.path.join(destination, _platlibdir, impl_name) check_path(cmd.install_platlib, platlibdir) check_path(cmd.install_purelib, libdir) - check_path(cmd.install_headers, - os.path.join(destination, "include", "python", "foopkg")) + check_path( + cmd.install_headers, + os.path.join(destination, "include", impl_name, "foopkg"), + ) check_path(cmd.install_scripts, os.path.join(destination, "bin")) check_path(cmd.install_data, destination) - def test_user_site(self): + def test_user_site(self, monkeypatch): # test install with --user # preparing the environment for the test - self.old_user_base = site.USER_BASE - self.old_user_site = site.USER_SITE self.tmpdir = self.mkdtemp() - self.user_base = os.path.join(self.tmpdir, 'B') - self.user_site = os.path.join(self.tmpdir, 'S') - site.USER_BASE = self.user_base - site.USER_SITE = self.user_site - install_module.USER_BASE = self.user_base - install_module.USER_SITE = self.user_site + orig_site = site.USER_SITE + orig_base = site.USER_BASE + monkeypatch.setattr(site, 'USER_BASE', os.path.join(self.tmpdir, 'B')) + monkeypatch.setattr(site, 'USER_SITE', os.path.join(self.tmpdir, 'S')) + monkeypatch.setattr(install_module, 'USER_BASE', site.USER_BASE) + monkeypatch.setattr(install_module, 'USER_SITE', site.USER_SITE) def _expanduser(path): if path.startswith('~'): return os.path.normpath(self.tmpdir + path[1:]) return path - self.old_expand = os.path.expanduser - os.path.expanduser = _expanduser - - def cleanup(): - site.USER_BASE = self.old_user_base - site.USER_SITE = self.old_user_site - install_module.USER_BASE = self.old_user_base - install_module.USER_SITE = self.old_user_site - os.path.expanduser = self.old_expand - self.addCleanup(cleanup) + monkeypatch.setattr(os.path, 'expanduser', _expanduser) for key in ('nt_user', 'posix_user'): - self.assertIn(key, INSTALL_SCHEMES) + assert key in INSTALL_SCHEMES dist = Distribution({'name': 'xx'}) cmd = install(dist) # making sure the user option is there - options = [name for name, short, lable in - cmd.user_options] - self.assertIn('user', options) + options = [name for name, short, label in cmd.user_options] + assert 'user' in options # setting a value cmd.user = 1 # user base and site shouldn't be created yet - self.assertFalse(os.path.exists(self.user_base)) - self.assertFalse(os.path.exists(self.user_site)) + assert not os.path.exists(site.USER_BASE) + assert not os.path.exists(site.USER_SITE) # let's run finalize cmd.ensure_finalized() # now they should - self.assertTrue(os.path.exists(self.user_base)) - self.assertTrue(os.path.exists(self.user_site)) + assert os.path.exists(site.USER_BASE) + assert os.path.exists(site.USER_SITE) - self.assertIn('userbase', cmd.config_vars) - self.assertIn('usersite', cmd.config_vars) + assert 'userbase' in cmd.config_vars + assert 'usersite' in cmd.config_vars - actual_headers = os.path.relpath(cmd.install_headers, self.user_base) + actual_headers = os.path.relpath(cmd.install_headers, site.USER_BASE) if os.name == 'nt': - site_path = os.path.relpath( - os.path.dirname(self.old_user_site), self.old_user_base) + site_path = os.path.relpath(os.path.dirname(orig_site), orig_base) include = os.path.join(site_path, 'Include') else: include = sysconfig.get_python_inc(0, '') expect_headers = os.path.join(include, 'xx') - self.assertEqual(os.path.normcase(actual_headers), os.path.normcase(expect_headers)) + assert os.path.normcase(actual_headers) == os.path.normcase(expect_headers) def test_handle_extra_path(self): dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'}) @@ -141,27 +136,28 @@ def test_handle_extra_path(self): # two elements cmd.handle_extra_path() - self.assertEqual(cmd.extra_path, ['path', 'dirs']) - self.assertEqual(cmd.extra_dirs, 'dirs') - self.assertEqual(cmd.path_file, 'path') + assert cmd.extra_path == ['path', 'dirs'] + assert cmd.extra_dirs == 'dirs' + assert cmd.path_file == 'path' # one element cmd.extra_path = ['path'] cmd.handle_extra_path() - self.assertEqual(cmd.extra_path, ['path']) - self.assertEqual(cmd.extra_dirs, 'path') - self.assertEqual(cmd.path_file, 'path') + assert cmd.extra_path == ['path'] + assert cmd.extra_dirs == 'path' + assert cmd.path_file == 'path' # none dist.extra_path = cmd.extra_path = None cmd.handle_extra_path() - self.assertEqual(cmd.extra_path, None) - self.assertEqual(cmd.extra_dirs, '') - self.assertEqual(cmd.path_file, None) + assert cmd.extra_path is None + assert cmd.extra_dirs == '' + assert cmd.path_file is None # three elements (no way !) cmd.extra_path = 'path,dirs,again' - self.assertRaises(DistutilsOptionError, cmd.handle_extra_path) + with pytest.raises(DistutilsOptionError): + cmd.handle_extra_path() def test_finalize_options(self): dist = Distribution({'name': 'xx'}) @@ -171,23 +167,25 @@ def test_finalize_options(self): # install-base/install-platbase -- not both cmd.prefix = 'prefix' cmd.install_base = 'base' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + with pytest.raises(DistutilsOptionError): + cmd.finalize_options() # must supply either home or prefix/exec-prefix -- not both cmd.install_base = None cmd.home = 'home' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + with pytest.raises(DistutilsOptionError): + cmd.finalize_options() # can't combine user with prefix/exec_prefix/home or # install_(plat)base cmd.prefix = None cmd.user = 'user' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + with pytest.raises(DistutilsOptionError): + cmd.finalize_options() def test_record(self): install_dir = self.mkdtemp() - project_dir, dist = self.create_dist(py_modules=['hello'], - scripts=['sayhi']) + project_dir, dist = self.create_dist(py_modules=['hello'], scripts=['sayhi']) os.chdir(project_dir) self.write_file('hello.py', "def main(): print('o hai')") self.write_file('sayhi', 'from hello import main; main()') @@ -206,18 +204,22 @@ def test_record(self): f.close() found = [os.path.basename(line) for line in content.splitlines()] - expected = ['hello.py', 'hello.%s.pyc' % sys.implementation.cache_tag, - 'sayhi', - 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]] - self.assertEqual(found, expected) + expected = [ + 'hello.py', + 'hello.%s.pyc' % sys.implementation.cache_tag, + 'sayhi', + 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2], + ] + assert found == expected def test_record_extensions(self): cmd = test_support.missing_compiler_executable() if cmd is not None: - self.skipTest('The %r command is not found' % cmd) + pytest.skip('The %r command is not found' % cmd) install_dir = self.mkdtemp() - project_dir, dist = self.create_dist(ext_modules=[ - Extension('xx', ['xxmodule.c'])]) + project_dir, dist = self.create_dist( + ext_modules=[Extension('xx', ['xxmodule.c'])] + ) os.chdir(project_dir) support.copy_xxmodule_c(project_dir) @@ -233,31 +235,18 @@ def test_record_extensions(self): cmd.ensure_finalized() cmd.run() - f = open(cmd.record) - try: - content = f.read() - finally: - f.close() + content = pathlib.Path(cmd.record).read_text() found = [os.path.basename(line) for line in content.splitlines()] - expected = [_make_ext_name('xx'), - 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]] - self.assertEqual(found, expected) + expected = [ + _make_ext_name('xx'), + 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2], + ] + assert found == expected - def test_debug_mode(self): + def test_debug_mode(self, caplog, monkeypatch): # this covers the code called when DEBUG is set - old_logs_len = len(self.logs) - install_module.DEBUG = True - try: - with captured_stdout(): - self.test_record() - finally: - install_module.DEBUG = False - self.assertGreater(len(self.logs), old_logs_len) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + monkeypatch.setattr(install_module, 'DEBUG', True) + caplog.set_level(logging.DEBUG) + self.test_record() + assert any(rec for rec in caplog.records if rec.levelno == logging.DEBUG) diff --git a/setuptools/_distutils/tests/test_install_data.py b/setuptools/_distutils/tests/test_install_data.py index 6191d2fa6e..9badbc264f 100644 --- a/setuptools/_distutils/tests/test_install_data.py +++ b/setuptools/_distutils/tests/test_install_data.py @@ -1,16 +1,16 @@ """Tests for distutils.command.install_data.""" import os -import unittest + +import pytest from distutils.command.install_data import install_data from distutils.tests import support -from test.support import run_unittest -class InstallDataTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): +@pytest.mark.usefixtures('save_env') +class TestInstallData( + support.TempdirManager, +): def test_simple_run(self): pkg_dir, dist = self.create_dist() cmd = install_data(dist) @@ -26,18 +26,18 @@ def test_simple_run(self): self.write_file(two, 'xxx') cmd.data_files = [one, (inst2, [two])] - self.assertEqual(cmd.get_inputs(), [one, (inst2, [two])]) + assert cmd.get_inputs() == [one, (inst2, [two])] # let's run the command cmd.ensure_finalized() cmd.run() # let's check the result - self.assertEqual(len(cmd.get_outputs()), 2) + assert len(cmd.get_outputs()) == 2 rtwo = os.path.split(two)[-1] - self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) + assert os.path.exists(os.path.join(inst2, rtwo)) rone = os.path.split(one)[-1] - self.assertTrue(os.path.exists(os.path.join(inst, rone))) + assert os.path.exists(os.path.join(inst, rone)) cmd.outfiles = [] # let's try with warn_dir one @@ -46,30 +46,21 @@ def test_simple_run(self): cmd.run() # let's check the result - self.assertEqual(len(cmd.get_outputs()), 2) - self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) - self.assertTrue(os.path.exists(os.path.join(inst, rone))) + assert len(cmd.get_outputs()) == 2 + assert os.path.exists(os.path.join(inst2, rtwo)) + assert os.path.exists(os.path.join(inst, rone)) cmd.outfiles = [] # now using root and empty dir cmd.root = os.path.join(pkg_dir, 'root') - inst3 = os.path.join(cmd.install_dir, 'inst3') inst4 = os.path.join(pkg_dir, 'inst4') three = os.path.join(cmd.install_dir, 'three') self.write_file(three, 'xx') - cmd.data_files = [one, (inst2, [two]), - ('inst3', [three]), - (inst4, [])] + cmd.data_files = [one, (inst2, [two]), ('inst3', [three]), (inst4, [])] cmd.ensure_finalized() cmd.run() # let's check the result - self.assertEqual(len(cmd.get_outputs()), 4) - self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) - self.assertTrue(os.path.exists(os.path.join(inst, rone))) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallDataTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + assert len(cmd.get_outputs()) == 4 + assert os.path.exists(os.path.join(inst2, rtwo)) + assert os.path.exists(os.path.join(inst, rone)) diff --git a/setuptools/_distutils/tests/test_install_headers.py b/setuptools/_distutils/tests/test_install_headers.py index 1aa4d09cde..1e8ccf7991 100644 --- a/setuptools/_distutils/tests/test_install_headers.py +++ b/setuptools/_distutils/tests/test_install_headers.py @@ -1,16 +1,16 @@ """Tests for distutils.command.install_headers.""" import os -import unittest + +import pytest from distutils.command.install_headers import install_headers from distutils.tests import support -from test.support import run_unittest -class InstallHeadersTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): +@pytest.mark.usefixtures('save_env') +class TestInstallHeaders( + support.TempdirManager, +): def test_simple_run(self): # we have two headers header_list = self.mkdtemp() @@ -22,7 +22,7 @@ def test_simple_run(self): pkg_dir, dist = self.create_dist(headers=headers) cmd = install_headers(dist) - self.assertEqual(cmd.get_inputs(), headers) + assert cmd.get_inputs() == headers # let's run the command cmd.install_dir = os.path.join(pkg_dir, 'inst') @@ -30,10 +30,4 @@ def test_simple_run(self): cmd.run() # let's check the results - self.assertEqual(len(cmd.get_outputs()), 2) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallHeadersTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + assert len(cmd.get_outputs()) == 2 diff --git a/setuptools/_distutils/tests/test_install_lib.py b/setuptools/_distutils/tests/test_install_lib.py index 652653f2b2..0bd67cd04d 100644 --- a/setuptools/_distutils/tests/test_install_lib.py +++ b/setuptools/_distutils/tests/test_install_lib.py @@ -2,39 +2,41 @@ import sys import os import importlib.util -import unittest + +import pytest from distutils.command.install_lib import install_lib from distutils.extension import Extension from distutils.tests import support from distutils.errors import DistutilsOptionError -from test.support import run_unittest - -class InstallLibTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): +@support.combine_markers +@pytest.mark.usefixtures('save_env') +class TestInstallLib( + support.TempdirManager, +): def test_finalize_options(self): dist = self.create_dist()[1] cmd = install_lib(dist) cmd.finalize_options() - self.assertEqual(cmd.compile, 1) - self.assertEqual(cmd.optimize, 0) + assert cmd.compile == 1 + assert cmd.optimize == 0 # optimize must be 0, 1, or 2 cmd.optimize = 'foo' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + with pytest.raises(DistutilsOptionError): + cmd.finalize_options() cmd.optimize = '4' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + with pytest.raises(DistutilsOptionError): + cmd.finalize_options() cmd.optimize = '2' cmd.finalize_options() - self.assertEqual(cmd.optimize, 2) + assert cmd.optimize == 2 - @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled') + @pytest.mark.skipif('sys.dont_write_bytecode') def test_byte_compile(self): project_dir, dist = self.create_dist() os.chdir(project_dir) @@ -45,10 +47,11 @@ def test_byte_compile(self): self.write_file(f, '# python file') cmd.byte_compile([f]) pyc_file = importlib.util.cache_from_source('foo.py', optimization='') - pyc_opt_file = importlib.util.cache_from_source('foo.py', - optimization=cmd.optimize) - self.assertTrue(os.path.exists(pyc_file)) - self.assertTrue(os.path.exists(pyc_opt_file)) + pyc_opt_file = importlib.util.cache_from_source( + 'foo.py', optimization=cmd.optimize + ) + assert os.path.exists(pyc_file) + assert os.path.exists(pyc_opt_file) def test_get_outputs(self): project_dir, dist = self.create_dist() @@ -68,7 +71,7 @@ def test_get_outputs(self): # get_outputs should return 4 elements: spam/__init__.py and .pyc, # foo.import-tag-abiflags.so / foo.pyd outputs = cmd.get_outputs() - self.assertEqual(len(outputs), 4, outputs) + assert len(outputs) == 4, outputs def test_get_inputs(self): project_dir, dist = self.create_dist() @@ -88,9 +91,9 @@ def test_get_inputs(self): # get_inputs should return 2 elements: spam/__init__.py and # foo.import-tag-abiflags.so / foo.pyd inputs = cmd.get_inputs() - self.assertEqual(len(inputs), 2, inputs) + assert len(inputs) == 2, inputs - def test_dont_write_bytecode(self): + def test_dont_write_bytecode(self, caplog): # makes sure byte_compile is not used dist = self.create_dist()[1] cmd = install_lib(dist) @@ -104,12 +107,4 @@ def test_dont_write_bytecode(self): finally: sys.dont_write_bytecode = old_dont_write_bytecode - self.assertIn('byte-compiling is disabled', - self.logs[0][1] % self.logs[0][2]) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallLibTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + assert 'byte-compiling is disabled' in caplog.messages[0] diff --git a/setuptools/_distutils/tests/test_install_scripts.py b/setuptools/_distutils/tests/test_install_scripts.py index 648db3b11d..58313f2864 100644 --- a/setuptools/_distutils/tests/test_install_scripts.py +++ b/setuptools/_distutils/tests/test_install_scripts.py @@ -1,40 +1,34 @@ """Tests for distutils.command.install_scripts.""" import os -import unittest from distutils.command.install_scripts import install_scripts from distutils.core import Distribution from distutils.tests import support -from test.support import run_unittest -class InstallScriptsTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - +class TestInstallScripts(support.TempdirManager): def test_default_settings(self): dist = Distribution() - dist.command_obj["build"] = support.DummyCommand( - build_scripts="/foo/bar") + dist.command_obj["build"] = support.DummyCommand(build_scripts="/foo/bar") dist.command_obj["install"] = support.DummyCommand( install_scripts="/splat/funk", force=1, skip_build=1, - ) + ) cmd = install_scripts(dist) - self.assertFalse(cmd.force) - self.assertFalse(cmd.skip_build) - self.assertIsNone(cmd.build_dir) - self.assertIsNone(cmd.install_dir) + assert not cmd.force + assert not cmd.skip_build + assert cmd.build_dir is None + assert cmd.install_dir is None cmd.finalize_options() - self.assertTrue(cmd.force) - self.assertTrue(cmd.skip_build) - self.assertEqual(cmd.build_dir, "/foo/bar") - self.assertEqual(cmd.install_dir, "/splat/funk") + assert cmd.force + assert cmd.skip_build + assert cmd.build_dir == "/foo/bar" + assert cmd.install_dir == "/splat/funk" def test_installation(self): source = self.mkdtemp() @@ -48,15 +42,21 @@ def write_script(name, text): finally: f.close() - write_script("script1.py", ("#! /usr/bin/env python2.3\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - write_script("script2.py", ("#!/usr/bin/python\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - write_script("shell.sh", ("#!/bin/sh\n" - "# bogus shell script w/ sh-bang\n" - "exit 0\n")) + write_script( + "script1.py", + ( + "#! /usr/bin/env python2.3\n" + "# bogus script w/ Python sh-bang\n" + "pass\n" + ), + ) + write_script( + "script2.py", + ("#!/usr/bin/python\n" "# bogus script w/ Python sh-bang\n" "pass\n"), + ) + write_script( + "shell.sh", ("#!/bin/sh\n" "# bogus shell script w/ sh-bang\n" "exit 0\n") + ) target = self.mkdtemp() dist = Distribution() @@ -65,18 +65,11 @@ def write_script(name, text): install_scripts=target, force=1, skip_build=1, - ) + ) cmd = install_scripts(dist) cmd.finalize_options() cmd.run() installed = os.listdir(target) for name in expected: - self.assertIn(name, installed) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallScriptsTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + assert name in installed diff --git a/setuptools/_distutils/tests/test_log.py b/setuptools/_distutils/tests/test_log.py index ec2ae028de..ec6a0c8051 100644 --- a/setuptools/_distutils/tests/test_log.py +++ b/setuptools/_distutils/tests/test_log.py @@ -1,46 +1,13 @@ """Tests for distutils.log""" -import io -import sys -import unittest -from test.support import swap_attr, run_unittest +import logging -from distutils import log +from distutils._log import log -class TestLog(unittest.TestCase): - def test_non_ascii(self): - # Issues #8663, #34421: test that non-encodable text is escaped with - # backslashreplace error handler and encodable non-ASCII text is - # output as is. - for errors in ('strict', 'backslashreplace', 'surrogateescape', - 'replace', 'ignore'): - with self.subTest(errors=errors): - stdout = io.TextIOWrapper(io.BytesIO(), - encoding='cp437', errors=errors) - stderr = io.TextIOWrapper(io.BytesIO(), - encoding='cp437', errors=errors) - old_threshold = log.set_threshold(log.DEBUG) - try: - with swap_attr(sys, 'stdout', stdout), \ - swap_attr(sys, 'stderr', stderr): - log.debug('Dεbug\tMėssãge') - log.fatal('Fαtal\tÈrrōr') - finally: - log.set_threshold(old_threshold) - stdout.seek(0) - self.assertEqual(stdout.read().rstrip(), - 'Dεbug\tM?ss?ge' if errors == 'replace' else - 'Dεbug\tMssge' if errors == 'ignore' else - 'Dεbug\tM\\u0117ss\\xe3ge') - stderr.seek(0) - self.assertEqual(stderr.read().rstrip(), - 'Fαtal\t?rr?r' if errors == 'replace' else - 'Fαtal\trrr' if errors == 'ignore' else - 'Fαtal\t\\xc8rr\\u014dr') - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(TestLog) - -if __name__ == "__main__": - run_unittest(test_suite()) +class TestLog: + def test_non_ascii(self, caplog): + caplog.set_level(logging.DEBUG) + log.debug('Dεbug\tMėssãge') + log.fatal('Fαtal\tÈrrōr') + assert caplog.messages == ['Dεbug\tMėssãge', 'Fαtal\tÈrrōr'] diff --git a/setuptools/_distutils/tests/test_msvc9compiler.py b/setuptools/_distutils/tests/test_msvc9compiler.py index 6235405e31..fe5693e1d8 100644 --- a/setuptools/_distutils/tests/test_msvc9compiler.py +++ b/setuptools/_distutils/tests/test_msvc9compiler.py @@ -1,11 +1,10 @@ """Tests for distutils.msvc9compiler.""" import sys -import unittest import os from distutils.errors import DistutilsPlatformError from distutils.tests import support -from test.support import run_unittest +import pytest # A manifest with the only assembly reference being the msvcrt assembly, so # should have the assembly completely stripped. Note that although the @@ -90,56 +89,62 @@ """ -if sys.platform=="win32": +if sys.platform == "win32": from distutils.msvccompiler import get_build_version - if get_build_version()>=8.0: + + if get_build_version() >= 8.0: SKIP_MESSAGE = None else: SKIP_MESSAGE = "These tests are only for MSVC8.0 or above" else: SKIP_MESSAGE = "These tests are only for win32" -@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE) -class msvc9compilerTestCase(support.TempdirManager, - unittest.TestCase): +@pytest.mark.skipif('SKIP_MESSAGE', reason=SKIP_MESSAGE) +class Testmsvc9compiler(support.TempdirManager): def test_no_compiler(self): # makes sure query_vcvarsall raises # a DistutilsPlatformError if the compiler # is not found from distutils.msvc9compiler import query_vcvarsall + def _find_vcvarsall(version): return None from distutils import msvc9compiler + old_find_vcvarsall = msvc9compiler.find_vcvarsall msvc9compiler.find_vcvarsall = _find_vcvarsall try: - self.assertRaises(DistutilsPlatformError, query_vcvarsall, - 'wont find this version') + with pytest.raises(DistutilsPlatformError): + query_vcvarsall('wont find this version') finally: msvc9compiler.find_vcvarsall = old_find_vcvarsall def test_reg_class(self): from distutils.msvc9compiler import Reg - self.assertRaises(KeyError, Reg.get_value, 'xxx', 'xxx') + + with pytest.raises(KeyError): + Reg.get_value('xxx', 'xxx') # looking for values that should exist on all # windows registry versions. path = r'Control Panel\Desktop' v = Reg.get_value(path, 'dragfullwindows') - self.assertIn(v, ('0', '1', '2')) + assert v in ('0', '1', '2') import winreg + HKCU = winreg.HKEY_CURRENT_USER keys = Reg.read_keys(HKCU, 'xxxx') - self.assertEqual(keys, None) + assert keys is None keys = Reg.read_keys(HKCU, r'Control Panel') - self.assertIn('Desktop', keys) + assert 'Desktop' in keys def test_remove_visual_c_ref(self): from distutils.msvc9compiler import MSVCCompiler + tempdir = self.mkdtemp() manifest = os.path.join(tempdir, 'manifest') f = open(manifest, 'w') @@ -160,10 +165,11 @@ def test_remove_visual_c_ref(self): f.close() # makes sure the manifest was properly cleaned - self.assertEqual(content, _CLEANED_MANIFEST) + assert content == _CLEANED_MANIFEST def test_remove_entire_manifest(self): from distutils.msvc9compiler import MSVCCompiler + tempdir = self.mkdtemp() manifest = os.path.join(tempdir, 'manifest') f = open(manifest, 'w') @@ -174,11 +180,4 @@ def test_remove_entire_manifest(self): compiler = MSVCCompiler() got = compiler._remove_visual_c_ref(manifest) - self.assertIsNone(got) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(msvc9compilerTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + assert got is None diff --git a/setuptools/_distutils/tests/test_msvccompiler.py b/setuptools/_distutils/tests/test_msvccompiler.py index 846e5bb80a..f63537b8e5 100644 --- a/setuptools/_distutils/tests/test_msvccompiler.py +++ b/setuptools/_distutils/tests/test_msvccompiler.py @@ -1,23 +1,21 @@ """Tests for distutils._msvccompiler.""" import sys -import unittest import os import threading +import unittest.mock as mock + +import pytest from distutils.errors import DistutilsPlatformError from distutils.tests import support -from test.support import run_unittest +from distutils import _msvccompiler -SKIP_MESSAGE = (None if sys.platform == "win32" else - "These tests are only for win32") +needs_winreg = pytest.mark.skipif('not hasattr(_msvccompiler, "winreg")') -@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE) -class msvccompilerTestCase(support.TempdirManager, - unittest.TestCase): +class Testmsvccompiler(support.TempdirManager): def test_no_compiler(self): - import distutils._msvccompiler as _msvccompiler # makes sure query_vcvarsall raises # a DistutilsPlatformError if the compiler # is not found @@ -27,15 +25,15 @@ def _find_vcvarsall(plat_spec): old_find_vcvarsall = _msvccompiler._find_vcvarsall _msvccompiler._find_vcvarsall = _find_vcvarsall try: - self.assertRaises(DistutilsPlatformError, - _msvccompiler._get_vc_env, - 'wont find this version') + with pytest.raises(DistutilsPlatformError): + _msvccompiler._get_vc_env( + 'wont find this version', + ) finally: _msvccompiler._find_vcvarsall = old_find_vcvarsall + @needs_winreg def test_get_vc_env_unicode(self): - import distutils._msvccompiler as _msvccompiler - test_var = 'ṰḖṤṪ┅ṼẨṜ' test_value = '₃⁴₅' @@ -44,36 +42,25 @@ def test_get_vc_env_unicode(self): os.environ[test_var] = test_value try: env = _msvccompiler._get_vc_env('x86') - self.assertIn(test_var.lower(), env) - self.assertEqual(test_value, env[test_var.lower()]) + assert test_var.lower() in env + assert test_value == env[test_var.lower()] finally: os.environ.pop(test_var) if old_distutils_use_sdk: os.environ['DISTUTILS_USE_SDK'] = old_distutils_use_sdk - def test_get_vc2017(self): - import distutils._msvccompiler as _msvccompiler - - # This function cannot be mocked, so pass it if we find VS 2017 - # and mark it skipped if we do not. - version, path = _msvccompiler._find_vc2017() - if version: - self.assertGreaterEqual(version, 15) - self.assertTrue(os.path.isdir(path)) - else: - raise unittest.SkipTest("VS 2017 is not installed") - - def test_get_vc2015(self): - import distutils._msvccompiler as _msvccompiler - - # This function cannot be mocked, so pass it if we find VS 2015 - # and mark it skipped if we do not. - version, path = _msvccompiler._find_vc2015() - if version: - self.assertGreaterEqual(version, 14) - self.assertTrue(os.path.isdir(path)) - else: - raise unittest.SkipTest("VS 2015 is not installed") + @needs_winreg + @pytest.mark.parametrize('ver', (2015, 2017)) + def test_get_vc(self, ver): + # This function cannot be mocked, so pass if VC is found + # and skip otherwise. + lookup = getattr(_msvccompiler, f'_find_vc{ver}') + expected_version = {2015: 14, 2017: 15}[ver] + version, path = lookup() + if not version: + pytest.skip(f"VS {ver} is not installed") + assert version >= expected_version + assert os.path.isdir(path) class CheckThread(threading.Thread): @@ -89,20 +76,18 @@ def __bool__(self): return not self.exc_info -class TestSpawn(unittest.TestCase): +class TestSpawn: def test_concurrent_safe(self): """ Concurrent calls to spawn should have consistent results. """ - import distutils._msvccompiler as _msvccompiler compiler = _msvccompiler.MSVCCompiler() compiler._paths = "expected" inner_cmd = 'import os; assert os.environ["PATH"] == "expected"' command = [sys.executable, '-c', inner_cmd] threads = [ - CheckThread(target=compiler.spawn, args=[command]) - for n in range(100) + CheckThread(target=compiler.spawn, args=[command]) for n in range(100) ] for thread in threads: thread.start() @@ -115,8 +100,8 @@ def test_concurrent_safe_fallback(self): If CCompiler.spawn has been monkey-patched without support for an env, it should still execute. """ - import distutils._msvccompiler as _msvccompiler from distutils import ccompiler + compiler = _msvccompiler.MSVCCompiler() compiler._paths = "expected" @@ -124,15 +109,7 @@ def CCompiler_spawn(self, cmd): "A spawn without an env argument." assert os.environ["PATH"] == "expected" - with unittest.mock.patch.object( - ccompiler.CCompiler, 'spawn', CCompiler_spawn): + with mock.patch.object(ccompiler.CCompiler, 'spawn', CCompiler_spawn): compiler.spawn(["n/a"]) assert os.environ.get("PATH") != "expected" - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(msvccompilerTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_register.py b/setuptools/_distutils/tests/test_register.py index 5770ed58ae..34e593244e 100644 --- a/setuptools/_distutils/tests/test_register.py +++ b/setuptools/_distutils/tests/test_register.py @@ -1,20 +1,14 @@ """Tests for distutils.command.register.""" import os -import unittest import getpass import urllib -import warnings - -from test.support import run_unittest - -from .py38compat import check_warnings from distutils.command import register as register_module from distutils.command.register import register from distutils.errors import DistutilsSetupError -from distutils.log import INFO from distutils.tests.test_config import BasePyPIRCCommandTestCase +import pytest try: import docutils @@ -41,8 +35,10 @@ password:password """ -class Inputs(object): + +class Inputs: """Fakes user inputs.""" + def __init__(self, *answers): self.answers = answers self.index = 0 @@ -53,8 +49,10 @@ def __call__(self, prompt=''): finally: self.index += 1 -class FakeOpener(object): + +class FakeOpener: """Fakes a PyPI server""" + def __init__(self): self.reqs = [] @@ -71,33 +69,33 @@ def read(self): def getheader(self, name, default=None): return { 'content-type': 'text/plain; charset=utf-8', - }.get(name.lower(), default) + }.get(name.lower(), default) + +@pytest.fixture(autouse=True) +def autopass(monkeypatch): + monkeypatch.setattr(getpass, 'getpass', lambda prompt: 'password') -class RegisterTestCase(BasePyPIRCCommandTestCase): - def setUp(self): - super(RegisterTestCase, self).setUp() - # patching the password prompt - self._old_getpass = getpass.getpass - def _getpass(prompt): - return 'password' - getpass.getpass = _getpass - urllib.request._opener = None - self.old_opener = urllib.request.build_opener - self.conn = urllib.request.build_opener = FakeOpener() +@pytest.fixture(autouse=True) +def fake_opener(monkeypatch, request): + opener = FakeOpener() + monkeypatch.setattr(urllib.request, 'build_opener', opener) + monkeypatch.setattr(urllib.request, '_opener', None) + request.instance.conn = opener - def tearDown(self): - getpass.getpass = self._old_getpass - urllib.request._opener = None - urllib.request.build_opener = self.old_opener - super(RegisterTestCase, self).tearDown() +class TestRegister(BasePyPIRCCommandTestCase): def _get_cmd(self, metadata=None): if metadata is None: - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx'} + metadata = { + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + 'name': 'xxx', + 'version': 'xxx', + 'long_description': 'xxx', + } pkg_info, dist = self.create_dist(**metadata) return register(dist) @@ -109,7 +107,7 @@ def test_create_pypirc(self): cmd = self._get_cmd() # we shouldn't have a .pypirc file yet - self.assertFalse(os.path.exists(self.rc)) + assert not os.path.exists(self.rc) # patching input and getpass.getpass # so register gets happy @@ -128,13 +126,13 @@ def test_create_pypirc(self): del register_module.input # we should have a brand new .pypirc file - self.assertTrue(os.path.exists(self.rc)) + assert os.path.exists(self.rc) # with the content similar to WANTED_PYPIRC f = open(self.rc) try: content = f.read() - self.assertEqual(content, WANTED_PYPIRC) + assert content == WANTED_PYPIRC finally: f.close() @@ -143,6 +141,7 @@ def test_create_pypirc(self): # if we run the command again def _no_way(prompt=''): raise AssertionError(prompt) + register_module.input = _no_way cmd.show_response = 1 @@ -150,16 +149,15 @@ def _no_way(prompt=''): # let's see what the server received : we should # have 2 similar requests - self.assertEqual(len(self.conn.reqs), 2) + assert len(self.conn.reqs) == 2 req1 = dict(self.conn.reqs[0].headers) req2 = dict(self.conn.reqs[1].headers) - self.assertEqual(req1['Content-length'], '1374') - self.assertEqual(req2['Content-length'], '1374') - self.assertIn(b'xxx', self.conn.reqs[1].data) + assert req1['Content-length'] == '1358' + assert req2['Content-length'] == '1358' + assert b'xxx' in self.conn.reqs[1].data def test_password_not_in_file(self): - self.write_file(self.rc, PYPIRC_NOPASSWORD) cmd = self._get_cmd() cmd._set_config() @@ -168,7 +166,7 @@ def test_password_not_in_file(self): # dist.password should be set # therefore used afterwards by other commands - self.assertEqual(cmd.distribution.password, 'password') + assert cmd.distribution.password == 'password' def test_registering(self): # this test runs choice 2 @@ -182,11 +180,11 @@ def test_registering(self): del register_module.input # we should have send a request - self.assertEqual(len(self.conn.reqs), 1) + assert len(self.conn.reqs) == 1 req = self.conn.reqs[0] headers = dict(req.headers) - self.assertEqual(headers['Content-length'], '608') - self.assertIn(b'tarek', req.data) + assert headers['Content-length'] == '608' + assert b'tarek' in req.data def test_password_reset(self): # this test runs choice 3 @@ -200,35 +198,42 @@ def test_password_reset(self): del register_module.input # we should have send a request - self.assertEqual(len(self.conn.reqs), 1) + assert len(self.conn.reqs) == 1 req = self.conn.reqs[0] headers = dict(req.headers) - self.assertEqual(headers['Content-length'], '290') - self.assertIn(b'tarek', req.data) + assert headers['Content-length'] == '290' + assert b'tarek' in req.data - @unittest.skipUnless(docutils is not None, 'needs docutils') def test_strict(self): - # testing the script option + # testing the strict option # when on, the register command stops if # the metadata is incomplete or if # long_description is not reSt compliant + pytest.importorskip('docutils') + # empty metadata cmd = self._get_cmd({}) cmd.ensure_finalized() cmd.strict = 1 - self.assertRaises(DistutilsSetupError, cmd.run) + with pytest.raises(DistutilsSetupError): + cmd.run() # metadata are OK but long_description is broken - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'éxéxé', - 'name': 'xxx', 'version': 'xxx', - 'long_description': 'title\n==\n\ntext'} + metadata = { + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'éxéxé', + 'name': 'xxx', + 'version': 'xxx', + 'long_description': 'title\n==\n\ntext', + } cmd = self._get_cmd(metadata) cmd.ensure_finalized() cmd.strict = 1 - self.assertRaises(DistutilsSetupError, cmd.run) + with pytest.raises(DistutilsSetupError): + cmd.run() # now something that works metadata['long_description'] = 'title\n=====\n\ntext' @@ -255,11 +260,15 @@ def test_strict(self): del register_module.input # and finally a Unicode test (bug #12114) - metadata = {'url': 'xxx', 'author': '\u00c9ric', - 'author_email': 'xxx', 'name': 'xxx', - 'version': 'xxx', - 'description': 'Something about esszet \u00df', - 'long_description': 'More things about esszet \u00df'} + metadata = { + 'url': 'xxx', + 'author': '\u00c9ric', + 'author_email': 'xxx', + 'name': 'xxx', + 'version': 'xxx', + 'description': 'Something about esszet \u00df', + 'long_description': 'More things about esszet \u00df', + } cmd = self._get_cmd(metadata) cmd.ensure_finalized() @@ -272,38 +281,33 @@ def test_strict(self): finally: del register_module.input - @unittest.skipUnless(docutils is not None, 'needs docutils') - def test_register_invalid_long_description(self): + def test_register_invalid_long_description(self, monkeypatch): + pytest.importorskip('docutils') description = ':funkie:`str`' # mimic Sphinx-specific markup - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx', - 'long_description': description} + metadata = { + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + 'name': 'xxx', + 'version': 'xxx', + 'long_description': description, + } cmd = self._get_cmd(metadata) cmd.ensure_finalized() cmd.strict = True inputs = Inputs('2', 'tarek', 'tarek@ziade.org') - register_module.input = inputs - self.addCleanup(delattr, register_module, 'input') + monkeypatch.setattr(register_module, 'input', inputs, raising=False) - self.assertRaises(DistutilsSetupError, cmd.run) - - def test_check_metadata_deprecated(self): - # makes sure make_metadata is deprecated - cmd = self._get_cmd() - with check_warnings() as w: - warnings.simplefilter("always") - cmd.check_metadata() - self.assertEqual(len(w.warnings), 1) + with pytest.raises(DistutilsSetupError): + cmd.run() - def test_list_classifiers(self): + def test_list_classifiers(self, caplog): cmd = self._get_cmd() cmd.list_classifiers = 1 cmd.run() - results = self.get_logs(INFO) - self.assertEqual(results, ['running check', 'xxx']) + assert caplog.messages == ['running check', 'xxx'] - def test_show_response(self): + def test_show_response(self, caplog): # test that the --show-response option return a well formatted response cmd = self._get_cmd() inputs = Inputs('1', 'tarek', 'y') @@ -314,12 +318,4 @@ def test_show_response(self): finally: del register_module.input - results = self.get_logs(INFO) - self.assertEqual(results[3], 75 * '-' + '\nxxx\n' + 75 * '-') - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(RegisterTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + assert caplog.messages[3] == 75 * '-' + '\nxxx\n' + 75 * '-' diff --git a/setuptools/_distutils/tests/test_sdist.py b/setuptools/_distutils/tests/test_sdist.py index 4c51717ce6..fdb768e73f 100644 --- a/setuptools/_distutils/tests/test_sdist.py +++ b/setuptools/_distutils/tests/test_sdist.py @@ -1,28 +1,23 @@ """Tests for distutils.command.sdist.""" import os import tarfile -import unittest import warnings import zipfile from os.path import join from textwrap import dedent -from test.support import captured_stdout, run_unittest from .unix_compat import require_unix_id, require_uid_0, pwd, grp -from .py38compat import check_warnings +import pytest +import path +import jaraco.path -try: - import zlib - ZLIB_SUPPORT = True -except ImportError: - ZLIB_SUPPORT = False +from .py38compat import check_warnings from distutils.command.sdist import sdist, show_formats from distutils.core import Distribution from distutils.tests.test_config import BasePyPIRCCommandTestCase from distutils.errors import DistutilsOptionError -from distutils.spawn import find_executable -from distutils.log import WARN +from distutils.spawn import find_executable # noqa: F401 from distutils.filelist import FileList from distutils.archive_util import ARCHIVE_FORMATS @@ -48,33 +43,35 @@ somecode%(sep)sdoc.txt """ -class SDistTestCase(BasePyPIRCCommandTestCase): - - def setUp(self): - # PyPIRCCommandTestCase creates a temp dir already - # and put it in self.tmp_dir - super(SDistTestCase, self).setUp() - # setting up an environment - self.old_path = os.getcwd() - os.mkdir(join(self.tmp_dir, 'somecode')) - os.mkdir(join(self.tmp_dir, 'dist')) - # a package, and a README - self.write_file((self.tmp_dir, 'README'), 'xxx') - self.write_file((self.tmp_dir, 'somecode', '__init__.py'), '#') - self.write_file((self.tmp_dir, 'setup.py'), SETUP_PY) - os.chdir(self.tmp_dir) - - def tearDown(self): - # back to normal - os.chdir(self.old_path) - super(SDistTestCase, self).tearDown() +@pytest.fixture(autouse=True) +def project_dir(request, pypirc): + self = request.instance + jaraco.path.build( + { + 'somecode': { + '__init__.py': '#', + }, + 'README': 'xxx', + 'setup.py': SETUP_PY, + }, + self.tmp_dir, + ) + with path.Path(self.tmp_dir): + yield + + +class TestSDist(BasePyPIRCCommandTestCase): def get_cmd(self, metadata=None): """Returns a cmd""" if metadata is None: - metadata = {'name': 'fake', 'version': '1.0', - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'} + metadata = { + 'name': 'fake', + 'version': '1.0', + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + } dist = Distribution(metadata) dist.script_name = 'setup.py' dist.packages = ['somecode'] @@ -83,7 +80,7 @@ def get_cmd(self, metadata=None): cmd.dist_dir = 'dist' return dist, cmd - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + @pytest.mark.usefixtures('needs_zlib') def test_prune_file_list(self): # this test creates a project with some VCS dirs and an NFS rename # file, then launches sdist to check they get pruned on all systems @@ -93,12 +90,10 @@ def test_prune_file_list(self): self.write_file((self.tmp_dir, 'somecode', '.svn', 'ok.py'), 'xxx') os.mkdir(join(self.tmp_dir, 'somecode', '.hg')) - self.write_file((self.tmp_dir, 'somecode', '.hg', - 'ok'), 'xxx') + self.write_file((self.tmp_dir, 'somecode', '.hg', 'ok'), 'xxx') os.mkdir(join(self.tmp_dir, 'somecode', '.git')) - self.write_file((self.tmp_dir, 'somecode', '.git', - 'ok'), 'xxx') + self.write_file((self.tmp_dir, 'somecode', '.git', 'ok'), 'xxx') self.write_file((self.tmp_dir, 'somecode', '.nfs0001'), 'xxx') @@ -115,7 +110,7 @@ def test_prune_file_list(self): # now let's check what we have dist_folder = join(self.tmp_dir, 'dist') files = os.listdir(dist_folder) - self.assertEqual(files, ['fake-1.0.zip']) + assert files == ['fake-1.0.zip'] zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) try: @@ -124,15 +119,19 @@ def test_prune_file_list(self): zip_file.close() # making sure everything has been pruned correctly - expected = ['', 'PKG-INFO', 'README', 'setup.py', - 'somecode/', 'somecode/__init__.py'] - self.assertEqual(sorted(content), ['fake-1.0/' + x for x in expected]) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - @unittest.skipIf(find_executable('tar') is None, - "The tar command is not found") - @unittest.skipIf(find_executable('gzip') is None, - "The gzip command is not found") + expected = [ + '', + 'PKG-INFO', + 'README', + 'setup.py', + 'somecode/', + 'somecode/__init__.py', + ] + assert sorted(content) == ['fake-1.0/' + x for x in expected] + + @pytest.mark.usefixtures('needs_zlib') + @pytest.mark.skipif("not find_executable('tar')") + @pytest.mark.skipif("not find_executable('gzip')") def test_make_distribution(self): # now building a sdist dist, cmd = self.get_cmd() @@ -146,7 +145,7 @@ def test_make_distribution(self): dist_folder = join(self.tmp_dir, 'dist') result = os.listdir(dist_folder) result.sort() - self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz']) + assert result == ['fake-1.0.tar', 'fake-1.0.tar.gz'] os.remove(join(dist_folder, 'fake-1.0.tar')) os.remove(join(dist_folder, 'fake-1.0.tar.gz')) @@ -159,11 +158,10 @@ def test_make_distribution(self): result = os.listdir(dist_folder) result.sort() - self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz']) + assert result == ['fake-1.0.tar', 'fake-1.0.tar.gz'] - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + @pytest.mark.usefixtures('needs_zlib') def test_add_defaults(self): - # http://bugs.python.org/issue2279 # add_default should also include @@ -172,8 +170,7 @@ def test_add_defaults(self): # filling data_files by pointing files # in package_data - dist.package_data = {'': ['*.cfg', '*.dat'], - 'somecode': ['*.txt']} + dist.package_data = {'': ['*.cfg', '*.dat'], 'somecode': ['*.txt']} self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#') self.write_file((self.tmp_dir, 'somecode', 'doc.dat'), '#') @@ -193,12 +190,11 @@ def test_add_defaults(self): self.write_file((some_dir, 'file.txt'), '#') self.write_file((some_dir, 'other_file.txt'), '#') - dist.data_files = [('data', ['data/data.dt', - 'buildout.cfg', - 'inroot.txt', - 'notexisting']), - 'some/file.txt', - 'some/other_file.txt'] + dist.data_files = [ + ('data', ['data/data.dt', 'buildout.cfg', 'inroot.txt', 'notexisting']), + 'some/file.txt', + 'some/other_file.txt', + ] # adding a script script_dir = join(self.tmp_dir, 'scripts') @@ -215,7 +211,7 @@ def test_add_defaults(self): # now let's check what we have dist_folder = join(self.tmp_dir, 'dist') files = os.listdir(dist_folder) - self.assertEqual(files, ['fake-1.0.zip']) + assert files == ['fake-1.0.zip'] zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) try: @@ -224,13 +220,26 @@ def test_add_defaults(self): zip_file.close() # making sure everything was added - expected = ['', 'PKG-INFO', 'README', 'buildout.cfg', - 'data/', 'data/data.dt', 'inroot.txt', - 'scripts/', 'scripts/script.py', 'setup.py', - 'some/', 'some/file.txt', 'some/other_file.txt', - 'somecode/', 'somecode/__init__.py', 'somecode/doc.dat', - 'somecode/doc.txt'] - self.assertEqual(sorted(content), ['fake-1.0/' + x for x in expected]) + expected = [ + '', + 'PKG-INFO', + 'README', + 'buildout.cfg', + 'data/', + 'data/data.dt', + 'inroot.txt', + 'scripts/', + 'scripts/script.py', + 'setup.py', + 'some/', + 'some/file.txt', + 'some/other_file.txt', + 'somecode/', + 'somecode/__init__.py', + 'somecode/doc.dat', + 'somecode/doc.txt', + ] + assert sorted(content) == ['fake-1.0/' + x for x in expected] # checking the MANIFEST f = open(join(self.tmp_dir, 'MANIFEST')) @@ -238,10 +247,14 @@ def test_add_defaults(self): manifest = f.read() finally: f.close() - self.assertEqual(manifest, MANIFEST % {'sep': os.sep}) + assert manifest == MANIFEST % {'sep': os.sep} + + @staticmethod + def warnings(messages, prefix='warning: '): + return [msg for msg in messages if msg.startswith(prefix)] - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_metadata_check_option(self): + @pytest.mark.usefixtures('needs_zlib') + def test_metadata_check_option(self, caplog): # testing the `medata-check` option dist, cmd = self.get_cmd(metadata={}) @@ -249,19 +262,15 @@ def test_metadata_check_option(self): # with the `check` subcommand cmd.ensure_finalized() cmd.run() - warnings = [msg for msg in self.get_logs(WARN) if - msg.startswith('warning: check:')] - self.assertEqual(len(warnings), 2) + assert len(self.warnings(caplog.messages, 'warning: check: ')) == 1 # trying with a complete set of metadata - self.clear_logs() + caplog.clear() dist, cmd = self.get_cmd() cmd.ensure_finalized() cmd.metadata_check = 0 cmd.run() - warnings = [msg for msg in self.get_logs(WARN) if - msg.startswith('warning: check:')] - self.assertEqual(len(warnings), 0) + assert len(self.warnings(caplog.messages, 'warning: check: ')) == 0 def test_check_metadata_deprecated(self): # makes sure make_metadata is deprecated @@ -269,65 +278,68 @@ def test_check_metadata_deprecated(self): with check_warnings() as w: warnings.simplefilter("always") cmd.check_metadata() - self.assertEqual(len(w.warnings), 1) + assert len(w.warnings) == 1 - def test_show_formats(self): - with captured_stdout() as stdout: - show_formats() + def test_show_formats(self, capsys): + show_formats() # the output should be a header line + one line per format num_formats = len(ARCHIVE_FORMATS.keys()) - output = [line for line in stdout.getvalue().split('\n') - if line.strip().startswith('--formats=')] - self.assertEqual(len(output), num_formats) + output = [ + line + for line in capsys.readouterr().out.split('\n') + if line.strip().startswith('--formats=') + ] + assert len(output) == num_formats def test_finalize_options(self): dist, cmd = self.get_cmd() cmd.finalize_options() # default options set by finalize - self.assertEqual(cmd.manifest, 'MANIFEST') - self.assertEqual(cmd.template, 'MANIFEST.in') - self.assertEqual(cmd.dist_dir, 'dist') + assert cmd.manifest == 'MANIFEST' + assert cmd.template == 'MANIFEST.in' + assert cmd.dist_dir == 'dist' # formats has to be a string splitable on (' ', ',') or # a stringlist cmd.formats = 1 - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + with pytest.raises(DistutilsOptionError): + cmd.finalize_options() cmd.formats = ['zip'] cmd.finalize_options() # formats has to be known cmd.formats = 'supazipa' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + with pytest.raises(DistutilsOptionError): + cmd.finalize_options() # the following tests make sure there is a nice error message instead # of a traceback when parsing an invalid manifest template - def _check_template(self, content): + def _check_template(self, content, caplog): dist, cmd = self.get_cmd() os.chdir(self.tmp_dir) self.write_file('MANIFEST.in', content) cmd.ensure_finalized() cmd.filelist = FileList() cmd.read_template() - warnings = self.get_logs(WARN) - self.assertEqual(len(warnings), 1) + assert len(self.warnings(caplog.messages)) == 1 - def test_invalid_template_unknown_command(self): - self._check_template('taunt knights *') + def test_invalid_template_unknown_command(self, caplog): + self._check_template('taunt knights *', caplog) - def test_invalid_template_wrong_arguments(self): + def test_invalid_template_wrong_arguments(self, caplog): # this manifest command takes one argument - self._check_template('prune') + self._check_template('prune', caplog) - @unittest.skipIf(os.name != 'nt', 'test relevant for Windows only') - def test_invalid_template_wrong_path(self): + @pytest.mark.skipif("platform.system() != 'Windows'") + def test_invalid_template_wrong_path(self, caplog): # on Windows, trailing slashes are not allowed # this used to crash instead of raising a warning: #8286 - self._check_template('include examples/') + self._check_template('include examples/', caplog) - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + @pytest.mark.usefixtures('needs_zlib') def test_get_file_list(self): # make sure MANIFEST is recalculated dist, cmd = self.get_cmd() @@ -341,12 +353,13 @@ def test_get_file_list(self): f = open(cmd.manifest) try: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] + manifest = [ + line.strip() for line in f.read().split('\n') if line.strip() != '' + ] finally: f.close() - self.assertEqual(len(manifest), 5) + assert len(manifest) == 5 # adding a file self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#') @@ -360,16 +373,17 @@ def test_get_file_list(self): f = open(cmd.manifest) try: - manifest2 = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] + manifest2 = [ + line.strip() for line in f.read().split('\n') if line.strip() != '' + ] finally: f.close() # do we have the new file in MANIFEST ? - self.assertEqual(len(manifest2), 6) - self.assertIn('doc2.txt', manifest2[-1]) + assert len(manifest2) == 6 + assert 'doc2.txt' in manifest2[-1] - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + @pytest.mark.usefixtures('needs_zlib') def test_manifest_marker(self): # check that autogenerated MANIFESTs have a marker dist, cmd = self.get_cmd() @@ -378,22 +392,24 @@ def test_manifest_marker(self): f = open(cmd.manifest) try: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] + manifest = [ + line.strip() for line in f.read().split('\n') if line.strip() != '' + ] finally: f.close() - self.assertEqual(manifest[0], - '# file GENERATED by distutils, do NOT edit') + assert manifest[0] == '# file GENERATED by distutils, do NOT edit' - @unittest.skipUnless(ZLIB_SUPPORT, "Need zlib support to run") + @pytest.mark.usefixtures('needs_zlib') def test_manifest_comments(self): # make sure comments don't cause exceptions or wrong includes - contents = dedent("""\ + contents = dedent( + """\ # bad.py #bad.py good.py - """) + """ + ) dist, cmd = self.get_cmd() cmd.ensure_finalized() self.write_file((self.tmp_dir, cmd.manifest), contents) @@ -401,28 +417,31 @@ def test_manifest_comments(self): self.write_file((self.tmp_dir, 'bad.py'), "# don't pick me!") self.write_file((self.tmp_dir, '#bad.py'), "# don't pick me!") cmd.run() - self.assertEqual(cmd.filelist.files, ['good.py']) + assert cmd.filelist.files == ['good.py'] - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + @pytest.mark.usefixtures('needs_zlib') def test_manual_manifest(self): # check that a MANIFEST without a marker is left alone dist, cmd = self.get_cmd() cmd.formats = ['gztar'] cmd.ensure_finalized() self.write_file((self.tmp_dir, cmd.manifest), 'README.manual') - self.write_file((self.tmp_dir, 'README.manual'), - 'This project maintains its MANIFEST file itself.') + self.write_file( + (self.tmp_dir, 'README.manual'), + 'This project maintains its MANIFEST file itself.', + ) cmd.run() - self.assertEqual(cmd.filelist.files, ['README.manual']) + assert cmd.filelist.files == ['README.manual'] f = open(cmd.manifest) try: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] + manifest = [ + line.strip() for line in f.read().split('\n') if line.strip() != '' + ] finally: f.close() - self.assertEqual(manifest, ['README.manual']) + assert manifest == ['README.manual'] archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') archive = tarfile.open(archive_name) @@ -430,16 +449,17 @@ def test_manual_manifest(self): filenames = [tarinfo.name for tarinfo in archive] finally: archive.close() - self.assertEqual(sorted(filenames), ['fake-1.0', 'fake-1.0/PKG-INFO', - 'fake-1.0/README.manual']) + assert sorted(filenames) == [ + 'fake-1.0', + 'fake-1.0/PKG-INFO', + 'fake-1.0/README.manual', + ] - @unittest.skipUnless(ZLIB_SUPPORT, "requires zlib") + @pytest.mark.usefixtures('needs_zlib') @require_unix_id @require_uid_0 - @unittest.skipIf(find_executable('tar') is None, - "The tar command is not found") - @unittest.skipIf(find_executable('gzip') is None, - "The gzip command is not found") + @pytest.mark.skipif("not find_executable('tar')") + @pytest.mark.skipif("not find_executable('gzip')") def test_make_distribution_owner_group(self): # now building a sdist dist, cmd = self.get_cmd() @@ -456,8 +476,8 @@ def test_make_distribution_owner_group(self): archive = tarfile.open(archive_name) try: for member in archive.getmembers(): - self.assertEqual(member.uid, 0) - self.assertEqual(member.gid, 0) + assert member.uid == 0 + assert member.gid == 0 finally: archive.close() @@ -478,12 +498,6 @@ def test_make_distribution_owner_group(self): # rights (see #7408) try: for member in archive.getmembers(): - self.assertEqual(member.uid, os.getuid()) + assert member.uid == os.getuid() finally: archive.close() - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(SDistTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_spawn.py b/setuptools/_distutils/tests/test_spawn.py index c5ed8e2b45..08a34ee2b8 100644 --- a/setuptools/_distutils/tests/test_spawn.py +++ b/setuptools/_distutils/tests/test_spawn.py @@ -2,23 +2,23 @@ import os import stat import sys -import unittest.mock -from test.support import run_unittest +import unittest.mock as mock + +from test.support import unix_shell + +import path -from .py35compat import unix_shell from . import py38compat as os_helper from distutils.spawn import find_executable from distutils.spawn import spawn from distutils.errors import DistutilsExecError from distutils.tests import support +import pytest -class SpawnTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - @unittest.skipUnless(os.name in ('nt', 'posix'), - 'Runs only under posix or nt') +class TestSpawn(support.TempdirManager): + @pytest.mark.skipif("os.name not in ('nt', 'posix')") def test_spawn(self): tmpdir = self.mkdtemp() @@ -32,7 +32,8 @@ def test_spawn(self): self.write_file(exe, 'exit 1') os.chmod(exe, 0o777) - self.assertRaises(DistutilsExecError, spawn, [exe]) + with pytest.raises(DistutilsExecError): + spawn([exe]) # now something that works if sys.platform != 'win32': @@ -45,95 +46,84 @@ def test_spawn(self): os.chmod(exe, 0o777) spawn([exe]) # should work without any error - def test_find_executable(self): - with os_helper.temp_dir() as tmp_dir: - # use TESTFN to get a pseudo-unique filename - program_noeext = os_helper.TESTFN - # Give the temporary program an ".exe" suffix for all. - # It's needed on Windows and not harmful on other platforms. - program = program_noeext + ".exe" - - filename = os.path.join(tmp_dir, program) - with open(filename, "wb"): - pass - os.chmod(filename, stat.S_IXUSR) - - # test path parameter - rv = find_executable(program, path=tmp_dir) - self.assertEqual(rv, filename) - - if sys.platform == 'win32': - # test without ".exe" extension - rv = find_executable(program_noeext, path=tmp_dir) - self.assertEqual(rv, filename) - - # test find in the current directory - with os_helper.change_cwd(tmp_dir): + def test_find_executable(self, tmp_path): + program_noeext = 'program' + # Give the temporary program an ".exe" suffix for all. + # It's needed on Windows and not harmful on other platforms. + program = program_noeext + ".exe" + + program_path = tmp_path / program + program_path.write_text("") + program_path.chmod(stat.S_IXUSR) + filename = str(program_path) + tmp_dir = path.Path(tmp_path) + + # test path parameter + rv = find_executable(program, path=tmp_dir) + assert rv == filename + + if sys.platform == 'win32': + # test without ".exe" extension + rv = find_executable(program_noeext, path=tmp_dir) + assert rv == filename + + # test find in the current directory + with tmp_dir: + rv = find_executable(program) + assert rv == program + + # test non-existent program + dont_exist_program = "dontexist_" + program + rv = find_executable(dont_exist_program, path=tmp_dir) + assert rv is None + + # PATH='': no match, except in the current directory + with os_helper.EnvironmentVarGuard() as env: + env['PATH'] = '' + with mock.patch( + 'distutils.spawn.os.confstr', return_value=tmp_dir, create=True + ), mock.patch('distutils.spawn.os.defpath', tmp_dir): rv = find_executable(program) - self.assertEqual(rv, program) - - # test non-existent program - dont_exist_program = "dontexist_" + program - rv = find_executable(dont_exist_program , path=tmp_dir) - self.assertIsNone(rv) - - # PATH='': no match, except in the current directory - with os_helper.EnvironmentVarGuard() as env: - env['PATH'] = '' - with unittest.mock.patch('distutils.spawn.os.confstr', - return_value=tmp_dir, create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', - tmp_dir): - rv = find_executable(program) - self.assertIsNone(rv) - - # look in current directory - with os_helper.change_cwd(tmp_dir): - rv = find_executable(program) - self.assertEqual(rv, program) - - # PATH=':': explicitly looks in the current directory - with os_helper.EnvironmentVarGuard() as env: - env['PATH'] = os.pathsep - with unittest.mock.patch('distutils.spawn.os.confstr', - return_value='', create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', ''): - rv = find_executable(program) - self.assertIsNone(rv) - - # look in current directory - with os_helper.change_cwd(tmp_dir): - rv = find_executable(program) - self.assertEqual(rv, program) - - # missing PATH: test os.confstr("CS_PATH") and os.defpath - with os_helper.EnvironmentVarGuard() as env: - env.pop('PATH', None) - - # without confstr - with unittest.mock.patch('distutils.spawn.os.confstr', - side_effect=ValueError, - create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', - tmp_dir): + assert rv is None + + # look in current directory + with tmp_dir: rv = find_executable(program) - self.assertEqual(rv, filename) + assert rv == program + + # PATH=':': explicitly looks in the current directory + with os_helper.EnvironmentVarGuard() as env: + env['PATH'] = os.pathsep + with mock.patch( + 'distutils.spawn.os.confstr', return_value='', create=True + ), mock.patch('distutils.spawn.os.defpath', ''): + rv = find_executable(program) + assert rv is None - # with confstr - with unittest.mock.patch('distutils.spawn.os.confstr', - return_value=tmp_dir, create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', ''): + # look in current directory + with tmp_dir: rv = find_executable(program) - self.assertEqual(rv, filename) + assert rv == program - def test_spawn_missing_exe(self): - with self.assertRaises(DistutilsExecError) as ctx: - spawn(['does-not-exist']) - self.assertIn("command 'does-not-exist' failed", str(ctx.exception)) + # missing PATH: test os.confstr("CS_PATH") and os.defpath + with os_helper.EnvironmentVarGuard() as env: + env.pop('PATH', None) + # without confstr + with mock.patch( + 'distutils.spawn.os.confstr', side_effect=ValueError, create=True + ), mock.patch('distutils.spawn.os.defpath', tmp_dir): + rv = find_executable(program) + assert rv == filename -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(SpawnTestCase) + # with confstr + with mock.patch( + 'distutils.spawn.os.confstr', return_value=tmp_dir, create=True + ), mock.patch('distutils.spawn.os.defpath', ''): + rv = find_executable(program) + assert rv == filename -if __name__ == "__main__": - run_unittest(test_suite()) + def test_spawn_missing_exe(self): + with pytest.raises(DistutilsExecError) as ctx: + spawn(['does-not-exist']) + assert "command 'does-not-exist' failed" in str(ctx.value) diff --git a/setuptools/_distutils/tests/test_sysconfig.py b/setuptools/_distutils/tests/test_sysconfig.py index 9de3cb7018..bfeaf9a6b9 100644 --- a/setuptools/_distutils/tests/test_sysconfig.py +++ b/setuptools/_distutils/tests/test_sysconfig.py @@ -1,91 +1,79 @@ """Tests for distutils.sysconfig.""" import contextlib import os -import shutil import subprocess import sys -import textwrap -import unittest +import pathlib +import pytest +import jaraco.envs +import path +from jaraco.text import trim + +import distutils from distutils import sysconfig -from distutils.ccompiler import get_default_compiler +from distutils.ccompiler import get_default_compiler # noqa: F401 from distutils.unixccompiler import UnixCCompiler -from distutils.tests import support -from test.support import run_unittest, swap_item - -from .py38compat import TESTFN -from .py38compat import check_warnings - +from test.support import swap_item -class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): - def setUp(self): - super(SysconfigTestCase, self).setUp() - self.makefile = None +from . import py37compat - def tearDown(self): - if self.makefile is not None: - os.unlink(self.makefile) - self.cleanup_testfn() - super(SysconfigTestCase, self).tearDown() - - def cleanup_testfn(self): - if os.path.isfile(TESTFN): - os.remove(TESTFN) - elif os.path.isdir(TESTFN): - shutil.rmtree(TESTFN) +@pytest.mark.usefixtures('save_env') +class TestSysconfig: def test_get_config_h_filename(self): config_h = sysconfig.get_config_h_filename() - self.assertTrue(os.path.isfile(config_h), config_h) + assert os.path.isfile(config_h) - @unittest.skipIf(sys.platform == 'win32', - 'Makefile only exists on Unix like systems') + @pytest.mark.skipif("platform.system() == 'Windows'") + @pytest.mark.skipif("sys.implementation.name != 'cpython'") def test_get_makefile_filename(self): makefile = sysconfig.get_makefile_filename() - self.assertTrue(os.path.isfile(makefile), makefile) + assert os.path.isfile(makefile) - def test_get_python_lib(self): - # XXX doesn't work on Linux when Python was never installed before - #self.assertTrue(os.path.isdir(lib_dir), lib_dir) - # test for pythonxx.lib? - self.assertNotEqual(sysconfig.get_python_lib(), - sysconfig.get_python_lib(prefix=TESTFN)) + def test_get_python_lib(self, tmp_path): + assert sysconfig.get_python_lib() != sysconfig.get_python_lib(prefix=tmp_path) def test_get_config_vars(self): cvars = sysconfig.get_config_vars() - self.assertIsInstance(cvars, dict) - self.assertTrue(cvars) - - @unittest.skip('sysconfig.IS_PYPY') - def test_srcdir(self): - # See Issues #15322, #15364. - srcdir = sysconfig.get_config_var('srcdir') - - self.assertTrue(os.path.isabs(srcdir), srcdir) - self.assertTrue(os.path.isdir(srcdir), srcdir) - - if sysconfig.python_build: - # The python executable has not been installed so srcdir - # should be a full source checkout. - Python_h = os.path.join(srcdir, 'Include', 'Python.h') - self.assertTrue(os.path.exists(Python_h), Python_h) - self.assertTrue(sysconfig._is_python_source_dir(srcdir)) - elif os.name == 'posix': - self.assertEqual( - os.path.dirname(sysconfig.get_makefile_filename()), - srcdir) + assert isinstance(cvars, dict) + assert cvars + + @pytest.mark.skipif('sysconfig.IS_PYPY') + @pytest.mark.skipif('sysconfig.python_build') + @pytest.mark.xfail('platform.system() == "Windows"') + def test_srcdir_simple(self): + # See #15364. + srcdir = pathlib.Path(sysconfig.get_config_var('srcdir')) + + assert srcdir.absolute() + assert srcdir.is_dir() + + makefile = pathlib.Path(sysconfig.get_makefile_filename()) + assert makefile.parent.samefile(srcdir) + + @pytest.mark.skipif('sysconfig.IS_PYPY') + @pytest.mark.skipif('not sysconfig.python_build') + def test_srcdir_python_build(self): + # See #15364. + srcdir = pathlib.Path(sysconfig.get_config_var('srcdir')) + + # The python executable has not been installed so srcdir + # should be a full source checkout. + Python_h = srcdir.joinpath('Include', 'Python.h') + assert Python_h.is_file() + assert sysconfig._is_python_source_dir(srcdir) + assert sysconfig._is_python_source_dir(str(srcdir)) def test_srcdir_independent_of_cwd(self): - # srcdir should be independent of the current working directory - # See Issues #15322, #15364. + """ + srcdir should be independent of the current working directory + """ + # See #15364. srcdir = sysconfig.get_config_var('srcdir') - cwd = os.getcwd() - try: - os.chdir('..') + with path.Path('..'): srcdir2 = sysconfig.get_config_var('srcdir') - finally: - os.chdir(cwd) - self.assertEqual(srcdir, srcdir2) + assert srcdir == srcdir2 def customize_compiler(self): # make sure AR gets caught @@ -109,7 +97,6 @@ def set_executables(self, **kw): 'CCSHARED': '--sc-ccshared', 'LDSHARED': 'sc_ldshared', 'SHLIB_SUFFIX': 'sc_shutil_suffix', - # On macOS, disable _osx_support.customize_compiler() 'CUSTOMIZED_OSX_COMPILER': 'True', } @@ -122,8 +109,7 @@ def set_executables(self, **kw): return comp - @unittest.skipUnless(get_default_compiler() == 'unix', - 'not testing if default compiler is not unix') + @pytest.mark.skipif("get_default_compiler() != 'unix'") def test_customize_compiler(self): # Make sure that sysconfig._config_vars is initialized sysconfig.get_config_vars() @@ -140,29 +126,23 @@ def test_customize_compiler(self): os.environ['RANLIB'] = 'env_ranlib' comp = self.customize_compiler() - self.assertEqual(comp.exes['archiver'], - 'env_ar --env-arflags') - self.assertEqual(comp.exes['preprocessor'], - 'env_cpp --env-cppflags') - self.assertEqual(comp.exes['compiler'], - 'env_cc --sc-cflags --env-cflags --env-cppflags') - self.assertEqual(comp.exes['compiler_so'], - ('env_cc --sc-cflags ' - '--env-cflags ''--env-cppflags --sc-ccshared')) - self.assertEqual(comp.exes['compiler_cxx'], - 'env_cxx --env-cxx-flags') - self.assertEqual(comp.exes['linker_exe'], - 'env_cc') - self.assertEqual(comp.exes['linker_so'], - ('env_ldshared --env-ldflags --env-cflags' - ' --env-cppflags')) - self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix') + assert comp.exes['archiver'] == 'env_ar --env-arflags' + assert comp.exes['preprocessor'] == 'env_cpp --env-cppflags' + assert comp.exes['compiler'] == 'env_cc --sc-cflags --env-cflags --env-cppflags' + assert comp.exes['compiler_so'] == ( + 'env_cc --sc-cflags ' '--env-cflags ' '--env-cppflags --sc-ccshared' + ) + assert comp.exes['compiler_cxx'] == 'env_cxx --env-cxx-flags' + assert comp.exes['linker_exe'] == 'env_cc' + assert comp.exes['linker_so'] == ( + 'env_ldshared --env-ldflags --env-cflags' ' --env-cppflags' + ) + assert comp.shared_lib_extension == 'sc_shutil_suffix' if sys.platform == "darwin": - self.assertEqual(comp.exes['ranlib'], - 'env_ranlib') + assert comp.exes['ranlib'] == 'env_ranlib' else: - self.assertTrue('ranlib' not in comp.exes) + assert 'ranlib' not in comp.exes del os.environ['AR'] del os.environ['CC'] @@ -176,57 +156,55 @@ def test_customize_compiler(self): del os.environ['RANLIB'] comp = self.customize_compiler() - self.assertEqual(comp.exes['archiver'], - 'sc_ar --sc-arflags') - self.assertEqual(comp.exes['preprocessor'], - 'sc_cc -E') - self.assertEqual(comp.exes['compiler'], - 'sc_cc --sc-cflags') - self.assertEqual(comp.exes['compiler_so'], - 'sc_cc --sc-cflags --sc-ccshared') - self.assertEqual(comp.exes['compiler_cxx'], - 'sc_cxx') - self.assertEqual(comp.exes['linker_exe'], - 'sc_cc') - self.assertEqual(comp.exes['linker_so'], - 'sc_ldshared') - self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix') - self.assertTrue('ranlib' not in comp.exes) - - def test_parse_makefile_base(self): - self.makefile = TESTFN - fd = open(self.makefile, 'w') - try: - fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=LIB'" '\n') - fd.write('VAR=$OTHER\nOTHER=foo') - finally: - fd.close() - d = sysconfig.parse_makefile(self.makefile) - self.assertEqual(d, {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'", - 'OTHER': 'foo'}) - - def test_parse_makefile_literal_dollar(self): - self.makefile = TESTFN - fd = open(self.makefile, 'w') - try: - fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=\$$LIB'" '\n') - fd.write('VAR=$OTHER\nOTHER=foo') - finally: - fd.close() - d = sysconfig.parse_makefile(self.makefile) - self.assertEqual(d, {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'", - 'OTHER': 'foo'}) - + assert comp.exes['archiver'] == 'sc_ar --sc-arflags' + assert comp.exes['preprocessor'] == 'sc_cc -E' + assert comp.exes['compiler'] == 'sc_cc --sc-cflags' + assert comp.exes['compiler_so'] == 'sc_cc --sc-cflags --sc-ccshared' + assert comp.exes['compiler_cxx'] == 'sc_cxx' + assert comp.exes['linker_exe'] == 'sc_cc' + assert comp.exes['linker_so'] == 'sc_ldshared' + assert comp.shared_lib_extension == 'sc_shutil_suffix' + assert 'ranlib' not in comp.exes + + def test_parse_makefile_base(self, tmp_path): + makefile = tmp_path / 'Makefile' + makefile.write_text( + trim( + """ + CONFIG_ARGS= '--arg1=optarg1' 'ENV=LIB' + VAR=$OTHER + OTHER=foo + """ + ) + ) + d = sysconfig.parse_makefile(makefile) + assert d == {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'", 'OTHER': 'foo'} + + def test_parse_makefile_literal_dollar(self, tmp_path): + makefile = tmp_path / 'Makefile' + makefile.write_text( + trim( + """ + CONFIG_ARGS= '--arg1=optarg1' 'ENV=\\$$LIB' + VAR=$OTHER + OTHER=foo + """ + ) + ) + d = sysconfig.parse_makefile(makefile) + assert d == {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'", 'OTHER': 'foo'} def test_sysconfig_module(self): import sysconfig as global_sysconfig - self.assertEqual(global_sysconfig.get_config_var('CFLAGS'), - sysconfig.get_config_var('CFLAGS')) - self.assertEqual(global_sysconfig.get_config_var('LDFLAGS'), - sysconfig.get_config_var('LDFLAGS')) - @unittest.skipIf(sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'), - 'compiler flags customized') + assert global_sysconfig.get_config_var('CFLAGS') == sysconfig.get_config_var( + 'CFLAGS' + ) + assert global_sysconfig.get_config_var('LDFLAGS') == sysconfig.get_config_var( + 'LDFLAGS' + ) + + @pytest.mark.skipif("sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER')") def test_sysconfig_compiler_vars(self): # On OS X, binary installers support extension module building on # various levels of the operating system with differing Xcode @@ -243,67 +221,98 @@ def test_sysconfig_compiler_vars(self): # The longer-term solution is to only have one version of sysconfig. import sysconfig as global_sysconfig + if sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'): - self.skipTest('compiler flags customized') - self.assertEqual(global_sysconfig.get_config_var('LDSHARED'), - sysconfig.get_config_var('LDSHARED')) - self.assertEqual(global_sysconfig.get_config_var('CC'), - sysconfig.get_config_var('CC')) - - @unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None, - 'EXT_SUFFIX required for this test') + pytest.skip('compiler flags customized') + assert global_sysconfig.get_config_var('LDSHARED') == sysconfig.get_config_var( + 'LDSHARED' + ) + assert global_sysconfig.get_config_var('CC') == sysconfig.get_config_var('CC') + + @pytest.mark.skipif("not sysconfig.get_config_var('EXT_SUFFIX')") def test_SO_deprecation(self): - self.assertWarns(DeprecationWarning, - sysconfig.get_config_var, 'SO') - - @unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None, - 'EXT_SUFFIX required for this test') - def test_SO_value(self): - with check_warnings(('', DeprecationWarning)): - self.assertEqual(sysconfig.get_config_var('SO'), - sysconfig.get_config_var('EXT_SUFFIX')) - - @unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None, - 'EXT_SUFFIX required for this test') - def test_SO_in_vars(self): - vars = sysconfig.get_config_vars() - self.assertIsNotNone(vars['SO']) - self.assertEqual(vars['SO'], vars['EXT_SUFFIX']) - - def test_customize_compiler_before_get_config_vars(self): + with pytest.warns(DeprecationWarning): + sysconfig.get_config_var('SO') + + def test_customize_compiler_before_get_config_vars(self, tmp_path): # Issue #21923: test that a Distribution compiler # instance can be called without an explicit call to # get_config_vars(). - with open(TESTFN, 'w') as f: - f.writelines(textwrap.dedent('''\ + file = tmp_path / 'file' + file.write_text( + trim( + """ from distutils.core import Distribution config = Distribution().get_command_obj('config') # try_compile may pass or it may fail if no compiler # is found but it should not raise an exception. rc = config.try_compile('int x;') - ''')) - p = subprocess.Popen([str(sys.executable), TESTFN], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=True) + """ + ) + ) + p = subprocess.Popen( + py37compat.subprocess_args(sys.executable, file), + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + ) outs, errs = p.communicate() - self.assertEqual(0, p.returncode, "Subprocess failed: " + outs) + assert 0 == p.returncode, "Subprocess failed: " + outs def test_parse_config_h(self): config_h = sysconfig.get_config_h_filename() input = {} with open(config_h, encoding="utf-8") as f: result = sysconfig.parse_config_h(f, g=input) - self.assertTrue(input is result) + assert input is result with open(config_h, encoding="utf-8") as f: result = sysconfig.parse_config_h(f) - self.assertTrue(isinstance(result, dict)) - -def test_suite(): - suite = unittest.TestSuite() - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(SysconfigTestCase)) - return suite - - -if __name__ == '__main__': - run_unittest(test_suite()) + assert isinstance(result, dict) + + @pytest.mark.skipif("platform.system() != 'Windows'") + @pytest.mark.skipif("sys.implementation.name != 'cpython'") + def test_win_ext_suffix(self): + assert sysconfig.get_config_var("EXT_SUFFIX").endswith(".pyd") + assert sysconfig.get_config_var("EXT_SUFFIX") != ".pyd" + + @pytest.mark.skipif("platform.system() != 'Windows'") + @pytest.mark.skipif("sys.implementation.name != 'cpython'") + @pytest.mark.skipif( + '\\PCbuild\\'.casefold() not in sys.executable.casefold(), + reason='Need sys.executable to be in a source tree', + ) + def test_win_build_venv_from_source_tree(self, tmp_path): + """Ensure distutils.sysconfig detects venvs from source tree builds.""" + env = jaraco.envs.VEnv() + env.create_opts = env.clean_opts + env.root = tmp_path + env.ensure_env() + cmd = [ + env.exe(), + "-c", + "import distutils.sysconfig; print(distutils.sysconfig.python_build)", + ] + distutils_path = os.path.dirname(os.path.dirname(distutils.__file__)) + out = subprocess.check_output( + cmd, env={**os.environ, "PYTHONPATH": distutils_path} + ) + assert out == "True" + + def test_get_python_inc_missing_config_dir(self, monkeypatch): + """ + In portable Python installations, the sysconfig will be broken, + pointing to the directories where the installation was built and + not where it currently is. In this case, ensure that the missing + directory isn't used for get_python_inc. + + See pypa/distutils#178. + """ + + def override(name): + if name == 'INCLUDEPY': + return '/does-not-exist' + return sysconfig.get_config_var(name) + + monkeypatch.setattr(sysconfig, 'get_config_var', override) + + assert os.path.exists(sysconfig.get_python_inc()) diff --git a/setuptools/_distutils/tests/test_text_file.py b/setuptools/_distutils/tests/test_text_file.py index ebac3d52f9..7c8dc5be54 100644 --- a/setuptools/_distutils/tests/test_text_file.py +++ b/setuptools/_distutils/tests/test_text_file.py @@ -1,9 +1,7 @@ """Tests for distutils.text_file.""" import os -import unittest from distutils.text_file import TextFile from distutils.tests import support -from test.support import run_unittest TEST_DATA = """# test file @@ -12,32 +10,35 @@ continues on next line """ -class TextFileTestCase(support.TempdirManager, unittest.TestCase): +class TestTextFile(support.TempdirManager): def test_class(self): # old tests moved from text_file.__main__ # so they are really called by the buildbots # result 1: no fancy options - result1 = ['# test file\n', '\n', 'line 3 \\\n', - '# intervening comment\n', - ' continues on next line\n'] + result1 = [ + '# test file\n', + '\n', + 'line 3 \\\n', + '# intervening comment\n', + ' continues on next line\n', + ] # result 2: just strip comments - result2 = ["\n", - "line 3 \\\n", - " continues on next line\n"] + result2 = ["\n", "line 3 \\\n", " continues on next line\n"] # result 3: just strip blank lines - result3 = ["# test file\n", - "line 3 \\\n", - "# intervening comment\n", - " continues on next line\n"] + result3 = [ + "# test file\n", + "line 3 \\\n", + "# intervening comment\n", + " continues on next line\n", + ] # result 4: default, strip comments, blank lines, # and trailing whitespace - result4 = ["line 3 \\", - " continues on next line"] + result4 = ["line 3 \\", " continues on next line"] # result 5: strip comments and blanks, plus join lines (but don't # "collapse" joined lines @@ -49,7 +50,7 @@ def test_class(self): def test_input(count, description, file, expected_result): result = file.readlines() - self.assertEqual(result, expected_result) + assert result == expected_result tmpdir = self.mkdtemp() filename = os.path.join(tmpdir, "test.txt") @@ -59,22 +60,25 @@ def test_input(count, description, file, expected_result): finally: out_file.close() - in_file = TextFile(filename, strip_comments=0, skip_blanks=0, - lstrip_ws=0, rstrip_ws=0) + in_file = TextFile( + filename, strip_comments=0, skip_blanks=0, lstrip_ws=0, rstrip_ws=0 + ) try: test_input(1, "no processing", in_file, result1) finally: in_file.close() - in_file = TextFile(filename, strip_comments=1, skip_blanks=0, - lstrip_ws=0, rstrip_ws=0) + in_file = TextFile( + filename, strip_comments=1, skip_blanks=0, lstrip_ws=0, rstrip_ws=0 + ) try: test_input(2, "strip comments", in_file, result2) finally: in_file.close() - in_file = TextFile(filename, strip_comments=0, skip_blanks=1, - lstrip_ws=0, rstrip_ws=0) + in_file = TextFile( + filename, strip_comments=0, skip_blanks=1, lstrip_ws=0, rstrip_ws=0 + ) try: test_input(3, "strip blanks", in_file, result3) finally: @@ -86,22 +90,23 @@ def test_input(count, description, file, expected_result): finally: in_file.close() - in_file = TextFile(filename, strip_comments=1, skip_blanks=1, - join_lines=1, rstrip_ws=1) + in_file = TextFile( + filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1 + ) try: test_input(5, "join lines without collapsing", in_file, result5) finally: in_file.close() - in_file = TextFile(filename, strip_comments=1, skip_blanks=1, - join_lines=1, rstrip_ws=1, collapse_join=1) + in_file = TextFile( + filename, + strip_comments=1, + skip_blanks=1, + join_lines=1, + rstrip_ws=1, + collapse_join=1, + ) try: test_input(6, "join lines with collapsing", in_file, result6) finally: in_file.close() - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(TextFileTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_unixccompiler.py b/setuptools/_distutils/tests/test_unixccompiler.py index 4574f77fb4..a018442459 100644 --- a/setuptools/_distutils/tests/test_unixccompiler.py +++ b/setuptools/_distutils/tests/test_unixccompiler.py @@ -1,8 +1,7 @@ """Tests for distutils.unixccompiler.""" import os import sys -import unittest -from test.support import run_unittest +import unittest.mock as mock from .py38compat import EnvironmentVarGuard @@ -12,27 +11,28 @@ from distutils.util import _clear_cached_macosx_ver from . import support +import pytest -class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): - - def setUp(self): - super().setUp() - self._backup_platform = sys.platform - self._backup_get_config_var = sysconfig.get_config_var - self._backup_get_config_vars = sysconfig.get_config_vars - class CompilerWrapper(UnixCCompiler): - def rpath_foo(self): - return self.runtime_library_dir_option('/foo') - self.cc = CompilerWrapper() - - def tearDown(self): - super().tearDown() - sys.platform = self._backup_platform - sysconfig.get_config_var = self._backup_get_config_var - sysconfig.get_config_vars = self._backup_get_config_vars - - @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") - def test_runtime_libdir_option(self): + +@pytest.fixture(autouse=True) +def save_values(monkeypatch): + monkeypatch.setattr(sys, 'platform', sys.platform) + monkeypatch.setattr(sysconfig, 'get_config_var', sysconfig.get_config_var) + monkeypatch.setattr(sysconfig, 'get_config_vars', sysconfig.get_config_vars) + + +@pytest.fixture(autouse=True) +def compiler_wrapper(request): + class CompilerWrapper(UnixCCompiler): + def rpath_foo(self): + return self.runtime_library_dir_option('/foo') + + request.instance.cc = CompilerWrapper() + + +class TestUnixCCompiler(support.TempdirManager): + @pytest.mark.skipif('platform.system == "Windows"') # noqa: C901 + def test_runtime_libdir_option(self): # noqa: C901 # Issue #5900; GitHub Issue #37 # # Ensure RUNPATH is added to extension modules with RPATH if @@ -48,18 +48,18 @@ def test_runtime_libdir_option(self): # Version value of None generates two tests: as None and as empty string # Expected flag value of None means an mismatch exception is expected darwin_test_cases = [ - ((None , None ), darwin_lib_flag), - ((None , '11' ), darwin_rpath_flag), - (('10' , None ), darwin_lib_flag), - (('10.3' , None ), darwin_lib_flag), - (('10.3.1', None ), darwin_lib_flag), - (('10.5' , None ), darwin_rpath_flag), - (('10.5.1', None ), darwin_rpath_flag), - (('10.3' , '10.3' ), darwin_lib_flag), - (('10.3' , '10.5' ), darwin_rpath_flag), - (('10.5' , '10.3' ), darwin_lib_flag), - (('10.5' , '11' ), darwin_rpath_flag), - (('10.4' , '10' ), None), + ((None, None), darwin_lib_flag), + ((None, '11'), darwin_rpath_flag), + (('10', None), darwin_lib_flag), + (('10.3', None), darwin_lib_flag), + (('10.3.1', None), darwin_lib_flag), + (('10.5', None), darwin_rpath_flag), + (('10.5.1', None), darwin_rpath_flag), + (('10.3', '10.3'), darwin_lib_flag), + (('10.3', '10.5'), darwin_rpath_flag), + (('10.5', '10.3'), darwin_lib_flag), + (('10.5', '11'), darwin_rpath_flag), + (('10.4', '10'), None), ] def make_darwin_gcv(syscfg_macosx_ver): @@ -67,12 +67,15 @@ def gcv(var): if var == darwin_ver_var: return syscfg_macosx_ver return "xxx" + return gcv def do_darwin_test(syscfg_macosx_ver, env_macosx_ver, expected_flag): env = os.environ - msg = "macOS version = (sysconfig=%r, env=%r)" % \ - (syscfg_macosx_ver, env_macosx_ver) + msg = "macOS version = (sysconfig={!r}, env={!r})".format( + syscfg_macosx_ver, + env_macosx_ver, + ) # Save old_gcv = sysconfig.get_config_var @@ -88,10 +91,11 @@ def do_darwin_test(syscfg_macosx_ver, env_macosx_ver, expected_flag): # Run the test if expected_flag is not None: - self.assertEqual(self.cc.rpath_foo(), expected_flag, msg=msg) + assert self.cc.rpath_foo() == expected_flag, msg else: - with self.assertRaisesRegex(DistutilsPlatformError, - darwin_ver_var + r' mismatch', msg=msg): + with pytest.raises( + DistutilsPlatformError, match=darwin_ver_var + r' mismatch' + ): self.cc.rpath_foo() # Restore @@ -117,83 +121,98 @@ def do_darwin_test(syscfg_macosx_ver, env_macosx_ver, expected_flag): # hp-ux sys.platform = 'hp-ux' + def gcv(v): return 'xxx' + sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), ['+s', '-L/foo']) + assert self.cc.rpath_foo() == ['+s', '-L/foo'] def gcv(v): return 'gcc' + sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo']) + assert self.cc.rpath_foo() == ['-Wl,+s', '-L/foo'] def gcv(v): return 'g++' + sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo']) + assert self.cc.rpath_foo() == ['-Wl,+s', '-L/foo'] sysconfig.get_config_var = old_gcv # GCC GNULD sys.platform = 'bar' + def gcv(v): if v == 'CC': return 'gcc' elif v == 'GNULD': return 'yes' + sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo') + assert self.cc.rpath_foo() == '-Wl,--enable-new-dtags,-R/foo' def gcv(v): if v == 'CC': return 'gcc -pthread -B /bar' elif v == 'GNULD': return 'yes' + sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo') + assert self.cc.rpath_foo() == '-Wl,--enable-new-dtags,-R/foo' # GCC non-GNULD sys.platform = 'bar' + def gcv(v): if v == 'CC': return 'gcc' elif v == 'GNULD': return 'no' + sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-Wl,-R/foo') + assert self.cc.rpath_foo() == '-Wl,-R/foo' # GCC GNULD with fully qualified configuration prefix # see #7617 sys.platform = 'bar' + def gcv(v): if v == 'CC': return 'x86_64-pc-linux-gnu-gcc-4.4.2' elif v == 'GNULD': return 'yes' + sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo') + assert self.cc.rpath_foo() == '-Wl,--enable-new-dtags,-R/foo' # non-GCC GNULD sys.platform = 'bar' + def gcv(v): if v == 'CC': return 'cc' elif v == 'GNULD': return 'yes' + sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo') + assert self.cc.rpath_foo() == '-Wl,--enable-new-dtags,-R/foo' # non-GCC non-GNULD sys.platform = 'bar' + def gcv(v): if v == 'CC': return 'cc' elif v == 'GNULD': return 'no' + sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-Wl,-R/foo') + assert self.cc.rpath_foo() == '-Wl,-R/foo' - @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") + @pytest.mark.skipif('platform.system == "Windows"') def test_cc_overrides_ldshared(self): # Issue #18080: # ensure that setting CC env variable also changes default linker @@ -206,15 +225,56 @@ def gcvs(*args, _orig=sysconfig.get_config_vars): if args: return list(map(sysconfig.get_config_var, args)) return _orig() + sysconfig.get_config_var = gcv sysconfig.get_config_vars = gcvs with EnvironmentVarGuard() as env: env['CC'] = 'my_cc' del env['LDSHARED'] sysconfig.customize_compiler(self.cc) - self.assertEqual(self.cc.linker_so[0], 'my_cc') + assert self.cc.linker_so[0] == 'my_cc' + + @pytest.mark.skipif('platform.system == "Windows"') + def test_cc_overrides_ldshared_for_cxx_correctly(self): + """ + Ensure that setting CC env variable also changes default linker + correctly when building C++ extensions. + + pypa/distutils#126 + """ + + def gcv(v): + if v == 'LDSHARED': + return 'gcc-4.2 -bundle -undefined dynamic_lookup ' + elif v == 'CXX': + return 'g++-4.2' + return 'gcc-4.2' + + def gcvs(*args, _orig=sysconfig.get_config_vars): + if args: + return list(map(sysconfig.get_config_var, args)) + return _orig() + + sysconfig.get_config_var = gcv + sysconfig.get_config_vars = gcvs + with mock.patch.object( + self.cc, 'spawn', return_value=None + ) as mock_spawn, mock.patch.object( + self.cc, '_need_link', return_value=True + ), mock.patch.object( + self.cc, 'mkpath', return_value=None + ), EnvironmentVarGuard() as env: + env['CC'] = 'ccache my_cc' + env['CXX'] = 'my_cxx' + del env['LDSHARED'] + sysconfig.customize_compiler(self.cc) + assert self.cc.linker_so[0:2] == ['ccache', 'my_cc'] + self.cc.link(None, [], 'a.out', target_lang='c++') + call_args = mock_spawn.call_args[0][0] + expected = ['my_cxx', '-bundle', '-undefined', 'dynamic_lookup'] + assert call_args[:4] == expected - @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") + @pytest.mark.skipif('platform.system == "Windows"') def test_explicit_ldshared(self): # Issue #18080: # ensure that setting CC env variable does not change @@ -228,13 +288,14 @@ def gcvs(*args, _orig=sysconfig.get_config_vars): if args: return list(map(sysconfig.get_config_var, args)) return _orig() + sysconfig.get_config_var = gcv sysconfig.get_config_vars = gcvs with EnvironmentVarGuard() as env: env['CC'] = 'my_cc' env['LDSHARED'] = 'my_ld -bundle -dynamic' sysconfig.customize_compiler(self.cc) - self.assertEqual(self.cc.linker_so[0], 'my_ld') + assert self.cc.linker_so[0] == 'my_ld' def test_has_function(self): # Issue https://github.com/pypa/distutils/issues/64: @@ -242,11 +303,4 @@ def test_has_function(self): # FileNotFoundError: [Errno 2] No such file or directory: 'a.out' self.cc.output_dir = 'scratch' os.chdir(self.mkdtemp()) - self.cc.has_function('abort', includes=['stdlib.h']) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(UnixCCompilerTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + self.cc.has_function('abort') diff --git a/setuptools/_distutils/tests/test_upload.py b/setuptools/_distutils/tests/test_upload.py index ce3e84a248..af113b8b6e 100644 --- a/setuptools/_distutils/tests/test_upload.py +++ b/setuptools/_distutils/tests/test_upload.py @@ -1,18 +1,16 @@ """Tests for distutils.command.upload.""" import os -import unittest import unittest.mock as mock from urllib.request import HTTPError -from test.support import run_unittest from distutils.command import upload as upload_mod from distutils.command.upload import upload from distutils.core import Distribution from distutils.errors import DistutilsError -from distutils.log import ERROR, INFO from distutils.tests.test_config import PYPIRC, BasePyPIRCCommandTestCase +import pytest PYPIRC_LONG_PASSWORD = """\ [distutils] @@ -43,8 +41,8 @@ username:me """ -class FakeOpen(object): +class FakeOpen: def __init__(self, url, msg=None, code=None): self.url = url if not isinstance(url, str): @@ -57,7 +55,7 @@ def __init__(self, url, msg=None, code=None): def getheader(self, name, default=None): return { 'content-type': 'text/plain; charset=utf-8', - }.get(name.lower(), default) + }.get(name.lower(), default) def read(self): return b'xyzzy' @@ -66,35 +64,31 @@ def getcode(self): return self.code -class uploadTestCase(BasePyPIRCCommandTestCase): +@pytest.fixture(autouse=True) +def urlopen(request, monkeypatch): + self = request.instance + monkeypatch.setattr(upload_mod, 'urlopen', self._urlopen) + self.next_msg = self.next_code = None - def setUp(self): - super(uploadTestCase, self).setUp() - self.old_open = upload_mod.urlopen - upload_mod.urlopen = self._urlopen - self.last_open = None - self.next_msg = None - self.next_code = None - - def tearDown(self): - upload_mod.urlopen = self.old_open - super(uploadTestCase, self).tearDown() +class TestUpload(BasePyPIRCCommandTestCase): def _urlopen(self, url): self.last_open = FakeOpen(url, msg=self.next_msg, code=self.next_code) return self.last_open def test_finalize_options(self): - # new format self.write_file(self.rc, PYPIRC) dist = Distribution() cmd = upload(dist) cmd.finalize_options() - for attr, waited in (('username', 'me'), ('password', 'secret'), - ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/')): - self.assertEqual(getattr(cmd, attr), waited) + for attr, waited in ( + ('username', 'me'), + ('password', 'secret'), + ('realm', 'pypi'), + ('repository', 'https://upload.pypi.org/legacy/'), + ): + assert getattr(cmd, attr) == waited def test_saved_password(self): # file with no password @@ -104,16 +98,16 @@ def test_saved_password(self): dist = Distribution() cmd = upload(dist) cmd.finalize_options() - self.assertEqual(cmd.password, None) + assert cmd.password is None # make sure we get it as well, if another command # initialized it at the dist level dist.password = 'xxx' cmd = upload(dist) cmd.finalize_options() - self.assertEqual(cmd.password, 'xxx') + assert cmd.password == 'xxx' - def test_upload(self): + def test_upload(self, caplog): tmp = self.mkdtemp() path = os.path.join(tmp, 'xxx') self.write_file(path) @@ -130,34 +124,32 @@ def test_upload(self): # what did we send ? headers = dict(self.last_open.req.headers) - self.assertGreaterEqual(int(headers['Content-length']), 2162) + assert int(headers['Content-length']) >= 2162 content_type = headers['Content-type'] - self.assertTrue(content_type.startswith('multipart/form-data')) - self.assertEqual(self.last_open.req.get_method(), 'POST') + assert content_type.startswith('multipart/form-data') + assert self.last_open.req.get_method() == 'POST' expected_url = 'https://upload.pypi.org/legacy/' - self.assertEqual(self.last_open.req.get_full_url(), expected_url) + assert self.last_open.req.get_full_url() == expected_url data = self.last_open.req.data - self.assertIn(b'xxx',data) - self.assertIn(b'protocol_version', data) - self.assertIn(b'sha256_digest', data) - self.assertIn( + assert b'xxx' in data + assert b'protocol_version' in data + assert b'sha256_digest' in data + assert ( b'cd2eb0837c9b4c962c22d2ff8b5441b7b45805887f051d39bf133b583baf' - b'6860', - data + b'6860' in data ) if b'md5_digest' in data: - self.assertIn(b'f561aaf6ef0bf14d4208bb46a4ccb3ad', data) + assert b'f561aaf6ef0bf14d4208bb46a4ccb3ad' in data if b'blake2_256_digest' in data: - self.assertIn( + assert ( b'b6f289a27d4fe90da63c503bfe0a9b761a8f76bb86148565065f040be' b'6d1c3044cf7ded78ef800509bccb4b648e507d88dc6383d67642aadcc' - b'ce443f1534330a', - data + b'ce443f1534330a' in data ) # The PyPI response body was echoed - results = self.get_logs(INFO) - self.assertEqual(results[-1], 75 * '-' + '\nxyzzy\n' + 75 * '-') + results = caplog.messages + assert results[-1] == 75 * '-' + '\nxyzzy\n' + 75 * '-' # bpo-32304: archives whose last byte was b'\r' were corrupted due to # normalization intended for Mac OS 9. @@ -173,8 +165,7 @@ def test_upload_correct_cr(self): # other fields that ended with \r used to be modified, now are # preserved. pkg_dir, dist = self.create_dist( - dist_files=dist_files, - description='long description\r' + dist_files=dist_files, description='long description\r' ) cmd = upload(dist) cmd.show_response = 1 @@ -182,15 +173,28 @@ def test_upload_correct_cr(self): cmd.run() headers = dict(self.last_open.req.headers) - self.assertGreaterEqual(int(headers['Content-length']), 2172) - self.assertIn(b'long description\r', self.last_open.req.data) + assert int(headers['Content-length']) >= 2172 + assert b'long description\r' in self.last_open.req.data - def test_upload_fails(self): + def test_upload_fails(self, caplog): self.next_msg = "Not Found" self.next_code = 404 - self.assertRaises(DistutilsError, self.test_upload) + with pytest.raises(DistutilsError): + self.test_upload(caplog) - def test_wrong_exception_order(self): + @pytest.mark.parametrize( + 'exception,expected,raised_exception', + [ + (OSError('oserror'), 'oserror', OSError), + pytest.param( + HTTPError('url', 400, 'httperror', {}, None), + 'Upload failed (400): httperror', + DistutilsError, + id="HTTP 400", + ), + ], + ) + def test_wrong_exception_order(self, exception, expected, raised_exception, caplog): tmp = self.mkdtemp() path = os.path.join(tmp, 'xxx') self.write_file(path) @@ -198,26 +202,15 @@ def test_wrong_exception_order(self): self.write_file(self.rc, PYPIRC_LONG_PASSWORD) pkg_dir, dist = self.create_dist(dist_files=dist_files) - tests = [ - (OSError('oserror'), 'oserror', OSError), - (HTTPError('url', 400, 'httperror', {}, None), - 'Upload failed (400): httperror', DistutilsError), - ] - for exception, expected, raised_exception in tests: - with self.subTest(exception=type(exception).__name__): - with mock.patch('distutils.command.upload.urlopen', - new=mock.Mock(side_effect=exception)): - with self.assertRaises(raised_exception): - cmd = upload(dist) - cmd.ensure_finalized() - cmd.run() - results = self.get_logs(ERROR) - self.assertIn(expected, results[-1]) - self.clear_logs() - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(uploadTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + + with mock.patch( + 'distutils.command.upload.urlopen', + new=mock.Mock(side_effect=exception), + ): + with pytest.raises(raised_exception): + cmd = upload(dist) + cmd.ensure_finalized() + cmd.run() + results = caplog.messages + assert expected in results[-1] + caplog.clear() diff --git a/setuptools/_distutils/tests/test_util.py b/setuptools/_distutils/tests/test_util.py index 2738388ea0..070a277069 100644 --- a/setuptools/_distutils/tests/test_util.py +++ b/setuptools/_distutils/tests/test_util.py @@ -1,204 +1,196 @@ """Tests for distutils.util.""" import os import sys -import unittest import sysconfig as stdlib_sysconfig +import unittest.mock as mock from copy import copy -from test.support import run_unittest -from unittest import mock -from distutils.errors import DistutilsPlatformError, DistutilsByteCompileError -from distutils.util import (get_platform, convert_path, change_root, - check_environ, split_quoted, strtobool, - rfc822_escape, byte_compile, - grok_environment_error, get_host_platform) -from distutils import util # used to patch _environ_checked +import pytest + +from distutils.util import ( + get_platform, + convert_path, + change_root, + check_environ, + split_quoted, + strtobool, + rfc822_escape, + byte_compile, + grok_environment_error, + get_host_platform, +) +from distutils import util from distutils import sysconfig -from distutils.tests import support - -class UtilTestCase(support.EnvironGuard, unittest.TestCase): - - def setUp(self): - super(UtilTestCase, self).setUp() - # saving the environment - self.name = os.name - self.platform = sys.platform - self.version = sys.version - self.sep = os.sep - self.join = os.path.join - self.isabs = os.path.isabs - self.splitdrive = os.path.splitdrive - self._config_vars = copy(sysconfig._config_vars) - - # patching os.uname - if hasattr(os, 'uname'): - self.uname = os.uname - self._uname = os.uname() - else: - self.uname = None - self._uname = None - - os.uname = self._get_uname - - def tearDown(self): - # getting back the environment - os.name = self.name - sys.platform = self.platform - sys.version = self.version - os.sep = self.sep - os.path.join = self.join - os.path.isabs = self.isabs - os.path.splitdrive = self.splitdrive - if self.uname is not None: - os.uname = self.uname - else: - del os.uname - sysconfig._config_vars = copy(self._config_vars) - super(UtilTestCase, self).tearDown() - - def _set_uname(self, uname): - self._uname = uname - - def _get_uname(self): - return self._uname +from distutils.errors import DistutilsPlatformError, DistutilsByteCompileError + +@pytest.fixture(autouse=True) +def environment(monkeypatch): + monkeypatch.setattr(os, 'name', os.name) + monkeypatch.setattr(sys, 'platform', sys.platform) + monkeypatch.setattr(sys, 'version', sys.version) + monkeypatch.setattr(os, 'sep', os.sep) + monkeypatch.setattr(os.path, 'join', os.path.join) + monkeypatch.setattr(os.path, 'isabs', os.path.isabs) + monkeypatch.setattr(os.path, 'splitdrive', os.path.splitdrive) + monkeypatch.setattr(sysconfig, '_config_vars', copy(sysconfig._config_vars)) + + +@pytest.mark.usefixtures('save_env') +class TestUtil: def test_get_host_platform(self): - with unittest.mock.patch('os.name', 'nt'): - with unittest.mock.patch('sys.version', '... [... (ARM64)]'): - self.assertEqual(get_host_platform(), 'win-arm64') - with unittest.mock.patch('sys.version', '... [... (ARM)]'): - self.assertEqual(get_host_platform(), 'win-arm32') + with mock.patch('os.name', 'nt'): + with mock.patch('sys.version', '... [... (ARM64)]'): + assert get_host_platform() == 'win-arm64' + with mock.patch('sys.version', '... [... (ARM)]'): + assert get_host_platform() == 'win-arm32' - with unittest.mock.patch('sys.version_info', (3, 9, 0, 'final', 0)): - self.assertEqual(get_host_platform(), stdlib_sysconfig.get_platform()) + with mock.patch('sys.version_info', (3, 9, 0, 'final', 0)): + assert get_host_platform() == stdlib_sysconfig.get_platform() def test_get_platform(self): - with unittest.mock.patch('os.name', 'nt'): - with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'x86'}): - self.assertEqual(get_platform(), 'win32') - with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'x64'}): - self.assertEqual(get_platform(), 'win-amd64') - with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm'}): - self.assertEqual(get_platform(), 'win-arm32') - with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm64'}): - self.assertEqual(get_platform(), 'win-arm64') + with mock.patch('os.name', 'nt'): + with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'x86'}): + assert get_platform() == 'win32' + with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'x64'}): + assert get_platform() == 'win-amd64' + with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm'}): + assert get_platform() == 'win-arm32' + with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm64'}): + assert get_platform() == 'win-arm64' def test_convert_path(self): # linux/mac os.sep = '/' + def _join(path): return '/'.join(path) + os.path.join = _join - self.assertEqual(convert_path('/home/to/my/stuff'), - '/home/to/my/stuff') + assert convert_path('/home/to/my/stuff') == '/home/to/my/stuff' # win os.sep = '\\' + def _join(*path): return '\\'.join(path) + os.path.join = _join - self.assertRaises(ValueError, convert_path, '/home/to/my/stuff') - self.assertRaises(ValueError, convert_path, 'home/to/my/stuff/') + with pytest.raises(ValueError): + convert_path('/home/to/my/stuff') + with pytest.raises(ValueError): + convert_path('home/to/my/stuff/') - self.assertEqual(convert_path('home/to/my/stuff'), - 'home\\to\\my\\stuff') - self.assertEqual(convert_path('.'), - os.curdir) + assert convert_path('home/to/my/stuff') == 'home\\to\\my\\stuff' + assert convert_path('.') == os.curdir def test_change_root(self): # linux/mac os.name = 'posix' + def _isabs(path): return path[0] == '/' + os.path.isabs = _isabs + def _join(*path): return '/'.join(path) + os.path.join = _join - self.assertEqual(change_root('/root', '/old/its/here'), - '/root/old/its/here') - self.assertEqual(change_root('/root', 'its/here'), - '/root/its/here') + assert change_root('/root', '/old/its/here') == '/root/old/its/here' + assert change_root('/root', 'its/here') == '/root/its/here' # windows os.name = 'nt' + def _isabs(path): return path.startswith('c:\\') + os.path.isabs = _isabs + def _splitdrive(path): if path.startswith('c:'): return ('', path.replace('c:', '')) return ('', path) + os.path.splitdrive = _splitdrive + def _join(*path): return '\\'.join(path) + os.path.join = _join - self.assertEqual(change_root('c:\\root', 'c:\\old\\its\\here'), - 'c:\\root\\old\\its\\here') - self.assertEqual(change_root('c:\\root', 'its\\here'), - 'c:\\root\\its\\here') + assert ( + change_root('c:\\root', 'c:\\old\\its\\here') == 'c:\\root\\old\\its\\here' + ) + assert change_root('c:\\root', 'its\\here') == 'c:\\root\\its\\here' # BugsBunny os (it's a great os) os.name = 'BugsBunny' - self.assertRaises(DistutilsPlatformError, - change_root, 'c:\\root', 'its\\here') + with pytest.raises(DistutilsPlatformError): + change_root('c:\\root', 'its\\here') # XXX platforms to be covered: mac def test_check_environ(self): - util._environ_checked = 0 + util.check_environ.cache_clear() os.environ.pop('HOME', None) check_environ() - self.assertEqual(os.environ['PLAT'], get_platform()) - self.assertEqual(util._environ_checked, 1) + assert os.environ['PLAT'] == get_platform() - @unittest.skipUnless(os.name == 'posix', 'specific to posix') + @pytest.mark.skipif("os.name != 'posix'") def test_check_environ_getpwuid(self): - util._environ_checked = 0 + util.check_environ.cache_clear() os.environ.pop('HOME', None) import pwd # only set pw_dir field, other fields are not used - result = pwd.struct_passwd((None, None, None, None, None, - '/home/distutils', None)) + result = pwd.struct_passwd( + (None, None, None, None, None, '/home/distutils', None) + ) with mock.patch.object(pwd, 'getpwuid', return_value=result): check_environ() - self.assertEqual(os.environ['HOME'], '/home/distutils') + assert os.environ['HOME'] == '/home/distutils' - util._environ_checked = 0 + util.check_environ.cache_clear() os.environ.pop('HOME', None) # bpo-10496: Catch pwd.getpwuid() error with mock.patch.object(pwd, 'getpwuid', side_effect=KeyError): check_environ() - self.assertNotIn('HOME', os.environ) + assert 'HOME' not in os.environ def test_split_quoted(self): - self.assertEqual(split_quoted('""one"" "two" \'three\' \\four'), - ['one', 'two', 'three', 'four']) + assert split_quoted('""one"" "two" \'three\' \\four') == [ + 'one', + 'two', + 'three', + 'four', + ] def test_strtobool(self): yes = ('y', 'Y', 'yes', 'True', 't', 'true', 'True', 'On', 'on', '1') no = ('n', 'no', 'f', 'false', 'off', '0', 'Off', 'No', 'N') for y in yes: - self.assertTrue(strtobool(y)) + assert strtobool(y) for n in no: - self.assertFalse(strtobool(n)) + assert not strtobool(n) def test_rfc822_escape(self): header = 'I am a\npoor\nlonesome\nheader\n' res = rfc822_escape(header) - wanted = ('I am a%(8s)spoor%(8s)slonesome%(8s)s' - 'header%(8s)s') % {'8s': '\n'+8*' '} - self.assertEqual(res, wanted) + wanted = ('I am a%(8s)spoor%(8s)slonesome%(8s)s' 'header%(8s)s') % { + '8s': '\n' + 8 * ' ' + } + assert res == wanted def test_dont_write_bytecode(self): # makes sure byte_compile raise a DistutilsError @@ -206,7 +198,8 @@ def test_dont_write_bytecode(self): old_dont_write_bytecode = sys.dont_write_bytecode sys.dont_write_bytecode = True try: - self.assertRaises(DistutilsByteCompileError, byte_compile, []) + with pytest.raises(DistutilsByteCompileError): + byte_compile([]) finally: sys.dont_write_bytecode = old_dont_write_bytecode @@ -214,11 +207,4 @@ def test_grok_environment_error(self): # test obsolete function to ensure backward compat (#4931) exc = IOError("Unable to find batch file") msg = grok_environment_error(exc) - self.assertEqual(msg, "error: Unable to find batch file") - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(UtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + assert msg == "error: Unable to find batch file" diff --git a/setuptools/_distutils/tests/test_version.py b/setuptools/_distutils/tests/test_version.py index 8405aa3a62..ff52ea4683 100644 --- a/setuptools/_distutils/tests/test_version.py +++ b/setuptools/_distutils/tests/test_version.py @@ -1,44 +1,45 @@ """Tests for distutils.version.""" -import unittest +import pytest + import distutils from distutils.version import LooseVersion from distutils.version import StrictVersion -from test.support import run_unittest -class VersionTestCase(unittest.TestCase): - def setUp(self): - self.ctx = distutils.version.suppress_known_deprecation() - self.ctx.__enter__() +@pytest.fixture(autouse=True) +def suppress_deprecation(): + with distutils.version.suppress_known_deprecation(): + yield - def tearDown(self): - self.ctx.__exit__(None, None, None) +class TestVersion: def test_prerelease(self): version = StrictVersion('1.2.3a1') - self.assertEqual(version.version, (1, 2, 3)) - self.assertEqual(version.prerelease, ('a', 1)) - self.assertEqual(str(version), '1.2.3a1') + assert version.version == (1, 2, 3) + assert version.prerelease == ('a', 1) + assert str(version) == '1.2.3a1' version = StrictVersion('1.2.0') - self.assertEqual(str(version), '1.2') + assert str(version) == '1.2' def test_cmp_strict(self): - versions = (('1.5.1', '1.5.2b2', -1), - ('161', '3.10a', ValueError), - ('8.02', '8.02', 0), - ('3.4j', '1996.07.12', ValueError), - ('3.2.pl0', '3.1.1.6', ValueError), - ('2g6', '11g', ValueError), - ('0.9', '2.2', -1), - ('1.2.1', '1.2', 1), - ('1.1', '1.2.2', -1), - ('1.2', '1.1', 1), - ('1.2.1', '1.2.2', -1), - ('1.2.2', '1.2', 1), - ('1.2', '1.2.2', -1), - ('0.4.0', '0.4', 0), - ('1.13++', '5.5.kw', ValueError)) + versions = ( + ('1.5.1', '1.5.2b2', -1), + ('161', '3.10a', ValueError), + ('8.02', '8.02', 0), + ('3.4j', '1996.07.12', ValueError), + ('3.2.pl0', '3.1.1.6', ValueError), + ('2g6', '11g', ValueError), + ('0.9', '2.2', -1), + ('1.2.1', '1.2', 1), + ('1.1', '1.2.2', -1), + ('1.2', '1.1', 1), + ('1.2.1', '1.2.2', -1), + ('1.2.2', '1.2', 1), + ('1.2', '1.2.2', -1), + ('0.4.0', '0.4', 0), + ('1.13++', '5.5.kw', ValueError), + ) for v1, v2, wanted in versions: try: @@ -47,49 +48,43 @@ def test_cmp_strict(self): if wanted is ValueError: continue else: - raise AssertionError(("cmp(%s, %s) " - "shouldn't raise ValueError") - % (v1, v2)) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) + raise AssertionError( + ("cmp(%s, %s) " "shouldn't raise ValueError") % (v1, v2) + ) + assert res == wanted, 'cmp({}, {}) should be {}, got {}'.format( + v1, v2, wanted, res + ) res = StrictVersion(v1)._cmp(v2) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) + assert res == wanted, 'cmp({}, {}) should be {}, got {}'.format( + v1, v2, wanted, res + ) res = StrictVersion(v1)._cmp(object()) - self.assertIs(res, NotImplemented, - 'cmp(%s, %s) should be NotImplemented, got %s' % - (v1, v2, res)) - + assert ( + res is NotImplemented + ), 'cmp({}, {}) should be NotImplemented, got {}'.format(v1, v2, res) def test_cmp(self): - versions = (('1.5.1', '1.5.2b2', -1), - ('161', '3.10a', 1), - ('8.02', '8.02', 0), - ('3.4j', '1996.07.12', -1), - ('3.2.pl0', '3.1.1.6', 1), - ('2g6', '11g', -1), - ('0.960923', '2.2beta29', -1), - ('1.13++', '5.5.kw', -1)) - + versions = ( + ('1.5.1', '1.5.2b2', -1), + ('161', '3.10a', 1), + ('8.02', '8.02', 0), + ('3.4j', '1996.07.12', -1), + ('3.2.pl0', '3.1.1.6', 1), + ('2g6', '11g', -1), + ('0.960923', '2.2beta29', -1), + ('1.13++', '5.5.kw', -1), + ) for v1, v2, wanted in versions: res = LooseVersion(v1)._cmp(LooseVersion(v2)) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) + assert res == wanted, 'cmp({}, {}) should be {}, got {}'.format( + v1, v2, wanted, res + ) res = LooseVersion(v1)._cmp(v2) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) + assert res == wanted, 'cmp({}, {}) should be {}, got {}'.format( + v1, v2, wanted, res + ) res = LooseVersion(v1)._cmp(object()) - self.assertIs(res, NotImplemented, - 'cmp(%s, %s) should be NotImplemented, got %s' % - (v1, v2, res)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(VersionTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) + assert ( + res is NotImplemented + ), 'cmp({}, {}) should be NotImplemented, got {}'.format(v1, v2, res) diff --git a/setuptools/_distutils/tests/test_versionpredicate.py b/setuptools/_distutils/tests/test_versionpredicate.py index 28ae09dc20..e69de29bb2 100644 --- a/setuptools/_distutils/tests/test_versionpredicate.py +++ b/setuptools/_distutils/tests/test_versionpredicate.py @@ -1,13 +0,0 @@ -"""Tests harness for distutils.versionpredicate. - -""" - -import distutils.versionpredicate -import doctest -from test.support import run_unittest - -def test_suite(): - return doctest.DocTestSuite(distutils.versionpredicate) - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/unix_compat.py b/setuptools/_distutils/tests/unix_compat.py index b7718c2641..95fc8eebe2 100644 --- a/setuptools/_distutils/tests/unix_compat.py +++ b/setuptools/_distutils/tests/unix_compat.py @@ -1,5 +1,4 @@ import sys -import unittest try: import grp @@ -7,10 +6,13 @@ except ImportError: grp = pwd = None +import pytest + UNIX_ID_SUPPORT = grp and pwd UID_0_SUPPORT = UNIX_ID_SUPPORT and sys.platform != "cygwin" -require_unix_id = unittest.skipUnless( - UNIX_ID_SUPPORT, "Requires grp and pwd support") -require_uid_0 = unittest.skipUnless(UID_0_SUPPORT, "Requires UID 0 support") +require_unix_id = pytest.mark.skipif( + not UNIX_ID_SUPPORT, reason="Requires grp and pwd support" +) +require_uid_0 = pytest.mark.skipif(not UID_0_SUPPORT, reason="Requires UID 0 support") diff --git a/setuptools/_distutils/tests/xxmodule-3.8.c b/setuptools/_distutils/tests/xxmodule-3.8.c new file mode 100644 index 0000000000..0250031d72 --- /dev/null +++ b/setuptools/_distutils/tests/xxmodule-3.8.c @@ -0,0 +1,411 @@ + +/* Use this file as a template to start implementing a module that + also declares object types. All occurrences of 'Xxo' should be changed + to something reasonable for your objects. After that, all other + occurrences of 'xx' should be changed to something reasonable for your + module. If your module is named foo your sourcefile should be named + foomodule.c. + + You will probably want to delete all references to 'x_attr' and add + your own types of attributes instead. Maybe you want to name your + local variables other than 'self'. If your object type is needed in + other files, you'll have to create a file "foobarobject.h"; see + floatobject.h for an example. */ + +/* Xxo objects */ + +#include "Python.h" + +static PyObject *ErrorObject; + +typedef struct { + PyObject_HEAD + PyObject *x_attr; /* Attributes dictionary */ +} XxoObject; + +static PyTypeObject Xxo_Type; + +#define XxoObject_Check(v) (Py_TYPE(v) == &Xxo_Type) + +static XxoObject * +newXxoObject(PyObject *arg) +{ + XxoObject *self; + self = PyObject_New(XxoObject, &Xxo_Type); + if (self == NULL) + return NULL; + self->x_attr = NULL; + return self; +} + +/* Xxo methods */ + +static void +Xxo_dealloc(XxoObject *self) +{ + Py_XDECREF(self->x_attr); + PyObject_Del(self); +} + +static PyObject * +Xxo_demo(XxoObject *self, PyObject *args) +{ + if (!PyArg_ParseTuple(args, ":demo")) + return NULL; + Py_INCREF(Py_None); + return Py_None; +} + +static PyMethodDef Xxo_methods[] = { + {"demo", (PyCFunction)Xxo_demo, METH_VARARGS, + PyDoc_STR("demo() -> None")}, + {NULL, NULL} /* sentinel */ +}; + +static PyObject * +Xxo_getattro(XxoObject *self, PyObject *name) +{ + if (self->x_attr != NULL) { + PyObject *v = PyDict_GetItemWithError(self->x_attr, name); + if (v != NULL) { + Py_INCREF(v); + return v; + } + else if (PyErr_Occurred()) { + return NULL; + } + } + return PyObject_GenericGetAttr((PyObject *)self, name); +} + +static int +Xxo_setattr(XxoObject *self, const char *name, PyObject *v) +{ + if (self->x_attr == NULL) { + self->x_attr = PyDict_New(); + if (self->x_attr == NULL) + return -1; + } + if (v == NULL) { + int rv = PyDict_DelItemString(self->x_attr, name); + if (rv < 0 && PyErr_ExceptionMatches(PyExc_KeyError)) + PyErr_SetString(PyExc_AttributeError, + "delete non-existing Xxo attribute"); + return rv; + } + else + return PyDict_SetItemString(self->x_attr, name, v); +} + +static PyTypeObject Xxo_Type = { + /* The ob_type field must be initialized in the module init function + * to be portable to Windows without using C++. */ + PyVarObject_HEAD_INIT(NULL, 0) + "xxmodule.Xxo", /*tp_name*/ + sizeof(XxoObject), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + /* methods */ + (destructor)Xxo_dealloc, /*tp_dealloc*/ + 0, /*tp_vectorcall_offset*/ + (getattrfunc)0, /*tp_getattr*/ + (setattrfunc)Xxo_setattr, /*tp_setattr*/ + 0, /*tp_as_async*/ + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + (getattrofunc)Xxo_getattro, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT, /*tp_flags*/ + 0, /*tp_doc*/ + 0, /*tp_traverse*/ + 0, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + Xxo_methods, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + 0, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ +}; +/* --------------------------------------------------------------------- */ + +/* Function of two integers returning integer */ + +PyDoc_STRVAR(xx_foo_doc, +"foo(i,j)\n\ +\n\ +Return the sum of i and j."); + +static PyObject * +xx_foo(PyObject *self, PyObject *args) +{ + long i, j; + long res; + if (!PyArg_ParseTuple(args, "ll:foo", &i, &j)) + return NULL; + res = i+j; /* XXX Do something here */ + return PyLong_FromLong(res); +} + + +/* Function of no arguments returning new Xxo object */ + +static PyObject * +xx_new(PyObject *self, PyObject *args) +{ + XxoObject *rv; + + if (!PyArg_ParseTuple(args, ":new")) + return NULL; + rv = newXxoObject(args); + if (rv == NULL) + return NULL; + return (PyObject *)rv; +} + +/* Example with subtle bug from extensions manual ("Thin Ice"). */ + +static PyObject * +xx_bug(PyObject *self, PyObject *args) +{ + PyObject *list, *item; + + if (!PyArg_ParseTuple(args, "O:bug", &list)) + return NULL; + + item = PyList_GetItem(list, 0); + /* Py_INCREF(item); */ + PyList_SetItem(list, 1, PyLong_FromLong(0L)); + PyObject_Print(item, stdout, 0); + printf("\n"); + /* Py_DECREF(item); */ + + Py_INCREF(Py_None); + return Py_None; +} + +/* Test bad format character */ + +static PyObject * +xx_roj(PyObject *self, PyObject *args) +{ + PyObject *a; + long b; + if (!PyArg_ParseTuple(args, "O#:roj", &a, &b)) + return NULL; + Py_INCREF(Py_None); + return Py_None; +} + + +/* ---------- */ + +static PyTypeObject Str_Type = { + /* The ob_type field must be initialized in the module init function + * to be portable to Windows without using C++. */ + PyVarObject_HEAD_INIT(NULL, 0) + "xxmodule.Str", /*tp_name*/ + 0, /*tp_basicsize*/ + 0, /*tp_itemsize*/ + /* methods */ + 0, /*tp_dealloc*/ + 0, /*tp_vectorcall_offset*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + 0, /*tp_as_async*/ + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/ + 0, /*tp_doc*/ + 0, /*tp_traverse*/ + 0, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + 0, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /* see PyInit_xx */ /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + 0, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ +}; + +/* ---------- */ + +static PyObject * +null_richcompare(PyObject *self, PyObject *other, int op) +{ + Py_INCREF(Py_NotImplemented); + return Py_NotImplemented; +} + +static PyTypeObject Null_Type = { + /* The ob_type field must be initialized in the module init function + * to be portable to Windows without using C++. */ + PyVarObject_HEAD_INIT(NULL, 0) + "xxmodule.Null", /*tp_name*/ + 0, /*tp_basicsize*/ + 0, /*tp_itemsize*/ + /* methods */ + 0, /*tp_dealloc*/ + 0, /*tp_vectorcall_offset*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + 0, /*tp_as_async*/ + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/ + 0, /*tp_doc*/ + 0, /*tp_traverse*/ + 0, /*tp_clear*/ + null_richcompare, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + 0, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /* see PyInit_xx */ /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + PyType_GenericNew, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ +}; + + +/* ---------- */ + + +/* List of functions defined in the module */ + +static PyMethodDef xx_methods[] = { + {"roj", xx_roj, METH_VARARGS, + PyDoc_STR("roj(a,b) -> None")}, + {"foo", xx_foo, METH_VARARGS, + xx_foo_doc}, + {"new", xx_new, METH_VARARGS, + PyDoc_STR("new() -> new Xx object")}, + {"bug", xx_bug, METH_VARARGS, + PyDoc_STR("bug(o) -> None")}, + {NULL, NULL} /* sentinel */ +}; + +PyDoc_STRVAR(module_doc, +"This is a template module just for instruction."); + + +static int +xx_exec(PyObject *m) +{ + /* Slot initialization is subject to the rules of initializing globals. + C99 requires the initializers to be "address constants". Function + designators like 'PyType_GenericNew', with implicit conversion to + a pointer, are valid C99 address constants. + + However, the unary '&' operator applied to a non-static variable + like 'PyBaseObject_Type' is not required to produce an address + constant. Compilers may support this (gcc does), MSVC does not. + + Both compilers are strictly standard conforming in this particular + behavior. + */ + Null_Type.tp_base = &PyBaseObject_Type; + Str_Type.tp_base = &PyUnicode_Type; + + /* Finalize the type object including setting type of the new type + * object; doing it here is required for portability, too. */ + if (PyType_Ready(&Xxo_Type) < 0) + goto fail; + + /* Add some symbolic constants to the module */ + if (ErrorObject == NULL) { + ErrorObject = PyErr_NewException("xx.error", NULL, NULL); + if (ErrorObject == NULL) + goto fail; + } + Py_INCREF(ErrorObject); + PyModule_AddObject(m, "error", ErrorObject); + + /* Add Str */ + if (PyType_Ready(&Str_Type) < 0) + goto fail; + PyModule_AddObject(m, "Str", (PyObject *)&Str_Type); + + /* Add Null */ + if (PyType_Ready(&Null_Type) < 0) + goto fail; + PyModule_AddObject(m, "Null", (PyObject *)&Null_Type); + return 0; + fail: + Py_XDECREF(m); + return -1; +} + +static struct PyModuleDef_Slot xx_slots[] = { + {Py_mod_exec, xx_exec}, + {0, NULL}, +}; + +static struct PyModuleDef xxmodule = { + PyModuleDef_HEAD_INIT, + "xx", + module_doc, + 0, + xx_methods, + xx_slots, + NULL, + NULL, + NULL +}; + +/* Export function for the module (*must* be called PyInit_xx) */ + +PyMODINIT_FUNC +PyInit_xx(void) +{ + return PyModuleDef_Init(&xxmodule); +} diff --git a/setuptools/_distutils/tests/xxmodule.c b/setuptools/_distutils/tests/xxmodule.c new file mode 100644 index 0000000000..a6e5071d1d --- /dev/null +++ b/setuptools/_distutils/tests/xxmodule.c @@ -0,0 +1,412 @@ + +/* Use this file as a template to start implementing a module that + also declares object types. All occurrences of 'Xxo' should be changed + to something reasonable for your objects. After that, all other + occurrences of 'xx' should be changed to something reasonable for your + module. If your module is named foo your sourcefile should be named + foomodule.c. + + You will probably want to delete all references to 'x_attr' and add + your own types of attributes instead. Maybe you want to name your + local variables other than 'self'. If your object type is needed in + other files, you'll have to create a file "foobarobject.h"; see + floatobject.h for an example. */ + +/* Xxo objects */ + +#include "Python.h" + +static PyObject *ErrorObject; + +typedef struct { + PyObject_HEAD + PyObject *x_attr; /* Attributes dictionary */ +} XxoObject; + +static PyTypeObject Xxo_Type; + +#define XxoObject_Check(v) Py_IS_TYPE(v, &Xxo_Type) + +static XxoObject * +newXxoObject(PyObject *arg) +{ + XxoObject *self; + self = PyObject_New(XxoObject, &Xxo_Type); + if (self == NULL) + return NULL; + self->x_attr = NULL; + return self; +} + +/* Xxo methods */ + +static void +Xxo_dealloc(XxoObject *self) +{ + Py_XDECREF(self->x_attr); + PyObject_Free(self); +} + +static PyObject * +Xxo_demo(XxoObject *self, PyObject *args) +{ + if (!PyArg_ParseTuple(args, ":demo")) + return NULL; + Py_INCREF(Py_None); + return Py_None; +} + +static PyMethodDef Xxo_methods[] = { + {"demo", (PyCFunction)Xxo_demo, METH_VARARGS, + PyDoc_STR("demo() -> None")}, + {NULL, NULL} /* sentinel */ +}; + +static PyObject * +Xxo_getattro(XxoObject *self, PyObject *name) +{ + if (self->x_attr != NULL) { + PyObject *v = PyDict_GetItemWithError(self->x_attr, name); + if (v != NULL) { + Py_INCREF(v); + return v; + } + else if (PyErr_Occurred()) { + return NULL; + } + } + return PyObject_GenericGetAttr((PyObject *)self, name); +} + +static int +Xxo_setattr(XxoObject *self, const char *name, PyObject *v) +{ + if (self->x_attr == NULL) { + self->x_attr = PyDict_New(); + if (self->x_attr == NULL) + return -1; + } + if (v == NULL) { + int rv = PyDict_DelItemString(self->x_attr, name); + if (rv < 0 && PyErr_ExceptionMatches(PyExc_KeyError)) + PyErr_SetString(PyExc_AttributeError, + "delete non-existing Xxo attribute"); + return rv; + } + else + return PyDict_SetItemString(self->x_attr, name, v); +} + +static PyTypeObject Xxo_Type = { + /* The ob_type field must be initialized in the module init function + * to be portable to Windows without using C++. */ + PyVarObject_HEAD_INIT(NULL, 0) + "xxmodule.Xxo", /*tp_name*/ + sizeof(XxoObject), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + /* methods */ + (destructor)Xxo_dealloc, /*tp_dealloc*/ + 0, /*tp_vectorcall_offset*/ + (getattrfunc)0, /*tp_getattr*/ + (setattrfunc)Xxo_setattr, /*tp_setattr*/ + 0, /*tp_as_async*/ + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + (getattrofunc)Xxo_getattro, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT, /*tp_flags*/ + 0, /*tp_doc*/ + 0, /*tp_traverse*/ + 0, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + Xxo_methods, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + 0, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ +}; +/* --------------------------------------------------------------------- */ + +/* Function of two integers returning integer */ + +PyDoc_STRVAR(xx_foo_doc, +"foo(i,j)\n\ +\n\ +Return the sum of i and j."); + +static PyObject * +xx_foo(PyObject *self, PyObject *args) +{ + long i, j; + long res; + if (!PyArg_ParseTuple(args, "ll:foo", &i, &j)) + return NULL; + res = i+j; /* XXX Do something here */ + return PyLong_FromLong(res); +} + + +/* Function of no arguments returning new Xxo object */ + +static PyObject * +xx_new(PyObject *self, PyObject *args) +{ + XxoObject *rv; + + if (!PyArg_ParseTuple(args, ":new")) + return NULL; + rv = newXxoObject(args); + if (rv == NULL) + return NULL; + return (PyObject *)rv; +} + +/* Example with subtle bug from extensions manual ("Thin Ice"). */ + +static PyObject * +xx_bug(PyObject *self, PyObject *args) +{ + PyObject *list, *item; + + if (!PyArg_ParseTuple(args, "O:bug", &list)) + return NULL; + + item = PyList_GetItem(list, 0); + /* Py_INCREF(item); */ + PyList_SetItem(list, 1, PyLong_FromLong(0L)); + PyObject_Print(item, stdout, 0); + printf("\n"); + /* Py_DECREF(item); */ + + Py_INCREF(Py_None); + return Py_None; +} + +/* Test bad format character */ + +static PyObject * +xx_roj(PyObject *self, PyObject *args) +{ + PyObject *a; + long b; + if (!PyArg_ParseTuple(args, "O#:roj", &a, &b)) + return NULL; + Py_INCREF(Py_None); + return Py_None; +} + + +/* ---------- */ + +static PyTypeObject Str_Type = { + /* The ob_type field must be initialized in the module init function + * to be portable to Windows without using C++. */ + PyVarObject_HEAD_INIT(NULL, 0) + "xxmodule.Str", /*tp_name*/ + 0, /*tp_basicsize*/ + 0, /*tp_itemsize*/ + /* methods */ + 0, /*tp_dealloc*/ + 0, /*tp_vectorcall_offset*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + 0, /*tp_as_async*/ + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/ + 0, /*tp_doc*/ + 0, /*tp_traverse*/ + 0, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + 0, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /* see PyInit_xx */ /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + 0, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ +}; + +/* ---------- */ + +static PyObject * +null_richcompare(PyObject *self, PyObject *other, int op) +{ + Py_INCREF(Py_NotImplemented); + return Py_NotImplemented; +} + +static PyTypeObject Null_Type = { + /* The ob_type field must be initialized in the module init function + * to be portable to Windows without using C++. */ + PyVarObject_HEAD_INIT(NULL, 0) + "xxmodule.Null", /*tp_name*/ + 0, /*tp_basicsize*/ + 0, /*tp_itemsize*/ + /* methods */ + 0, /*tp_dealloc*/ + 0, /*tp_vectorcall_offset*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + 0, /*tp_as_async*/ + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/ + 0, /*tp_doc*/ + 0, /*tp_traverse*/ + 0, /*tp_clear*/ + null_richcompare, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + 0, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /* see PyInit_xx */ /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + PyType_GenericNew, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ +}; + + +/* ---------- */ + + +/* List of functions defined in the module */ + +static PyMethodDef xx_methods[] = { + {"roj", xx_roj, METH_VARARGS, + PyDoc_STR("roj(a,b) -> None")}, + {"foo", xx_foo, METH_VARARGS, + xx_foo_doc}, + {"new", xx_new, METH_VARARGS, + PyDoc_STR("new() -> new Xx object")}, + {"bug", xx_bug, METH_VARARGS, + PyDoc_STR("bug(o) -> None")}, + {NULL, NULL} /* sentinel */ +}; + +PyDoc_STRVAR(module_doc, +"This is a template module just for instruction."); + + +static int +xx_exec(PyObject *m) +{ + /* Slot initialization is subject to the rules of initializing globals. + C99 requires the initializers to be "address constants". Function + designators like 'PyType_GenericNew', with implicit conversion to + a pointer, are valid C99 address constants. + + However, the unary '&' operator applied to a non-static variable + like 'PyBaseObject_Type' is not required to produce an address + constant. Compilers may support this (gcc does), MSVC does not. + + Both compilers are strictly standard conforming in this particular + behavior. + */ + Null_Type.tp_base = &PyBaseObject_Type; + Str_Type.tp_base = &PyUnicode_Type; + + /* Finalize the type object including setting type of the new type + * object; doing it here is required for portability, too. */ + if (PyType_Ready(&Xxo_Type) < 0) { + return -1; + } + + /* Add some symbolic constants to the module */ + if (ErrorObject == NULL) { + ErrorObject = PyErr_NewException("xx.error", NULL, NULL); + if (ErrorObject == NULL) { + return -1; + } + } + int rc = PyModule_AddType(m, (PyTypeObject *)ErrorObject); + Py_DECREF(ErrorObject); + if (rc < 0) { + return -1; + } + + /* Add Str and Null types */ + if (PyModule_AddType(m, &Str_Type) < 0) { + return -1; + } + if (PyModule_AddType(m, &Null_Type) < 0) { + return -1; + } + + return 0; +} + +static struct PyModuleDef_Slot xx_slots[] = { + {Py_mod_exec, xx_exec}, + {0, NULL}, +}; + +static struct PyModuleDef xxmodule = { + PyModuleDef_HEAD_INIT, + "xx", + module_doc, + 0, + xx_methods, + xx_slots, + NULL, + NULL, + NULL +}; + +/* Export function for the module (*must* be called PyInit_xx) */ + +PyMODINIT_FUNC +PyInit_xx(void) +{ + return PyModuleDef_Init(&xxmodule); +} diff --git a/setuptools/_distutils/text_file.py b/setuptools/_distutils/text_file.py index 93abad38f4..36f947e51c 100644 --- a/setuptools/_distutils/text_file.py +++ b/setuptools/_distutils/text_file.py @@ -4,84 +4,87 @@ that (optionally) takes care of stripping comments, ignoring blank lines, and joining lines with backslashes.""" -import sys, io +import sys class TextFile: """Provides a file-like object that takes care of all the things you - commonly want to do when processing a text file that has some - line-by-line syntax: strip comments (as long as "#" is your - comment character), skip blank lines, join adjacent lines by - escaping the newline (ie. backslash at end of line), strip - leading and/or trailing whitespace. All of these are optional - and independently controllable. - - Provides a 'warn()' method so you can generate warning messages that - report physical line number, even if the logical line in question - spans multiple physical lines. Also provides 'unreadline()' for - implementing line-at-a-time lookahead. - - Constructor is called as: - - TextFile (filename=None, file=None, **options) - - It bombs (RuntimeError) if both 'filename' and 'file' are None; - 'filename' should be a string, and 'file' a file object (or - something that provides 'readline()' and 'close()' methods). It is - recommended that you supply at least 'filename', so that TextFile - can include it in warning messages. If 'file' is not supplied, - TextFile creates its own using 'io.open()'. - - The options are all boolean, and affect the value returned by - 'readline()': - strip_comments [default: true] - strip from "#" to end-of-line, as well as any whitespace - leading up to the "#" -- unless it is escaped by a backslash - lstrip_ws [default: false] - strip leading whitespace from each line before returning it - rstrip_ws [default: true] - strip trailing whitespace (including line terminator!) from - each line before returning it - skip_blanks [default: true} - skip lines that are empty *after* stripping comments and - whitespace. (If both lstrip_ws and rstrip_ws are false, - then some lines may consist of solely whitespace: these will - *not* be skipped, even if 'skip_blanks' is true.) - join_lines [default: false] - if a backslash is the last non-newline character on a line - after stripping comments and whitespace, join the following line - to it to form one "logical line"; if N consecutive lines end - with a backslash, then N+1 physical lines will be joined to - form one logical line. - collapse_join [default: false] - strip leading whitespace from lines that are joined to their - predecessor; only matters if (join_lines and not lstrip_ws) - errors [default: 'strict'] - error handler used to decode the file content - - Note that since 'rstrip_ws' can strip the trailing newline, the - semantics of 'readline()' must differ from those of the builtin file - object's 'readline()' method! In particular, 'readline()' returns - None for end-of-file: an empty string might just be a blank line (or - an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is - not.""" - - default_options = { 'strip_comments': 1, - 'skip_blanks': 1, - 'lstrip_ws': 0, - 'rstrip_ws': 1, - 'join_lines': 0, - 'collapse_join': 0, - 'errors': 'strict', - } + commonly want to do when processing a text file that has some + line-by-line syntax: strip comments (as long as "#" is your + comment character), skip blank lines, join adjacent lines by + escaping the newline (ie. backslash at end of line), strip + leading and/or trailing whitespace. All of these are optional + and independently controllable. + + Provides a 'warn()' method so you can generate warning messages that + report physical line number, even if the logical line in question + spans multiple physical lines. Also provides 'unreadline()' for + implementing line-at-a-time lookahead. + + Constructor is called as: + + TextFile (filename=None, file=None, **options) + + It bombs (RuntimeError) if both 'filename' and 'file' are None; + 'filename' should be a string, and 'file' a file object (or + something that provides 'readline()' and 'close()' methods). It is + recommended that you supply at least 'filename', so that TextFile + can include it in warning messages. If 'file' is not supplied, + TextFile creates its own using 'io.open()'. + + The options are all boolean, and affect the value returned by + 'readline()': + strip_comments [default: true] + strip from "#" to end-of-line, as well as any whitespace + leading up to the "#" -- unless it is escaped by a backslash + lstrip_ws [default: false] + strip leading whitespace from each line before returning it + rstrip_ws [default: true] + strip trailing whitespace (including line terminator!) from + each line before returning it + skip_blanks [default: true} + skip lines that are empty *after* stripping comments and + whitespace. (If both lstrip_ws and rstrip_ws are false, + then some lines may consist of solely whitespace: these will + *not* be skipped, even if 'skip_blanks' is true.) + join_lines [default: false] + if a backslash is the last non-newline character on a line + after stripping comments and whitespace, join the following line + to it to form one "logical line"; if N consecutive lines end + with a backslash, then N+1 physical lines will be joined to + form one logical line. + collapse_join [default: false] + strip leading whitespace from lines that are joined to their + predecessor; only matters if (join_lines and not lstrip_ws) + errors [default: 'strict'] + error handler used to decode the file content + + Note that since 'rstrip_ws' can strip the trailing newline, the + semantics of 'readline()' must differ from those of the builtin file + object's 'readline()' method! In particular, 'readline()' returns + None for end-of-file: an empty string might just be a blank line (or + an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is + not.""" + + default_options = { + 'strip_comments': 1, + 'skip_blanks': 1, + 'lstrip_ws': 0, + 'rstrip_ws': 1, + 'join_lines': 0, + 'collapse_join': 0, + 'errors': 'strict', + } def __init__(self, filename=None, file=None, **options): """Construct a new TextFile object. At least one of 'filename' - (a string) and 'file' (a file-like object) must be supplied. - They keyword argument options are described above and affect - the values returned by 'readline()'.""" + (a string) and 'file' (a file-like object) must be supplied. + They keyword argument options are described above and affect + the values returned by 'readline()'.""" if filename is None and file is None: - raise RuntimeError("you must supply either or both of 'filename' and 'file'") + raise RuntimeError( + "you must supply either or both of 'filename' and 'file'" + ) # set values for all options -- either from client option hash # or fallback to default_options @@ -101,7 +104,7 @@ def __init__(self, filename=None, file=None, **options): else: self.filename = filename self.file = file - self.current_line = 0 # assuming that file is at BOF! + self.current_line = 0 # assuming that file is at BOF! # 'linebuf' is a stack of lines that will be emptied before we # actually read from the file; it's only populated by an @@ -110,14 +113,14 @@ def __init__(self, filename=None, file=None, **options): def open(self, filename): """Open a new file named 'filename'. This overrides both the - 'filename' and 'file' arguments to the constructor.""" + 'filename' and 'file' arguments to the constructor.""" self.filename = filename - self.file = io.open(self.filename, 'r', errors=self.errors) + self.file = open(self.filename, errors=self.errors) self.current_line = 0 def close(self): """Close the current file and forget everything we know about it - (filename, current line number).""" + (filename, current line number).""" file = self.file self.file = None self.filename = None @@ -141,24 +144,24 @@ def error(self, msg, line=None): def warn(self, msg, line=None): """Print (to stderr) a warning message tied to the current logical - line in the current file. If the current logical line in the - file spans multiple physical lines, the warning refers to the - whole range, eg. "lines 3-5". If 'line' supplied, it overrides - the current line number; it may be a list or tuple to indicate a - range of physical lines, or an integer for a single physical - line.""" + line in the current file. If the current logical line in the + file spans multiple physical lines, the warning refers to the + whole range, eg. "lines 3-5". If 'line' supplied, it overrides + the current line number; it may be a list or tuple to indicate a + range of physical lines, or an integer for a single physical + line.""" sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n") - def readline(self): + def readline(self): # noqa: C901 """Read and return a single logical line from the current file (or - from an internal buffer if lines have previously been "unread" - with 'unreadline()'). If the 'join_lines' option is true, this - may involve reading multiple physical lines concatenated into a - single string. Updates the current line number, so calling - 'warn()' after 'readline()' emits a warning about the physical - line(s) just read. Returns None on end-of-file, since the empty - string can occur if 'rstrip_ws' is true but 'strip_blanks' is - not.""" + from an internal buffer if lines have previously been "unread" + with 'unreadline()'). If the 'join_lines' option is true, this + may involve reading multiple physical lines concatenated into a + single string. Updates the current line number, so calling + 'warn()' after 'readline()' emits a warning about the physical + line(s) just read. Returns None on end-of-file, since the empty + string can occur if 'rstrip_ws' is true but 'strip_blanks' is + not.""" # If any "unread" lines waiting in 'linebuf', return the top # one. (We don't actually buffer read-ahead data -- lines only # get put in 'linebuf' if the client explicitly does an @@ -177,7 +180,6 @@ def readline(self): line = None if self.strip_comments and line: - # Look for the first "#" in the line. If none, never # mind. If we find one and it's the first character, or # is not preceded by "\", then it starts a comment -- @@ -187,12 +189,12 @@ def readline(self): # lurking in there) and otherwise leave the line alone. pos = line.find("#") - if pos == -1: # no "#" -- no comments + if pos == -1: # no "#" -- no comments pass # It's definitely a comment -- either "#" is the first # character, or it's elsewhere and unescaped. - elif pos == 0 or line[pos-1] != "\\": + elif pos == 0 or line[pos - 1] != "\\": # Have to preserve the trailing newline, because it's # the job of a later step (rstrip_ws) to remove it -- # and if rstrip_ws is false, we'd better preserve it! @@ -211,15 +213,14 @@ def readline(self): # result in "hello there". if line.strip() == "": continue - else: # it's an escaped "#" + else: # it's an escaped "#" line = line.replace("\\#", "#") # did previous line end with a backslash? then accumulate if self.join_lines and buildup_line: # oops: end of file if line is None: - self.warn("continuation line immediately precedes " - "end-of-file") + self.warn("continuation line immediately precedes " "end-of-file") return buildup_line if self.collapse_join: @@ -230,11 +231,10 @@ def readline(self): if isinstance(self.current_line, list): self.current_line[1] = self.current_line[1] + 1 else: - self.current_line = [self.current_line, - self.current_line + 1] + self.current_line = [self.current_line, self.current_line + 1] # just an ordinary line, read it as usual else: - if line is None: # eof + if line is None: # eof return None # still have to be careful about incrementing the line number! @@ -254,7 +254,7 @@ def readline(self): # blank line (whether we rstrip'ed or not)? skip to next line # if appropriate - if (line == '' or line == '\n') and self.skip_blanks: + if line in ('', '\n') and self.skip_blanks: continue if self.join_lines: @@ -271,7 +271,7 @@ def readline(self): def readlines(self): """Read and return the list of all logical lines remaining in the - current file.""" + current file.""" lines = [] while True: line = self.readline() @@ -281,6 +281,6 @@ def readlines(self): def unreadline(self, line): """Push 'line' (a string) onto an internal buffer that will be - checked by future 'readline()' calls. Handy for implementing - a parser with line-at-a-time lookahead.""" + checked by future 'readline()' calls. Handy for implementing + a parser with line-at-a-time lookahead.""" self.linebuf.append(line) diff --git a/setuptools/_distutils/unixccompiler.py b/setuptools/_distutils/unixccompiler.py index a07e598890..6ca2332ae1 100644 --- a/setuptools/_distutils/unixccompiler.py +++ b/setuptools/_distutils/unixccompiler.py @@ -13,18 +13,18 @@ * link shared library handled by 'cc -shared' """ -import os, sys, re, shlex - -from distutils import sysconfig -from distutils.dep_util import newer -from distutils.ccompiler import \ - CCompiler, gen_preprocess_options, gen_lib_options -from distutils.errors import \ - DistutilsExecError, CompileError, LibError, LinkError -from distutils import log - -if sys.platform == 'darwin': - import _osx_support +import os +import sys +import re +import shlex +import itertools + +from . import sysconfig +from .dep_util import newer +from .ccompiler import CCompiler, gen_preprocess_options, gen_lib_options +from .errors import DistutilsExecError, CompileError, LibError, LinkError +from ._log import log +from ._macos_compat import compiler_fixup # XXX Things not currently handled: # * optimization/debug/warning flags; we just use whatever's in Python's @@ -42,8 +42,67 @@ # options and carry on. -class UnixCCompiler(CCompiler): +def _split_env(cmd): + """ + For macOS, split command into 'env' portion (if any) + and the rest of the linker command. + + >>> _split_env(['a', 'b', 'c']) + ([], ['a', 'b', 'c']) + >>> _split_env(['/usr/bin/env', 'A=3', 'gcc']) + (['/usr/bin/env', 'A=3'], ['gcc']) + """ + pivot = 0 + if os.path.basename(cmd[0]) == "env": + pivot = 1 + while '=' in cmd[pivot]: + pivot += 1 + return cmd[:pivot], cmd[pivot:] + + +def _split_aix(cmd): + """ + AIX platforms prefix the compiler with the ld_so_aix + script, so split that from the linker command. + + >>> _split_aix(['a', 'b', 'c']) + ([], ['a', 'b', 'c']) + >>> _split_aix(['/bin/foo/ld_so_aix', 'gcc']) + (['/bin/foo/ld_so_aix'], ['gcc']) + """ + pivot = os.path.basename(cmd[0]) == 'ld_so_aix' + return cmd[:pivot], cmd[pivot:] + + +def _linker_params(linker_cmd, compiler_cmd): + """ + The linker command usually begins with the compiler + command (possibly multiple elements), followed by zero or more + params for shared library building. + + If the LDSHARED env variable overrides the linker command, + however, the commands may not match. + + Return the best guess of the linker parameters by stripping + the linker command. If the compiler command does not + match the linker command, assume the linker command is + just the first element. + + >>> _linker_params('gcc foo bar'.split(), ['gcc']) + ['foo', 'bar'] + >>> _linker_params('gcc foo bar'.split(), ['other']) + ['foo', 'bar'] + >>> _linker_params('ccache gcc foo bar'.split(), 'ccache gcc'.split()) + ['foo', 'bar'] + >>> _linker_params(['gcc'], ['gcc']) + [] + """ + c_len = len(compiler_cmd) + pivot = c_len if linker_cmd[:c_len] == compiler_cmd else 1 + return linker_cmd[pivot:] + +class UnixCCompiler(CCompiler): compiler_type = 'unix' # These are used by CCompiler in two places: the constructor sets @@ -52,15 +111,16 @@ class UnixCCompiler(CCompiler): # are pretty generic; they will probably have to be set by an outsider # (eg. using information discovered by the sysconfig about building # Python extensions). - executables = {'preprocessor' : None, - 'compiler' : ["cc"], - 'compiler_so' : ["cc"], - 'compiler_cxx' : ["cc"], - 'linker_so' : ["cc", "-shared"], - 'linker_exe' : ["cc"], - 'archiver' : ["ar", "-cr"], - 'ranlib' : None, - } + executables = { + 'preprocessor': None, + 'compiler': ["cc"], + 'compiler_so': ["cc"], + 'compiler_cxx': ["cc"], + 'linker_so': ["cc", "-shared"], + 'linker_exe': ["cc"], + 'archiver': ["ar", "-cr"], + 'ranlib': None, + } if sys.platform[:6] == "darwin": executables['ranlib'] = ["ranlib"] @@ -71,7 +131,7 @@ class UnixCCompiler(CCompiler): # reasonable common default here, but it's not necessarily used on all # Unices! - src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"] + src_extensions = [".c", ".C", ".cc", ".cxx", ".cpp", ".m"] obj_extension = ".o" static_lib_extension = ".a" shared_lib_extension = ".so" @@ -82,8 +142,15 @@ class UnixCCompiler(CCompiler): if sys.platform == "cygwin": exe_extension = ".exe" - def preprocess(self, source, output_file=None, macros=None, - include_dirs=None, extra_preargs=None, extra_postargs=None): + def preprocess( + self, + source, + output_file=None, + macros=None, + include_dirs=None, + extra_preargs=None, + extra_postargs=None, + ): fixed_args = self._fix_compile_args(None, macros, include_dirs) ignore, macros, include_dirs = fixed_args pp_opts = gen_preprocess_options(macros, include_dirs) @@ -96,41 +163,39 @@ def preprocess(self, source, output_file=None, macros=None, pp_args.extend(extra_postargs) pp_args.append(source) - # We need to preprocess: either we're being forced to, or we're - # generating output to stdout, or there's a target output file and - # the source file is newer than the target (or the target doesn't - # exist). - if self.force or output_file is None or newer(source, output_file): - if output_file: - self.mkpath(os.path.dirname(output_file)) - try: - self.spawn(pp_args) - except DistutilsExecError as msg: - raise CompileError(msg) + # reasons to preprocess: + # - force is indicated + # - output is directed to stdout + # - source file is newer than the target + preprocess = self.force or output_file is None or newer(source, output_file) + if not preprocess: + return + + if output_file: + self.mkpath(os.path.dirname(output_file)) + + try: + self.spawn(pp_args) + except DistutilsExecError as msg: + raise CompileError(msg) def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - compiler_so = self.compiler_so - if sys.platform == 'darwin': - compiler_so = _osx_support.compiler_fixup(compiler_so, - cc_args + extra_postargs) + compiler_so = compiler_fixup(self.compiler_so, cc_args + extra_postargs) try: - self.spawn(compiler_so + cc_args + [src, '-o', obj] + - extra_postargs) + self.spawn(compiler_so + cc_args + [src, '-o', obj] + extra_postargs) except DistutilsExecError as msg: raise CompileError(msg) - def create_static_lib(self, objects, output_libname, - output_dir=None, debug=0, target_lang=None): + def create_static_lib( + self, objects, output_libname, output_dir=None, debug=0, target_lang=None + ): objects, output_dir = self._fix_object_args(objects, output_dir) - output_filename = \ - self.library_filename(output_libname, output_dir=output_dir) + output_filename = self.library_filename(output_libname, output_dir=output_dir) if self._need_link(objects, output_filename): self.mkpath(os.path.dirname(output_filename)) - self.spawn(self.archiver + - [output_filename] + - objects + self.objects) + self.spawn(self.archiver + [output_filename] + objects + self.objects) # Not many Unices required ranlib anymore -- SunOS 4.x is, I # think the only major Unix that does. Maybe we need some @@ -145,26 +210,34 @@ def create_static_lib(self, objects, output_libname, else: log.debug("skipping %s (up-to-date)", output_filename) - def link(self, target_desc, objects, - output_filename, output_dir=None, libraries=None, - library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): + def link( + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): objects, output_dir = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) + fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) libraries, library_dirs, runtime_library_dirs = fixed_args - lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, - libraries) + lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) if not isinstance(output_dir, (str, type(None))): raise TypeError("'output_dir' must be a string or None") if output_dir is not None: output_filename = os.path.join(output_dir, output_filename) if self._need_link(objects, output_filename): - ld_args = (objects + self.objects + - lib_opts + ['-o', output_filename]) + ld_args = objects + self.objects + lib_opts + ['-o', output_filename] if debug: ld_args[:0] = ['-g'] if extra_preargs: @@ -173,33 +246,22 @@ def link(self, target_desc, objects, ld_args.extend(extra_postargs) self.mkpath(os.path.dirname(output_filename)) try: - if target_desc == CCompiler.EXECUTABLE: - linker = self.linker_exe[:] - else: - linker = self.linker_so[:] + # Select a linker based on context: linker_exe when + # building an executable or linker_so (with shared options) + # when building a shared library. + building_exe = target_desc == CCompiler.EXECUTABLE + linker = (self.linker_exe if building_exe else self.linker_so)[:] + if target_lang == "c++" and self.compiler_cxx: - # skip over environment variable settings if /usr/bin/env - # is used to set up the linker's environment. - # This is needed on OSX. Note: this assumes that the - # normal and C++ compiler have the same environment - # settings. - i = 0 - if os.path.basename(linker[0]) == "env": - i = 1 - while '=' in linker[i]: - i += 1 - - if os.path.basename(linker[i]) == 'ld_so_aix': - # AIX platforms prefix the compiler with the ld_so_aix - # script, so we need to adjust our linker index - offset = 1 - else: - offset = 0 - - linker[i+offset] = self.compiler_cxx[i] - - if sys.platform == 'darwin': - linker = _osx_support.compiler_fixup(linker, ld_args) + env, linker_ne = _split_env(linker) + aix, linker_na = _split_aix(linker_ne) + _, compiler_cxx_ne = _split_env(self.compiler_cxx) + _, linker_exe_ne = _split_env(self.linker_exe) + + params = _linker_params(linker_na, linker_exe_ne) + linker = env + aix + compiler_cxx_ne + params + + linker = compiler_fixup(linker, ld_args) self.spawn(linker + ld_args) except DistutilsExecError as msg: @@ -214,8 +276,10 @@ def link(self, target_desc, objects, def library_dir_option(self, dir): return "-L" + dir - def _is_gcc(self, compiler_name): - return "gcc" in compiler_name or "g++" in compiler_name + def _is_gcc(self): + cc_var = sysconfig.get_config_var("CC") + compiler = os.path.basename(shlex.split(cc_var)[0]) + return "gcc" in compiler or "g++" in compiler def runtime_library_dir_option(self, dir): # XXX Hackish, at the very least. See Python bug #445902: @@ -231,20 +295,21 @@ def runtime_library_dir_option(self, dir): # this time, there's no way to determine this information from # the configuration data stored in the Python installation, so # we use this hack. - compiler = os.path.basename(shlex.split(sysconfig.get_config_var("CC"))[0]) if sys.platform[:6] == "darwin": from distutils.util import get_macosx_target_ver, split_version + macosx_target_ver = get_macosx_target_ver() if macosx_target_ver and split_version(macosx_target_ver) >= [10, 5]: return "-Wl,-rpath," + dir - else: # no support for -rpath on earlier macOS versions + else: # no support for -rpath on earlier macOS versions return "-L" + dir elif sys.platform[:7] == "freebsd": return "-Wl,-rpath=" + dir elif sys.platform[:5] == "hp-ux": - if self._is_gcc(compiler): - return ["-Wl,+s", "-L" + dir] - return ["+s", "-L" + dir] + return [ + "-Wl,+s" if self._is_gcc() else "+s", + "-L" + dir, + ] # For all compilers, `-Wl` is the presumed way to # pass a compiler option to the linker and `-R` is @@ -259,67 +324,77 @@ def runtime_library_dir_option(self, dir): def library_option(self, lib): return "-l" + lib + @staticmethod + def _library_root(dir): + """ + macOS users can specify an alternate SDK using'-isysroot'. + Calculate the SDK root if it is specified. + + Note that, as of Xcode 7, Apple SDKs may contain textual stub + libraries with .tbd extensions rather than the normal .dylib + shared libraries installed in /. The Apple compiler tool + chain handles this transparently but it can cause problems + for programs that are being built with an SDK and searching + for specific libraries. Callers of find_library_file need to + keep in mind that the base filename of the returned SDK library + file might have a different extension from that of the library + file installed on the running system, for example: + /Applications/Xcode.app/Contents/Developer/Platforms/ + MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/ + usr/lib/libedit.tbd + vs + /usr/lib/libedit.dylib + """ + cflags = sysconfig.get_config_var('CFLAGS') + match = re.search(r'-isysroot\s*(\S+)', cflags) + + apply_root = ( + sys.platform == 'darwin' + and match + and ( + dir.startswith('/System/') + or (dir.startswith('/usr/') and not dir.startswith('/usr/local/')) + ) + ) + + return os.path.join(match.group(1), dir[1:]) if apply_root else dir + def find_library_file(self, dirs, lib, debug=0): - shared_f = self.library_filename(lib, lib_type='shared') - dylib_f = self.library_filename(lib, lib_type='dylib') - xcode_stub_f = self.library_filename(lib, lib_type='xcode_stub') - static_f = self.library_filename(lib, lib_type='static') - - if sys.platform == 'darwin': - # On OSX users can specify an alternate SDK using - # '-isysroot', calculate the SDK root if it is specified - # (and use it further on) - # - # Note that, as of Xcode 7, Apple SDKs may contain textual stub - # libraries with .tbd extensions rather than the normal .dylib - # shared libraries installed in /. The Apple compiler tool - # chain handles this transparently but it can cause problems - # for programs that are being built with an SDK and searching - # for specific libraries. Callers of find_library_file need to - # keep in mind that the base filename of the returned SDK library - # file might have a different extension from that of the library - # file installed on the running system, for example: - # /Applications/Xcode.app/Contents/Developer/Platforms/ - # MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/ - # usr/lib/libedit.tbd - # vs - # /usr/lib/libedit.dylib - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s*(\S+)', cflags) - if m is None: - sysroot = '/' - else: - sysroot = m.group(1) - - - - for dir in dirs: - shared = os.path.join(dir, shared_f) - dylib = os.path.join(dir, dylib_f) - static = os.path.join(dir, static_f) - xcode_stub = os.path.join(dir, xcode_stub_f) - - if sys.platform == 'darwin' and ( - dir.startswith('/System/') or ( - dir.startswith('/usr/') and not dir.startswith('/usr/local/'))): - - shared = os.path.join(sysroot, dir[1:], shared_f) - dylib = os.path.join(sysroot, dir[1:], dylib_f) - static = os.path.join(sysroot, dir[1:], static_f) - xcode_stub = os.path.join(sysroot, dir[1:], xcode_stub_f) - - # We're second-guessing the linker here, with not much hard - # data to go on: GCC seems to prefer the shared library, so I'm - # assuming that *all* Unix C compilers do. And of course I'm - # ignoring even GCC's "-static" option. So sue me. - if os.path.exists(dylib): - return dylib - elif os.path.exists(xcode_stub): - return xcode_stub - elif os.path.exists(shared): - return shared - elif os.path.exists(static): - return static - - # Oops, didn't find it in *any* of 'dirs' - return None + r""" + Second-guess the linker with not much hard + data to go on: GCC seems to prefer the shared library, so + assume that *all* Unix C compilers do, + ignoring even GCC's "-static" option. + + >>> compiler = UnixCCompiler() + >>> compiler._library_root = lambda dir: dir + >>> monkeypatch = getfixture('monkeypatch') + >>> monkeypatch.setattr(os.path, 'exists', lambda d: 'existing' in d) + >>> dirs = ('/foo/bar/missing', '/foo/bar/existing') + >>> compiler.find_library_file(dirs, 'abc').replace('\\', '/') + '/foo/bar/existing/libabc.dylib' + >>> compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/') + '/foo/bar/existing/libabc.dylib' + >>> monkeypatch.setattr(os.path, 'exists', + ... lambda d: 'existing' in d and '.a' in d) + >>> compiler.find_library_file(dirs, 'abc').replace('\\', '/') + '/foo/bar/existing/libabc.a' + >>> compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/') + '/foo/bar/existing/libabc.a' + """ + lib_names = ( + self.library_filename(lib, lib_type=type) + for type in 'dylib xcode_stub shared static'.split() + ) + + roots = map(self._library_root, dirs) + + searched = ( + os.path.join(root, lib_name) + for root, lib_name in itertools.product(roots, lib_names) + ) + + found = filter(os.path.exists, searched) + + # Return None if it could not be found in any dir. + return next(found, None) diff --git a/setuptools/_distutils/util.py b/setuptools/_distutils/util.py index 6d506d7e31..7ef47176e2 100644 --- a/setuptools/_distutils/util.py +++ b/setuptools/_distutils/util.py @@ -4,30 +4,31 @@ one of the other *util.py modules. """ +import importlib.util import os import re -import importlib.util import string +import subprocess import sys import sysconfig -from distutils.errors import DistutilsPlatformError -from distutils.dep_util import newer -from distutils.spawn import spawn -from distutils import log -from distutils.errors import DistutilsByteCompileError -from .py35compat import _optim_args_from_interpreter_flags +import functools + +from .errors import DistutilsPlatformError, DistutilsByteCompileError +from .dep_util import newer +from .spawn import spawn +from ._log import log def get_host_platform(): - """Return a string that identifies the current platform. This is used mainly to - distinguish platform-specific build directories and platform-specific built - distributions. + """ + Return a string that identifies the current platform. Use this + function to distinguish platform-specific build directories and + platform-specific built distributions. """ - # We initially exposed platforms as defined in Python 3.9 + # This function initially exposed platforms as defined in Python 3.9 # even with older Python versions when distutils was split out. - # Now that we delegate to stdlib sysconfig we need to restore this - # in case anyone has started to depend on it. + # Now it delegates to stdlib sysconfig, but maintains compatibility. if sys.version_info < (3, 8): if os.name == 'nt': @@ -41,43 +42,49 @@ def get_host_platform(): osname, host, release, version, machine = os.uname() if osname[:3] == "aix": from .py38compat import aix_platform + return aix_platform(osname, version, release) return sysconfig.get_platform() + def get_platform(): if os.name == 'nt': TARGET_TO_PLAT = { - 'x86' : 'win32', - 'x64' : 'win-amd64', - 'arm' : 'win-arm32', + 'x86': 'win32', + 'x64': 'win-amd64', + 'arm': 'win-arm32', 'arm64': 'win-arm64', } - return TARGET_TO_PLAT.get(os.environ.get('VSCMD_ARG_TGT_ARCH')) or get_host_platform() - else: - return get_host_platform() + target = os.environ.get('VSCMD_ARG_TGT_ARCH') + return TARGET_TO_PLAT.get(target) or get_host_platform() + return get_host_platform() if sys.platform == 'darwin': - _syscfg_macosx_ver = None # cache the version pulled from sysconfig + _syscfg_macosx_ver = None # cache the version pulled from sysconfig MACOSX_VERSION_VAR = 'MACOSX_DEPLOYMENT_TARGET' + def _clear_cached_macosx_ver(): """For testing only. Do not call.""" global _syscfg_macosx_ver _syscfg_macosx_ver = None + def get_macosx_target_ver_from_syscfg(): """Get the version of macOS latched in the Python interpreter configuration. Returns the version as a string or None if can't obtain one. Cached.""" global _syscfg_macosx_ver if _syscfg_macosx_ver is None: from distutils import sysconfig + ver = sysconfig.get_config_var(MACOSX_VERSION_VAR) or '' if ver: _syscfg_macosx_ver = ver return _syscfg_macosx_ver + def get_macosx_target_ver(): """Return the version of macOS for which we are building. @@ -95,12 +102,16 @@ def get_macosx_target_ver(): # ensures extension modules are built with correct compatibility # values, specifically LDSHARED which can use # '-undefined dynamic_lookup' which only works on >= 10.3. - if syscfg_ver and split_version(syscfg_ver) >= [10, 3] and \ - split_version(env_ver) < [10, 3]: - my_msg = ('$' + MACOSX_VERSION_VAR + ' mismatch: ' - 'now "%s" but "%s" during configure; ' - 'must use 10.3 or later' - % (env_ver, syscfg_ver)) + if ( + syscfg_ver + and split_version(syscfg_ver) >= [10, 3] + and split_version(env_ver) < [10, 3] + ): + my_msg = ( + '$' + MACOSX_VERSION_VAR + ' mismatch: ' + 'now "%s" but "%s" during configure; ' + 'must use 10.3 or later' % (env_ver, syscfg_ver) + ) raise DistutilsPlatformError(my_msg) return env_ver return syscfg_ver @@ -111,7 +122,7 @@ def split_version(s): return [int(n) for n in s.split('.')] -def convert_path (pathname): +def convert_path(pathname): """Return 'pathname' as a name that will work on the native filesystem, i.e. split it on '/' and put it back together again using the current directory separator. Needed because filenames in the setup script are @@ -136,10 +147,11 @@ def convert_path (pathname): return os.curdir return os.path.join(*paths) + # convert_path () -def change_root (new_root, pathname): +def change_root(new_root, pathname): """Return 'pathname' with 'new_root' prepended. If 'pathname' is relative, this is equivalent to "os.path.join(new_root,pathname)". Otherwise, it requires making 'pathname' relative and then joining the @@ -157,12 +169,11 @@ def change_root (new_root, pathname): path = path[1:] return os.path.join(new_root, path) - else: - raise DistutilsPlatformError("nothing known about platform '%s'" % os.name) + raise DistutilsPlatformError(f"nothing known about platform '{os.name}'") -_environ_checked = 0 -def check_environ (): +@functools.lru_cache() +def check_environ(): """Ensure that 'os.environ' has all the environment variables we guarantee that users can use in config files, command-line options, etc. Currently this includes: @@ -170,13 +181,10 @@ def check_environ (): PLAT - description of the current platform, including hardware and OS (see 'get_platform()') """ - global _environ_checked - if _environ_checked: - return - if os.name == 'posix' and 'HOME' not in os.environ: try: import pwd + os.environ['HOME'] = pwd.getpwuid(os.getuid())[5] except (ImportError, KeyError): # bpo-10496: if the current user identifier doesn't exist in the @@ -186,10 +194,8 @@ def check_environ (): if 'PLAT' not in os.environ: os.environ['PLAT'] = get_platform() - _environ_checked = 1 - -def subst_vars (s, local_vars): +def subst_vars(s, local_vars): """ Perform variable substitution on 'string'. Variables are indicated by format-style braces ("{var}"). @@ -207,27 +213,28 @@ def subst_vars (s, local_vars): except KeyError as var: raise ValueError(f"invalid variable {var}") -# subst_vars () - def _subst_compat(s): """ Replace shell/Perl-style variable substitution with format-style. For compatibility. """ + def _subst(match): return f'{{{match.group(1)}}}' + repl = re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s) if repl != s: import warnings + warnings.warn( - "shell/Perl-style substitions are deprecated", + "shell/Perl-style substitutions are deprecated", DeprecationWarning, ) return repl -def grok_environment_error (exc, prefix="error: "): +def grok_environment_error(exc, prefix="error: "): # Function kept for backward compatibility. # Used to try clever things with EnvironmentErrors, # but nowadays str(exception) produces good messages. @@ -236,13 +243,16 @@ def grok_environment_error (exc, prefix="error: "): # Needed by 'split_quoted()' _wordchars_re = _squote_re = _dquote_re = None + + def _init_regex(): global _wordchars_re, _squote_re, _dquote_re _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace) _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'") _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"') -def split_quoted (s): + +def split_quoted(s): """Split a string up according to Unix shell-like rules for quotes and backslashes. In short: words are delimited by spaces, as long as those spaces are not escaped by a backslash, or inside a quoted string. @@ -256,7 +266,8 @@ def split_quoted (s): # This is a nice algorithm for splitting up a single string, since it # doesn't require character-by-character examination. It was a little # bit of a brain-bender to get it working right, though... - if _wordchars_re is None: _init_regex() + if _wordchars_re is None: + _init_regex() s = s.strip() words = [] @@ -269,20 +280,23 @@ def split_quoted (s): words.append(s[:end]) break - if s[end] in string.whitespace: # unescaped, unquoted whitespace: now - words.append(s[:end]) # we definitely have a word delimiter + if s[end] in string.whitespace: + # unescaped, unquoted whitespace: now + # we definitely have a word delimiter + words.append(s[:end]) s = s[end:].lstrip() pos = 0 - elif s[end] == '\\': # preserve whatever is being escaped; - # will become part of the current word - s = s[:end] + s[end+1:] - pos = end+1 + elif s[end] == '\\': + # preserve whatever is being escaped; + # will become part of the current word + s = s[:end] + s[end + 1 :] + pos = end + 1 else: - if s[end] == "'": # slurp singly-quoted string + if s[end] == "'": # slurp singly-quoted string m = _squote_re.match(s, end) - elif s[end] == '"': # slurp doubly-quoted string + elif s[end] == '"': # slurp doubly-quoted string m = _dquote_re.match(s, end) else: raise RuntimeError("this can't happen (bad char '%c')" % s[end]) @@ -291,7 +305,7 @@ def split_quoted (s): raise ValueError("bad string (mismatched %s quotes?)" % s[end]) (beg, end) = m.span() - s = s[:beg] + s[beg+1:end-1] + s[end:] + s = s[:beg] + s[beg + 1 : end - 1] + s[end:] pos = m.end() - 2 if pos >= len(s): @@ -300,10 +314,11 @@ def split_quoted (s): return words + # split_quoted () -def execute (func, args, msg=None, verbose=0, dry_run=0): +def execute(func, args, msg=None, verbose=0, dry_run=0): """Perform some action that affects the outside world (eg. by writing to the filesystem). Such actions are special because they are disabled by the 'dry_run' flag. This method takes care of all @@ -313,8 +328,8 @@ def execute (func, args, msg=None, verbose=0, dry_run=0): print. """ if msg is None: - msg = "%s%r" % (func.__name__, args) - if msg[-2:] == ',)': # correct for singleton tuple + msg = "{}{!r}".format(func.__name__, args) + if msg[-2:] == ',)': # correct for singleton tuple msg = msg[0:-2] + ')' log.info(msg) @@ -322,7 +337,7 @@ def execute (func, args, msg=None, verbose=0, dry_run=0): func(*args) -def strtobool (val): +def strtobool(val): """Convert a string representation of truth to true (1) or false (0). True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values @@ -335,14 +350,19 @@ def strtobool (val): elif val in ('n', 'no', 'f', 'false', 'off', '0'): return 0 else: - raise ValueError("invalid truth value %r" % (val,)) - - -def byte_compile (py_files, - optimize=0, force=0, - prefix=None, base_dir=None, - verbose=1, dry_run=0, - direct=None): + raise ValueError("invalid truth value {!r}".format(val)) + + +def byte_compile( # noqa: C901 + py_files, + optimize=0, + force=0, + prefix=None, + base_dir=None, + verbose=1, + dry_run=0, + direct=None, +): """Byte-compile a collection of Python source files to .pyc files in a __pycache__ subdirectory. 'py_files' is a list of files to compile; any files that don't end in ".py" are silently @@ -372,10 +392,6 @@ def byte_compile (py_files, it set to None. """ - # Late import to fix a bootstrap issue: _posixsubprocess is built by - # setup.py, but setup.py uses distutils. - import subprocess - # nothing is done if sys.dont_write_bytecode is True if sys.dont_write_bytecode: raise DistutilsByteCompileError('byte-compiling is disabled.') @@ -391,16 +407,18 @@ def byte_compile (py_files, # optimize mode, or if either optimization level was requested by # the caller. if direct is None: - direct = (__debug__ and optimize == 0) + direct = __debug__ and optimize == 0 # "Indirect" byte-compilation: write a temporary script and then # run it with the appropriate flags. if not direct: try: from tempfile import mkstemp + (script_fd, script_name) = mkstemp(".py") except ImportError: from tempfile import mktemp + (script_fd, script_name) = None, mktemp(".py") log.info("writing byte-compilation script '%s'", script_name) if not dry_run: @@ -410,10 +428,12 @@ def byte_compile (py_files, script = open(script_name, "w") with script: - script.write("""\ + script.write( + """\ from distutils.util import byte_compile files = [ -""") +""" + ) # XXX would be nice to write absolute filenames, just for # safety's sake (script should be more robust in the face of @@ -425,24 +445,22 @@ def byte_compile (py_files, # problem is that it's really a directory, but I'm treating it # as a dumb string, so trailing slashes and so forth matter. - #py_files = map(os.path.abspath, py_files) - #if prefix: - # prefix = os.path.abspath(prefix) - script.write(",\n".join(map(repr, py_files)) + "]\n") - script.write(""" + script.write( + """ byte_compile(files, optimize=%r, force=%r, prefix=%r, base_dir=%r, verbose=%r, dry_run=0, direct=1) -""" % (optimize, force, prefix, base_dir, verbose)) +""" + % (optimize, force, prefix, base_dir, verbose) + ) cmd = [sys.executable] - cmd.extend(_optim_args_from_interpreter_flags()) + cmd.extend(subprocess._optim_args_from_interpreter_flags()) cmd.append(script_name) spawn(cmd, dry_run=dry_run) - execute(os.remove, (script_name,), "removing %s" % script_name, - dry_run=dry_run) + execute(os.remove, (script_name,), "removing %s" % script_name, dry_run=dry_run) # "Direct" byte-compilation: use the py_compile module to compile # right here, right now. Note that the script generated in indirect @@ -462,16 +480,17 @@ def byte_compile (py_files, # dfile - purported source filename (same as 'file' by default) if optimize >= 0: opt = '' if optimize == 0 else optimize - cfile = importlib.util.cache_from_source( - file, optimization=opt) + cfile = importlib.util.cache_from_source(file, optimization=opt) else: cfile = importlib.util.cache_from_source(file) dfile = file if prefix: - if file[:len(prefix)] != prefix: - raise ValueError("invalid prefix: filename %r doesn't start with %r" - % (file, prefix)) - dfile = dfile[len(prefix):] + if file[: len(prefix)] != prefix: + raise ValueError( + "invalid prefix: filename %r doesn't start with %r" + % (file, prefix) + ) + dfile = dfile[len(prefix) :] if base_dir: dfile = os.path.join(base_dir, dfile) @@ -482,12 +501,10 @@ def byte_compile (py_files, if not dry_run: compile(file, cfile, dfile) else: - log.debug("skipping byte-compilation of %s to %s", - file, cfile_base) + log.debug("skipping byte-compilation of %s to %s", file, cfile_base) -# byte_compile () -def rfc822_escape (header): +def rfc822_escape(header): """Return a version of the string escaped for inclusion in an RFC-822 header, by ensuring there are 8 spaces space after each newline. """ diff --git a/setuptools/_distutils/version.py b/setuptools/_distutils/version.py index 35e181dbb6..74c40d7bfd 100644 --- a/setuptools/_distutils/version.py +++ b/setuptools/_distutils/version.py @@ -49,18 +49,18 @@ class Version: rich comparisons to _cmp. """ - def __init__ (self, vstring=None): + def __init__(self, vstring=None): + if vstring: + self.parse(vstring) warnings.warn( "distutils Version classes are deprecated. " "Use packaging.version instead.", DeprecationWarning, stacklevel=2, ) - if vstring: - self.parse(vstring) - def __repr__ (self): - return "%s ('%s')" % (self.__class__.__name__, str(self)) + def __repr__(self): + return "{} ('{}')".format(self.__class__.__name__, str(self)) def __eq__(self, other): c = self._cmp(other) @@ -110,7 +110,7 @@ def __ge__(self, other): # instance of your version class) -class StrictVersion (Version): +class StrictVersion(Version): """Version numbering for anal retentives and software idealists. Implements the standard interface for version number classes as @@ -147,17 +147,16 @@ class StrictVersion (Version): in the distutils documentation. """ - version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', - re.VERBOSE | re.ASCII) + version_re = re.compile( + r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', re.VERBOSE | re.ASCII + ) - - def parse (self, vstring): + def parse(self, vstring): match = self.version_re.match(vstring) if not match: raise ValueError("invalid version number '%s'" % vstring) - (major, minor, patch, prerelease, prerelease_num) = \ - match.group(1, 2, 4, 5, 6) + (major, minor, patch, prerelease, prerelease_num) = match.group(1, 2, 4, 5, 6) if patch: self.version = tuple(map(int, [major, minor, patch])) @@ -169,9 +168,7 @@ def parse (self, vstring): else: self.prerelease = None - - def __str__ (self): - + def __str__(self): if self.version[2] == 0: vstring = '.'.join(map(str, self.version[0:2])) else: @@ -182,8 +179,7 @@ def __str__ (self): return vstring - - def _cmp (self, other): + def _cmp(self, other): # noqa: C901 if isinstance(other, str): with suppress_known_deprecation(): other = StrictVersion(other) @@ -204,13 +200,13 @@ def _cmp (self, other): # case 3: self doesn't have prerelease, other does: self is greater # case 4: both have prerelease: must compare them! - if (not self.prerelease and not other.prerelease): + if not self.prerelease and not other.prerelease: return 0 - elif (self.prerelease and not other.prerelease): + elif self.prerelease and not other.prerelease: return -1 - elif (not self.prerelease and other.prerelease): + elif not self.prerelease and other.prerelease: return 1 - elif (self.prerelease and other.prerelease): + elif self.prerelease and other.prerelease: if self.prerelease == other.prerelease: return 0 elif self.prerelease < other.prerelease: @@ -220,6 +216,7 @@ def _cmp (self, other): else: assert False, "never get here" + # end class StrictVersion @@ -287,7 +284,8 @@ def _cmp (self, other): # the Right Thing" (ie. the code matches the conception). But I'd rather # have a conception that matches common notions about version numbers. -class LooseVersion (Version): + +class LooseVersion(Version): """Version numbering for anarchists and software realists. Implements the standard interface for version number classes as @@ -322,13 +320,12 @@ class LooseVersion (Version): component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE) - def parse (self, vstring): + def parse(self, vstring): # I've given up on thinking I can reconstruct the version string # from the parsed tuple -- so I just store the string here for # use by __str__ self.vstring = vstring - components = [x for x in self.component_re.split(vstring) - if x and x != '.'] + components = [x for x in self.component_re.split(vstring) if x and x != '.'] for i, obj in enumerate(components): try: components[i] = int(obj) @@ -337,16 +334,13 @@ def parse (self, vstring): self.version = components - - def __str__ (self): + def __str__(self): return self.vstring - - def __repr__ (self): + def __repr__(self): return "LooseVersion ('%s')" % str(self) - - def _cmp (self, other): + def _cmp(self, other): if isinstance(other, str): other = LooseVersion(other) elif not isinstance(other, LooseVersion): diff --git a/setuptools/_distutils/versionpredicate.py b/setuptools/_distutils/versionpredicate.py index 55f25d91ae..d6c0c007aa 100644 --- a/setuptools/_distutils/versionpredicate.py +++ b/setuptools/_distutils/versionpredicate.py @@ -1,15 +1,14 @@ """Module for parsing and testing package version predicate strings. """ import re -import distutils.version +from . import version import operator -re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", - re.ASCII) +re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", re.ASCII) # (package) (rest) -re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses +re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$") # (comp) (version) @@ -23,12 +22,20 @@ def splitUp(pred): if not res: raise ValueError("bad package restriction syntax: %r" % pred) comp, verStr = res.groups() - with distutils.version.suppress_known_deprecation(): - other = distutils.version.StrictVersion(verStr) + with version.suppress_known_deprecation(): + other = version.StrictVersion(verStr) return (comp, other) -compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq, - ">": operator.gt, ">=": operator.ge, "!=": operator.ne} + +compmap = { + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + ">": operator.gt, + ">=": operator.ge, + "!=": operator.ne, +} + class VersionPredicate: """Parse and test package version predicates. @@ -96,8 +103,7 @@ class VersionPredicate: """ def __init__(self, versionPredicateStr): - """Parse a version predicate string. - """ + """Parse a version predicate string.""" # Fields: # name: package name # pred: list of (comparison string, StrictVersion) @@ -117,8 +123,7 @@ def __init__(self, versionPredicateStr): str = match.groups()[0] self.pred = [splitUp(aPred) for aPred in str.split(",")] if not self.pred: - raise ValueError("empty parenthesized list in %r" - % versionPredicateStr) + raise ValueError("empty parenthesized list in %r" % versionPredicateStr) else: self.pred = [] @@ -142,6 +147,7 @@ def satisfied_by(self, version): _provision_rx = None + def split_provision(value): """Return the name and optional version number of a provision. @@ -156,14 +162,14 @@ def split_provision(value): global _provision_rx if _provision_rx is None: _provision_rx = re.compile( - r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$", - re.ASCII) + r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$", re.ASCII + ) value = value.strip() m = _provision_rx.match(value) if not m: raise ValueError("illegal provides specification: %r" % value) ver = m.group(2) or None if ver: - with distutils.version.suppress_known_deprecation(): - ver = distutils.version.StrictVersion(ver) + with version.suppress_known_deprecation(): + ver = version.StrictVersion(ver) return m.group(1), ver diff --git a/setuptools/_entry_points.py b/setuptools/_entry_points.py index f087681b59..a2346342d6 100644 --- a/setuptools/_entry_points.py +++ b/setuptools/_entry_points.py @@ -2,6 +2,7 @@ import operator import itertools +from .errors import OptionError from .extern.jaraco.text import yield_lines from .extern.jaraco.functools import pass_none from ._importlib import metadata @@ -14,7 +15,14 @@ def ensure_valid(ep): Exercise one of the dynamic properties to trigger the pattern match. """ - ep.extras + try: + ep.extras + except AttributeError as ex: + msg = ( + f"Problems to parse {ep}.\nPlease ensure entry-point follows the spec: " + "https://packaging.python.org/en/latest/specifications/entry-points/" + ) + raise OptionError(msg) from ex def load_group(value, group): diff --git a/setuptools/_importlib.py b/setuptools/_importlib.py index c1ac137e11..5ae94b4739 100644 --- a/setuptools/_importlib.py +++ b/setuptools/_importlib.py @@ -12,6 +12,20 @@ def disable_importlib_metadata_finder(metadata): import importlib_metadata except ImportError: return + except AttributeError: + from .warnings import SetuptoolsWarning + + SetuptoolsWarning.emit( + "Incompatibility problem.", + """ + `importlib-metadata` version is incompatible with `setuptools`. + This problem is likely to be solved by installing an updated version of + `importlib-metadata`. + """, + see_url="https://github.com/python/importlib_metadata/issues/396" + ) # Ensure a descriptive message is shown. + raise # This exception can be suppressed by _distutils_hack + if importlib_metadata is metadata: return to_remove = [ diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py new file mode 100644 index 0000000000..31899f7ab1 --- /dev/null +++ b/setuptools/_normalization.py @@ -0,0 +1,114 @@ +""" +Helpers for normalization as expected in wheel/sdist/module file names +and core metadata +""" +import re +from pathlib import Path +from typing import Union + +from .extern import packaging +from .warnings import SetuptoolsDeprecationWarning + +_Path = Union[str, Path] + +# https://packaging.python.org/en/latest/specifications/core-metadata/#name +_VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.I) +_UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9.]+", re.I) + + +def safe_identifier(name: str) -> str: + """Make a string safe to be used as Python identifier. + >>> safe_identifier("12abc") + '_12abc' + >>> safe_identifier("__editable__.myns.pkg-78.9.3_local") + '__editable___myns_pkg_78_9_3_local' + """ + safe = re.sub(r'\W|^(?=\d)', '_', name) + assert safe.isidentifier() + return safe + + +def safe_name(component: str) -> str: + """Escape a component used as a project name according to Core Metadata. + >>> safe_name("hello world") + 'hello-world' + >>> safe_name("hello?world") + 'hello-world' + """ + # See pkg_resources.safe_name + return _UNSAFE_NAME_CHARS.sub("-", component) + + +def safe_version(version: str) -> str: + """Convert an arbitrary string into a valid version string. + >>> safe_version("1988 12 25") + '1988.12.25' + >>> safe_version("v0.2.1") + '0.2.1' + >>> safe_version("v0.2?beta") + '0.2b0' + >>> safe_version("v0.2 beta") + '0.2b0' + >>> safe_version("ubuntu lts") + Traceback (most recent call last): + ... + setuptools.extern.packaging.version.InvalidVersion: Invalid version: 'ubuntu.lts' + """ + v = version.replace(' ', '.') + try: + return str(packaging.version.Version(v)) + except packaging.version.InvalidVersion: + attempt = _UNSAFE_NAME_CHARS.sub("-", v) + return str(packaging.version.Version(attempt)) + + +def best_effort_version(version: str) -> str: + """Convert an arbitrary string into a version-like string. + >>> best_effort_version("v0.2 beta") + '0.2b0' + + >>> import warnings + >>> warnings.simplefilter("ignore", category=SetuptoolsDeprecationWarning) + >>> best_effort_version("ubuntu lts") + 'ubuntu.lts' + """ + # See pkg_resources.safe_version + try: + return safe_version(version) + except packaging.version.InvalidVersion: + SetuptoolsDeprecationWarning.emit( + f"Invalid version: {version!r}.", + f""" + Version {version!r} is not valid according to PEP 440. + + Please make sure to specify a valid version for your package. + Also note that future releases of setuptools may halt the build process + if an invalid version is given. + """, + see_url="https://peps.python.org/pep-0440/", + due_date=(2023, 9, 26), # See setuptools/dist _validate_version + ) + v = version.replace(' ', '.') + return safe_name(v) + + +def filename_component(value: str) -> str: + """Normalize each component of a filename (e.g. distribution/version part of wheel) + Note: ``value`` needs to be already normalized. + >>> filename_component("my-pkg") + 'my_pkg' + """ + return value.replace("-", "_").strip("_") + + +def safer_name(value: str) -> str: + """Like ``safe_name`` but can be used as filename component for wheel""" + # See bdist_wheel.safer_name + return filename_component(safe_name(value)) + + +def safer_best_effort_version(value: str) -> str: + """Like ``best_effort_version`` but can be used as filename component for wheel""" + # See bdist_wheel.safer_verion + # TODO: Replace with only safe_version in the future (no need for best effort) + return filename_component(best_effort_version(value)) diff --git a/setuptools/_path.py b/setuptools/_path.py index ede9cb0027..b99d9dadcf 100644 --- a/setuptools/_path.py +++ b/setuptools/_path.py @@ -1,7 +1,37 @@ import os +import sys +from typing import Union + +_Path = Union[str, os.PathLike] def ensure_directory(path): """Ensure that the parent directory of `path` exists""" dirname = os.path.dirname(path) os.makedirs(dirname, exist_ok=True) + + +def same_path(p1: _Path, p2: _Path) -> bool: + """Differs from os.path.samefile because it does not require paths to exist. + Purely string based (no comparison between i-nodes). + >>> same_path("a/b", "./a/b") + True + >>> same_path("a/b", "a/./b") + True + >>> same_path("a/b", "././a/b") + True + >>> same_path("a/b", "./a/b/c/..") + True + >>> same_path("a/b", "../a/b/c") + False + >>> same_path("a", "a/b") + False + """ + return normpath(p1) == normpath(p2) + + +def normpath(filename: _Path) -> str: + """Normalize a file/dir name for comparison purposes.""" + # See pkg_resources.normalize_path for notes about cygwin + file = os.path.abspath(filename) if sys.platform == 'cygwin' else filename + return os.path.normcase(os.path.realpath(os.path.normpath(file))) diff --git a/setuptools/_reqs.py b/setuptools/_reqs.py index ca7241746b..5d5b927fd8 100644 --- a/setuptools/_reqs.py +++ b/setuptools/_reqs.py @@ -1,9 +1,13 @@ +from typing import Callable, Iterable, Iterator, TypeVar, Union, overload + import setuptools.extern.jaraco.text as text +from setuptools.extern.packaging.requirements import Requirement -from pkg_resources import Requirement +_T = TypeVar("_T") +_StrOrIter = Union[str, Iterable[str]] -def parse_strings(strs): +def parse_strings(strs: _StrOrIter) -> Iterator[str]: """ Yield requirement strings for each specification in `strs`. @@ -12,8 +16,18 @@ def parse_strings(strs): return text.join_continuation(map(text.drop_comment, text.yield_lines(strs))) -def parse(strs): +@overload +def parse(strs: _StrOrIter) -> Iterator[Requirement]: + ... + + +@overload +def parse(strs: _StrOrIter, parser: Callable[[str], _T]) -> Iterator[_T]: + ... + + +def parse(strs, parser=Requirement): """ - Deprecated drop-in replacement for pkg_resources.parse_requirements. + Replacement for ``pkg_resources.parse_requirements`` that uses ``packaging``. """ - return map(Requirement, parse_strings(strs)) + return map(parser, parse_strings(strs)) diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/LICENSE b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/LICENSE deleted file mode 100644 index be7e092b0b..0000000000 --- a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2017-2019 Jason R. Coombs, Barry Warsaw - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/RECORD b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/RECORD deleted file mode 100644 index d8c2dff628..0000000000 --- a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/RECORD +++ /dev/null @@ -1,24 +0,0 @@ -importlib_metadata-4.11.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -importlib_metadata-4.11.1.dist-info/LICENSE,sha256=wNe6dAchmJ1VvVB8D9oTc-gHHadCuaSBAev36sYEM6U,571 -importlib_metadata-4.11.1.dist-info/METADATA,sha256=XNgM09x6V8tbt6ugvKjiUxH9yB7pBdILWuWE5YNWHRw,3999 -importlib_metadata-4.11.1.dist-info/RECORD,, -importlib_metadata-4.11.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_metadata-4.11.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 -importlib_metadata-4.11.1.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19 -importlib_metadata/__init__.py,sha256=Wkh_tb0u0Ds_615ByV9VLLjqgoOWirwMY8EW40oO3nM,30122 -importlib_metadata/__pycache__/__init__.cpython-310.pyc,, -importlib_metadata/__pycache__/_adapters.cpython-310.pyc,, -importlib_metadata/__pycache__/_collections.cpython-310.pyc,, -importlib_metadata/__pycache__/_compat.cpython-310.pyc,, -importlib_metadata/__pycache__/_functools.cpython-310.pyc,, -importlib_metadata/__pycache__/_itertools.cpython-310.pyc,, -importlib_metadata/__pycache__/_meta.cpython-310.pyc,, -importlib_metadata/__pycache__/_text.cpython-310.pyc,, -importlib_metadata/_adapters.py,sha256=B6fCi5-8mLVDFUZj3krI5nAo-mKp1dH_qIavyIyFrJs,1862 -importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743 -importlib_metadata/_compat.py,sha256=EU2XCFBPFByuI0Of6XkAuBYbzqSyjwwwwqmsK4ccna0,1826 -importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895 -importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068 -importlib_metadata/_meta.py,sha256=_F48Hu_jFxkfKWz5wcYS8vO23qEygbVdF9r-6qh-hjE,1154 -importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166 -importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/INSTALLER b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/INSTALLER similarity index 100% rename from setuptools/_vendor/importlib_metadata-4.11.1.dist-info/INSTALLER rename to setuptools/_vendor/importlib_metadata-6.0.0.dist-info/INSTALLER diff --git a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/LICENSE b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/LICENSE new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/METADATA b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/METADATA similarity index 72% rename from setuptools/_vendor/importlib_metadata-4.11.1.dist-info/METADATA rename to setuptools/_vendor/importlib_metadata-6.0.0.dist-info/METADATA index fda4bc75b5..663c0c8720 100644 --- a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/METADATA +++ b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/METADATA @@ -1,12 +1,10 @@ Metadata-Version: 2.1 Name: importlib-metadata -Version: 4.11.1 +Version: 6.0.0 Summary: Read metadata from Python packages Home-page: https://github.com/python/importlib_metadata Author: Jason R. Coombs Author-email: jaraco@jaraco.com -License: UNKNOWN -Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: Apache Software License @@ -17,32 +15,33 @@ License-File: LICENSE Requires-Dist: zipp (>=0.5) Requires-Dist: typing-extensions (>=3.6.4) ; python_version < "3.8" Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: sphinx (>=3.5) ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx-lint ; extra == 'docs' +Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' Provides-Extra: perf Requires-Dist: ipython ; extra == 'perf' Provides-Extra: testing Requires-Dist: pytest (>=6) ; extra == 'testing' Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' -Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: flake8 (<5) ; extra == 'testing' Requires-Dist: pytest-cov ; extra == 'testing' -Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing' Requires-Dist: packaging ; extra == 'testing' Requires-Dist: pyfakefs ; extra == 'testing' Requires-Dist: flufl.flake8 ; extra == 'testing' Requires-Dist: pytest-perf (>=0.9.2) ; extra == 'testing' Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing' Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra == 'testing' .. image:: https://img.shields.io/pypi/v/importlib_metadata.svg - :target: `PyPI link`_ + :target: https://pypi.org/project/importlib_metadata .. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg - :target: `PyPI link`_ - -.. _PyPI link: https://pypi.org/project/importlib_metadata .. image:: https://github.com/python/importlib_metadata/workflows/tests/badge.svg :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22 @@ -58,6 +57,8 @@ Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra .. image:: https://img.shields.io/badge/skeleton-2022-informational :target: https://blog.jaraco.com/skeleton +.. image:: https://tidelift.com/badges/package/pypi/importlib-metadata + :target: https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=readme Library to access the metadata for a Python package. @@ -78,9 +79,11 @@ were contributed to different versions in the standard library: * - importlib_metadata - stdlib - * - 4.8 + * - 5.0 + - 3.12 + * - 4.13 - 3.11 - * - 4.4 + * - 4.6 - 3.10 * - 1.4 - 3.8 @@ -115,4 +118,18 @@ Project details * Code hosting: https://github.com/python/importlib_metadata * Documentation: https://importlib_metadata.readthedocs.io/ +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. + +Security Contact +================ +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. diff --git a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD new file mode 100644 index 0000000000..5cc984511a --- /dev/null +++ b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD @@ -0,0 +1,26 @@ +importlib_metadata-6.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +importlib_metadata-6.0.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +importlib_metadata-6.0.0.dist-info/METADATA,sha256=tZIEx9HdEXD34SWuitkNXaYBqSnyNukx2l4FKQAz9hY,4958 +importlib_metadata-6.0.0.dist-info/RECORD,, +importlib_metadata-6.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_metadata-6.0.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +importlib_metadata-6.0.0.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19 +importlib_metadata/__init__.py,sha256=wiMJxNXXhPtRRHSX2N9gGLnTh0YszmE1rn3uKYRrNcs,26490 +importlib_metadata/__pycache__/__init__.cpython-38.pyc,, +importlib_metadata/__pycache__/_adapters.cpython-38.pyc,, +importlib_metadata/__pycache__/_collections.cpython-38.pyc,, +importlib_metadata/__pycache__/_compat.cpython-38.pyc,, +importlib_metadata/__pycache__/_functools.cpython-38.pyc,, +importlib_metadata/__pycache__/_itertools.cpython-38.pyc,, +importlib_metadata/__pycache__/_meta.cpython-38.pyc,, +importlib_metadata/__pycache__/_py39compat.cpython-38.pyc,, +importlib_metadata/__pycache__/_text.cpython-38.pyc,, +importlib_metadata/_adapters.py,sha256=i8S6Ib1OQjcILA-l4gkzktMZe18TaeUNI49PLRp6OBU,2454 +importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743 +importlib_metadata/_compat.py,sha256=9zOKf0eDgkCMnnaEhU5kQVxHd1P8BIYV7Stso7av5h8,1857 +importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895 +importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068 +importlib_metadata/_meta.py,sha256=v5e1ZDG7yZTH3h7TjbS5bM5p8AGzMPVOu8skDMv4h6k,1165 +importlib_metadata/_py39compat.py,sha256=2Tk5twb_VgLCY-1NEAQjdZp_S9OFMC-pUzP2isuaPsQ,1098 +importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166 +importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/REQUESTED b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/REQUESTED similarity index 100% rename from setuptools/_vendor/importlib_resources-5.4.0.dist-info/REQUESTED rename to setuptools/_vendor/importlib_metadata-6.0.0.dist-info/REQUESTED diff --git a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/WHEEL b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/WHEEL similarity index 65% rename from pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/WHEEL rename to setuptools/_vendor/importlib_metadata-6.0.0.dist-info/WHEEL index 5bad85fdc1..57e3d840d5 100644 --- a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/WHEEL +++ b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) +Generator: bdist_wheel (0.38.4) Root-Is-Purelib: true Tag: py3-none-any diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/top_level.txt b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/top_level.txt similarity index 100% rename from setuptools/_vendor/importlib_metadata-4.11.1.dist-info/top_level.txt rename to setuptools/_vendor/importlib_metadata-6.0.0.dist-info/top_level.txt diff --git a/setuptools/_vendor/importlib_metadata/__init__.py b/setuptools/_vendor/importlib_metadata/__init__.py index 292e0c6d4a..8864214375 100644 --- a/setuptools/_vendor/importlib_metadata/__init__.py +++ b/setuptools/_vendor/importlib_metadata/__init__.py @@ -14,7 +14,7 @@ import posixpath import collections -from . import _adapters, _meta +from . import _adapters, _meta, _py39compat from ._collections import FreezableDefaultDict, Pair from ._compat import ( NullFinder, @@ -29,7 +29,7 @@ from importlib import import_module from importlib.abc import MetaPathFinder from itertools import starmap -from typing import List, Mapping, Optional, Union +from typing import List, Mapping, Optional __all__ = [ @@ -139,6 +139,7 @@ class DeprecatedTuple: 1 """ + # Do not remove prior to 2023-05-01 or Python 3.13 _warn = functools.partial( warnings.warn, "EntryPoint tuple interface is deprecated. Access members by name.", @@ -157,6 +158,15 @@ class EntryPoint(DeprecatedTuple): See `the packaging docs on entry points `_ for more information. + + >>> ep = EntryPoint( + ... name=None, group=None, value='package.module:attr [extra1, extra2]') + >>> ep.module + 'package.module' + >>> ep.attr + 'attr' + >>> ep.extras + ['extra1', 'extra2'] """ pattern = re.compile( @@ -180,6 +190,10 @@ class EntryPoint(DeprecatedTuple): following the attr, and following any extras. """ + name: str + value: str + group: str + dist: Optional['Distribution'] = None def __init__(self, name, value, group): @@ -208,24 +222,32 @@ def attr(self): @property def extras(self): match = self.pattern.match(self.value) - return list(re.finditer(r'\w+', match.group('extras') or '')) + return re.findall(r'\w+', match.group('extras') or '') def _for(self, dist): vars(self).update(dist=dist) return self - def __iter__(self): + def matches(self, **params): """ - Supply iter so one may construct dicts of EntryPoints by name. + EntryPoint matches the given parameters. + + >>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]') + >>> ep.matches(group='foo') + True + >>> ep.matches(name='bar', value='bing:bong [extra1, extra2]') + True + >>> ep.matches(group='foo', name='other') + False + >>> ep.matches() + True + >>> ep.matches(extras=['extra1', 'extra2']) + True + >>> ep.matches(module='bing') + True + >>> ep.matches(attr='bong') + True """ - msg = ( - "Construction of dict of EntryPoints is deprecated in " - "favor of EntryPoints." - ) - warnings.warn(msg, DeprecationWarning) - return iter((self.name, self)) - - def matches(self, **params): attrs = (getattr(self, param) for param in params) return all(map(operator.eq, params.values(), attrs)) @@ -251,77 +273,7 @@ def __hash__(self): return hash(self._key()) -class DeprecatedList(list): - """ - Allow an otherwise immutable object to implement mutability - for compatibility. - - >>> recwarn = getfixture('recwarn') - >>> dl = DeprecatedList(range(3)) - >>> dl[0] = 1 - >>> dl.append(3) - >>> del dl[3] - >>> dl.reverse() - >>> dl.sort() - >>> dl.extend([4]) - >>> dl.pop(-1) - 4 - >>> dl.remove(1) - >>> dl += [5] - >>> dl + [6] - [1, 2, 5, 6] - >>> dl + (6,) - [1, 2, 5, 6] - >>> dl.insert(0, 0) - >>> dl - [0, 1, 2, 5] - >>> dl == [0, 1, 2, 5] - True - >>> dl == (0, 1, 2, 5) - True - >>> len(recwarn) - 1 - """ - - __slots__ = () - - _warn = functools.partial( - warnings.warn, - "EntryPoints list interface is deprecated. Cast to list if needed.", - DeprecationWarning, - stacklevel=pypy_partial(2), - ) - - def _wrap_deprecated_method(method_name: str): # type: ignore - def wrapped(self, *args, **kwargs): - self._warn() - return getattr(super(), method_name)(*args, **kwargs) - - return method_name, wrapped - - locals().update( - map( - _wrap_deprecated_method, - '__setitem__ __delitem__ append reverse extend pop remove ' - '__iadd__ insert sort'.split(), - ) - ) - - def __add__(self, other): - if not isinstance(other, tuple): - self._warn() - other = tuple(other) - return self.__class__(tuple(self) + other) - - def __eq__(self, other): - if not isinstance(other, tuple): - self._warn() - other = tuple(other) - - return tuple(self).__eq__(other) - - -class EntryPoints(DeprecatedList): +class EntryPoints(tuple): """ An immutable collection of selectable EntryPoint objects. """ @@ -332,14 +284,6 @@ def __getitem__(self, name): # -> EntryPoint: """ Get the EntryPoint in self matching name. """ - if isinstance(name, int): - warnings.warn( - "Accessing entry points by index is deprecated. " - "Cast to tuple if needed.", - DeprecationWarning, - stacklevel=2, - ) - return super().__getitem__(name) try: return next(iter(self.select(name=name))) except StopIteration: @@ -350,7 +294,7 @@ def select(self, **params): Select entry points from self that match the given parameters (typically group and/or name). """ - return EntryPoints(ep for ep in self if ep.matches(**params)) + return EntryPoints(ep for ep in self if _py39compat.ep_matches(ep, **params)) @property def names(self): @@ -363,10 +307,6 @@ def names(self): def groups(self): """ Return the set of all groups of all entry points. - - For coverage while SelectableGroups is present. - >>> EntryPoints().groups - set() """ return {ep.group for ep in self} @@ -382,101 +322,6 @@ def _from_text(text): ) -class Deprecated: - """ - Compatibility add-in for mapping to indicate that - mapping behavior is deprecated. - - >>> recwarn = getfixture('recwarn') - >>> class DeprecatedDict(Deprecated, dict): pass - >>> dd = DeprecatedDict(foo='bar') - >>> dd.get('baz', None) - >>> dd['foo'] - 'bar' - >>> list(dd) - ['foo'] - >>> list(dd.keys()) - ['foo'] - >>> 'foo' in dd - True - >>> list(dd.values()) - ['bar'] - >>> len(recwarn) - 1 - """ - - _warn = functools.partial( - warnings.warn, - "SelectableGroups dict interface is deprecated. Use select.", - DeprecationWarning, - stacklevel=pypy_partial(2), - ) - - def __getitem__(self, name): - self._warn() - return super().__getitem__(name) - - def get(self, name, default=None): - self._warn() - return super().get(name, default) - - def __iter__(self): - self._warn() - return super().__iter__() - - def __contains__(self, *args): - self._warn() - return super().__contains__(*args) - - def keys(self): - self._warn() - return super().keys() - - def values(self): - self._warn() - return super().values() - - -class SelectableGroups(Deprecated, dict): - """ - A backward- and forward-compatible result from - entry_points that fully implements the dict interface. - """ - - @classmethod - def load(cls, eps): - by_group = operator.attrgetter('group') - ordered = sorted(eps, key=by_group) - grouped = itertools.groupby(ordered, by_group) - return cls((group, EntryPoints(eps)) for group, eps in grouped) - - @property - def _all(self): - """ - Reconstruct a list of all entrypoints from the groups. - """ - groups = super(Deprecated, self).values() - return EntryPoints(itertools.chain.from_iterable(groups)) - - @property - def groups(self): - return self._all.groups - - @property - def names(self): - """ - for coverage: - >>> SelectableGroups().names - set() - """ - return self._all.names - - def select(self, **params): - if not params: - return self - return self._all.select(**params) - - class PackagePath(pathlib.PurePosixPath): """A reference to a path in a package""" @@ -501,7 +346,7 @@ def __repr__(self): return f'' -class Distribution: +class Distribution(metaclass=abc.ABCMeta): """A Python distribution package.""" @abc.abstractmethod @@ -520,7 +365,7 @@ def locate_file(self, path): """ @classmethod - def from_name(cls, name): + def from_name(cls, name: str): """Return the Distribution for the given package name. :param name: The name of the distribution package to search for. @@ -528,13 +373,13 @@ def from_name(cls, name): package, if found. :raises PackageNotFoundError: When the named package's distribution metadata cannot be found. + :raises ValueError: When an invalid value is supplied for name. """ - for resolver in cls._discover_resolvers(): - dists = resolver(DistributionFinder.Context(name=name)) - dist = next(iter(dists), None) - if dist is not None: - return dist - else: + if not name: + raise ValueError("A distribution name is required.") + try: + return next(cls.discover(name=name)) + except StopIteration: raise PackageNotFoundError(name) @classmethod @@ -763,7 +608,7 @@ def __new__(cls, root): return super().__new__(cls) def __init__(self, root): - self.root = str(root) + self.root = root def joinpath(self, child): return pathlib.Path(self.root, child) @@ -928,13 +773,26 @@ def _normalized_name(self): normalized name from the file system path. """ stem = os.path.basename(str(self._path)) - return self._name_from_stem(stem) or super()._normalized_name + return ( + pass_none(Prepared.normalize)(self._name_from_stem(stem)) + or super()._normalized_name + ) - def _name_from_stem(self, stem): - name, ext = os.path.splitext(stem) + @staticmethod + def _name_from_stem(stem): + """ + >>> PathDistribution._name_from_stem('foo-3.0.egg-info') + 'foo' + >>> PathDistribution._name_from_stem('CherryPy-3.0.dist-info') + 'CherryPy' + >>> PathDistribution._name_from_stem('face.egg-info') + 'face' + >>> PathDistribution._name_from_stem('foo.bar') + """ + filename, ext = os.path.splitext(stem) if ext not in ('.dist-info', '.egg-info'): return - name, sep, rest = stem.partition('-') + name, sep, rest = filename.partition('-') return name @@ -974,29 +832,28 @@ def version(distribution_name): return distribution(distribution_name).version -def entry_points(**params) -> Union[EntryPoints, SelectableGroups]: +_unique = functools.partial( + unique_everseen, + key=_py39compat.normalized_name, +) +""" +Wrapper for ``distributions`` to return unique distributions by name. +""" + + +def entry_points(**params) -> EntryPoints: """Return EntryPoint objects for all installed packages. Pass selection parameters (group or name) to filter the result to entry points matching those properties (see EntryPoints.select()). - For compatibility, returns ``SelectableGroups`` object unless - selection parameters are supplied. In the future, this function - will return ``EntryPoints`` instead of ``SelectableGroups`` - even when no selection parameters are supplied. - - For maximum future compatibility, pass selection parameters - or invoke ``.select`` with parameters on the result. - - :return: EntryPoints or SelectableGroups for all installed packages. + :return: EntryPoints for all installed packages. """ - norm_name = operator.attrgetter('_normalized_name') - unique = functools.partial(unique_everseen, key=norm_name) eps = itertools.chain.from_iterable( - dist.entry_points for dist in unique(distributions()) + dist.entry_points for dist in _unique(distributions()) ) - return SelectableGroups.load(eps).select(**params) + return EntryPoints(eps).select(**params) def files(distribution_name): diff --git a/setuptools/_vendor/importlib_metadata/_adapters.py b/setuptools/_vendor/importlib_metadata/_adapters.py index aa460d3eda..e33cba5e44 100644 --- a/setuptools/_vendor/importlib_metadata/_adapters.py +++ b/setuptools/_vendor/importlib_metadata/_adapters.py @@ -1,8 +1,20 @@ +import functools +import warnings import re import textwrap import email.message from ._text import FoldedCase +from ._compat import pypy_partial + + +# Do not remove prior to 2024-01-01 or Python 3.14 +_warn = functools.partial( + warnings.warn, + "Implicit None on return values is deprecated and will raise KeyErrors.", + DeprecationWarning, + stacklevel=pypy_partial(2), +) class Message(email.message.Message): @@ -39,6 +51,16 @@ def __init__(self, *args, **kwargs): def __iter__(self): return super().__iter__() + def __getitem__(self, item): + """ + Warn users that a ``KeyError`` can be expected when a + mising key is supplied. Ref python/importlib_metadata#371. + """ + res = super().__getitem__(item) + if res is None: + _warn() + return res + def _repair_headers(self): def redent(value): "Correct for RFC822 indentation" diff --git a/setuptools/_vendor/importlib_metadata/_compat.py b/setuptools/_vendor/importlib_metadata/_compat.py index ef3136f8d2..84f9eea4f3 100644 --- a/setuptools/_vendor/importlib_metadata/_compat.py +++ b/setuptools/_vendor/importlib_metadata/_compat.py @@ -8,6 +8,7 @@ try: from typing import Protocol except ImportError: # pragma: no cover + # Python 3.7 compatibility from ..typing_extensions import Protocol # type: ignore diff --git a/setuptools/_vendor/importlib_metadata/_meta.py b/setuptools/_vendor/importlib_metadata/_meta.py index 37ee43e6ef..259b15ba19 100644 --- a/setuptools/_vendor/importlib_metadata/_meta.py +++ b/setuptools/_vendor/importlib_metadata/_meta.py @@ -30,18 +30,19 @@ def json(self) -> Dict[str, Union[str, List[str]]]: """ -class SimplePath(Protocol): +class SimplePath(Protocol[_T]): """ A minimal subset of pathlib.Path required by PathDistribution. """ - def joinpath(self) -> 'SimplePath': + def joinpath(self) -> _T: ... # pragma: no cover - def __truediv__(self) -> 'SimplePath': + def __truediv__(self, other: Union[str, _T]) -> _T: ... # pragma: no cover - def parent(self) -> 'SimplePath': + @property + def parent(self) -> _T: ... # pragma: no cover def read_text(self) -> str: diff --git a/setuptools/_vendor/importlib_metadata/_py39compat.py b/setuptools/_vendor/importlib_metadata/_py39compat.py new file mode 100644 index 0000000000..cde4558fbb --- /dev/null +++ b/setuptools/_vendor/importlib_metadata/_py39compat.py @@ -0,0 +1,35 @@ +""" +Compatibility layer with Python 3.8/3.9 +""" +from typing import TYPE_CHECKING, Any, Optional + +if TYPE_CHECKING: # pragma: no cover + # Prevent circular imports on runtime. + from . import Distribution, EntryPoint +else: + Distribution = EntryPoint = Any + + +def normalized_name(dist: Distribution) -> Optional[str]: + """ + Honor name normalization for distributions that don't provide ``_normalized_name``. + """ + try: + return dist._normalized_name + except AttributeError: + from . import Prepared # -> delay to prevent circular imports. + + return Prepared.normalize(getattr(dist, "name", None) or dist.metadata['Name']) + + +def ep_matches(ep: EntryPoint, **params) -> bool: + """ + Workaround for ``EntryPoint`` objects without the ``matches`` method. + """ + try: + return ep.matches(**params) + except AttributeError: + from . import EntryPoint # -> delay to prevent circular imports. + + # Reconstruct the EntryPoint object to make sure it is compatible. + return EntryPoint(ep.name, ep.value, ep.group).matches(**params) diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/INSTALLER b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/INSTALLER similarity index 100% rename from setuptools/_vendor/importlib_resources-5.4.0.dist-info/INSTALLER rename to setuptools/_vendor/importlib_resources-5.10.2.dist-info/INSTALLER diff --git a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/LICENSE b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/LICENSE new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/METADATA b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/METADATA similarity index 62% rename from setuptools/_vendor/importlib_resources-5.4.0.dist-info/METADATA rename to setuptools/_vendor/importlib_resources-5.10.2.dist-info/METADATA index cdb1e78399..a9995f09a3 100644 --- a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/METADATA +++ b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/METADATA @@ -1,41 +1,40 @@ Metadata-Version: 2.1 Name: importlib-resources -Version: 5.4.0 +Version: 5.10.2 Summary: Read resources from Python packages Home-page: https://github.com/python/importlib_resources Author: Barry Warsaw Author-email: barry@python.org -License: UNKNOWN Project-URL: Documentation, https://importlib-resources.readthedocs.io/ -Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: Apache Software License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3 :: Only -Requires-Python: >=3.6 +Requires-Python: >=3.7 License-File: LICENSE Requires-Dist: zipp (>=3.1.0) ; python_version < "3.10" Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: sphinx (>=3.5) ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx-lint ; extra == 'docs' +Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' Provides-Extra: testing Requires-Dist: pytest (>=6) ; extra == 'testing' Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' -Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: flake8 (<5) ; extra == 'testing' Requires-Dist: pytest-cov ; extra == 'testing' -Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing' Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' -Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing' .. image:: https://img.shields.io/pypi/v/importlib_resources.svg - :target: `PyPI link`_ + :target: https://pypi.org/project/importlib_resources .. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg - :target: `PyPI link`_ - -.. _PyPI link: https://pypi.org/project/importlib_resources .. image:: https://github.com/python/importlib_resources/workflows/tests/badge.svg :target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22 @@ -48,9 +47,12 @@ Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extr .. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest :target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest -.. image:: https://img.shields.io/badge/skeleton-2021-informational +.. image:: https://img.shields.io/badge/skeleton-2022-informational :target: https://blog.jaraco.com/skeleton +.. image:: https://tidelift.com/badges/package/pypi/importlib-resources + :target: https://tidelift.com/subscription/pkg/pypi-importlib-resources?utm_source=pypi-importlib-resources&utm_medium=readme + ``importlib_resources`` is a backport of Python standard library `importlib.resources `_ @@ -74,7 +76,9 @@ were contributed to different versions in the standard library: * - importlib_resources - stdlib - * - 5.2 + * - 5.9 + - 3.12 + * - 5.7 - 3.11 * - 5.0 - 3.10 @@ -83,4 +87,18 @@ were contributed to different versions in the standard library: * - 0.5 (?) - 3.7 +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. + +Security Contact +================ +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. diff --git a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD new file mode 100644 index 0000000000..50fe7fc5c2 --- /dev/null +++ b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD @@ -0,0 +1,77 @@ +importlib_resources-5.10.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +importlib_resources-5.10.2.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +importlib_resources-5.10.2.dist-info/METADATA,sha256=Xo5ntATvDYUxdmW8tr8kxtfdiOC9889mOk-LE1LtZfI,4111 +importlib_resources-5.10.2.dist-info/RECORD,, +importlib_resources-5.10.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources-5.10.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +importlib_resources-5.10.2.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20 +importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506 +importlib_resources/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/__pycache__/_adapters.cpython-38.pyc,, +importlib_resources/__pycache__/_common.cpython-38.pyc,, +importlib_resources/__pycache__/_compat.cpython-38.pyc,, +importlib_resources/__pycache__/_itertools.cpython-38.pyc,, +importlib_resources/__pycache__/_legacy.cpython-38.pyc,, +importlib_resources/__pycache__/abc.cpython-38.pyc,, +importlib_resources/__pycache__/readers.cpython-38.pyc,, +importlib_resources/__pycache__/simple.cpython-38.pyc,, +importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504 +importlib_resources/_common.py,sha256=jSC4xfLdcMNbtbWHtpzbFkNa0W7kvf__nsYn14C_AEU,5457 +importlib_resources/_compat.py,sha256=dSadF6WPt8MwOqSm_NIOQPhw4x0iaMYTWxi-XS93p7M,2923 +importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884 +importlib_resources/_legacy.py,sha256=0TKdZixxLWA-xwtAZw4HcpqJmj4Xprx1Zkcty0gTRZY,3481 +importlib_resources/abc.py,sha256=Icr2IJ2QtH7vvAB9vC5WRJ9KBoaDyJa7KUs8McuROzo,5140 +importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/readers.py,sha256=PZsi5qacr2Qn3KHw4qw3Gm1MzrBblPHoTdjqjH7EKWw,3581 +importlib_resources/simple.py,sha256=0__2TQBTQoqkajYmNPt1HxERcReAT6boVKJA328pr04,2576 +importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/__pycache__/_compat.cpython-38.pyc,, +importlib_resources/tests/__pycache__/_path.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_contents.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_files.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_open.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_path.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_read.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_reader.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_resource.cpython-38.pyc,, +importlib_resources/tests/__pycache__/update-zips.cpython-38.pyc,, +importlib_resources/tests/__pycache__/util.cpython-38.pyc,, +importlib_resources/tests/_compat.py,sha256=YTSB0U1R9oADnh6GrQcOCgojxcF_N6H1LklymEWf9SQ,708 +importlib_resources/tests/_path.py,sha256=yZyWsQzJZQ1Z8ARAxWkjAdaVVsjlzyqxO0qjBUofJ8M,1039 +importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data01/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 +importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 +importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 +importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 +importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/one/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13 +importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/two/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13 +importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 +importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 +importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 +importlib_resources/tests/test_compatibilty_files.py,sha256=NWkbIsylI8Wz3Dwsxo1quT4ZI6ToXFA2mojCG6Dzuxw,3260 +importlib_resources/tests/test_contents.py,sha256=V1Xfk3lqTDdvUsZuV18Kndf0CT_tkM2oEIwk9Vv0rhg,968 +importlib_resources/tests/test_files.py,sha256=1Y8da-g0xOQLzuREDYUiRc_qhWlvFNeydW_mUH7l15w,3251 +importlib_resources/tests/test_open.py,sha256=pmEgdrSFdM83L6FxtR8U_RT9BfI3JZ4snGmM_ZZIegY,2565 +importlib_resources/tests/test_path.py,sha256=xvPteNA-UKavDhKgLgrQuXSxKWYH7Q4nSNDVfBX95Gs,2103 +importlib_resources/tests/test_read.py,sha256=EyYvpHJ_7F4LuX2EU_c5EerIBQfRhOFmiIR7LOc5Y5E,2408 +importlib_resources/tests/test_reader.py,sha256=nPhldbYPq3fXoQs0ZAub4atjhp2lgNyLNv2G1pg6Agw,4480 +importlib_resources/tests/test_resource.py,sha256=EMoarxTEHcrq8R41LQDsndIG8Idtm4I_LpN8DYpHtT0,8478 +importlib_resources/tests/update-zips.py,sha256=x-SrO5v87iLLUMXyefxDwAd3imAs_slI94sLWvJ6N40,1417 +importlib_resources/tests/util.py,sha256=ARAlxZ47wC-lgR7PGlmgBoi4HnhzcykD5Is2-TAwY0I,4873 +importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876 +importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698 diff --git a/setuptools/_vendor/packaging-21.3.dist-info/REQUESTED b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/REQUESTED similarity index 100% rename from setuptools/_vendor/packaging-21.3.dist-info/REQUESTED rename to setuptools/_vendor/importlib_resources-5.10.2.dist-info/REQUESTED diff --git a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/WHEEL b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/WHEEL similarity index 65% rename from pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/WHEEL rename to setuptools/_vendor/importlib_resources-5.10.2.dist-info/WHEEL index 5bad85fdc1..57e3d840d5 100644 --- a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/WHEEL +++ b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) +Generator: bdist_wheel (0.38.4) Root-Is-Purelib: true Tag: py3-none-any diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/top_level.txt similarity index 100% rename from setuptools/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt rename to setuptools/_vendor/importlib_resources-5.10.2.dist-info/top_level.txt diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/LICENSE b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/LICENSE deleted file mode 100644 index 378b991a4d..0000000000 --- a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2017-2019 Brett Cannon, Barry Warsaw - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/RECORD b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/RECORD deleted file mode 100644 index 7a68a2f26c..0000000000 --- a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/RECORD +++ /dev/null @@ -1,75 +0,0 @@ -importlib_resources-5.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -importlib_resources-5.4.0.dist-info/LICENSE,sha256=uWRjFdYGataJX2ziXk048ItUglQmjng3GWBALaWA36U,568 -importlib_resources-5.4.0.dist-info/METADATA,sha256=i5jH25IbM0Ls6u6UzSSCOa0c8hpDvePxqgnQwh2T5Io,3135 -importlib_resources-5.4.0.dist-info/RECORD,, -importlib_resources-5.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources-5.4.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -importlib_resources-5.4.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20 -importlib_resources/__init__.py,sha256=zuA0lbRgtVVCcAztM0z5LuBiOCV9L_3qtI6mW2p5xAg,525 -importlib_resources/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/__pycache__/_adapters.cpython-310.pyc,, -importlib_resources/__pycache__/_common.cpython-310.pyc,, -importlib_resources/__pycache__/_compat.cpython-310.pyc,, -importlib_resources/__pycache__/_itertools.cpython-310.pyc,, -importlib_resources/__pycache__/_legacy.cpython-310.pyc,, -importlib_resources/__pycache__/abc.cpython-310.pyc,, -importlib_resources/__pycache__/readers.cpython-310.pyc,, -importlib_resources/__pycache__/simple.cpython-310.pyc,, -importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504 -importlib_resources/_common.py,sha256=iIxAaQhotSh6TLLUEfL_ynU2fzEeyHMz9JcL46mUhLg,2741 -importlib_resources/_compat.py,sha256=3LpkIfeN9x4oXjRea5TxZP5VYhPlzuVRhGe-hEv-S0s,2704 -importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884 -importlib_resources/_legacy.py,sha256=TMLkx6aEM6U8xIREPXqGZrMbUhTiPUuPl6ESD7RdYj4,3494 -importlib_resources/abc.py,sha256=MvTJJXajbl74s36Gyeesf76egtbFnh-TMtzQMVhFWXo,3886 -importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/readers.py,sha256=_9QLGQ5AzrED3PY8S2Zf8V6yLR0-nqqYqtQmgleDJzY,3566 -importlib_resources/simple.py,sha256=xt0qhXbwt3bZ86zuaaKbTiE9A0mDbwu0saRjUq_pcY0,2836 -importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/__pycache__/_compat.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_contents.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_files.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_open.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_path.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_read.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_reader.cpython-310.pyc,, -importlib_resources/tests/__pycache__/test_resource.cpython-310.pyc,, -importlib_resources/tests/__pycache__/update-zips.cpython-310.pyc,, -importlib_resources/tests/__pycache__/util.cpython-310.pyc,, -importlib_resources/tests/_compat.py,sha256=QGI_4p0DXybypoYvw0kr3jfQqvls3p8u4wy4Wvf0Z_o,435 -importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data01/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 -importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 -importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 -importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 -importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/one/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13 -importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/two/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13 -importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 -importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 -importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 -importlib_resources/tests/test_compatibilty_files.py,sha256=NWkbIsylI8Wz3Dwsxo1quT4ZI6ToXFA2mojCG6Dzuxw,3260 -importlib_resources/tests/test_contents.py,sha256=V1Xfk3lqTDdvUsZuV18Kndf0CT_tkM2oEIwk9Vv0rhg,968 -importlib_resources/tests/test_files.py,sha256=1Nqv6VM_MjfwrmtXYL1a1CMT0QhCxi3hNMqwXlfMQTg,1184 -importlib_resources/tests/test_open.py,sha256=pmEgdrSFdM83L6FxtR8U_RT9BfI3JZ4snGmM_ZZIegY,2565 -importlib_resources/tests/test_path.py,sha256=xvPteNA-UKavDhKgLgrQuXSxKWYH7Q4nSNDVfBX95Gs,2103 -importlib_resources/tests/test_read.py,sha256=EyYvpHJ_7F4LuX2EU_c5EerIBQfRhOFmiIR7LOc5Y5E,2408 -importlib_resources/tests/test_reader.py,sha256=hgXHquqAEnioemv20ZZcDlVaiOrcZKADO37_FkiQ00Y,4286 -importlib_resources/tests/test_resource.py,sha256=DqfLNc9kaN5obqxU8kn0sRUWMf9MygagrpfMV5-QfWg,8145 -importlib_resources/tests/update-zips.py,sha256=x3iJVqWnMM5qp4Oob2Pl3o6Yi03sUjEv_5Wf-UCg3ps,1415 -importlib_resources/tests/util.py,sha256=X1j-0C96pu3_tmtJuLhzfBfcfMenOphDLkxtCt5j7t4,5309 -importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876 -importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-310.pyc,, -importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698 diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/WHEEL b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/WHEEL deleted file mode 100644 index 5bad85fdc1..0000000000 --- a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/setuptools/_vendor/importlib_resources/_common.py b/setuptools/_vendor/importlib_resources/_common.py index a12e2c75d1..3c6de1cfb2 100644 --- a/setuptools/_vendor/importlib_resources/_common.py +++ b/setuptools/_vendor/importlib_resources/_common.py @@ -5,25 +5,58 @@ import contextlib import types import importlib +import inspect +import warnings +import itertools -from typing import Union, Optional +from typing import Union, Optional, cast from .abc import ResourceReader, Traversable from ._compat import wrap_spec Package = Union[types.ModuleType, str] +Anchor = Package -def files(package): - # type: (Package) -> Traversable +def package_to_anchor(func): """ - Get a Traversable resource from a package + Replace 'package' parameter as 'anchor' and warn about the change. + + Other errors should fall through. + + >>> files('a', 'b') + Traceback (most recent call last): + TypeError: files() takes from 0 to 1 positional arguments but 2 were given + """ + undefined = object() + + @functools.wraps(func) + def wrapper(anchor=undefined, package=undefined): + if package is not undefined: + if anchor is not undefined: + return func(anchor, package) + warnings.warn( + "First parameter to files is renamed to 'anchor'", + DeprecationWarning, + stacklevel=2, + ) + return func(package) + elif anchor is undefined: + return func() + return func(anchor) + + return wrapper + + +@package_to_anchor +def files(anchor: Optional[Anchor] = None) -> Traversable: + """ + Get a Traversable resource for an anchor. """ - return from_package(get_package(package)) + return from_package(resolve(anchor)) -def get_resource_reader(package): - # type: (types.ModuleType) -> Optional[ResourceReader] +def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]: """ Return the package's loader if it's a ResourceReader. """ @@ -39,24 +72,39 @@ def get_resource_reader(package): return reader(spec.name) # type: ignore -def resolve(cand): - # type: (Package) -> types.ModuleType - return cand if isinstance(cand, types.ModuleType) else importlib.import_module(cand) +@functools.singledispatch +def resolve(cand: Optional[Anchor]) -> types.ModuleType: + return cast(types.ModuleType, cand) + + +@resolve.register +def _(cand: str) -> types.ModuleType: + return importlib.import_module(cand) + +@resolve.register +def _(cand: None) -> types.ModuleType: + return resolve(_infer_caller().f_globals['__name__']) -def get_package(package): - # type: (Package) -> types.ModuleType - """Take a package name or module object and return the module. - Raise an exception if the resolved module is not a package. +def _infer_caller(): """ - resolved = resolve(package) - if wrap_spec(resolved).submodule_search_locations is None: - raise TypeError(f'{package!r} is not a package') - return resolved + Walk the stack and find the frame of the first caller not in this module. + """ + + def is_this_file(frame_info): + return frame_info.filename == __file__ + + def is_wrapper(frame_info): + return frame_info.function == 'wrapper' + + not_this_file = itertools.filterfalse(is_this_file, inspect.stack()) + # also exclude 'wrapper' due to singledispatch in the call stack + callers = itertools.filterfalse(is_wrapper, not_this_file) + return next(callers).frame -def from_package(package): +def from_package(package: types.ModuleType): """ Return a Traversable object for the given package. @@ -67,7 +115,14 @@ def from_package(package): @contextlib.contextmanager -def _tempfile(reader, suffix=''): +def _tempfile( + reader, + suffix='', + # gh-93353: Keep a reference to call os.remove() in late Python + # finalization. + *, + _os_remove=os.remove, +): # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' # blocks due to the need to close the temporary file to work on Windows # properly. @@ -81,18 +136,35 @@ def _tempfile(reader, suffix=''): yield pathlib.Path(raw_path) finally: try: - os.remove(raw_path) + _os_remove(raw_path) except FileNotFoundError: pass +def _temp_file(path): + return _tempfile(path.read_bytes, suffix=path.name) + + +def _is_present_dir(path: Traversable) -> bool: + """ + Some Traversables implement ``is_dir()`` to raise an + exception (i.e. ``FileNotFoundError``) when the + directory doesn't exist. This function wraps that call + to always return a boolean and only return True + if there's a dir and it exists. + """ + with contextlib.suppress(FileNotFoundError): + return path.is_dir() + return False + + @functools.singledispatch def as_file(path): """ Given a Traversable object, return that object as a path on the local file system in a context manager. """ - return _tempfile(path.read_bytes, suffix=path.name) + return _temp_dir(path) if _is_present_dir(path) else _temp_file(path) @as_file.register(pathlib.Path) @@ -102,3 +174,34 @@ def _(path): Degenerate behavior for pathlib.Path objects. """ yield path + + +@contextlib.contextmanager +def _temp_path(dir: tempfile.TemporaryDirectory): + """ + Wrap tempfile.TemporyDirectory to return a pathlib object. + """ + with dir as result: + yield pathlib.Path(result) + + +@contextlib.contextmanager +def _temp_dir(path): + """ + Given a traversable dir, recursively replicate the whole tree + to the file system in a context manager. + """ + assert path.is_dir() + with _temp_path(tempfile.TemporaryDirectory()) as temp_dir: + yield _write_contents(temp_dir, path) + + +def _write_contents(target, source): + child = target.joinpath(source.name) + if source.is_dir(): + child.mkdir() + for item in source.iterdir(): + _write_contents(child, item) + else: + child.write_bytes(source.read_bytes()) + return child diff --git a/setuptools/_vendor/importlib_resources/_compat.py b/setuptools/_vendor/importlib_resources/_compat.py index cb9fc820cb..8b5b1d280f 100644 --- a/setuptools/_vendor/importlib_resources/_compat.py +++ b/setuptools/_vendor/importlib_resources/_compat.py @@ -1,9 +1,12 @@ # flake8: noqa import abc +import os import sys import pathlib from contextlib import suppress +from typing import Union + if sys.version_info >= (3, 10): from zipfile import Path as ZipPath # type: ignore @@ -96,3 +99,10 @@ def wrap_spec(package): from . import _adapters return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) + + +if sys.version_info >= (3, 9): + StrPath = Union[str, os.PathLike[str]] +else: + # PathLike is only subscriptable at runtime in 3.9+ + StrPath = Union[str, "os.PathLike[str]"] diff --git a/setuptools/_vendor/importlib_resources/_legacy.py b/setuptools/_vendor/importlib_resources/_legacy.py index 1d5d3f1fbb..b1ea8105da 100644 --- a/setuptools/_vendor/importlib_resources/_legacy.py +++ b/setuptools/_vendor/importlib_resources/_legacy.py @@ -27,8 +27,7 @@ def wrapper(*args, **kwargs): return wrapper -def normalize_path(path): - # type: (Any) -> str +def normalize_path(path: Any) -> str: """Normalize a path by ensuring it is a string. If the resulting string contains path separators, an exception is raised. diff --git a/setuptools/_vendor/importlib_resources/abc.py b/setuptools/_vendor/importlib_resources/abc.py index d39dc1adba..23b6aeafe4 100644 --- a/setuptools/_vendor/importlib_resources/abc.py +++ b/setuptools/_vendor/importlib_resources/abc.py @@ -1,7 +1,13 @@ import abc -from typing import BinaryIO, Iterable, Text +import io +import itertools +import pathlib +from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional -from ._compat import runtime_checkable, Protocol +from ._compat import runtime_checkable, Protocol, StrPath + + +__all__ = ["ResourceReader", "Traversable", "TraversableResources"] class ResourceReader(metaclass=abc.ABCMeta): @@ -46,27 +52,34 @@ def contents(self) -> Iterable[str]: raise FileNotFoundError +class TraversalError(Exception): + pass + + @runtime_checkable class Traversable(Protocol): """ An object with a subset of pathlib.Path methods suitable for traversing directories and opening files. + + Any exceptions that occur when accessing the backing resource + may propagate unaltered. """ @abc.abstractmethod - def iterdir(self): + def iterdir(self) -> Iterator["Traversable"]: """ Yield Traversable objects in self """ - def read_bytes(self): + def read_bytes(self) -> bytes: """ Read contents of self as bytes """ with self.open('rb') as strm: return strm.read() - def read_text(self, encoding=None): + def read_text(self, encoding: Optional[str] = None) -> str: """ Read contents of self as text """ @@ -85,13 +98,32 @@ def is_file(self) -> bool: Return True if self is a file """ - @abc.abstractmethod - def joinpath(self, child): + def joinpath(self, *descendants: StrPath) -> "Traversable": """ - Return Traversable child in self + Return Traversable resolved with any descendants applied. + + Each descendant should be a path segment relative to self + and each may contain multiple levels separated by + ``posixpath.sep`` (``/``). """ + if not descendants: + return self + names = itertools.chain.from_iterable( + path.parts for path in map(pathlib.PurePosixPath, descendants) + ) + target = next(names) + matches = ( + traversable for traversable in self.iterdir() if traversable.name == target + ) + try: + match = next(matches) + except StopIteration: + raise TraversalError( + "Target not found during traversal.", target, list(names) + ) + return match.joinpath(*names) - def __truediv__(self, child): + def __truediv__(self, child: StrPath) -> "Traversable": """ Return Traversable child in self """ @@ -107,7 +139,8 @@ def open(self, mode='r', *args, **kwargs): accepted by io.TextIOWrapper. """ - @abc.abstractproperty + @property + @abc.abstractmethod def name(self) -> str: """ The base name of this object without any parent references. @@ -121,17 +154,17 @@ class TraversableResources(ResourceReader): """ @abc.abstractmethod - def files(self): + def files(self) -> "Traversable": """Return a Traversable object for the loaded package.""" - def open_resource(self, resource): + def open_resource(self, resource: StrPath) -> io.BufferedReader: return self.files().joinpath(resource).open('rb') - def resource_path(self, resource): + def resource_path(self, resource: Any) -> NoReturn: raise FileNotFoundError(resource) - def is_resource(self, path): + def is_resource(self, path: StrPath) -> bool: return self.files().joinpath(path).is_file() - def contents(self): + def contents(self) -> Iterator[str]: return (item.name for item in self.files().iterdir()) diff --git a/setuptools/_vendor/importlib_resources/readers.py b/setuptools/_vendor/importlib_resources/readers.py index f1190ca452..ab34db7409 100644 --- a/setuptools/_vendor/importlib_resources/readers.py +++ b/setuptools/_vendor/importlib_resources/readers.py @@ -82,15 +82,13 @@ def is_dir(self): def is_file(self): return False - def joinpath(self, child): - # first try to find child in current paths - for file in self.iterdir(): - if file.name == child: - return file - # if it does not exist, construct it with the first path - return self._paths[0] / child - - __truediv__ = joinpath + def joinpath(self, *descendants): + try: + return super().joinpath(*descendants) + except abc.TraversalError: + # One of the paths did not resolve (a directory does not exist). + # Just return something that will not exist. + return self._paths[0].joinpath(*descendants) def open(self, *args, **kwargs): raise FileNotFoundError(f'{self} is not a file') diff --git a/setuptools/_vendor/importlib_resources/simple.py b/setuptools/_vendor/importlib_resources/simple.py index da073cbdb1..7770c922c8 100644 --- a/setuptools/_vendor/importlib_resources/simple.py +++ b/setuptools/_vendor/importlib_resources/simple.py @@ -16,31 +16,28 @@ class SimpleReader(abc.ABC): provider. """ - @abc.abstractproperty - def package(self): - # type: () -> str + @property + @abc.abstractmethod + def package(self) -> str: """ The name of the package for which this reader loads resources. """ @abc.abstractmethod - def children(self): - # type: () -> List['SimpleReader'] + def children(self) -> List['SimpleReader']: """ Obtain an iterable of SimpleReader for available child containers (e.g. directories). """ @abc.abstractmethod - def resources(self): - # type: () -> List[str] + def resources(self) -> List[str]: """ Obtain available named resources for this virtual package. """ @abc.abstractmethod - def open_binary(self, resource): - # type: (str) -> BinaryIO + def open_binary(self, resource: str) -> BinaryIO: """ Obtain a File-like for a named resource. """ @@ -50,39 +47,12 @@ def name(self): return self.package.split('.')[-1] -class ResourceHandle(Traversable): - """ - Handle to a named resource in a ResourceReader. - """ - - def __init__(self, parent, name): - # type: (ResourceContainer, str) -> None - self.parent = parent - self.name = name # type: ignore - - def is_file(self): - return True - - def is_dir(self): - return False - - def open(self, mode='r', *args, **kwargs): - stream = self.parent.reader.open_binary(self.name) - if 'b' not in mode: - stream = io.TextIOWrapper(*args, **kwargs) - return stream - - def joinpath(self, name): - raise RuntimeError("Cannot traverse into a resource") - - class ResourceContainer(Traversable): """ Traversable container for a package's resources via its reader. """ - def __init__(self, reader): - # type: (SimpleReader) -> None + def __init__(self, reader: SimpleReader): self.reader = reader def is_dir(self): @@ -99,10 +69,30 @@ def iterdir(self): def open(self, *args, **kwargs): raise IsADirectoryError() + +class ResourceHandle(Traversable): + """ + Handle to a named resource in a ResourceReader. + """ + + def __init__(self, parent: ResourceContainer, name: str): + self.parent = parent + self.name = name # type: ignore + + def is_file(self): + return True + + def is_dir(self): + return False + + def open(self, mode='r', *args, **kwargs): + stream = self.parent.reader.open_binary(self.name) + if 'b' not in mode: + stream = io.TextIOWrapper(*args, **kwargs) + return stream + def joinpath(self, name): - return next( - traversable for traversable in self.iterdir() if traversable.name == name - ) + raise RuntimeError("Cannot traverse into a resource") class TraversableReader(TraversableResources, SimpleReader): diff --git a/setuptools/_vendor/importlib_resources/tests/_compat.py b/setuptools/_vendor/importlib_resources/tests/_compat.py index 4c99cffd02..e7bf06dd4e 100644 --- a/setuptools/_vendor/importlib_resources/tests/_compat.py +++ b/setuptools/_vendor/importlib_resources/tests/_compat.py @@ -6,7 +6,20 @@ except ImportError: # Python 3.9 and earlier class import_helper: # type: ignore - from test.support import modules_setup, modules_cleanup + from test.support import ( + modules_setup, + modules_cleanup, + DirsOnSysPath, + CleanImport, + ) + + +try: + from test.support import os_helper # type: ignore +except ImportError: + # Python 3.9 compat + class os_helper: # type:ignore + from test.support import temp_dir try: diff --git a/setuptools/_vendor/importlib_resources/tests/_path.py b/setuptools/_vendor/importlib_resources/tests/_path.py new file mode 100644 index 0000000000..c630e4d3d3 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/_path.py @@ -0,0 +1,50 @@ +import pathlib +import functools + + +#### +# from jaraco.path 3.4 + + +def build(spec, prefix=pathlib.Path()): + """ + Build a set of files/directories, as described by the spec. + + Each key represents a pathname, and the value represents + the content. Content may be a nested directory. + + >>> spec = { + ... 'README.txt': "A README file", + ... "foo": { + ... "__init__.py": "", + ... "bar": { + ... "__init__.py": "", + ... }, + ... "baz.py": "# Some code", + ... } + ... } + >>> tmpdir = getfixture('tmpdir') + >>> build(spec, tmpdir) + """ + for name, contents in spec.items(): + create(contents, pathlib.Path(prefix) / name) + + +@functools.singledispatch +def create(content, path): + path.mkdir(exist_ok=True) + build(content, prefix=path) # type: ignore + + +@create.register +def _(content: bytes, path): + path.write_bytes(content) + + +@create.register +def _(content: str, path): + path.write_text(content) + + +# end from jaraco.path +#### diff --git a/setuptools/_vendor/importlib_resources/tests/test_files.py b/setuptools/_vendor/importlib_resources/tests/test_files.py index 2676b49e7d..d258fb5f0f 100644 --- a/setuptools/_vendor/importlib_resources/tests/test_files.py +++ b/setuptools/_vendor/importlib_resources/tests/test_files.py @@ -1,10 +1,23 @@ import typing +import textwrap import unittest +import warnings +import importlib +import contextlib import importlib_resources as resources -from importlib_resources.abc import Traversable +from ..abc import Traversable from . import data01 from . import util +from . import _path +from ._compat import os_helper, import_helper + + +@contextlib.contextmanager +def suppress_known_deprecation(): + with warnings.catch_warnings(record=True) as ctx: + warnings.simplefilter('default', category=DeprecationWarning) + yield ctx class FilesTests: @@ -25,6 +38,14 @@ def test_read_text(self): def test_traversable(self): assert isinstance(resources.files(self.data), Traversable) + def test_old_parameter(self): + """ + Files used to take a 'package' parameter. Make sure anyone + passing by name is still supported. + """ + with suppress_known_deprecation(): + resources.files(package=self.data) + class OpenDiskTests(FilesTests, unittest.TestCase): def setUp(self): @@ -42,5 +63,50 @@ def setUp(self): self.data = namespacedata01 +class SiteDir: + def setUp(self): + self.fixtures = contextlib.ExitStack() + self.addCleanup(self.fixtures.close) + self.site_dir = self.fixtures.enter_context(os_helper.temp_dir()) + self.fixtures.enter_context(import_helper.DirsOnSysPath(self.site_dir)) + self.fixtures.enter_context(import_helper.CleanImport()) + + +class ModulesFilesTests(SiteDir, unittest.TestCase): + def test_module_resources(self): + """ + A module can have resources found adjacent to the module. + """ + spec = { + 'mod.py': '', + 'res.txt': 'resources are the best', + } + _path.build(spec, self.site_dir) + import mod + + actual = resources.files(mod).joinpath('res.txt').read_text() + assert actual == spec['res.txt'] + + +class ImplicitContextFilesTests(SiteDir, unittest.TestCase): + def test_implicit_files(self): + """ + Without any parameter, files() will infer the location as the caller. + """ + spec = { + 'somepkg': { + '__init__.py': textwrap.dedent( + """ + import importlib_resources as res + val = res.files().joinpath('res.txt').read_text() + """ + ), + 'res.txt': 'resources are the best', + }, + } + _path.build(spec, self.site_dir) + assert importlib.import_module('somepkg').val == 'resources are the best' + + if __name__ == '__main__': unittest.main() diff --git a/setuptools/_vendor/importlib_resources/tests/test_reader.py b/setuptools/_vendor/importlib_resources/tests/test_reader.py index 16841a508a..1c8ebeeb13 100644 --- a/setuptools/_vendor/importlib_resources/tests/test_reader.py +++ b/setuptools/_vendor/importlib_resources/tests/test_reader.py @@ -75,6 +75,11 @@ def test_join_path(self): str(path.joinpath('imaginary'))[len(prefix) + 1 :], os.path.join('namespacedata01', 'imaginary'), ) + self.assertEqual(path.joinpath(), path) + + def test_join_path_compound(self): + path = MultiplexedPath(self.folder) + assert not path.joinpath('imaginary/foo.py').exists() def test_repr(self): self.assertEqual( diff --git a/setuptools/_vendor/importlib_resources/tests/test_resource.py b/setuptools/_vendor/importlib_resources/tests/test_resource.py index 5affd8b0de..8239027167 100644 --- a/setuptools/_vendor/importlib_resources/tests/test_resource.py +++ b/setuptools/_vendor/importlib_resources/tests/test_resource.py @@ -111,6 +111,14 @@ def test_submodule_contents_by_name(self): {'__init__.py', 'binary.file'}, ) + def test_as_file_directory(self): + with resources.as_file(resources.files('ziptestdata')) as data: + assert data.name == 'ziptestdata' + assert data.is_dir() + assert data.joinpath('subdirectory').is_dir() + assert len(list(data.iterdir())) + assert not data.parent.exists() + class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase): ZIP_MODULE = zipdata02 # type: ignore diff --git a/setuptools/_vendor/importlib_resources/tests/update-zips.py b/setuptools/_vendor/importlib_resources/tests/update-zips.py index 9ef0224ca6..231334aa7e 100644 --- a/setuptools/_vendor/importlib_resources/tests/update-zips.py +++ b/setuptools/_vendor/importlib_resources/tests/update-zips.py @@ -42,7 +42,7 @@ def generate(suffix): def walk(datapath): for dirpath, dirnames, filenames in os.walk(datapath): - with contextlib.suppress(KeyError): + with contextlib.suppress(ValueError): dirnames.remove('__pycache__') for filename in filenames: res = pathlib.Path(dirpath) / filename diff --git a/setuptools/_vendor/importlib_resources/tests/util.py b/setuptools/_vendor/importlib_resources/tests/util.py index c6d83e4bd3..b596c0ce4f 100644 --- a/setuptools/_vendor/importlib_resources/tests/util.py +++ b/setuptools/_vendor/importlib_resources/tests/util.py @@ -3,7 +3,7 @@ import io import sys import types -from pathlib import Path, PurePath +import pathlib from . import data01 from . import zipdata01 @@ -94,7 +94,7 @@ def test_string_path(self): def test_pathlib_path(self): # Passing in a pathlib.PurePath object for the path should succeed. - path = PurePath('utf-8.file') + path = pathlib.PurePath('utf-8.file') self.execute(data01, path) def test_importing_module_as_side_effect(self): @@ -102,17 +102,6 @@ def test_importing_module_as_side_effect(self): del sys.modules[data01.__name__] self.execute(data01.__name__, 'utf-8.file') - def test_non_package_by_name(self): - # The anchor package cannot be a module. - with self.assertRaises(TypeError): - self.execute(__name__, 'utf-8.file') - - def test_non_package_by_package(self): - # The anchor package cannot be a module. - with self.assertRaises(TypeError): - module = sys.modules['importlib_resources.tests.util'] - self.execute(module, 'utf-8.file') - def test_missing_path(self): # Attempting to open or read or request the path for a # non-existent path should succeed if open_resource @@ -144,7 +133,7 @@ class ZipSetupBase: @classmethod def setUpClass(cls): - data_path = Path(cls.ZIP_MODULE.__file__) + data_path = pathlib.Path(cls.ZIP_MODULE.__file__) data_dir = data_path.parent cls._zip_path = str(data_dir / 'ziptestdata.zip') sys.path.append(cls._zip_path) diff --git a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/RECORD b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/RECORD deleted file mode 100644 index f40d48c7ee..0000000000 --- a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/RECORD +++ /dev/null @@ -1,8 +0,0 @@ -jaraco.context-4.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -jaraco.context-4.1.1.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 -jaraco.context-4.1.1.dist-info/METADATA,sha256=bvqDGCk6Z7TkohUqr5XZm19SbF9mVxrtXjN6uF_BAMQ,2031 -jaraco.context-4.1.1.dist-info/RECORD,, -jaraco.context-4.1.1.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -jaraco.context-4.1.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 -jaraco/__pycache__/context.cpython-310.pyc,, -jaraco/context.py,sha256=7X1tpCLc5EN45iWGzGcsH0Unx62REIkvtRvglj0SiUA,5420 diff --git a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/WHEEL b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/WHEEL deleted file mode 100644 index 5bad85fdc1..0000000000 --- a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/INSTALLER b/setuptools/_vendor/jaraco.context-4.3.0.dist-info/INSTALLER similarity index 100% rename from setuptools/_vendor/jaraco.context-4.1.1.dist-info/INSTALLER rename to setuptools/_vendor/jaraco.context-4.3.0.dist-info/INSTALLER diff --git a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/LICENSE b/setuptools/_vendor/jaraco.context-4.3.0.dist-info/LICENSE similarity index 100% rename from setuptools/_vendor/jaraco.context-4.1.1.dist-info/LICENSE rename to setuptools/_vendor/jaraco.context-4.3.0.dist-info/LICENSE diff --git a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/METADATA b/setuptools/_vendor/jaraco.context-4.3.0.dist-info/METADATA similarity index 50% rename from pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/METADATA rename to setuptools/_vendor/jaraco.context-4.3.0.dist-info/METADATA index 908711b7ca..281137a035 100644 --- a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/METADATA +++ b/setuptools/_vendor/jaraco.context-4.3.0.dist-info/METADATA @@ -1,39 +1,38 @@ Metadata-Version: 2.1 Name: jaraco.context -Version: 4.1.1 +Version: 4.3.0 Summary: Context managers by jaraco Home-page: https://github.com/jaraco/jaraco.context Author: Jason R. Coombs Author-email: jaraco@jaraco.com -License: UNKNOWN -Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3 :: Only -Requires-Python: >=3.6 +Requires-Python: >=3.7 License-File: LICENSE Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: sphinx (>=3.5) ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx-lint ; extra == 'docs' +Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' Provides-Extra: testing Requires-Dist: pytest (>=6) ; extra == 'testing' Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' -Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: flake8 (<5) ; extra == 'testing' Requires-Dist: pytest-cov ; extra == 'testing' -Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing' Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' -Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing' .. image:: https://img.shields.io/pypi/v/jaraco.context.svg - :target: `PyPI link`_ + :target: https://pypi.org/project/jaraco.context .. image:: https://img.shields.io/pypi/pyversions/jaraco.context.svg - :target: `PyPI link`_ - -.. _PyPI link: https://pypi.org/project/jaraco.context .. image:: https://github.com/jaraco/jaraco.context/workflows/tests/badge.svg :target: https://github.com/jaraco/jaraco.context/actions?query=workflow%3A%22tests%22 @@ -46,7 +45,24 @@ Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extr .. image:: https://readthedocs.org/projects/jaracocontext/badge/?version=latest :target: https://jaracocontext.readthedocs.io/en/latest/?badge=latest -.. image:: https://img.shields.io/badge/skeleton-2021-informational +.. image:: https://img.shields.io/badge/skeleton-2023-informational :target: https://blog.jaraco.com/skeleton +.. image:: https://tidelift.com/badges/package/pypi/jaraco.context + :target: https://tidelift.com/subscription/pkg/pypi-jaraco.context?utm_source=pypi-jaraco.context&utm_medium=readme + +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. + +Security Contact +================ +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. diff --git a/setuptools/_vendor/jaraco.context-4.3.0.dist-info/RECORD b/setuptools/_vendor/jaraco.context-4.3.0.dist-info/RECORD new file mode 100644 index 0000000000..561d3a1bdf --- /dev/null +++ b/setuptools/_vendor/jaraco.context-4.3.0.dist-info/RECORD @@ -0,0 +1,8 @@ +jaraco.context-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +jaraco.context-4.3.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 +jaraco.context-4.3.0.dist-info/METADATA,sha256=GqMykAm33E7Tt_t_MHc5O7GJN62Qwp6MEHX9WD-LPow,2958 +jaraco.context-4.3.0.dist-info/RECORD,, +jaraco.context-4.3.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +jaraco.context-4.3.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 +jaraco/__pycache__/context.cpython-38.pyc,, +jaraco/context.py,sha256=vlyDzb_PvZ9H7R9bbTr_CMRnveW5Dc56eC7eyd_GfoA,7460 diff --git a/setuptools/_vendor/jaraco.context-4.3.0.dist-info/WHEEL b/setuptools/_vendor/jaraco.context-4.3.0.dist-info/WHEEL new file mode 100644 index 0000000000..57e3d840d5 --- /dev/null +++ b/setuptools/_vendor/jaraco.context-4.3.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt b/setuptools/_vendor/jaraco.context-4.3.0.dist-info/top_level.txt similarity index 100% rename from setuptools/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt rename to setuptools/_vendor/jaraco.context-4.3.0.dist-info/top_level.txt diff --git a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/RECORD b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/RECORD deleted file mode 100644 index fbda3d1f03..0000000000 --- a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/RECORD +++ /dev/null @@ -1,8 +0,0 @@ -jaraco.functools-3.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -jaraco.functools-3.5.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 -jaraco.functools-3.5.0.dist-info/METADATA,sha256=cE9C7u9bo_GjLAuw4nML67a25kUaPDiHn4j03lG4jd0,2276 -jaraco.functools-3.5.0.dist-info/RECORD,, -jaraco.functools-3.5.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -jaraco.functools-3.5.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 -jaraco/__pycache__/functools.cpython-310.pyc,, -jaraco/functools.py,sha256=PtEHbXZstgVJrwje4GvJOsz5pEbgslOcgEn2EJNpr2c,13494 diff --git a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/WHEEL b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/WHEEL deleted file mode 100644 index 5bad85fdc1..0000000000 --- a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/INSTALLER b/setuptools/_vendor/jaraco.functools-3.5.2.dist-info/INSTALLER similarity index 100% rename from setuptools/_vendor/jaraco.functools-3.5.0.dist-info/INSTALLER rename to setuptools/_vendor/jaraco.functools-3.5.2.dist-info/INSTALLER diff --git a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/LICENSE b/setuptools/_vendor/jaraco.functools-3.5.2.dist-info/LICENSE similarity index 100% rename from setuptools/_vendor/jaraco.functools-3.5.0.dist-info/LICENSE rename to setuptools/_vendor/jaraco.functools-3.5.2.dist-info/LICENSE diff --git a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/METADATA b/setuptools/_vendor/jaraco.functools-3.5.2.dist-info/METADATA similarity index 60% rename from pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/METADATA rename to setuptools/_vendor/jaraco.functools-3.5.2.dist-info/METADATA index 12dfbdd00c..fa8f0211b9 100644 --- a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/METADATA +++ b/setuptools/_vendor/jaraco.functools-3.5.2.dist-info/METADATA @@ -1,12 +1,10 @@ Metadata-Version: 2.1 Name: jaraco.functools -Version: 3.5.0 +Version: 3.5.2 Summary: Functools like those found in stdlib Home-page: https://github.com/jaraco/jaraco.functools Author: Jason R. Coombs Author-email: jaraco@jaraco.com -License: UNKNOWN -Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License @@ -16,18 +14,20 @@ Requires-Python: >=3.7 License-File: LICENSE Requires-Dist: more-itertools Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: sphinx (>=3.5) ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' Provides-Extra: testing Requires-Dist: pytest (>=6) ; extra == 'testing' Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: flake8 (<5) ; extra == 'testing' Requires-Dist: pytest-cov ; extra == 'testing' -Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing' Requires-Dist: jaraco.classes ; extra == 'testing' Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' -Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' .. image:: https://img.shields.io/pypi/v/jaraco.functools.svg :target: `PyPI link`_ @@ -50,9 +50,26 @@ Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extr .. image:: https://readthedocs.org/projects/jaracofunctools/badge/?version=latest :target: https://jaracofunctools.readthedocs.io/en/latest/?badge=latest -.. image:: https://img.shields.io/badge/skeleton-2021-informational +.. image:: https://img.shields.io/badge/skeleton-2022-informational :target: https://blog.jaraco.com/skeleton +.. image:: https://tidelift.com/badges/package/pypi/jaraco.functools + :target: https://tidelift.com/subscription/pkg/pypi-jaraco.functools?utm_source=pypi-jaraco.functools&utm_medium=readme + Additional functools in the spirit of stdlib's functools. +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. + +Security Contact +================ +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. diff --git a/setuptools/_vendor/jaraco.functools-3.5.2.dist-info/RECORD b/setuptools/_vendor/jaraco.functools-3.5.2.dist-info/RECORD new file mode 100644 index 0000000000..127b826762 --- /dev/null +++ b/setuptools/_vendor/jaraco.functools-3.5.2.dist-info/RECORD @@ -0,0 +1,8 @@ +jaraco.functools-3.5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +jaraco.functools-3.5.2.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 +jaraco.functools-3.5.2.dist-info/METADATA,sha256=ZIViwS4ZOmaWwA5ArwZ_xXQGR9WDnUSzx-0MO5kGPi8,3154 +jaraco.functools-3.5.2.dist-info/RECORD,, +jaraco.functools-3.5.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +jaraco.functools-3.5.2.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 +jaraco/__pycache__/functools.cpython-311.pyc,, +jaraco/functools.py,sha256=PtEHbXZstgVJrwje4GvJOsz5pEbgslOcgEn2EJNpr2c,13494 diff --git a/setuptools/_vendor/jaraco.functools-3.5.2.dist-info/WHEEL b/setuptools/_vendor/jaraco.functools-3.5.2.dist-info/WHEEL new file mode 100644 index 0000000000..becc9a66ea --- /dev/null +++ b/setuptools/_vendor/jaraco.functools-3.5.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt b/setuptools/_vendor/jaraco.functools-3.5.2.dist-info/top_level.txt similarity index 100% rename from setuptools/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt rename to setuptools/_vendor/jaraco.functools-3.5.2.dist-info/top_level.txt diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD index 916ad7d3f8..dd471b0708 100644 --- a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD +++ b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD @@ -7,4 +7,4 @@ jaraco.text-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FG jaraco.text-3.7.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335 jaraco/text/__init__.py,sha256=I56MW2ZFwPrYXIxzqxMBe2A1t-T4uZBgEgAKe9-JoqM,15538 -jaraco/text/__pycache__/__init__.cpython-310.pyc,, +jaraco/text/__pycache__/__init__.cpython-311.pyc,, diff --git a/setuptools/_vendor/jaraco/context.py b/setuptools/_vendor/jaraco/context.py index 87a4e3dca2..b0d1ef37cb 100644 --- a/setuptools/_vendor/jaraco/context.py +++ b/setuptools/_vendor/jaraco/context.py @@ -5,10 +5,18 @@ import tempfile import shutil import operator +import warnings @contextlib.contextmanager def pushd(dir): + """ + >>> tmp_path = getfixture('tmp_path') + >>> with pushd(tmp_path): + ... assert os.getcwd() == os.fspath(tmp_path) + >>> assert os.getcwd() != os.fspath(tmp_path) + """ + orig = os.getcwd() os.chdir(dir) try: @@ -29,6 +37,8 @@ def tarball_context(url, target_dir=None, runner=None, pushd=pushd): target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '') if runner is None: runner = functools.partial(subprocess.check_call, shell=True) + else: + warnings.warn("runner parameter is deprecated", DeprecationWarning) # In the tar command, use --strip-components=1 to strip the first path and # then # use -C to cause the files to be extracted to {target_dir}. This ensures @@ -48,6 +58,15 @@ def tarball_context(url, target_dir=None, runner=None, pushd=pushd): def infer_compression(url): """ Given a URL or filename, infer the compression code for tar. + + >>> infer_compression('http://foo/bar.tar.gz') + 'z' + >>> infer_compression('http://foo/bar.tgz') + 'z' + >>> infer_compression('file.bz') + 'j' + >>> infer_compression('file.xz') + 'J' """ # cheat and just assume it's the last two characters compression_indicator = url[-2:] @@ -61,6 +80,12 @@ def temp_dir(remover=shutil.rmtree): """ Create a temporary directory context. Pass a custom remover to override the removal behavior. + + >>> import pathlib + >>> with temp_dir() as the_dir: + ... assert os.path.isdir(the_dir) + ... _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents') + >>> assert not os.path.exists(the_dir) """ temp_dir = tempfile.mkdtemp() try: @@ -90,6 +115,12 @@ def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir): @contextlib.contextmanager def null(): + """ + A null context suitable to stand in for a meaningful context. + + >>> with null() as value: + ... assert value is None + """ yield @@ -112,6 +143,10 @@ class ExceptionTrap: ... raise ValueError("1 + 1 is not 3") >>> bool(trap) True + >>> trap.value + ValueError('1 + 1 is not 3') + >>> trap.tb + >>> with ExceptionTrap(ValueError) as trap: ... raise Exception() @@ -211,3 +246,43 @@ class suppress(contextlib.suppress, contextlib.ContextDecorator): ... {}[''] >>> key_error() """ + + +class on_interrupt(contextlib.ContextDecorator): + """ + Replace a KeyboardInterrupt with SystemExit(1) + + >>> def do_interrupt(): + ... raise KeyboardInterrupt() + >>> on_interrupt('error')(do_interrupt)() + Traceback (most recent call last): + ... + SystemExit: 1 + >>> on_interrupt('error', code=255)(do_interrupt)() + Traceback (most recent call last): + ... + SystemExit: 255 + >>> on_interrupt('suppress')(do_interrupt)() + >>> with __import__('pytest').raises(KeyboardInterrupt): + ... on_interrupt('ignore')(do_interrupt)() + """ + + def __init__( + self, + action='error', + # py3.7 compat + # /, + code=1, + ): + self.action = action + self.code = code + + def __enter__(self): + return self + + def __exit__(self, exctype, excinst, exctb): + if exctype is not KeyboardInterrupt or self.action == 'ignore': + return + elif self.action == 'error': + raise SystemExit(self.code) from excinst + return self.action == 'suppress' diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD b/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD index 36ffbd8653..c3cbb83382 100644 --- a/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD +++ b/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD @@ -7,9 +7,9 @@ more_itertools-8.8.0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQ more_itertools-8.8.0.dist-info/top_level.txt,sha256=fAuqRXu9LPhxdB9ujJowcFOu1rZ8wzSpOW9_jlKis6M,15 more_itertools/__init__.py,sha256=C7sXffHTXM3P-iaLPPfqfmDoxOflQMJLcM7ed9p3jak,82 more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43 -more_itertools/__pycache__/__init__.cpython-310.pyc,, -more_itertools/__pycache__/more.cpython-310.pyc,, -more_itertools/__pycache__/recipes.cpython-310.pyc,, +more_itertools/__pycache__/__init__.cpython-311.pyc,, +more_itertools/__pycache__/more.cpython-311.pyc,, +more_itertools/__pycache__/recipes.cpython-311.pyc,, more_itertools/more.py,sha256=DlZa8v6JihVwfQ5zHidOA-xDE0orcQIUyxVnCaUoDKE,117968 more_itertools/more.pyi,sha256=r32pH2raBC1zih3evK4fyvAXvrUamJqc6dgV7QCRL_M,14977 more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/METADATA b/setuptools/_vendor/ordered_set-3.1.1.dist-info/METADATA index db6e12f2dc..4c64d142b9 100644 --- a/setuptools/_vendor/ordered_set-3.1.1.dist-info/METADATA +++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/METADATA @@ -153,5 +153,3 @@ look up an entry by its index. OrderedSet is automatically tested on Python 2.7, 3.4, 3.5, 3.6, and 3.7. We've checked more informally that it works on PyPy and PyPy3. - - diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD b/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD index 89579a07ca..6d85b247ea 100644 --- a/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD +++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD @@ -1,9 +1,9 @@ -__pycache__/ordered_set.cpython-310.pyc,, +__pycache__/ordered_set.cpython-311.pyc,, ordered_set-3.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -ordered_set-3.1.1.dist-info/METADATA,sha256=uGvfFaNmhcl69lGdHmyOXc30N3U6Jn8DByfh_VHEPpw,5359 +ordered_set-3.1.1.dist-info/METADATA,sha256=qEaJM9CbGNixB_jvfohisKbXTUjcef6nCCcBJju6f4U,5357 ordered_set-3.1.1.dist-info/MIT-LICENSE,sha256=TvRE7qUSUBcd0ols7wgNf3zDEEJWW7kv7WDRySrMBBE,1071 ordered_set-3.1.1.dist-info/RECORD,, ordered_set-3.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -ordered_set-3.1.1.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110 +ordered_set-3.1.1.dist-info/WHEEL,sha256=bb2Ot9scclHKMOLDEHY6B2sicWOgugjFKaJsT7vwMQo,110 ordered_set-3.1.1.dist-info/top_level.txt,sha256=NTY2_aDi1Do9fl3Z9EmWPxasFkUeW2dzO2D3RDx5CfM,12 ordered_set.py,sha256=dbaCcs27dyN9gnMWGF5nA_BrVn6Q-NrjKYJpV9_fgBs,15130 diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL b/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL index 0b18a28110..9d8f872bbf 100644 --- a/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL +++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.1) +Generator: bdist_wheel (0.38.4) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any diff --git a/setuptools/_vendor/packaging-21.3.dist-info/METADATA b/setuptools/_vendor/packaging-21.3.dist-info/METADATA deleted file mode 100644 index 358ace5362..0000000000 --- a/setuptools/_vendor/packaging-21.3.dist-info/METADATA +++ /dev/null @@ -1,453 +0,0 @@ -Metadata-Version: 2.1 -Name: packaging -Version: 21.3 -Summary: Core utilities for Python packages -Home-page: https://github.com/pypa/packaging -Author: Donald Stufft and individual contributors -Author-email: donald@stufft.io -License: BSD-2-Clause or Apache-2.0 -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: License :: OSI Approved :: BSD License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Requires-Python: >=3.6 -Description-Content-Type: text/x-rst -License-File: LICENSE -License-File: LICENSE.APACHE -License-File: LICENSE.BSD -Requires-Dist: pyparsing (!=3.0.5,>=2.0.2) - -packaging -========= - -.. start-intro - -Reusable core utilities for various Python Packaging -`interoperability specifications `_. - -This library provides utilities that implement the interoperability -specifications which have clearly one correct behaviour (eg: :pep:`440`) -or benefit greatly from having a single shared implementation (eg: :pep:`425`). - -.. end-intro - -The ``packaging`` project includes the following: version handling, specifiers, -markers, requirements, tags, utilities. - -Documentation -------------- - -The `documentation`_ provides information and the API for the following: - -- Version Handling -- Specifiers -- Markers -- Requirements -- Tags -- Utilities - -Installation ------------- - -Use ``pip`` to install these utilities:: - - pip install packaging - -Discussion ----------- - -If you run into bugs, you can file them in our `issue tracker`_. - -You can also join ``#pypa`` on Freenode to ask questions or get involved. - - -.. _`documentation`: https://packaging.pypa.io/ -.. _`issue tracker`: https://github.com/pypa/packaging/issues - - -Code of Conduct ---------------- - -Everyone interacting in the packaging project's codebases, issue trackers, chat -rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. - -.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md - -Contributing ------------- - -The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as -well as how to report a potential security issue. The documentation for this -project also covers information about `project development`_ and `security`_. - -.. _`project development`: https://packaging.pypa.io/en/latest/development/ -.. _`security`: https://packaging.pypa.io/en/latest/security/ - -Project History ---------------- - -Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for -recent changes and project history. - -.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ - -Changelog ---------- - -21.3 - 2021-11-17 -~~~~~~~~~~~~~~~~~ - -* Add a ``pp3-none-any`` tag (`#311 `__) -* Replace the blank pyparsing 3 exclusion with a 3.0.5 exclusion (`#481 `__, `#486 `__) -* Fix a spelling mistake (`#479 `__) - -21.2 - 2021-10-29 -~~~~~~~~~~~~~~~~~ - -* Update documentation entry for 21.1. - -21.1 - 2021-10-29 -~~~~~~~~~~~~~~~~~ - -* Update pin to pyparsing to exclude 3.0.0. - -21.0 - 2021-07-03 -~~~~~~~~~~~~~~~~~ - -* PEP 656: musllinux support (`#411 `__) -* Drop support for Python 2.7, Python 3.4 and Python 3.5. -* Replace distutils usage with sysconfig (`#396 `__) -* Add support for zip files in ``parse_sdist_filename`` (`#429 `__) -* Use cached ``_hash`` attribute to short-circuit tag equality comparisons (`#417 `__) -* Specify the default value for the ``specifier`` argument to ``SpecifierSet`` (`#437 `__) -* Proper keyword-only "warn" argument in packaging.tags (`#403 `__) -* Correctly remove prerelease suffixes from ~= check (`#366 `__) -* Fix type hints for ``Version.post`` and ``Version.dev`` (`#393 `__) -* Use typing alias ``UnparsedVersion`` (`#398 `__) -* Improve type inference for ``packaging.specifiers.filter()`` (`#430 `__) -* Tighten the return type of ``canonicalize_version()`` (`#402 `__) - -20.9 - 2021-01-29 -~~~~~~~~~~~~~~~~~ - -* Run `isort `_ over the code base (`#377 `__) -* Add support for the ``macosx_10_*_universal2`` platform tags (`#379 `__) -* Introduce ``packaging.utils.parse_wheel_filename()`` and ``parse_sdist_filename()`` - (`#387 `__ and `#389 `__) - -20.8 - 2020-12-11 -~~~~~~~~~~~~~~~~~ - -* Revert back to setuptools for compatibility purposes for some Linux distros (`#363 `__) -* Do not insert an underscore in wheel tags when the interpreter version number - is more than 2 digits (`#372 `__) - -20.7 - 2020-11-28 -~~~~~~~~~~~~~~~~~ - -No unreleased changes. - -20.6 - 2020-11-28 -~~~~~~~~~~~~~~~~~ - -.. note:: This release was subsequently yanked, and these changes were included in 20.7. - -* Fix flit configuration, to include LICENSE files (`#357 `__) -* Make `intel` a recognized CPU architecture for the `universal` macOS platform tag (`#361 `__) -* Add some missing type hints to `packaging.requirements` (issue:`350`) - -20.5 - 2020-11-27 -~~~~~~~~~~~~~~~~~ - -* Officially support Python 3.9 (`#343 `__) -* Deprecate the ``LegacyVersion`` and ``LegacySpecifier`` classes (`#321 `__) -* Handle ``OSError`` on non-dynamic executables when attempting to resolve - the glibc version string. - -20.4 - 2020-05-19 -~~~~~~~~~~~~~~~~~ - -* Canonicalize version before comparing specifiers. (`#282 `__) -* Change type hint for ``canonicalize_name`` to return - ``packaging.utils.NormalizedName``. - This enables the use of static typing tools (like mypy) to detect mixing of - normalized and un-normalized names. - -20.3 - 2020-03-05 -~~~~~~~~~~~~~~~~~ - -* Fix changelog for 20.2. - -20.2 - 2020-03-05 -~~~~~~~~~~~~~~~~~ - -* Fix a bug that caused a 32-bit OS that runs on a 64-bit ARM CPU (e.g. ARM-v8, - aarch64), to report the wrong bitness. - -20.1 - 2020-01-24 -~~~~~~~~~~~~~~~~~~~ - -* Fix a bug caused by reuse of an exhausted iterator. (`#257 `__) - -20.0 - 2020-01-06 -~~~~~~~~~~~~~~~~~ - -* Add type hints (`#191 `__) - -* Add proper trove classifiers for PyPy support (`#198 `__) - -* Scale back depending on ``ctypes`` for manylinux support detection (`#171 `__) - -* Use ``sys.implementation.name`` where appropriate for ``packaging.tags`` (`#193 `__) - -* Expand upon the API provided by ``packaging.tags``: ``interpreter_name()``, ``mac_platforms()``, ``compatible_tags()``, ``cpython_tags()``, ``generic_tags()`` (`#187 `__) - -* Officially support Python 3.8 (`#232 `__) - -* Add ``major``, ``minor``, and ``micro`` aliases to ``packaging.version.Version`` (`#226 `__) - -* Properly mark ``packaging`` has being fully typed by adding a `py.typed` file (`#226 `__) - -19.2 - 2019-09-18 -~~~~~~~~~~~~~~~~~ - -* Remove dependency on ``attrs`` (`#178 `__, `#179 `__) - -* Use appropriate fallbacks for CPython ABI tag (`#181 `__, `#185 `__) - -* Add manylinux2014 support (`#186 `__) - -* Improve ABI detection (`#181 `__) - -* Properly handle debug wheels for Python 3.8 (`#172 `__) - -* Improve detection of debug builds on Windows (`#194 `__) - -19.1 - 2019-07-30 -~~~~~~~~~~~~~~~~~ - -* Add the ``packaging.tags`` module. (`#156 `__) - -* Correctly handle two-digit versions in ``python_version`` (`#119 `__) - - -19.0 - 2019-01-20 -~~~~~~~~~~~~~~~~~ - -* Fix string representation of PEP 508 direct URL requirements with markers. - -* Better handling of file URLs - - This allows for using ``file:///absolute/path``, which was previously - prevented due to the missing ``netloc``. - - This allows for all file URLs that ``urlunparse`` turns back into the - original URL to be valid. - - -18.0 - 2018-09-26 -~~~~~~~~~~~~~~~~~ - -* Improve error messages when invalid requirements are given. (`#129 `__) - - -17.1 - 2017-02-28 -~~~~~~~~~~~~~~~~~ - -* Fix ``utils.canonicalize_version`` when supplying non PEP 440 versions. - - -17.0 - 2017-02-28 -~~~~~~~~~~~~~~~~~ - -* Drop support for python 2.6, 3.2, and 3.3. - -* Define minimal pyparsing version to 2.0.2 (`#91 `__). - -* Add ``epoch``, ``release``, ``pre``, ``dev``, and ``post`` attributes to - ``Version`` and ``LegacyVersion`` (`#34 `__). - -* Add ``Version().is_devrelease`` and ``LegacyVersion().is_devrelease`` to - make it easy to determine if a release is a development release. - -* Add ``utils.canonicalize_version`` to canonicalize version strings or - ``Version`` instances (`#121 `__). - - -16.8 - 2016-10-29 -~~~~~~~~~~~~~~~~~ - -* Fix markers that utilize ``in`` so that they render correctly. - -* Fix an erroneous test on Python RC releases. - - -16.7 - 2016-04-23 -~~~~~~~~~~~~~~~~~ - -* Add support for the deprecated ``python_implementation`` marker which was - an undocumented setuptools marker in addition to the newer markers. - - -16.6 - 2016-03-29 -~~~~~~~~~~~~~~~~~ - -* Add support for the deprecated, PEP 345 environment markers in addition to - the newer markers. - - -16.5 - 2016-02-26 -~~~~~~~~~~~~~~~~~ - -* Fix a regression in parsing requirements with whitespaces between the comma - separators. - - -16.4 - 2016-02-22 -~~~~~~~~~~~~~~~~~ - -* Fix a regression in parsing requirements like ``foo (==4)``. - - -16.3 - 2016-02-21 -~~~~~~~~~~~~~~~~~ - -* Fix a bug where ``packaging.requirements:Requirement`` was overly strict when - matching legacy requirements. - - -16.2 - 2016-02-09 -~~~~~~~~~~~~~~~~~ - -* Add a function that implements the name canonicalization from PEP 503. - - -16.1 - 2016-02-07 -~~~~~~~~~~~~~~~~~ - -* Implement requirement specifiers from PEP 508. - - -16.0 - 2016-01-19 -~~~~~~~~~~~~~~~~~ - -* Relicense so that packaging is available under *either* the Apache License, - Version 2.0 or a 2 Clause BSD license. - -* Support installation of packaging when only distutils is available. - -* Fix ``==`` comparison when there is a prefix and a local version in play. - (`#41 `__). - -* Implement environment markers from PEP 508. - - -15.3 - 2015-08-01 -~~~~~~~~~~~~~~~~~ - -* Normalize post-release spellings for rev/r prefixes. `#35 `__ - - -15.2 - 2015-05-13 -~~~~~~~~~~~~~~~~~ - -* Fix an error where the arbitrary specifier (``===``) was not correctly - allowing pre-releases when it was being used. - -* Expose the specifier and version parts through properties on the - ``Specifier`` classes. - -* Allow iterating over the ``SpecifierSet`` to get access to all of the - ``Specifier`` instances. - -* Allow testing if a version is contained within a specifier via the ``in`` - operator. - - -15.1 - 2015-04-13 -~~~~~~~~~~~~~~~~~ - -* Fix a logic error that was causing inconsistent answers about whether or not - a pre-release was contained within a ``SpecifierSet`` or not. - - -15.0 - 2015-01-02 -~~~~~~~~~~~~~~~~~ - -* Add ``Version().is_postrelease`` and ``LegacyVersion().is_postrelease`` to - make it easy to determine if a release is a post release. - -* Add ``Version().base_version`` and ``LegacyVersion().base_version`` to make - it easy to get the public version without any pre or post release markers. - -* Support the update to PEP 440 which removed the implied ``!=V.*`` when using - either ``>V`` or ``V`` or ````) operator. - - -14.3 - 2014-11-19 -~~~~~~~~~~~~~~~~~ - -* **BACKWARDS INCOMPATIBLE** Refactor specifier support so that it can sanely - handle legacy specifiers as well as PEP 440 specifiers. - -* **BACKWARDS INCOMPATIBLE** Move the specifier support out of - ``packaging.version`` into ``packaging.specifiers``. - - -14.2 - 2014-09-10 -~~~~~~~~~~~~~~~~~ - -* Add prerelease support to ``Specifier``. -* Remove the ability to do ``item in Specifier()`` and replace it with - ``Specifier().contains(item)`` in order to allow flags that signal if a - prerelease should be accepted or not. -* Add a method ``Specifier().filter()`` which will take an iterable and returns - an iterable with items that do not match the specifier filtered out. - - -14.1 - 2014-09-08 -~~~~~~~~~~~~~~~~~ - -* Allow ``LegacyVersion`` and ``Version`` to be sorted together. -* Add ``packaging.version.parse()`` to enable easily parsing a version string - as either a ``Version`` or a ``LegacyVersion`` depending on it's PEP 440 - validity. - - -14.0 - 2014-09-05 -~~~~~~~~~~~~~~~~~ - -* Initial release. - - -.. _`master`: https://github.com/pypa/packaging/ - - diff --git a/setuptools/_vendor/packaging-21.3.dist-info/RECORD b/setuptools/_vendor/packaging-21.3.dist-info/RECORD deleted file mode 100644 index 97cace1022..0000000000 --- a/setuptools/_vendor/packaging-21.3.dist-info/RECORD +++ /dev/null @@ -1,32 +0,0 @@ -packaging-21.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -packaging-21.3.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 -packaging-21.3.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 -packaging-21.3.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 -packaging-21.3.dist-info/METADATA,sha256=KuKIy6qDLP3svIt6ejCbxBDhvq11ebkgUN55MeyKFyc,15147 -packaging-21.3.dist-info/RECORD,, -packaging-21.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -packaging-21.3.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -packaging-21.3.dist-info/top_level.txt,sha256=zFdHrhWnPslzsiP455HutQsqPB6v0KCtNUMtUtrefDw,10 -packaging/__about__.py,sha256=ugASIO2w1oUyH8_COqQ2X_s0rDhjbhQC3yJocD03h2c,661 -packaging/__init__.py,sha256=b9Kk5MF7KxhhLgcDmiUWukN-LatWFxPdNug0joPhHSk,497 -packaging/__pycache__/__about__.cpython-310.pyc,, -packaging/__pycache__/__init__.cpython-310.pyc,, -packaging/__pycache__/_manylinux.cpython-310.pyc,, -packaging/__pycache__/_musllinux.cpython-310.pyc,, -packaging/__pycache__/_structures.cpython-310.pyc,, -packaging/__pycache__/markers.cpython-310.pyc,, -packaging/__pycache__/requirements.cpython-310.pyc,, -packaging/__pycache__/specifiers.cpython-310.pyc,, -packaging/__pycache__/tags.cpython-310.pyc,, -packaging/__pycache__/utils.cpython-310.pyc,, -packaging/__pycache__/version.cpython-310.pyc,, -packaging/_manylinux.py,sha256=XcbiXB-qcjv3bcohp6N98TMpOP4_j3m-iOA8ptK2GWY,11488 -packaging/_musllinux.py,sha256=_KGgY_qc7vhMGpoqss25n2hiLCNKRtvz9mCrS7gkqyc,4378 -packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 -packaging/markers.py,sha256=Fygi3_eZnjQ-3VJizW5AhI5wvo0Hb6RMk4DidsKpOC0,8475 -packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -packaging/requirements.py,sha256=rjaGRCMepZS1mlYMjJ5Qh6rfq3gtsCRQUQmftGZ_bu8,4664 -packaging/specifiers.py,sha256=LRQ0kFsHrl5qfcFNEEJrIFYsnIHQUJXY9fIsakTrrqE,30110 -packaging/tags.py,sha256=lmsnGNiJ8C4D_Pf9PbM0qgbZvD9kmB9lpZBQUZa3R_Y,15699 -packaging/utils.py,sha256=dJjeat3BS-TYn1RrUFVwufUMasbtzLfYRoy_HXENeFQ,4200 -packaging/version.py,sha256=_fLRNrFrxYcHVfyo8vk9j8s6JM8N_xsSxVFr6RJyco8,14665 diff --git a/setuptools/_vendor/packaging-21.3.dist-info/WHEEL b/setuptools/_vendor/packaging-21.3.dist-info/WHEEL deleted file mode 100644 index 5bad85fdc1..0000000000 --- a/setuptools/_vendor/packaging-21.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/setuptools/_vendor/packaging-21.3.dist-info/top_level.txt b/setuptools/_vendor/packaging-21.3.dist-info/top_level.txt deleted file mode 100644 index 748809f75c..0000000000 --- a/setuptools/_vendor/packaging-21.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -packaging diff --git a/setuptools/_vendor/packaging-21.3.dist-info/INSTALLER b/setuptools/_vendor/packaging-23.0.dist-info/INSTALLER similarity index 100% rename from setuptools/_vendor/packaging-21.3.dist-info/INSTALLER rename to setuptools/_vendor/packaging-23.0.dist-info/INSTALLER diff --git a/setuptools/_vendor/packaging-21.3.dist-info/LICENSE b/setuptools/_vendor/packaging-23.0.dist-info/LICENSE similarity index 100% rename from setuptools/_vendor/packaging-21.3.dist-info/LICENSE rename to setuptools/_vendor/packaging-23.0.dist-info/LICENSE diff --git a/setuptools/_vendor/packaging-21.3.dist-info/LICENSE.APACHE b/setuptools/_vendor/packaging-23.0.dist-info/LICENSE.APACHE similarity index 100% rename from setuptools/_vendor/packaging-21.3.dist-info/LICENSE.APACHE rename to setuptools/_vendor/packaging-23.0.dist-info/LICENSE.APACHE diff --git a/setuptools/_vendor/packaging-21.3.dist-info/LICENSE.BSD b/setuptools/_vendor/packaging-23.0.dist-info/LICENSE.BSD similarity index 100% rename from setuptools/_vendor/packaging-21.3.dist-info/LICENSE.BSD rename to setuptools/_vendor/packaging-23.0.dist-info/LICENSE.BSD diff --git a/setuptools/_vendor/packaging-23.0.dist-info/METADATA b/setuptools/_vendor/packaging-23.0.dist-info/METADATA new file mode 100644 index 0000000000..7c5087aaef --- /dev/null +++ b/setuptools/_vendor/packaging-23.0.dist-info/METADATA @@ -0,0 +1,98 @@ +Metadata-Version: 2.1 +Name: packaging +Version: 23.0 +Summary: Core utilities for Python packages +Author-email: Donald Stufft +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Project-URL: Documentation, https://packaging.pypa.io/ +Project-URL: Source, https://github.com/pypa/packaging + +packaging +========= + +.. start-intro + +Reusable core utilities for various Python Packaging +`interoperability specifications `_. + +This library provides utilities that implement the interoperability +specifications which have clearly one correct behaviour (eg: :pep:`440`) +or benefit greatly from having a single shared implementation (eg: :pep:`425`). + +.. end-intro + +The ``packaging`` project includes the following: version handling, specifiers, +markers, requirements, tags, utilities. + +Documentation +------------- + +The `documentation`_ provides information and the API for the following: + +- Version Handling +- Specifiers +- Markers +- Requirements +- Tags +- Utilities + +Installation +------------ + +Use ``pip`` to install these utilities:: + + pip install packaging + +Discussion +---------- + +If you run into bugs, you can file them in our `issue tracker`_. + +You can also join ``#pypa`` on Freenode to ask questions or get involved. + + +.. _`documentation`: https://packaging.pypa.io/ +.. _`issue tracker`: https://github.com/pypa/packaging/issues + + +Code of Conduct +--------------- + +Everyone interacting in the packaging project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + +Contributing +------------ + +The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as +well as how to report a potential security issue. The documentation for this +project also covers information about `project development`_ and `security`_. + +.. _`project development`: https://packaging.pypa.io/en/latest/development/ +.. _`security`: https://packaging.pypa.io/en/latest/security/ + +Project History +--------------- + +Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for +recent changes and project history. + +.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ + diff --git a/setuptools/_vendor/packaging-23.0.dist-info/RECORD b/setuptools/_vendor/packaging-23.0.dist-info/RECORD new file mode 100644 index 0000000000..3cdb0c289e --- /dev/null +++ b/setuptools/_vendor/packaging-23.0.dist-info/RECORD @@ -0,0 +1,35 @@ +packaging-23.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +packaging-23.0.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 +packaging-23.0.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 +packaging-23.0.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 +packaging-23.0.dist-info/METADATA,sha256=RFXOWcbEEITO7DWWyhtk55j4BGh7QaKb2VqL0TF8Y_4,3054 +packaging-23.0.dist-info/RECORD,, +packaging-23.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +packaging-23.0.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81 +packaging/__init__.py,sha256=7BlJ_DcIt1zv01UQcZLozidczzNcivKj66zIBkRL3R4,501 +packaging/__pycache__/__init__.cpython-38.pyc,, +packaging/__pycache__/_elffile.cpython-38.pyc,, +packaging/__pycache__/_manylinux.cpython-38.pyc,, +packaging/__pycache__/_musllinux.cpython-38.pyc,, +packaging/__pycache__/_parser.cpython-38.pyc,, +packaging/__pycache__/_structures.cpython-38.pyc,, +packaging/__pycache__/_tokenizer.cpython-38.pyc,, +packaging/__pycache__/markers.cpython-38.pyc,, +packaging/__pycache__/requirements.cpython-38.pyc,, +packaging/__pycache__/specifiers.cpython-38.pyc,, +packaging/__pycache__/tags.cpython-38.pyc,, +packaging/__pycache__/utils.cpython-38.pyc,, +packaging/__pycache__/version.cpython-38.pyc,, +packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266 +packaging/_manylinux.py,sha256=uZ821PBqQrokhUbwe7E0UodEraMHqzoSgTvfJ8MIl30,8813 +packaging/_musllinux.py,sha256=mvPk7FNjjILKRLIdMxR7IvJ1uggLgCszo-L9rjfpi0M,2524 +packaging/_parser.py,sha256=jjFjSqNf7W2-Ta6YUkywK0P4d2i0Bz_MqLOfl7O1Tkw,9399 +packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 +packaging/_tokenizer.py,sha256=czGibL-4oPofx1pCSt_hrozNbHlOPrqGv6m-0d-iTdo,5148 +packaging/markers.py,sha256=HDPXE0_MPBSwsw_9upez8t8mdrqUGrgiOG_qyQy-W30,8161 +packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +packaging/requirements.py,sha256=4nOKheaBbVEQXTGSqaOGTy1Tkg7J_sEno3u8jxC-baw,3264 +packaging/specifiers.py,sha256=-3ajZ5CkQrjNW5H8NPjvCV2RBgr-w9wcYBdb8kjPBfg,39046 +packaging/tags.py,sha256=fOKnZVfiU3oc9CPSzjJUsMk5VTfgOfpNhWobUH0sAlg,18065 +packaging/utils.py,sha256=es0cCezKspzriQ-3V88h3yJzxz028euV2sUwM61kE-o,4355 +packaging/version.py,sha256=_ULefmddLDLJ9VKRFAXhshEd0zP8OYPhcjCPfYolUbo,16295 diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/REQUESTED b/setuptools/_vendor/packaging-23.0.dist-info/REQUESTED similarity index 100% rename from setuptools/_vendor/pyparsing-2.2.1.dist-info/REQUESTED rename to setuptools/_vendor/packaging-23.0.dist-info/REQUESTED diff --git a/setuptools/_vendor/packaging-23.0.dist-info/WHEEL b/setuptools/_vendor/packaging-23.0.dist-info/WHEEL new file mode 100644 index 0000000000..db4a255f3a --- /dev/null +++ b/setuptools/_vendor/packaging-23.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.8.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/setuptools/_vendor/packaging/__about__.py b/setuptools/_vendor/packaging/__about__.py deleted file mode 100644 index 3551bc2d29..0000000000 --- a/setuptools/_vendor/packaging/__about__.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -__all__ = [ - "__title__", - "__summary__", - "__uri__", - "__version__", - "__author__", - "__email__", - "__license__", - "__copyright__", -] - -__title__ = "packaging" -__summary__ = "Core utilities for Python packages" -__uri__ = "https://github.com/pypa/packaging" - -__version__ = "21.3" - -__author__ = "Donald Stufft and individual contributors" -__email__ = "donald@stufft.io" - -__license__ = "BSD-2-Clause or Apache-2.0" -__copyright__ = "2014-2019 %s" % __author__ diff --git a/setuptools/_vendor/packaging/__init__.py b/setuptools/_vendor/packaging/__init__.py index 3c50c5dcfe..4112fec0a5 100644 --- a/setuptools/_vendor/packaging/__init__.py +++ b/setuptools/_vendor/packaging/__init__.py @@ -2,24 +2,14 @@ # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. -from .__about__ import ( - __author__, - __copyright__, - __email__, - __license__, - __summary__, - __title__, - __uri__, - __version__, -) +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" -__all__ = [ - "__title__", - "__summary__", - "__uri__", - "__version__", - "__author__", - "__email__", - "__license__", - "__copyright__", -] +__version__ = "23.0" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD-2-Clause or Apache-2.0" +__copyright__ = "2014-2019 %s" % __author__ diff --git a/setuptools/_vendor/packaging/_elffile.py b/setuptools/_vendor/packaging/_elffile.py new file mode 100644 index 0000000000..6fb19b30bb --- /dev/null +++ b/setuptools/_vendor/packaging/_elffile.py @@ -0,0 +1,108 @@ +""" +ELF file parser. + +This provides a class ``ELFFile`` that parses an ELF executable in a similar +interface to ``ZipFile``. Only the read interface is implemented. + +Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca +ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html +""" + +import enum +import os +import struct +from typing import IO, Optional, Tuple + + +class ELFInvalid(ValueError): + pass + + +class EIClass(enum.IntEnum): + C32 = 1 + C64 = 2 + + +class EIData(enum.IntEnum): + Lsb = 1 + Msb = 2 + + +class EMachine(enum.IntEnum): + I386 = 3 + S390 = 22 + Arm = 40 + X8664 = 62 + AArc64 = 183 + + +class ELFFile: + """ + Representation of an ELF executable. + """ + + def __init__(self, f: IO[bytes]) -> None: + self._f = f + + try: + ident = self._read("16B") + except struct.error: + raise ELFInvalid("unable to parse identification") + magic = bytes(ident[:4]) + if magic != b"\x7fELF": + raise ELFInvalid(f"invalid magic: {magic!r}") + + self.capacity = ident[4] # Format for program header (bitness). + self.encoding = ident[5] # Data structure encoding (endianness). + + try: + # e_fmt: Format for program header. + # p_fmt: Format for section header. + # p_idx: Indexes to find p_type, p_offset, and p_filesz. + e_fmt, self._p_fmt, self._p_idx = { + (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB. + (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB. + }[(self.capacity, self.encoding)] + except KeyError: + raise ELFInvalid( + f"unrecognized capacity ({self.capacity}) or " + f"encoding ({self.encoding})" + ) + + try: + ( + _, + self.machine, # Architecture type. + _, + _, + self._e_phoff, # Offset of program header. + _, + self.flags, # Processor-specific flags. + _, + self._e_phentsize, # Size of section. + self._e_phnum, # Number of sections. + ) = self._read(e_fmt) + except struct.error as e: + raise ELFInvalid("unable to parse machine and section information") from e + + def _read(self, fmt: str) -> Tuple[int, ...]: + return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) + + @property + def interpreter(self) -> Optional[str]: + """ + The path recorded in the ``PT_INTERP`` section header. + """ + for index in range(self._e_phnum): + self._f.seek(self._e_phoff + self._e_phentsize * index) + try: + data = self._read(self._p_fmt) + except struct.error: + continue + if data[self._p_idx[0]] != 3: # Not PT_INTERP. + continue + self._f.seek(data[self._p_idx[1]]) + return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0") + return None diff --git a/setuptools/_vendor/packaging/_manylinux.py b/setuptools/_vendor/packaging/_manylinux.py index 4c379aa6f6..2f0cc7439a 100644 --- a/setuptools/_vendor/packaging/_manylinux.py +++ b/setuptools/_vendor/packaging/_manylinux.py @@ -1,121 +1,58 @@ import collections +import contextlib import functools import os import re -import struct import sys import warnings -from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple - - -# Python does not provide platform information at sufficient granularity to -# identify the architecture of the running executable in some cases, so we -# determine it dynamically by reading the information from the running -# process. This only applies on Linux, which uses the ELF format. -class _ELFFileHeader: - # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header - class _InvalidELFFileHeader(ValueError): - """ - An invalid ELF file header was found. - """ - - ELF_MAGIC_NUMBER = 0x7F454C46 - ELFCLASS32 = 1 - ELFCLASS64 = 2 - ELFDATA2LSB = 1 - ELFDATA2MSB = 2 - EM_386 = 3 - EM_S390 = 22 - EM_ARM = 40 - EM_X86_64 = 62 - EF_ARM_ABIMASK = 0xFF000000 - EF_ARM_ABI_VER5 = 0x05000000 - EF_ARM_ABI_FLOAT_HARD = 0x00000400 - - def __init__(self, file: IO[bytes]) -> None: - def unpack(fmt: str) -> int: - try: - data = file.read(struct.calcsize(fmt)) - result: Tuple[int, ...] = struct.unpack(fmt, data) - except struct.error: - raise _ELFFileHeader._InvalidELFFileHeader() - return result[0] - - self.e_ident_magic = unpack(">I") - if self.e_ident_magic != self.ELF_MAGIC_NUMBER: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_class = unpack("B") - if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_data = unpack("B") - if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_version = unpack("B") - self.e_ident_osabi = unpack("B") - self.e_ident_abiversion = unpack("B") - self.e_ident_pad = file.read(7) - format_h = "H" - format_i = "I" - format_q = "Q" - format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q - self.e_type = unpack(format_h) - self.e_machine = unpack(format_h) - self.e_version = unpack(format_i) - self.e_entry = unpack(format_p) - self.e_phoff = unpack(format_p) - self.e_shoff = unpack(format_p) - self.e_flags = unpack(format_i) - self.e_ehsize = unpack(format_h) - self.e_phentsize = unpack(format_h) - self.e_phnum = unpack(format_h) - self.e_shentsize = unpack(format_h) - self.e_shnum = unpack(format_h) - self.e_shstrndx = unpack(format_h) - - -def _get_elf_header() -> Optional[_ELFFileHeader]: +from typing import Dict, Generator, Iterator, NamedTuple, Optional, Tuple + +from ._elffile import EIClass, EIData, ELFFile, EMachine + +EF_ARM_ABIMASK = 0xFF000000 +EF_ARM_ABI_VER5 = 0x05000000 +EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + +@contextlib.contextmanager +def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]: try: - with open(sys.executable, "rb") as f: - elf_header = _ELFFileHeader(f) - except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): - return None - return elf_header + with open(path, "rb") as f: + yield ELFFile(f) + except (OSError, TypeError, ValueError): + yield None -def _is_linux_armhf() -> bool: +def _is_linux_armhf(executable: str) -> bool: # hard-float ABI can be detected from the ELF header of the running # process # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf - elf_header = _get_elf_header() - if elf_header is None: - return False - result = elf_header.e_ident_class == elf_header.ELFCLASS32 - result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB - result &= elf_header.e_machine == elf_header.EM_ARM - result &= ( - elf_header.e_flags & elf_header.EF_ARM_ABIMASK - ) == elf_header.EF_ARM_ABI_VER5 - result &= ( - elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD - ) == elf_header.EF_ARM_ABI_FLOAT_HARD - return result - - -def _is_linux_i686() -> bool: - elf_header = _get_elf_header() - if elf_header is None: - return False - result = elf_header.e_ident_class == elf_header.ELFCLASS32 - result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB - result &= elf_header.e_machine == elf_header.EM_386 - return result + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.Arm + and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5 + and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD + ) + + +def _is_linux_i686(executable: str) -> bool: + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.I386 + ) -def _have_compatible_abi(arch: str) -> bool: +def _have_compatible_abi(executable: str, arch: str) -> bool: if arch == "armv7l": - return _is_linux_armhf() + return _is_linux_armhf(executable) if arch == "i686": - return _is_linux_i686() + return _is_linux_i686(executable) return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"} @@ -141,10 +78,10 @@ def _glibc_version_string_confstr() -> Optional[str]: # platform module. # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 try: - # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". - version_string = os.confstr("CS_GNU_LIBC_VERSION") + # Should be a string like "glibc 2.17". + version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION") assert version_string is not None - _, version = version_string.split() + _, version = version_string.rsplit() except (AssertionError, AttributeError, OSError, ValueError): # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... return None @@ -211,8 +148,8 @@ def _parse_glibc_version(version_str: str) -> Tuple[int, int]: m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) if not m: warnings.warn( - "Expected glibc version with 2 components major.minor," - " got: %s" % version_str, + f"Expected glibc version with 2 components major.minor," + f" got: {version_str}", RuntimeWarning, ) return -1, -1 @@ -265,7 +202,7 @@ def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool: def platform_tags(linux: str, arch: str) -> Iterator[str]: - if not _have_compatible_abi(arch): + if not _have_compatible_abi(sys.executable, arch): return # Oldest glibc to be supported regardless of architecture is (2, 17). too_old_glibc2 = _GLibCVersion(2, 16) diff --git a/setuptools/_vendor/packaging/_musllinux.py b/setuptools/_vendor/packaging/_musllinux.py index 8ac3059ba3..706ba600a9 100644 --- a/setuptools/_vendor/packaging/_musllinux.py +++ b/setuptools/_vendor/packaging/_musllinux.py @@ -4,68 +4,13 @@ linked against musl, and what musl version is used. """ -import contextlib import functools -import operator -import os import re -import struct import subprocess import sys -from typing import IO, Iterator, NamedTuple, Optional, Tuple +from typing import Iterator, NamedTuple, Optional - -def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]: - return struct.unpack(fmt, f.read(struct.calcsize(fmt))) - - -def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]: - """Detect musl libc location by parsing the Python executable. - - Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca - ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html - """ - f.seek(0) - try: - ident = _read_unpacked(f, "16B") - except struct.error: - return None - if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF. - return None - f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version. - - try: - # e_fmt: Format for program header. - # p_fmt: Format for section header. - # p_idx: Indexes to find p_type, p_offset, and p_filesz. - e_fmt, p_fmt, p_idx = { - 1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit. - 2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit. - }[ident[4]] - except KeyError: - return None - else: - p_get = operator.itemgetter(*p_idx) - - # Find the interpreter section and return its content. - try: - _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt) - except struct.error: - return None - for i in range(e_phnum + 1): - f.seek(e_phoff + e_phentsize * i) - try: - p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt)) - except struct.error: - return None - if p_type != 3: # Not PT_INTERP. - continue - f.seek(p_offset) - interpreter = os.fsdecode(f.read(p_filesz)).strip("\0") - if "musl" not in interpreter: - return None - return interpreter - return None +from ._elffile import ELFFile class _MuslVersion(NamedTuple): @@ -95,13 +40,12 @@ def _get_musl_version(executable: str) -> Optional[_MuslVersion]: Version 1.2.2 Dynamic Program Loader """ - with contextlib.ExitStack() as stack: - try: - f = stack.enter_context(open(executable, "rb")) - except OSError: - return None - ld = _parse_ld_musl_from_elf(f) - if not ld: + try: + with open(executable, "rb") as f: + ld = ELFFile(f).interpreter + except (OSError, TypeError, ValueError): + return None + if ld is None or "musl" not in ld: return None proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True) return _parse_musl_version(proc.stderr) diff --git a/setuptools/_vendor/packaging/_parser.py b/setuptools/_vendor/packaging/_parser.py new file mode 100644 index 0000000000..2bc6a8f98b --- /dev/null +++ b/setuptools/_vendor/packaging/_parser.py @@ -0,0 +1,328 @@ +"""Handwritten parser of dependency specifiers. + +The docstring for each __parse_* function contains ENBF-inspired grammar representing +the implementation. +""" + +import ast +from typing import Any, List, NamedTuple, Optional, Tuple, Union + +from ._tokenizer import DEFAULT_RULES, Tokenizer + + +class Node: + def __init__(self, value: str) -> None: + self.value = value + + def __str__(self) -> str: + return self.value + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}('{self}')>" + + def serialize(self) -> str: + raise NotImplementedError + + +class Variable(Node): + def serialize(self) -> str: + return str(self) + + +class Value(Node): + def serialize(self) -> str: + return f'"{self}"' + + +class Op(Node): + def serialize(self) -> str: + return str(self) + + +MarkerVar = Union[Variable, Value] +MarkerItem = Tuple[MarkerVar, Op, MarkerVar] +# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]] +# MarkerList = List[Union["MarkerList", MarkerAtom, str]] +# mypy does not support recursive type definition +# https://github.com/python/mypy/issues/731 +MarkerAtom = Any +MarkerList = List[Any] + + +class ParsedRequirement(NamedTuple): + name: str + url: str + extras: List[str] + specifier: str + marker: Optional[MarkerList] + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for dependency specifier +# -------------------------------------------------------------------------------------- +def parse_requirement(source: str) -> ParsedRequirement: + return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: + """ + requirement = WS? IDENTIFIER WS? extras WS? requirement_details + """ + tokenizer.consume("WS") + + name_token = tokenizer.expect( + "IDENTIFIER", expected="package name at the start of dependency specifier" + ) + name = name_token.text + tokenizer.consume("WS") + + extras = _parse_extras(tokenizer) + tokenizer.consume("WS") + + url, specifier, marker = _parse_requirement_details(tokenizer) + tokenizer.expect("END", expected="end of dependency specifier") + + return ParsedRequirement(name, url, extras, specifier, marker) + + +def _parse_requirement_details( + tokenizer: Tokenizer, +) -> Tuple[str, str, Optional[MarkerList]]: + """ + requirement_details = AT URL (WS requirement_marker?)? + | specifier WS? (requirement_marker)? + """ + + specifier = "" + url = "" + marker = None + + if tokenizer.check("AT"): + tokenizer.read() + tokenizer.consume("WS") + + url_start = tokenizer.position + url = tokenizer.expect("URL", expected="URL after @").text + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + tokenizer.expect("WS", expected="whitespace after URL") + + # The input might end after whitespace. + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, span_start=url_start, after="URL and whitespace" + ) + else: + specifier_start = tokenizer.position + specifier = _parse_specifier(tokenizer) + tokenizer.consume("WS") + + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, + span_start=specifier_start, + after=( + "version specifier" + if specifier + else "name and no valid version specifier" + ), + ) + + return (url, specifier, marker) + + +def _parse_requirement_marker( + tokenizer: Tokenizer, *, span_start: int, after: str +) -> MarkerList: + """ + requirement_marker = SEMICOLON marker WS? + """ + + if not tokenizer.check("SEMICOLON"): + tokenizer.raise_syntax_error( + f"Expected end or semicolon (after {after})", + span_start=span_start, + ) + tokenizer.read() + + marker = _parse_marker(tokenizer) + tokenizer.consume("WS") + + return marker + + +def _parse_extras(tokenizer: Tokenizer) -> List[str]: + """ + extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? + """ + if not tokenizer.check("LEFT_BRACKET", peek=True): + return [] + + with tokenizer.enclosing_tokens("LEFT_BRACKET", "RIGHT_BRACKET"): + tokenizer.consume("WS") + extras = _parse_extras_list(tokenizer) + tokenizer.consume("WS") + + return extras + + +def _parse_extras_list(tokenizer: Tokenizer) -> List[str]: + """ + extras_list = identifier (wsp* ',' wsp* identifier)* + """ + extras: List[str] = [] + + if not tokenizer.check("IDENTIFIER"): + return extras + + extras.append(tokenizer.read().text) + + while True: + tokenizer.consume("WS") + if tokenizer.check("IDENTIFIER", peek=True): + tokenizer.raise_syntax_error("Expected comma between extra names") + elif not tokenizer.check("COMMA"): + break + + tokenizer.read() + tokenizer.consume("WS") + + extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma") + extras.append(extra_token.text) + + return extras + + +def _parse_specifier(tokenizer: Tokenizer) -> str: + """ + specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS + | WS? version_many WS? + """ + with tokenizer.enclosing_tokens("LEFT_PARENTHESIS", "RIGHT_PARENTHESIS"): + tokenizer.consume("WS") + parsed_specifiers = _parse_version_many(tokenizer) + tokenizer.consume("WS") + + return parsed_specifiers + + +def _parse_version_many(tokenizer: Tokenizer) -> str: + """ + version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)? + """ + parsed_specifiers = "" + while tokenizer.check("SPECIFIER"): + parsed_specifiers += tokenizer.read().text + tokenizer.consume("WS") + if not tokenizer.check("COMMA"): + break + parsed_specifiers += tokenizer.read().text + tokenizer.consume("WS") + + return parsed_specifiers + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for marker expression +# -------------------------------------------------------------------------------------- +def parse_marker(source: str) -> MarkerList: + return _parse_marker(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_marker(tokenizer: Tokenizer) -> MarkerList: + """ + marker = marker_atom (BOOLOP marker_atom)+ + """ + expression = [_parse_marker_atom(tokenizer)] + while tokenizer.check("BOOLOP"): + token = tokenizer.read() + expr_right = _parse_marker_atom(tokenizer) + expression.extend((token.text, expr_right)) + return expression + + +def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: + """ + marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? + | WS? marker_item WS? + """ + + tokenizer.consume("WS") + if tokenizer.check("LEFT_PARENTHESIS", peek=True): + with tokenizer.enclosing_tokens("LEFT_PARENTHESIS", "RIGHT_PARENTHESIS"): + tokenizer.consume("WS") + marker: MarkerAtom = _parse_marker(tokenizer) + tokenizer.consume("WS") + else: + marker = _parse_marker_item(tokenizer) + tokenizer.consume("WS") + return marker + + +def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem: + """ + marker_item = WS? marker_var WS? marker_op WS? marker_var WS? + """ + tokenizer.consume("WS") + marker_var_left = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + marker_op = _parse_marker_op(tokenizer) + tokenizer.consume("WS") + marker_var_right = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + return (marker_var_left, marker_op, marker_var_right) + + +def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: + """ + marker_var = VARIABLE | QUOTED_STRING + """ + if tokenizer.check("VARIABLE"): + return process_env_var(tokenizer.read().text.replace(".", "_")) + elif tokenizer.check("QUOTED_STRING"): + return process_python_str(tokenizer.read().text) + else: + tokenizer.raise_syntax_error( + message="Expected a marker variable or quoted string" + ) + + +def process_env_var(env_var: str) -> Variable: + if ( + env_var == "platform_python_implementation" + or env_var == "python_implementation" + ): + return Variable("platform_python_implementation") + else: + return Variable(env_var) + + +def process_python_str(python_str: str) -> Value: + value = ast.literal_eval(python_str) + return Value(str(value)) + + +def _parse_marker_op(tokenizer: Tokenizer) -> Op: + """ + marker_op = IN | NOT IN | OP + """ + if tokenizer.check("IN"): + tokenizer.read() + return Op("in") + elif tokenizer.check("NOT"): + tokenizer.read() + tokenizer.expect("WS", expected="whitespace after 'not'") + tokenizer.expect("IN", expected="'in' after 'not'") + return Op("not in") + elif tokenizer.check("OP"): + return Op(tokenizer.read().text) + else: + return tokenizer.raise_syntax_error( + "Expected marker operator, one of " + "<=, <, !=, ==, >=, >, ~=, ===, in, not in" + ) diff --git a/setuptools/_vendor/packaging/_tokenizer.py b/setuptools/_vendor/packaging/_tokenizer.py new file mode 100644 index 0000000000..b1fb207c7f --- /dev/null +++ b/setuptools/_vendor/packaging/_tokenizer.py @@ -0,0 +1,188 @@ +import contextlib +import re +from dataclasses import dataclass +from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union + +from .specifiers import Specifier + + +@dataclass +class Token: + name: str + text: str + position: int + + +class ParserSyntaxError(Exception): + """The provided source text could not be parsed correctly.""" + + def __init__( + self, + message: str, + *, + source: str, + span: Tuple[int, int], + ) -> None: + self.span = span + self.message = message + self.source = source + + super().__init__() + + def __str__(self) -> str: + marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^" + return "\n ".join([self.message, self.source, marker]) + + +DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = { + "LEFT_PARENTHESIS": r"\(", + "RIGHT_PARENTHESIS": r"\)", + "LEFT_BRACKET": r"\[", + "RIGHT_BRACKET": r"\]", + "SEMICOLON": r";", + "COMMA": r",", + "QUOTED_STRING": re.compile( + r""" + ( + ('[^']*') + | + ("[^"]*") + ) + """, + re.VERBOSE, + ), + "OP": r"(===|==|~=|!=|<=|>=|<|>)", + "BOOLOP": r"\b(or|and)\b", + "IN": r"\bin\b", + "NOT": r"\bnot\b", + "VARIABLE": re.compile( + r""" + \b( + python_version + |python_full_version + |os[._]name + |sys[._]platform + |platform_(release|system) + |platform[._](version|machine|python_implementation) + |python_implementation + |implementation_(name|version) + |extra + )\b + """, + re.VERBOSE, + ), + "SPECIFIER": re.compile( + Specifier._operator_regex_str + Specifier._version_regex_str, + re.VERBOSE | re.IGNORECASE, + ), + "AT": r"\@", + "URL": r"[^ \t]+", + "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b", + "WS": r"[ \t]+", + "END": r"$", +} + + +class Tokenizer: + """Context-sensitive token parsing. + + Provides methods to examine the input stream to check whether the next token + matches. + """ + + def __init__( + self, + source: str, + *, + rules: "Dict[str, Union[str, re.Pattern[str]]]", + ) -> None: + self.source = source + self.rules: Dict[str, re.Pattern[str]] = { + name: re.compile(pattern) for name, pattern in rules.items() + } + self.next_token: Optional[Token] = None + self.position = 0 + + def consume(self, name: str) -> None: + """Move beyond provided token name, if at current position.""" + if self.check(name): + self.read() + + def check(self, name: str, *, peek: bool = False) -> bool: + """Check whether the next token has the provided name. + + By default, if the check succeeds, the token *must* be read before + another check. If `peek` is set to `True`, the token is not loaded and + would need to be checked again. + """ + assert ( + self.next_token is None + ), f"Cannot check for {name!r}, already have {self.next_token!r}" + assert name in self.rules, f"Unknown token name: {name!r}" + + expression = self.rules[name] + + match = expression.match(self.source, self.position) + if match is None: + return False + if not peek: + self.next_token = Token(name, match[0], self.position) + return True + + def expect(self, name: str, *, expected: str) -> Token: + """Expect a certain token name next, failing with a syntax error otherwise. + + The token is *not* read. + """ + if not self.check(name): + raise self.raise_syntax_error(f"Expected {expected}") + return self.read() + + def read(self) -> Token: + """Consume the next token and return it.""" + token = self.next_token + assert token is not None + + self.position += len(token.text) + self.next_token = None + + return token + + def raise_syntax_error( + self, + message: str, + *, + span_start: Optional[int] = None, + span_end: Optional[int] = None, + ) -> NoReturn: + """Raise ParserSyntaxError at the given position.""" + span = ( + self.position if span_start is None else span_start, + self.position if span_end is None else span_end, + ) + raise ParserSyntaxError( + message, + source=self.source, + span=span, + ) + + @contextlib.contextmanager + def enclosing_tokens(self, open_token: str, close_token: str) -> Iterator[bool]: + if self.check(open_token): + open_position = self.position + self.read() + else: + open_position = None + + yield open_position is not None + + if open_position is None: + return + + if not self.check(close_token): + self.raise_syntax_error( + f"Expected closing {close_token}", + span_start=open_position, + ) + + self.read() diff --git a/setuptools/_vendor/packaging/markers.py b/setuptools/_vendor/packaging/markers.py index eb0541b83a..68369c981b 100644 --- a/setuptools/_vendor/packaging/markers.py +++ b/setuptools/_vendor/packaging/markers.py @@ -8,19 +8,10 @@ import sys from typing import Any, Callable, Dict, List, Optional, Tuple, Union -from setuptools.extern.pyparsing import ( # noqa: N817 - Forward, - Group, - Literal as L, - ParseException, - ParseResults, - QuotedString, - ZeroOrMore, - stringEnd, - stringStart, -) - +from ._parser import MarkerAtom, MarkerList, Op, Value, Variable, parse_marker +from ._tokenizer import ParserSyntaxError from .specifiers import InvalidSpecifier, Specifier +from .utils import canonicalize_name __all__ = [ "InvalidMarker", @@ -52,101 +43,24 @@ class UndefinedEnvironmentName(ValueError): """ -class Node: - def __init__(self, value: Any) -> None: - self.value = value - - def __str__(self) -> str: - return str(self.value) - - def __repr__(self) -> str: - return f"<{self.__class__.__name__}('{self}')>" - - def serialize(self) -> str: - raise NotImplementedError - - -class Variable(Node): - def serialize(self) -> str: - return str(self) - - -class Value(Node): - def serialize(self) -> str: - return f'"{self}"' - - -class Op(Node): - def serialize(self) -> str: - return str(self) - - -VARIABLE = ( - L("implementation_version") - | L("platform_python_implementation") - | L("implementation_name") - | L("python_full_version") - | L("platform_release") - | L("platform_version") - | L("platform_machine") - | L("platform_system") - | L("python_version") - | L("sys_platform") - | L("os_name") - | L("os.name") # PEP-345 - | L("sys.platform") # PEP-345 - | L("platform.version") # PEP-345 - | L("platform.machine") # PEP-345 - | L("platform.python_implementation") # PEP-345 - | L("python_implementation") # undocumented setuptools legacy - | L("extra") # PEP-508 -) -ALIASES = { - "os.name": "os_name", - "sys.platform": "sys_platform", - "platform.version": "platform_version", - "platform.machine": "platform_machine", - "platform.python_implementation": "platform_python_implementation", - "python_implementation": "platform_python_implementation", -} -VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) - -VERSION_CMP = ( - L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") -) - -MARKER_OP = VERSION_CMP | L("not in") | L("in") -MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) - -MARKER_VALUE = QuotedString("'") | QuotedString('"') -MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) - -BOOLOP = L("and") | L("or") - -MARKER_VAR = VARIABLE | MARKER_VALUE - -MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) -MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) - -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() - -MARKER_EXPR = Forward() -MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) -MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) - -MARKER = stringStart + MARKER_EXPR + stringEnd - - -def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]: - if isinstance(results, ParseResults): - return [_coerce_parse_result(i) for i in results] - else: - return results +def _normalize_extra_values(results: Any) -> Any: + """ + Normalize extra values. + """ + if isinstance(results[0], tuple): + lhs, op, rhs = results[0] + if isinstance(lhs, Variable) and lhs.value == "extra": + normalized_extra = canonicalize_name(rhs.value) + rhs = Value(normalized_extra) + elif isinstance(rhs, Variable) and rhs.value == "extra": + normalized_extra = canonicalize_name(lhs.value) + lhs = Value(normalized_extra) + results[0] = lhs, op, rhs + return results def _format_marker( - marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True + marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True ) -> str: assert isinstance(marker, (list, tuple, str)) @@ -192,7 +106,7 @@ def _eval_op(lhs: str, op: Op, rhs: str) -> bool: except InvalidSpecifier: pass else: - return spec.contains(lhs) + return spec.contains(lhs, prereleases=True) oper: Optional[Operator] = _operators.get(op.serialize()) if oper is None: @@ -201,25 +115,19 @@ def _eval_op(lhs: str, op: Op, rhs: str) -> bool: return oper(lhs, rhs) -class Undefined: - pass - +def _normalize(*values: str, key: str) -> Tuple[str, ...]: + # PEP 685 – Comparison of extra names for optional distribution dependencies + # https://peps.python.org/pep-0685/ + # > When comparing extra names, tools MUST normalize the names being + # > compared using the semantics outlined in PEP 503 for names + if key == "extra": + return tuple(canonicalize_name(v) for v in values) -_undefined = Undefined() + # other environment markers don't have such standards + return values -def _get_env(environment: Dict[str, str], name: str) -> str: - value: Union[str, Undefined] = environment.get(name, _undefined) - - if isinstance(value, Undefined): - raise UndefinedEnvironmentName( - f"{name!r} does not exist in evaluation environment." - ) - - return value - - -def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool: +def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool: groups: List[List[bool]] = [[]] for marker in markers: @@ -231,12 +139,15 @@ def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool: lhs, op, rhs = marker if isinstance(lhs, Variable): - lhs_value = _get_env(environment, lhs.value) + environment_key = lhs.value + lhs_value = environment[environment_key] rhs_value = rhs.value else: lhs_value = lhs.value - rhs_value = _get_env(environment, rhs.value) + environment_key = rhs.value + rhs_value = environment[environment_key] + lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key) groups[-1].append(_eval_op(lhs_value, op, rhs_value)) else: assert marker in ["and", "or"] @@ -274,13 +185,29 @@ def default_environment() -> Dict[str, str]: class Marker: def __init__(self, marker: str) -> None: + # Note: We create a Marker object without calling this constructor in + # packaging.requirements.Requirement. If any additional logic is + # added here, make sure to mirror/adapt Requirement. try: - self._markers = _coerce_parse_result(MARKER.parseString(marker)) - except ParseException as e: - raise InvalidMarker( - f"Invalid marker: {marker!r}, parse error at " - f"{marker[e.loc : e.loc + 8]!r}" - ) + self._markers = _normalize_extra_values(parse_marker(marker)) + # The attribute `_markers` can be described in terms of a recursive type: + # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]] + # + # For example, the following expression: + # python_version > "3.6" or (python_version == "3.6" and os_name == "unix") + # + # is parsed into: + # [ + # (, ')>, ), + # 'and', + # [ + # (, , ), + # 'or', + # (, , ) + # ] + # ] + except ParserSyntaxError as e: + raise InvalidMarker(str(e)) from e def __str__(self) -> str: return _format_marker(self._markers) @@ -288,6 +215,15 @@ def __str__(self) -> str: def __repr__(self) -> str: return f"" + def __hash__(self) -> int: + return hash((self.__class__.__name__, str(self))) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Marker): + return NotImplemented + + return str(self) == str(other) + def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: """Evaluate a marker. @@ -298,7 +234,12 @@ def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: The environment is determined from the current Python process. """ current_environment = default_environment() + current_environment["extra"] = "" if environment is not None: current_environment.update(environment) + # The API used to allow setting extra to None. We need to handle this + # case for backwards compatibility. + if current_environment["extra"] is None: + current_environment["extra"] = "" return _evaluate_markers(self._markers, current_environment) diff --git a/setuptools/_vendor/packaging/requirements.py b/setuptools/_vendor/packaging/requirements.py index 0d93231b46..a9f9b9c7c9 100644 --- a/setuptools/_vendor/packaging/requirements.py +++ b/setuptools/_vendor/packaging/requirements.py @@ -2,26 +2,13 @@ # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. -import re -import string import urllib.parse -from typing import List, Optional as TOptional, Set +from typing import Any, List, Optional, Set -from setuptools.extern.pyparsing import ( # noqa - Combine, - Literal as L, - Optional, - ParseException, - Regex, - Word, - ZeroOrMore, - originalTextFor, - stringEnd, - stringStart, -) - -from .markers import MARKER_EXPR, Marker -from .specifiers import LegacySpecifier, Specifier, SpecifierSet +from ._parser import parse_requirement +from ._tokenizer import ParserSyntaxError +from .markers import Marker, _normalize_extra_values +from .specifiers import SpecifierSet class InvalidRequirement(ValueError): @@ -30,60 +17,6 @@ class InvalidRequirement(ValueError): """ -ALPHANUM = Word(string.ascii_letters + string.digits) - -LBRACKET = L("[").suppress() -RBRACKET = L("]").suppress() -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() -COMMA = L(",").suppress() -SEMICOLON = L(";").suppress() -AT = L("@").suppress() - -PUNCTUATION = Word("-_.") -IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) -IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) - -NAME = IDENTIFIER("name") -EXTRA = IDENTIFIER - -URI = Regex(r"[^ ]+")("url") -URL = AT + URI - -EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) -EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") - -VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) -VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) - -VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY -VERSION_MANY = Combine( - VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False -)("_raw_spec") -_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY) -_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") - -VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") -VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) - -MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") -MARKER_EXPR.setParseAction( - lambda s, l, t: Marker(s[t._original_start : t._original_end]) -) -MARKER_SEPARATOR = SEMICOLON -MARKER = MARKER_SEPARATOR + MARKER_EXPR - -VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) -URL_AND_MARKER = URL + Optional(MARKER) - -NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) - -REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd -# setuptools.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see -# issue #104 -REQUIREMENT.parseString("x[]") - - class Requirement: """Parse a requirement. @@ -99,28 +32,29 @@ class Requirement: def __init__(self, requirement_string: str) -> None: try: - req = REQUIREMENT.parseString(requirement_string) - except ParseException as e: - raise InvalidRequirement( - f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}' - ) - - self.name: str = req.name - if req.url: - parsed_url = urllib.parse.urlparse(req.url) + parsed = parse_requirement(requirement_string) + except ParserSyntaxError as e: + raise InvalidRequirement(str(e)) from e + + self.name: str = parsed.name + if parsed.url: + parsed_url = urllib.parse.urlparse(parsed.url) if parsed_url.scheme == "file": - if urllib.parse.urlunparse(parsed_url) != req.url: + if urllib.parse.urlunparse(parsed_url) != parsed.url: raise InvalidRequirement("Invalid URL given") elif not (parsed_url.scheme and parsed_url.netloc) or ( not parsed_url.scheme and not parsed_url.netloc ): - raise InvalidRequirement(f"Invalid URL: {req.url}") - self.url: TOptional[str] = req.url + raise InvalidRequirement(f"Invalid URL: {parsed.url}") + self.url: Optional[str] = parsed.url else: self.url = None - self.extras: Set[str] = set(req.extras.asList() if req.extras else []) - self.specifier: SpecifierSet = SpecifierSet(req.specifier) - self.marker: TOptional[Marker] = req.marker if req.marker else None + self.extras: Set[str] = set(parsed.extras if parsed.extras else []) + self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) + self.marker: Optional[Marker] = None + if parsed.marker is not None: + self.marker = Marker.__new__(Marker) + self.marker._markers = _normalize_extra_values(parsed.marker) def __str__(self) -> str: parts: List[str] = [self.name] @@ -144,3 +78,18 @@ def __str__(self) -> str: def __repr__(self) -> str: return f"" + + def __hash__(self) -> int: + return hash((self.__class__.__name__, str(self))) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Requirement): + return NotImplemented + + return ( + self.name == other.name + and self.extras == other.extras + and self.specifier == other.specifier + and self.url == other.url + and self.marker == other.marker + ) diff --git a/setuptools/_vendor/packaging/specifiers.py b/setuptools/_vendor/packaging/specifiers.py index 0e218a6f9f..e715ecc8c2 100644 --- a/setuptools/_vendor/packaging/specifiers.py +++ b/setuptools/_vendor/packaging/specifiers.py @@ -1,20 +1,22 @@ # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. +""" +.. testsetup:: + + from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier + from packaging.version import Version +""" import abc -import functools import itertools import re -import warnings from typing import ( Callable, - Dict, Iterable, Iterator, List, Optional, - Pattern, Set, Tuple, TypeVar, @@ -22,17 +24,28 @@ ) from .utils import canonicalize_version -from .version import LegacyVersion, Version, parse +from .version import Version + +UnparsedVersion = Union[Version, str] +UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) +CallableOperator = Callable[[Version, str], bool] -ParsedVersion = Union[Version, LegacyVersion] -UnparsedVersion = Union[Version, LegacyVersion, str] -VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion) -CallableOperator = Callable[[ParsedVersion, str], bool] + +def _coerce_version(version: UnparsedVersion) -> Version: + if not isinstance(version, Version): + version = Version(version) + return version class InvalidSpecifier(ValueError): """ - An invalid specifier was found, users should refer to PEP 440. + Raised when attempting to create a :class:`Specifier` with a specifier + string that is invalid. + + >>> Specifier("lolwat") + Traceback (most recent call last): + ... + packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat' """ @@ -40,35 +53,39 @@ class BaseSpecifier(metaclass=abc.ABCMeta): @abc.abstractmethod def __str__(self) -> str: """ - Returns the str representation of this Specifier like object. This + Returns the str representation of this Specifier-like object. This should be representative of the Specifier itself. """ @abc.abstractmethod def __hash__(self) -> int: """ - Returns a hash value for this Specifier like object. + Returns a hash value for this Specifier-like object. """ @abc.abstractmethod def __eq__(self, other: object) -> bool: """ - Returns a boolean representing whether or not the two Specifier like + Returns a boolean representing whether or not the two Specifier-like objects are equal. + + :param other: The other object to check against. """ - @abc.abstractproperty + @property + @abc.abstractmethod def prereleases(self) -> Optional[bool]: - """ - Returns whether or not pre-releases as a whole are allowed by this - specifier. + """Whether or not pre-releases as a whole are allowed. + + This can be set to either ``True`` or ``False`` to explicitly enable or disable + prereleases or it can be set to ``None`` (the default) to use default semantics. """ @prereleases.setter def prereleases(self, value: bool) -> None: - """ - Sets whether or not pre-releases as a whole are allowed by this - specifier. + """Setter for :attr:`prereleases`. + + :param value: The value to set. """ @abc.abstractmethod @@ -79,227 +96,28 @@ def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: @abc.abstractmethod def filter( - self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None - ) -> Iterable[VersionTypeVar]: + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: """ Takes an iterable of items and filters them so that only items which are contained within this specifier are allowed in it. """ -class _IndividualSpecifier(BaseSpecifier): - - _operators: Dict[str, str] = {} - _regex: Pattern[str] - - def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: - match = self._regex.search(spec) - if not match: - raise InvalidSpecifier(f"Invalid specifier: '{spec}'") - - self._spec: Tuple[str, str] = ( - match.group("operator").strip(), - match.group("version").strip(), - ) - - # Store whether or not this Specifier should accept prereleases - self._prereleases = prereleases - - def __repr__(self) -> str: - pre = ( - f", prereleases={self.prereleases!r}" - if self._prereleases is not None - else "" - ) - - return f"<{self.__class__.__name__}({str(self)!r}{pre})>" - - def __str__(self) -> str: - return "{}{}".format(*self._spec) - - @property - def _canonical_spec(self) -> Tuple[str, str]: - return self._spec[0], canonicalize_version(self._spec[1]) - - def __hash__(self) -> int: - return hash(self._canonical_spec) - - def __eq__(self, other: object) -> bool: - if isinstance(other, str): - try: - other = self.__class__(str(other)) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._canonical_spec == other._canonical_spec - - def _get_operator(self, op: str) -> CallableOperator: - operator_callable: CallableOperator = getattr( - self, f"_compare_{self._operators[op]}" - ) - return operator_callable - - def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion: - if not isinstance(version, (LegacyVersion, Version)): - version = parse(version) - return version +class Specifier(BaseSpecifier): + """This class abstracts handling of version specifiers. - @property - def operator(self) -> str: - return self._spec[0] + .. tip:: - @property - def version(self) -> str: - return self._spec[1] - - @property - def prereleases(self) -> Optional[bool]: - return self._prereleases - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - def __contains__(self, item: str) -> bool: - return self.contains(item) - - def contains( - self, item: UnparsedVersion, prereleases: Optional[bool] = None - ) -> bool: - - # Determine if prereleases are to be allowed or not. - if prereleases is None: - prereleases = self.prereleases - - # Normalize item to a Version or LegacyVersion, this allows us to have - # a shortcut for ``"2.0" in Specifier(">=2") - normalized_item = self._coerce_version(item) - - # Determine if we should be supporting prereleases in this specifier - # or not, if we do not support prereleases than we can short circuit - # logic if this version is a prereleases. - if normalized_item.is_prerelease and not prereleases: - return False - - # Actually do the comparison to determine if this item is contained - # within this Specifier or not. - operator_callable: CallableOperator = self._get_operator(self.operator) - return operator_callable(normalized_item, self.version) - - def filter( - self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None - ) -> Iterable[VersionTypeVar]: - - yielded = False - found_prereleases = [] - - kw = {"prereleases": prereleases if prereleases is not None else True} - - # Attempt to iterate over all the values in the iterable and if any of - # them match, yield them. - for version in iterable: - parsed_version = self._coerce_version(version) - - if self.contains(parsed_version, **kw): - # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later in case nothing - # else matches this specifier. - if parsed_version.is_prerelease and not ( - prereleases or self.prereleases - ): - found_prereleases.append(version) - # Either this is not a prerelease, or we should have been - # accepting prereleases from the beginning. - else: - yielded = True - yield version - - # Now that we've iterated over everything, determine if we've yielded - # any values, and if we have not and we have any prereleases stored up - # then we will go ahead and yield the prereleases. - if not yielded and found_prereleases: - for version in found_prereleases: - yield version - - -class LegacySpecifier(_IndividualSpecifier): - - _regex_str = r""" - (?P(==|!=|<=|>=|<|>)) - \s* - (?P - [^,;\s)]* # Since this is a "legacy" specifier, and the version - # string can be just about anything, we match everything - # except for whitespace, a semi-colon for marker support, - # a closing paren since versions can be enclosed in - # them, and a comma since it's a version separator. - ) - """ - - _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) - - _operators = { - "==": "equal", - "!=": "not_equal", - "<=": "less_than_equal", - ">=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - } - - def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: - super().__init__(spec, prereleases) - - warnings.warn( - "Creating a LegacyVersion has been deprecated and will be " - "removed in the next major release", - DeprecationWarning, - ) - - def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion: - if not isinstance(version, LegacyVersion): - version = LegacyVersion(str(version)) - return version - - def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective == self._coerce_version(spec) - - def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective != self._coerce_version(spec) - - def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective <= self._coerce_version(spec) - - def _compare_greater_than_equal( - self, prospective: LegacyVersion, spec: str - ) -> bool: - return prospective >= self._coerce_version(spec) - - def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective < self._coerce_version(spec) - - def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective > self._coerce_version(spec) - - -def _require_version_compare( - fn: Callable[["Specifier", ParsedVersion, str], bool] -) -> Callable[["Specifier", ParsedVersion, str], bool]: - @functools.wraps(fn) - def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool: - if not isinstance(prospective, Version): - return False - return fn(self, prospective, spec) - - return wrapped - - -class Specifier(_IndividualSpecifier): + It is generally not required to instantiate this manually. You should instead + prefer to work with :class:`SpecifierSet` instead, which can parse + comma-separated version specifiers (which is what package metadata contains). + """ - _regex_str = r""" + _operator_regex_str = r""" (?P(~=|==|!=|<=|>=|<|>|===)) + """ + _version_regex_str = r""" (?P (?: # The identity operators allow for an escape hatch that will @@ -309,8 +127,10 @@ class Specifier(_IndividualSpecifier): # but included entirely as an escape hatch. (?<====) # Only match for the identity operator \s* - [^\s]* # We just match everything, except for whitespace - # since we are only testing for strict identity. + [^\s;)]* # The arbitrary version can be just about anything, + # we match everything except for whitespace, a + # semi-colon for marker support, and a closing paren + # since versions can be enclosed in them. ) | (?: @@ -323,23 +143,23 @@ class Specifier(_IndividualSpecifier): v? (?:[0-9]+!)? # epoch [0-9]+(?:\.[0-9]+)* # release - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - # You cannot use a wild card and a dev or local version - # together so group them with a | and make them optional. + # You cannot use a wild card and a pre-release, post-release, a dev or + # local version together so group them with a | and make them optional. (?: + \.\* # Wild card syntax of .* + | + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local - | - \.\* # Wild card syntax of .* )? ) | @@ -354,7 +174,7 @@ class Specifier(_IndividualSpecifier): [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) (?: # pre release [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) + (alpha|beta|preview|pre|a|b|c|rc) [-_\.]? [0-9]* )? @@ -379,7 +199,7 @@ class Specifier(_IndividualSpecifier): [0-9]+(?:\.[0-9]+)* # release (?: # pre release [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) + (alpha|beta|preview|pre|a|b|c|rc) [-_\.]? [0-9]* )? @@ -391,7 +211,10 @@ class Specifier(_IndividualSpecifier): ) """ - _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + _regex = re.compile( + r"^\s*" + _operator_regex_str + _version_regex_str + r"\s*$", + re.VERBOSE | re.IGNORECASE, + ) _operators = { "~=": "compatible", @@ -404,8 +227,152 @@ class Specifier(_IndividualSpecifier): "===": "arbitrary", } - @_require_version_compare - def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool: + def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: + """Initialize a Specifier instance. + + :param spec: + The string representation of a specifier which will be parsed and + normalized before use. + :param prereleases: + This tells the specifier if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + :raises InvalidSpecifier: + If the given specifier is invalid (i.e. bad syntax). + """ + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier(f"Invalid specifier: '{spec}'") + + self._spec: Tuple[str, str] = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + @property + def prereleases(self) -> bool: + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if Version(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + @property + def operator(self) -> str: + """The operator of this specifier. + + >>> Specifier("==1.2.3").operator + '==' + """ + return self._spec[0] + + @property + def version(self) -> str: + """The version of this specifier. + + >>> Specifier("==1.2.3").version + '1.2.3' + """ + return self._spec[1] + + def __repr__(self) -> str: + """A representation of the Specifier that shows all internal state. + + >>> Specifier('>=1.0.0') + =1.0.0')> + >>> Specifier('>=1.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> Specifier('>=1.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"<{self.__class__.__name__}({str(self)!r}{pre})>" + + def __str__(self) -> str: + """A string representation of the Specifier that can be round-tripped. + + >>> str(Specifier('>=1.0.0')) + '>=1.0.0' + >>> str(Specifier('>=1.0.0', prereleases=False)) + '>=1.0.0' + """ + return "{}{}".format(*self._spec) + + @property + def _canonical_spec(self) -> Tuple[str, str]: + canonical_version = canonicalize_version( + self._spec[1], + strip_trailing_zero=(self._spec[0] != "~="), + ) + return self._spec[0], canonical_version + + def __hash__(self) -> int: + return hash(self._canonical_spec) + + def __eq__(self, other: object) -> bool: + """Whether or not the two Specifier-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0") + True + >>> (Specifier("==1.2.3", prereleases=False) == + ... Specifier("==1.2.3", prereleases=True)) + True + >>> Specifier("==1.2.3") == "==1.2.3" + True + >>> Specifier("==1.2.3") == Specifier("==1.2.4") + False + >>> Specifier("==1.2.3") == Specifier("~=1.2.3") + False + """ + if isinstance(other, str): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._canonical_spec == other._canonical_spec + + def _get_operator(self, op: str) -> CallableOperator: + operator_callable: CallableOperator = getattr( + self, f"_compare_{self._operators[op]}" + ) + return operator_callable + + def _compare_compatible(self, prospective: Version, spec: str) -> bool: # Compatible releases have an equivalent combination of >= and ==. That # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to @@ -426,34 +393,33 @@ def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool: prospective, prefix ) - @_require_version_compare - def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool: + def _compare_equal(self, prospective: Version, spec: str) -> bool: # We need special logic to handle prefix matching if spec.endswith(".*"): # In the case of prefix matching we want to ignore local segment. - prospective = Version(prospective.public) + normalized_prospective = canonicalize_version(prospective.public) + # Get the normalized version string ignoring the trailing .* + normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False) # Split the spec out by dots, and pretend that there is an implicit # dot in between a release segment and a pre-release segment. - split_spec = _version_split(spec[:-2]) # Remove the trailing .* + split_spec = _version_split(normalized_spec) # Split the prospective version out by dots, and pretend that there # is an implicit dot in between a release segment and a pre-release # segment. - split_prospective = _version_split(str(prospective)) + split_prospective = _version_split(normalized_prospective) + + # 0-pad the prospective version before shortening it to get the correct + # shortened version. + padded_prospective, _ = _pad_version(split_prospective, split_spec) # Shorten the prospective version to be the same length as the spec # so that we can determine if the specifier is a prefix of the # prospective version or not. - shortened_prospective = split_prospective[: len(split_spec)] + shortened_prospective = padded_prospective[: len(split_spec)] - # Pad out our two sides with zeros so that they both equal the same - # length. - padded_spec, padded_prospective = _pad_version( - split_spec, shortened_prospective - ) - - return padded_prospective == padded_spec + return shortened_prospective == split_spec else: # Convert our spec string into a Version spec_version = Version(spec) @@ -466,30 +432,24 @@ def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool: return prospective == spec_version - @_require_version_compare - def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool: + def _compare_not_equal(self, prospective: Version, spec: str) -> bool: return not self._compare_equal(prospective, spec) - @_require_version_compare - def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool: + def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: # NB: Local version identifiers are NOT permitted in the version # specifier, so local version labels can be universally removed from # the prospective version. return Version(prospective.public) <= Version(spec) - @_require_version_compare - def _compare_greater_than_equal( - self, prospective: ParsedVersion, spec: str - ) -> bool: + def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: # NB: Local version identifiers are NOT permitted in the version # specifier, so local version labels can be universally removed from # the prospective version. return Version(prospective.public) >= Version(spec) - @_require_version_compare - def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool: + def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: # Convert our spec to a Version instance, since we'll want to work with # it as a version. @@ -514,8 +474,7 @@ def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool: # version in the spec. return True - @_require_version_compare - def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool: + def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: # Convert our spec to a Version instance, since we'll want to work with # it as a version. @@ -549,34 +508,133 @@ def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bo def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: return str(prospective).lower() == str(spec).lower() - @property - def prereleases(self) -> bool: + def __contains__(self, item: Union[str, Version]) -> bool: + """Return whether or not the item is contained in this specifier. - # If there is an explicit prereleases set for this, then we'll just - # blindly use that. - if self._prereleases is not None: - return self._prereleases + :param item: The item to check for. - # Look at all of our specifiers and determine if they are inclusive - # operators, and if they are if they are including an explicit - # prerelease. - operator, version = self._spec - if operator in ["==", ">=", "<=", "~=", "==="]: - # The == specifier can include a trailing .*, if it does we - # want to remove before parsing. - if operator == "==" and version.endswith(".*"): - version = version[:-2] + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. - # Parse the version, and if it is a pre-release than this - # specifier allows pre-releases. - if parse(version).is_prerelease: - return True + >>> "1.2.3" in Specifier(">=1.2.3") + True + >>> Version("1.2.3") in Specifier(">=1.2.3") + True + >>> "1.0.0" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True) + True + """ + return self.contains(item) - return False + def contains( + self, item: UnparsedVersion, prereleases: Optional[bool] = None + ) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this Specifier. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> Specifier(">=1.2.3").contains("1.2.3") + True + >>> Specifier(">=1.2.3").contains(Version("1.2.3")) + True + >>> Specifier(">=1.2.3").contains("1.0.0") + False + >>> Specifier(">=1.2.3").contains("1.3.0a1") + False + >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1") + True + >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True) + True + """ - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version, this allows us to have a shortcut for + # "2.0" in Specifier(">=2") + normalized_item = _coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if normalized_item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + operator_callable: CallableOperator = self._get_operator(self.operator) + return operator_callable(normalized_item, self.version) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifier. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(Specifier().contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")])) + ['1.2.3', '1.3', ] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"])) + ['1.5a1'] + >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + """ + + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = _coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later in case nothing + # else matches this specifier. + if parsed_version.is_prerelease and not ( + prereleases or self.prereleases + ): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the beginning. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") @@ -618,22 +676,39 @@ def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str class SpecifierSet(BaseSpecifier): + """This class abstracts handling of a set of version specifiers. + + It can be passed a single specifier (``>=3.0``), a comma-separated list of + specifiers (``>=3.0,!=3.1``), or no specifier at all. + """ + def __init__( self, specifiers: str = "", prereleases: Optional[bool] = None ) -> None: + """Initialize a SpecifierSet instance. + + :param specifiers: + The string representation of a specifier or a comma-separated list of + specifiers which will be parsed and normalized before use. + :param prereleases: + This tells the SpecifierSet if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + + :raises InvalidSpecifier: + If the given ``specifiers`` are not parseable than this exception will be + raised. + """ - # Split on , to break each individual specifier into it's own item, and + # Split on `,` to break each individual specifier into it's own item, and # strip each item to remove leading/trailing whitespace. split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] # Parsed each individual specifier, attempting first to make it a - # Specifier and falling back to a LegacySpecifier. - parsed: Set[_IndividualSpecifier] = set() + # Specifier. + parsed: Set[Specifier] = set() for specifier in split_specifiers: - try: - parsed.add(Specifier(specifier)) - except InvalidSpecifier: - parsed.add(LegacySpecifier(specifier)) + parsed.add(Specifier(specifier)) # Turn our parsed specifiers into a frozen set and save them for later. self._specs = frozenset(parsed) @@ -642,7 +717,40 @@ def __init__( # we accept prereleases or not. self._prereleases = prereleases + @property + def prereleases(self) -> Optional[bool]: + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + def __repr__(self) -> str: + """A representation of the specifier set that shows all internal state. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> SpecifierSet('>=1.0.0,!=2.0.0') + =1.0.0')> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ pre = ( f", prereleases={self.prereleases!r}" if self._prereleases is not None @@ -652,12 +760,31 @@ def __repr__(self) -> str: return f"" def __str__(self) -> str: + """A string representation of the specifier set that can be round-tripped. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> str(SpecifierSet(">=1.0.0,!=1.0.1")) + '!=1.0.1,>=1.0.0' + >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False)) + '!=1.0.1,>=1.0.0' + """ return ",".join(sorted(str(s) for s in self._specs)) def __hash__(self) -> int: return hash(self._specs) def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": + """Return a SpecifierSet which is a combination of the two sets. + + :param other: The other object to combine with. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1' + =1.0.0')> + >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1') + =1.0.0')> + """ if isinstance(other, str): other = SpecifierSet(other) elif not isinstance(other, SpecifierSet): @@ -681,7 +808,25 @@ def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": return specifier def __eq__(self, other: object) -> bool: - if isinstance(other, (str, _IndividualSpecifier)): + """Whether or not the two SpecifierSet-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) == + ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1" + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2") + False + """ + if isinstance(other, (str, Specifier)): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented @@ -689,43 +834,72 @@ def __eq__(self, other: object) -> bool: return self._specs == other._specs def __len__(self) -> int: + """Returns the number of specifiers in this specifier set.""" return len(self._specs) - def __iter__(self) -> Iterator[_IndividualSpecifier]: - return iter(self._specs) - - @property - def prereleases(self) -> Optional[bool]: - - # If we have been given an explicit prerelease modifier, then we'll - # pass that through here. - if self._prereleases is not None: - return self._prereleases - - # If we don't have any specifiers, and we don't have a forced value, - # then we'll just return None since we don't know if this should have - # pre-releases or not. - if not self._specs: - return None - - # Otherwise we'll see if any of the given specifiers accept - # prereleases, if any of them do we'll return True, otherwise False. - return any(s.prereleases for s in self._specs) + def __iter__(self) -> Iterator[Specifier]: + """ + Returns an iterator over all the underlying :class:`Specifier` instances + in this specifier set. - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value + >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str) + [, =1.0.0')>] + """ + return iter(self._specs) def __contains__(self, item: UnparsedVersion) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True) + True + """ return self.contains(item) def contains( - self, item: UnparsedVersion, prereleases: Optional[bool] = None + self, + item: UnparsedVersion, + prereleases: Optional[bool] = None, + installed: Optional[bool] = None, ) -> bool: - - # Ensure that our item is a Version or LegacyVersion instance. - if not isinstance(item, (LegacyVersion, Version)): - item = parse(item) + """Return whether or not the item is contained in this SpecifierSet. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this SpecifierSet. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3")) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True) + True + """ + # Ensure that our item is a Version instance. + if not isinstance(item, Version): + item = Version(item) # Determine if we're forcing a prerelease or not, if we're not forcing # one for this particular filter call, then we'll use whatever the @@ -742,6 +916,9 @@ def contains( if not prereleases and item.is_prerelease: return False + if installed and item.is_prerelease: + item = Version(item.base_version) + # We simply dispatch to the underlying specs here to make sure that the # given version is contained within all of them. # Note: This use of all() here means that an empty set of specifiers @@ -749,9 +926,46 @@ def contains( return all(s.contains(item, prereleases=prereleases) for s in self._specs) def filter( - self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None - ) -> Iterable[VersionTypeVar]: - + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifiers in this set. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(SpecifierSet(...).contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")])) + ['1.3', ] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"])) + [] + >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + + An "empty" SpecifierSet will filter items based on the presence of prerelease + versions in the set. + + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet("").filter(["1.5a1"])) + ['1.5a1'] + >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + """ # Determine if we're forcing a prerelease or not, if we're not forcing # one for this particular filter call, then we'll use whatever the # SpecifierSet thinks for whether or not we should support prereleases. @@ -764,27 +978,16 @@ def filter( if self._specs: for spec in self._specs: iterable = spec.filter(iterable, prereleases=bool(prereleases)) - return iterable + return iter(iterable) # If we do not have any specifiers, then we need to have a rough filter # which will filter out any pre-releases, unless there are no final - # releases, and which will filter out LegacyVersion in general. + # releases. else: - filtered: List[VersionTypeVar] = [] - found_prereleases: List[VersionTypeVar] = [] - - item: UnparsedVersion - parsed_version: Union[Version, LegacyVersion] + filtered: List[UnparsedVersionVar] = [] + found_prereleases: List[UnparsedVersionVar] = [] for item in iterable: - # Ensure that we some kind of Version class for this item. - if not isinstance(item, (LegacyVersion, Version)): - parsed_version = parse(item) - else: - parsed_version = item - - # Filter out any item which is parsed as a LegacyVersion - if isinstance(parsed_version, LegacyVersion): - continue + parsed_version = _coerce_version(item) # Store any item which is a pre-release for later unless we've # already found a final version or we are accepting prereleases @@ -797,6 +1000,6 @@ def filter( # If we've found no items except for pre-releases, then we'll go # ahead and use the pre-releases if not filtered and found_prereleases and prereleases is None: - return found_prereleases + return iter(found_prereleases) - return filtered + return iter(filtered) diff --git a/setuptools/_vendor/packaging/tags.py b/setuptools/_vendor/packaging/tags.py index 9a3d25a71c..19ccbde3ea 100644 --- a/setuptools/_vendor/packaging/tags.py +++ b/setuptools/_vendor/packaging/tags.py @@ -4,6 +4,7 @@ import logging import platform +import subprocess import sys import sysconfig from importlib.machinery import EXTENSION_SUFFIXES @@ -36,7 +37,7 @@ } -_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 +_32_BIT_INTERPRETER = sys.maxsize <= 2**32 class Tag: @@ -224,10 +225,45 @@ def cpython_tags( yield Tag(interpreter, "abi3", platform_) -def _generic_abi() -> Iterator[str]: - abi = sysconfig.get_config_var("SOABI") - if abi: - yield _normalize_string(abi) +def _generic_abi() -> List[str]: + """ + Return the ABI tag based on EXT_SUFFIX. + """ + # The following are examples of `EXT_SUFFIX`. + # We want to keep the parts which are related to the ABI and remove the + # parts which are related to the platform: + # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310 + # - mac: '.cpython-310-darwin.so' => cp310 + # - win: '.cp310-win_amd64.pyd' => cp310 + # - win: '.pyd' => cp37 (uses _cpython_abis()) + # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73 + # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib' + # => graalpy_38_native + + ext_suffix = _get_config_var("EXT_SUFFIX", warn=True) + if not isinstance(ext_suffix, str) or ext_suffix[0] != ".": + raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')") + parts = ext_suffix.split(".") + if len(parts) < 3: + # CPython3.7 and earlier uses ".pyd" on Windows. + return _cpython_abis(sys.version_info[:2]) + soabi = parts[1] + if soabi.startswith("cpython"): + # non-windows + abi = "cp" + soabi.split("-")[1] + elif soabi.startswith("cp"): + # windows + abi = soabi.split("-")[0] + elif soabi.startswith("pypy"): + abi = "-".join(soabi.split("-")[:2]) + elif soabi.startswith("graalpy"): + abi = "-".join(soabi.split("-")[:3]) + elif soabi: + # pyston, ironpython, others? + abi = soabi + else: + return [] + return [_normalize_string(abi)] def generic_tags( @@ -251,8 +287,9 @@ def generic_tags( interpreter = "".join([interp_name, interp_version]) if abis is None: abis = _generic_abi() + else: + abis = list(abis) platforms = list(platforms or platform_tags()) - abis = list(abis) if "none" not in abis: abis.append("none") for abi in abis: @@ -356,6 +393,22 @@ def mac_platforms( version_str, _, cpu_arch = platform.mac_ver() if version is None: version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + if version == (10, 16): + # When built against an older macOS SDK, Python will report macOS 10.16 + # instead of the real version. + version_str = subprocess.run( + [ + sys.executable, + "-sS", + "-c", + "import platform; print(platform.mac_ver()[0])", + ], + check=True, + env={"SYSTEM_VERSION_COMPAT": "0"}, + stdout=subprocess.PIPE, + universal_newlines=True, + ).stdout + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) else: version = version if arch is None: @@ -446,6 +499,9 @@ def platform_tags() -> Iterator[str]: def interpreter_name() -> str: """ Returns the name of the running interpreter. + + Some implementations have a reserved, two-letter abbreviation which will + be returned when appropriate. """ name = sys.implementation.name return INTERPRETER_SHORT_NAMES.get(name) or name @@ -482,6 +538,9 @@ def sys_tags(*, warn: bool = False) -> Iterator[Tag]: yield from generic_tags() if interp_name == "pp": - yield from compatible_tags(interpreter="pp3") + interp = "pp3" + elif interp_name == "cp": + interp = "cp" + interpreter_version(warn=warn) else: - yield from compatible_tags() + interp = None + yield from compatible_tags(interpreter=interp) diff --git a/setuptools/_vendor/packaging/utils.py b/setuptools/_vendor/packaging/utils.py index bab11b80c6..33c613b749 100644 --- a/setuptools/_vendor/packaging/utils.py +++ b/setuptools/_vendor/packaging/utils.py @@ -35,7 +35,9 @@ def canonicalize_name(name: str) -> NormalizedName: return cast(NormalizedName, value) -def canonicalize_version(version: Union[Version, str]) -> str: +def canonicalize_version( + version: Union[Version, str], *, strip_trailing_zero: bool = True +) -> str: """ This is very similar to Version.__str__, but has one subtle difference with the way it handles the release segment. @@ -56,8 +58,11 @@ def canonicalize_version(version: Union[Version, str]) -> str: parts.append(f"{parsed.epoch}!") # Release segment - # NB: This strips trailing '.0's to normalize - parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release))) + release_segment = ".".join(str(x) for x in parsed.release) + if strip_trailing_zero: + # NB: This strips trailing '.0's to normalize + release_segment = re.sub(r"(\.0)+$", "", release_segment) + parts.append(release_segment) # Pre-release if parsed.pre is not None: diff --git a/setuptools/_vendor/packaging/version.py b/setuptools/_vendor/packaging/version.py index de9a09a4ed..e5c738cfda 100644 --- a/setuptools/_vendor/packaging/version.py +++ b/setuptools/_vendor/packaging/version.py @@ -1,16 +1,20 @@ # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. +""" +.. testsetup:: + + from packaging.version import parse, Version +""" import collections import itertools import re -import warnings -from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union +from typing import Callable, Optional, SupportsInt, Tuple, Union from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType -__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] +__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"] InfiniteTypes = Union[InfinityType, NegativeInfinityType] PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] @@ -29,36 +33,37 @@ CmpKey = Tuple[ int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType ] -LegacyCmpKey = Tuple[int, Tuple[str, ...]] -VersionComparisonMethod = Callable[ - [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool -] +VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool] _Version = collections.namedtuple( "_Version", ["epoch", "release", "dev", "pre", "post", "local"] ) -def parse(version: str) -> Union["LegacyVersion", "Version"]: - """ - Parse the given version string and return either a :class:`Version` object - or a :class:`LegacyVersion` object depending on if the given version is - a valid PEP 440 version or a legacy version. +def parse(version: str) -> "Version": + """Parse the given version string. + + >>> parse('1.0.dev1') + + + :param version: The version string to parse. + :raises InvalidVersion: When the version string is not a valid version. """ - try: - return Version(version) - except InvalidVersion: - return LegacyVersion(version) + return Version(version) class InvalidVersion(ValueError): - """ - An invalid version was found, users should refer to PEP 440. + """Raised when a version string is not a valid version. + + >>> Version("invalid") + Traceback (most recent call last): + ... + packaging.version.InvalidVersion: Invalid version: 'invalid' """ class _BaseVersion: - _key: Union[CmpKey, LegacyCmpKey] + _key: CmpKey def __hash__(self) -> int: return hash(self._key) @@ -103,126 +108,9 @@ def __ne__(self, other: object) -> bool: return self._key != other._key -class LegacyVersion(_BaseVersion): - def __init__(self, version: str) -> None: - self._version = str(version) - self._key = _legacy_cmpkey(self._version) - - warnings.warn( - "Creating a LegacyVersion has been deprecated and will be " - "removed in the next major release", - DeprecationWarning, - ) - - def __str__(self) -> str: - return self._version - - def __repr__(self) -> str: - return f"" - - @property - def public(self) -> str: - return self._version - - @property - def base_version(self) -> str: - return self._version - - @property - def epoch(self) -> int: - return -1 - - @property - def release(self) -> None: - return None - - @property - def pre(self) -> None: - return None - - @property - def post(self) -> None: - return None - - @property - def dev(self) -> None: - return None - - @property - def local(self) -> None: - return None - - @property - def is_prerelease(self) -> bool: - return False - - @property - def is_postrelease(self) -> bool: - return False - - @property - def is_devrelease(self) -> bool: - return False - - -_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) - -_legacy_version_replacement_map = { - "pre": "c", - "preview": "c", - "-": "final-", - "rc": "c", - "dev": "@", -} - - -def _parse_version_parts(s: str) -> Iterator[str]: - for part in _legacy_version_component_re.split(s): - part = _legacy_version_replacement_map.get(part, part) - - if not part or part == ".": - continue - - if part[:1] in "0123456789": - # pad for numeric comparison - yield part.zfill(8) - else: - yield "*" + part - - # ensure that alpha/beta/candidate are before final - yield "*final" - - -def _legacy_cmpkey(version: str) -> LegacyCmpKey: - - # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch - # greater than or equal to 0. This will effectively put the LegacyVersion, - # which uses the defacto standard originally implemented by setuptools, - # as before all PEP 440 versions. - epoch = -1 - - # This scheme is taken from pkg_resources.parse_version setuptools prior to - # it's adoption of the packaging library. - parts: List[str] = [] - for part in _parse_version_parts(version.lower()): - if part.startswith("*"): - # remove "-" before a prerelease tag - if part < "*final": - while parts and parts[-1] == "*final-": - parts.pop() - - # remove trailing zeros from each series of numeric parts - while parts and parts[-1] == "00000000": - parts.pop() - - parts.append(part) - - return epoch, tuple(parts) - - # Deliberately not anchored to the start and end of the string, to make it # easier for 3rd party code to reuse -VERSION_PATTERN = r""" +_VERSION_PATTERN = r""" v? (?: (?:(?P[0-9]+)!)? # epoch @@ -253,12 +141,55 @@ def _legacy_cmpkey(version: str) -> LegacyCmpKey: (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version """ +VERSION_PATTERN = _VERSION_PATTERN +""" +A string containing the regular expression used to match a valid version. + +The pattern is not anchored at either end, and is intended for embedding in larger +expressions (for example, matching a version number as part of a file name). The +regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE`` +flags set. + +:meta hide-value: +""" + class Version(_BaseVersion): + """This class abstracts handling of a project's versions. + + A :class:`Version` instance is comparison aware and can be compared and + sorted using the standard Python interfaces. + + >>> v1 = Version("1.0a5") + >>> v2 = Version("1.0") + >>> v1 + + >>> v2 + + >>> v1 < v2 + True + >>> v1 == v2 + False + >>> v1 > v2 + False + >>> v1 >= v2 + False + >>> v1 <= v2 + True + """ _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE) def __init__(self, version: str) -> None: + """Initialize a Version object. + + :param version: + The string representation of a version which will be parsed and normalized + before use. + :raises InvalidVersion: + If the ``version`` does not conform to PEP 440 in any way then this + exception will be raised. + """ # Validate the version and parse it into pieces match = self._regex.search(version) @@ -288,9 +219,19 @@ def __init__(self, version: str) -> None: ) def __repr__(self) -> str: + """A representation of the Version that shows all internal state. + + >>> Version('1.0.0') + + """ return f"" def __str__(self) -> str: + """A string representation of the version that can be rounded-tripped. + + >>> str(Version("1.0a5")) + '1.0a5' + """ parts = [] # Epoch @@ -320,29 +261,80 @@ def __str__(self) -> str: @property def epoch(self) -> int: + """The epoch of the version. + + >>> Version("2.0.0").epoch + 0 + >>> Version("1!2.0.0").epoch + 1 + """ _epoch: int = self._version.epoch return _epoch @property def release(self) -> Tuple[int, ...]: + """The components of the "release" segment of the version. + + >>> Version("1.2.3").release + (1, 2, 3) + >>> Version("2.0.0").release + (2, 0, 0) + >>> Version("1!2.0.0.post0").release + (2, 0, 0) + + Includes trailing zeroes but not the epoch or any pre-release / development / + post-release suffixes. + """ _release: Tuple[int, ...] = self._version.release return _release @property def pre(self) -> Optional[Tuple[str, int]]: + """The pre-release segment of the version. + + >>> print(Version("1.2.3").pre) + None + >>> Version("1.2.3a1").pre + ('a', 1) + >>> Version("1.2.3b1").pre + ('b', 1) + >>> Version("1.2.3rc1").pre + ('rc', 1) + """ _pre: Optional[Tuple[str, int]] = self._version.pre return _pre @property def post(self) -> Optional[int]: + """The post-release number of the version. + + >>> print(Version("1.2.3").post) + None + >>> Version("1.2.3.post1").post + 1 + """ return self._version.post[1] if self._version.post else None @property def dev(self) -> Optional[int]: + """The development number of the version. + + >>> print(Version("1.2.3").dev) + None + >>> Version("1.2.3.dev1").dev + 1 + """ return self._version.dev[1] if self._version.dev else None @property def local(self) -> Optional[str]: + """The local version segment of the version. + + >>> print(Version("1.2.3").local) + None + >>> Version("1.2.3+abc").local + 'abc' + """ if self._version.local: return ".".join(str(x) for x in self._version.local) else: @@ -350,10 +342,31 @@ def local(self) -> Optional[str]: @property def public(self) -> str: + """The public portion of the version. + + >>> Version("1.2.3").public + '1.2.3' + >>> Version("1.2.3+abc").public + '1.2.3' + >>> Version("1.2.3+abc.dev1").public + '1.2.3' + """ return str(self).split("+", 1)[0] @property def base_version(self) -> str: + """The "base version" of the version. + + >>> Version("1.2.3").base_version + '1.2.3' + >>> Version("1.2.3+abc").base_version + '1.2.3' + >>> Version("1!1.2.3+abc.dev1").base_version + '1!1.2.3' + + The "base version" is the public version of the project without any pre or post + release markers. + """ parts = [] # Epoch @@ -367,26 +380,72 @@ def base_version(self) -> str: @property def is_prerelease(self) -> bool: + """Whether this version is a pre-release. + + >>> Version("1.2.3").is_prerelease + False + >>> Version("1.2.3a1").is_prerelease + True + >>> Version("1.2.3b1").is_prerelease + True + >>> Version("1.2.3rc1").is_prerelease + True + >>> Version("1.2.3dev1").is_prerelease + True + """ return self.dev is not None or self.pre is not None @property def is_postrelease(self) -> bool: + """Whether this version is a post-release. + + >>> Version("1.2.3").is_postrelease + False + >>> Version("1.2.3.post1").is_postrelease + True + """ return self.post is not None @property def is_devrelease(self) -> bool: + """Whether this version is a development release. + + >>> Version("1.2.3").is_devrelease + False + >>> Version("1.2.3.dev1").is_devrelease + True + """ return self.dev is not None @property def major(self) -> int: + """The first item of :attr:`release` or ``0`` if unavailable. + + >>> Version("1.2.3").major + 1 + """ return self.release[0] if len(self.release) >= 1 else 0 @property def minor(self) -> int: + """The second item of :attr:`release` or ``0`` if unavailable. + + >>> Version("1.2.3").minor + 2 + >>> Version("1").minor + 0 + """ return self.release[1] if len(self.release) >= 2 else 0 @property def micro(self) -> int: + """The third item of :attr:`release` or ``0`` if unavailable. + + >>> Version("1.2.3").micro + 3 + >>> Version("1").micro + 0 + """ return self.release[2] if len(self.release) >= 3 else 0 diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/DESCRIPTION.rst b/setuptools/_vendor/pyparsing-2.2.1.dist-info/DESCRIPTION.rst deleted file mode 100644 index e1187231a3..0000000000 --- a/setuptools/_vendor/pyparsing-2.2.1.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,3 +0,0 @@ -UNKNOWN - - diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt b/setuptools/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt deleted file mode 100644 index bbc959e0d6..0000000000 --- a/setuptools/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt +++ /dev/null @@ -1,18 +0,0 @@ -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/METADATA b/setuptools/_vendor/pyparsing-2.2.1.dist-info/METADATA deleted file mode 100644 index a15c350e36..0000000000 --- a/setuptools/_vendor/pyparsing-2.2.1.dist-info/METADATA +++ /dev/null @@ -1,30 +0,0 @@ -Metadata-Version: 2.0 -Name: pyparsing -Version: 2.2.1 -Summary: Python parsing module -Home-page: https://github.com/pyparsing/pyparsing/ -Author: Paul McGuire -Author-email: ptmcg@users.sourceforge.net -License: MIT License -Download-URL: https://pypi.org/project/pyparsing/ -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: Information Technology -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.* - -UNKNOWN - - diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/RECORD b/setuptools/_vendor/pyparsing-2.2.1.dist-info/RECORD deleted file mode 100644 index 09cc30e34f..0000000000 --- a/setuptools/_vendor/pyparsing-2.2.1.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -__pycache__/pyparsing.cpython-310.pyc,, -pyparsing-2.2.1.dist-info/DESCRIPTION.rst,sha256=OCTuuN6LcWulhHS3d5rfjdsQtW22n7HENFRh6jC6ego,10 -pyparsing-2.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyparsing-2.2.1.dist-info/LICENSE.txt,sha256=081Pq74Spe1XdwrGkewNKSqa078kLIh7UWI-wVjdj8I,1041 -pyparsing-2.2.1.dist-info/METADATA,sha256=I0jhx9vpUYlQXjn4gVDnFFoAt3nNrxwR4iuqA_pknYs,1091 -pyparsing-2.2.1.dist-info/RECORD,, -pyparsing-2.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pyparsing-2.2.1.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 -pyparsing-2.2.1.dist-info/metadata.json,sha256=v1_77-dSdajUZSItSJg8Ov9M713STY3PzhyrRvs1ax4,1185 -pyparsing-2.2.1.dist-info/top_level.txt,sha256=eUOjGzJVhlQ3WS2rFAy2mN3LX_7FKTM5GSJ04jfnLmU,10 -pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055 diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/WHEEL b/setuptools/_vendor/pyparsing-2.2.1.dist-info/WHEEL deleted file mode 100644 index 7332a419cd..0000000000 --- a/setuptools/_vendor/pyparsing-2.2.1.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.30.0) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/metadata.json b/setuptools/_vendor/pyparsing-2.2.1.dist-info/metadata.json deleted file mode 100644 index b760b766b0..0000000000 --- a/setuptools/_vendor/pyparsing-2.2.1.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7"], "download_url": "https://pypi.org/project/pyparsing/", "extensions": {"python.details": {"contacts": [{"email": "ptmcg@users.sourceforge.net", "name": "Paul McGuire", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://github.com/pyparsing/pyparsing/"}}}, "generator": "bdist_wheel (0.30.0)", "license": "MIT License", "metadata_version": "2.0", "name": "pyparsing", "requires_python": ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*", "summary": "Python parsing module", "version": "2.2.1"} \ No newline at end of file diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/top_level.txt b/setuptools/_vendor/pyparsing-2.2.1.dist-info/top_level.txt deleted file mode 100644 index 210dfec50b..0000000000 --- a/setuptools/_vendor/pyparsing-2.2.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -pyparsing diff --git a/setuptools/_vendor/pyparsing.py b/setuptools/_vendor/pyparsing.py deleted file mode 100644 index cf75e1e5fc..0000000000 --- a/setuptools/_vendor/pyparsing.py +++ /dev/null @@ -1,5742 +0,0 @@ -# module pyparsing.py -# -# Copyright (c) 2003-2018 Paul T. McGuire -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__doc__ = \ -""" -pyparsing module - Classes and methods to define and execute parsing grammars -============================================================================= - -The pyparsing module is an alternative approach to creating and executing simple grammars, -vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you -don't need to learn a new syntax for defining grammars or matching expressions - the parsing module -provides a library of classes that you use to construct the grammar directly in Python. - -Here is a program to parse "Hello, World!" (or any greeting of the form -C{", !"}), built up using L{Word}, L{Literal}, and L{And} elements -(L{'+'} operator gives L{And} expressions, strings are auto-converted to -L{Literal} expressions):: - - from pyparsing import Word, alphas - - # define grammar of a greeting - greet = Word(alphas) + "," + Word(alphas) + "!" - - hello = "Hello, World!" - print (hello, "->", greet.parseString(hello)) - -The program outputs the following:: - - Hello, World! -> ['Hello', ',', 'World', '!'] - -The Python representation of the grammar is quite readable, owing to the self-explanatory -class names, and the use of '+', '|' and '^' operators. - -The L{ParseResults} object returned from L{ParserElement.parseString} can be accessed as a nested list, a dictionary, or an -object with named attributes. - -The pyparsing module handles some of the problems that are typically vexing when writing text parsers: - - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) - - quoted strings - - embedded comments - - -Getting Started - ------------------ -Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing -classes inherit from. Use the docstrings for examples of how to: - - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes - - construct character word-group expressions using the L{Word} class - - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes - - use L{'+'}, L{'|'}, L{'^'}, and L{'&'} operators to combine simple expressions into more complex ones - - associate names with your parsed results using L{ParserElement.setResultsName} - - find some helpful expression short-cuts like L{delimitedList} and L{oneOf} - - find more useful common expressions in the L{pyparsing_common} namespace class -""" - -__version__ = "2.2.1" -__versionTime__ = "18 Sep 2018 00:49 UTC" -__author__ = "Paul McGuire " - -import string -from weakref import ref as wkref -import copy -import sys -import warnings -import re -import sre_constants -import collections -import pprint -import traceback -import types -from datetime import datetime - -try: - from _thread import RLock -except ImportError: - from threading import RLock - -try: - # Python 3 - from collections.abc import Iterable - from collections.abc import MutableMapping -except ImportError: - # Python 2.7 - from collections import Iterable - from collections import MutableMapping - -try: - from collections import OrderedDict as _OrderedDict -except ImportError: - try: - from ordereddict import OrderedDict as _OrderedDict - except ImportError: - _OrderedDict = None - -#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) - -__all__ = [ -'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', -'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', -'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', -'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', -'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', -'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', -'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', -'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', -'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', -'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', -'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', -'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', -'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', -'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', -'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', -'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', -'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', -'CloseMatch', 'tokenMap', 'pyparsing_common', -] - -system_version = tuple(sys.version_info)[:3] -PY_3 = system_version[0] == 3 -if PY_3: - _MAX_INT = sys.maxsize - basestring = str - unichr = chr - _ustr = str - - # build list of single arg builtins, that can be used as parse actions - singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max] - -else: - _MAX_INT = sys.maxint - range = xrange - - def _ustr(obj): - """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries - str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It - then < returns the unicode object | encodes it with the default encoding | ... >. - """ - if isinstance(obj,unicode): - return obj - - try: - # If this works, then _ustr(obj) has the same behaviour as str(obj), so - # it won't break any existing code. - return str(obj) - - except UnicodeEncodeError: - # Else encode it - ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') - xmlcharref = Regex(r'&#\d+;') - xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) - return xmlcharref.transformString(ret) - - # build list of single arg builtins, tolerant of Python version, that can be used as parse actions - singleArgBuiltins = [] - import __builtin__ - for fname in "sum len sorted reversed list tuple set any all min max".split(): - try: - singleArgBuiltins.append(getattr(__builtin__,fname)) - except AttributeError: - continue - -_generatorType = type((y for y in range(1))) - -def _xml_escape(data): - """Escape &, <, >, ", ', etc. in a string of data.""" - - # ampersand must be replaced first - from_symbols = '&><"\'' - to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) - for from_,to_ in zip(from_symbols, to_symbols): - data = data.replace(from_, to_) - return data - -class _Constants(object): - pass - -alphas = string.ascii_uppercase + string.ascii_lowercase -nums = "0123456789" -hexnums = nums + "ABCDEFabcdef" -alphanums = alphas + nums -_bslash = chr(92) -printables = "".join(c for c in string.printable if c not in string.whitespace) - -class ParseBaseException(Exception): - """base exception class for all parsing runtime exceptions""" - # Performance tuning: we construct a *lot* of these, so keep this - # constructor as small and fast as possible - def __init__( self, pstr, loc=0, msg=None, elem=None ): - self.loc = loc - if msg is None: - self.msg = pstr - self.pstr = "" - else: - self.msg = msg - self.pstr = pstr - self.parserElement = elem - self.args = (pstr, loc, msg) - - @classmethod - def _from_exception(cls, pe): - """ - internal factory method to simplify creating one type of ParseException - from another - avoids having __init__ signature conflicts among subclasses - """ - return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) - - def __getattr__( self, aname ): - """supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text - """ - if( aname == "lineno" ): - return lineno( self.loc, self.pstr ) - elif( aname in ("col", "column") ): - return col( self.loc, self.pstr ) - elif( aname == "line" ): - return line( self.loc, self.pstr ) - else: - raise AttributeError(aname) - - def __str__( self ): - return "%s (at char %d), (line:%d, col:%d)" % \ - ( self.msg, self.loc, self.lineno, self.column ) - def __repr__( self ): - return _ustr(self) - def markInputline( self, markerString = ">!<" ): - """Extracts the exception line from the input string, and marks - the location of the exception with a special symbol. - """ - line_str = self.line - line_column = self.column - 1 - if markerString: - line_str = "".join((line_str[:line_column], - markerString, line_str[line_column:])) - return line_str.strip() - def __dir__(self): - return "lineno col line".split() + dir(type(self)) - -class ParseException(ParseBaseException): - """ - Exception thrown when parse expressions don't match class; - supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text - - Example:: - try: - Word(nums).setName("integer").parseString("ABC") - except ParseException as pe: - print(pe) - print("column: {}".format(pe.col)) - - prints:: - Expected integer (at char 0), (line:1, col:1) - column: 1 - """ - pass - -class ParseFatalException(ParseBaseException): - """user-throwable exception thrown when inconsistent parse content - is found; stops all parsing immediately""" - pass - -class ParseSyntaxException(ParseFatalException): - """just like L{ParseFatalException}, but thrown internally when an - L{ErrorStop} ('-' operator) indicates that parsing is to stop - immediately because an unbacktrackable syntax error has been found""" - pass - -#~ class ReparseException(ParseBaseException): - #~ """Experimental class - parse actions can raise this exception to cause - #~ pyparsing to reparse the input string: - #~ - with a modified input string, and/or - #~ - with a modified start location - #~ Set the values of the ReparseException in the constructor, and raise the - #~ exception in a parse action to cause pyparsing to use the new string/location. - #~ Setting the values as None causes no change to be made. - #~ """ - #~ def __init_( self, newstring, restartLoc ): - #~ self.newParseText = newstring - #~ self.reparseLoc = restartLoc - -class RecursiveGrammarException(Exception): - """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive""" - def __init__( self, parseElementList ): - self.parseElementTrace = parseElementList - - def __str__( self ): - return "RecursiveGrammarException: %s" % self.parseElementTrace - -class _ParseResultsWithOffset(object): - def __init__(self,p1,p2): - self.tup = (p1,p2) - def __getitem__(self,i): - return self.tup[i] - def __repr__(self): - return repr(self.tup[0]) - def setOffset(self,i): - self.tup = (self.tup[0],i) - -class ParseResults(object): - """ - Structured parse results, to provide multiple means of access to the parsed data: - - as a list (C{len(results)}) - - by list index (C{results[0], results[1]}, etc.) - - by attribute (C{results.} - see L{ParserElement.setResultsName}) - - Example:: - integer = Word(nums) - date_str = (integer.setResultsName("year") + '/' - + integer.setResultsName("month") + '/' - + integer.setResultsName("day")) - # equivalent form: - # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - # parseString returns a ParseResults object - result = date_str.parseString("1999/12/31") - - def test(s, fn=repr): - print("%s -> %s" % (s, fn(eval(s)))) - test("list(result)") - test("result[0]") - test("result['month']") - test("result.day") - test("'month' in result") - test("'minutes' in result") - test("result.dump()", str) - prints:: - list(result) -> ['1999', '/', '12', '/', '31'] - result[0] -> '1999' - result['month'] -> '12' - result.day -> '31' - 'month' in result -> True - 'minutes' in result -> False - result.dump() -> ['1999', '/', '12', '/', '31'] - - day: 31 - - month: 12 - - year: 1999 - """ - def __new__(cls, toklist=None, name=None, asList=True, modal=True ): - if isinstance(toklist, cls): - return toklist - retobj = object.__new__(cls) - retobj.__doinit = True - return retobj - - # Performance tuning: we construct a *lot* of these, so keep this - # constructor as small and fast as possible - def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): - if self.__doinit: - self.__doinit = False - self.__name = None - self.__parent = None - self.__accumNames = {} - self.__asList = asList - self.__modal = modal - if toklist is None: - toklist = [] - if isinstance(toklist, list): - self.__toklist = toklist[:] - elif isinstance(toklist, _generatorType): - self.__toklist = list(toklist) - else: - self.__toklist = [toklist] - self.__tokdict = dict() - - if name is not None and name: - if not modal: - self.__accumNames[name] = 0 - if isinstance(name,int): - name = _ustr(name) # will always return a str, but use _ustr for consistency - self.__name = name - if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): - if isinstance(toklist,basestring): - toklist = [ toklist ] - if asList: - if isinstance(toklist,ParseResults): - self[name] = _ParseResultsWithOffset(toklist.copy(),0) - else: - self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) - self[name].__name = name - else: - try: - self[name] = toklist[0] - except (KeyError,TypeError,IndexError): - self[name] = toklist - - def __getitem__( self, i ): - if isinstance( i, (int,slice) ): - return self.__toklist[i] - else: - if i not in self.__accumNames: - return self.__tokdict[i][-1][0] - else: - return ParseResults([ v[0] for v in self.__tokdict[i] ]) - - def __setitem__( self, k, v, isinstance=isinstance ): - if isinstance(v,_ParseResultsWithOffset): - self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] - sub = v[0] - elif isinstance(k,(int,slice)): - self.__toklist[k] = v - sub = v - else: - self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] - sub = v - if isinstance(sub,ParseResults): - sub.__parent = wkref(self) - - def __delitem__( self, i ): - if isinstance(i,(int,slice)): - mylen = len( self.__toklist ) - del self.__toklist[i] - - # convert int to slice - if isinstance(i, int): - if i < 0: - i += mylen - i = slice(i, i+1) - # get removed indices - removed = list(range(*i.indices(mylen))) - removed.reverse() - # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): - for j in removed: - for k, (value, position) in enumerate(occurrences): - occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) - else: - del self.__tokdict[i] - - def __contains__( self, k ): - return k in self.__tokdict - - def __len__( self ): return len( self.__toklist ) - def __bool__(self): return ( not not self.__toklist ) - __nonzero__ = __bool__ - def __iter__( self ): return iter( self.__toklist ) - def __reversed__( self ): return iter( self.__toklist[::-1] ) - def _iterkeys( self ): - if hasattr(self.__tokdict, "iterkeys"): - return self.__tokdict.iterkeys() - else: - return iter(self.__tokdict) - - def _itervalues( self ): - return (self[k] for k in self._iterkeys()) - - def _iteritems( self ): - return ((k, self[k]) for k in self._iterkeys()) - - if PY_3: - keys = _iterkeys - """Returns an iterator of all named result keys (Python 3.x only).""" - - values = _itervalues - """Returns an iterator of all named result values (Python 3.x only).""" - - items = _iteritems - """Returns an iterator of all named result key-value tuples (Python 3.x only).""" - - else: - iterkeys = _iterkeys - """Returns an iterator of all named result keys (Python 2.x only).""" - - itervalues = _itervalues - """Returns an iterator of all named result values (Python 2.x only).""" - - iteritems = _iteritems - """Returns an iterator of all named result key-value tuples (Python 2.x only).""" - - def keys( self ): - """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" - return list(self.iterkeys()) - - def values( self ): - """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" - return list(self.itervalues()) - - def items( self ): - """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" - return list(self.iteritems()) - - def haskeys( self ): - """Since keys() returns an iterator, this method is helpful in bypassing - code that looks for the existence of any defined results names.""" - return bool(self.__tokdict) - - def pop( self, *args, **kwargs): - """ - Removes and returns item at specified index (default=C{last}). - Supports both C{list} and C{dict} semantics for C{pop()}. If passed no - argument or an integer argument, it will use C{list} semantics - and pop tokens from the list of parsed tokens. If passed a - non-integer argument (most likely a string), it will use C{dict} - semantics and pop the corresponding value from any defined - results names. A second default return value argument is - supported, just as in C{dict.pop()}. - - Example:: - def remove_first(tokens): - tokens.pop(0) - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] - - label = Word(alphas) - patt = label("LABEL") + OneOrMore(Word(nums)) - print(patt.parseString("AAB 123 321").dump()) - - # Use pop() in a parse action to remove named result (note that corresponding value is not - # removed from list form of results) - def remove_LABEL(tokens): - tokens.pop("LABEL") - return tokens - patt.addParseAction(remove_LABEL) - print(patt.parseString("AAB 123 321").dump()) - prints:: - ['AAB', '123', '321'] - - LABEL: AAB - - ['AAB', '123', '321'] - """ - if not args: - args = [-1] - for k,v in kwargs.items(): - if k == 'default': - args = (args[0], v) - else: - raise TypeError("pop() got an unexpected keyword argument '%s'" % k) - if (isinstance(args[0], int) or - len(args) == 1 or - args[0] in self): - index = args[0] - ret = self[index] - del self[index] - return ret - else: - defaultvalue = args[1] - return defaultvalue - - def get(self, key, defaultValue=None): - """ - Returns named result matching the given key, or if there is no - such name, then returns the given C{defaultValue} or C{None} if no - C{defaultValue} is specified. - - Similar to C{dict.get()}. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString("1999/12/31") - print(result.get("year")) # -> '1999' - print(result.get("hour", "not specified")) # -> 'not specified' - print(result.get("hour")) # -> None - """ - if key in self: - return self[key] - else: - return defaultValue - - def insert( self, index, insStr ): - """ - Inserts new element at location index in the list of parsed tokens. - - Similar to C{list.insert()}. - - Example:: - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - - # use a parse action to insert the parse location in the front of the parsed results - def insert_locn(locn, tokens): - tokens.insert(0, locn) - print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] - """ - self.__toklist.insert(index, insStr) - # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): - for k, (value, position) in enumerate(occurrences): - occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) - - def append( self, item ): - """ - Add single element to end of ParseResults list of elements. - - Example:: - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - - # use a parse action to compute the sum of the parsed integers, and add it to the end - def append_sum(tokens): - tokens.append(sum(map(int, tokens))) - print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] - """ - self.__toklist.append(item) - - def extend( self, itemseq ): - """ - Add sequence of elements to end of ParseResults list of elements. - - Example:: - patt = OneOrMore(Word(alphas)) - - # use a parse action to append the reverse of the matched strings, to make a palindrome - def make_palindrome(tokens): - tokens.extend(reversed([t[::-1] for t in tokens])) - return ''.join(tokens) - print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' - """ - if isinstance(itemseq, ParseResults): - self += itemseq - else: - self.__toklist.extend(itemseq) - - def clear( self ): - """ - Clear all elements and results names. - """ - del self.__toklist[:] - self.__tokdict.clear() - - def __getattr__( self, name ): - try: - return self[name] - except KeyError: - return "" - - if name in self.__tokdict: - if name not in self.__accumNames: - return self.__tokdict[name][-1][0] - else: - return ParseResults([ v[0] for v in self.__tokdict[name] ]) - else: - return "" - - def __add__( self, other ): - ret = self.copy() - ret += other - return ret - - def __iadd__( self, other ): - if other.__tokdict: - offset = len(self.__toklist) - addoffset = lambda a: offset if a<0 else a+offset - otheritems = other.__tokdict.items() - otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) - for (k,vlist) in otheritems for v in vlist] - for k,v in otherdictitems: - self[k] = v - if isinstance(v[0],ParseResults): - v[0].__parent = wkref(self) - - self.__toklist += other.__toklist - self.__accumNames.update( other.__accumNames ) - return self - - def __radd__(self, other): - if isinstance(other,int) and other == 0: - # useful for merging many ParseResults using sum() builtin - return self.copy() - else: - # this may raise a TypeError - so be it - return other + self - - def __repr__( self ): - return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) - - def __str__( self ): - return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' - - def _asStringList( self, sep='' ): - out = [] - for item in self.__toklist: - if out and sep: - out.append(sep) - if isinstance( item, ParseResults ): - out += item._asStringList() - else: - out.append( _ustr(item) ) - return out - - def asList( self ): - """ - Returns the parse results as a nested list of matching tokens, all converted to strings. - - Example:: - patt = OneOrMore(Word(alphas)) - result = patt.parseString("sldkj lsdkj sldkj") - # even though the result prints in string-like form, it is actually a pyparsing ParseResults - print(type(result), result) # -> ['sldkj', 'lsdkj', 'sldkj'] - - # Use asList() to create an actual list - result_list = result.asList() - print(type(result_list), result_list) # -> ['sldkj', 'lsdkj', 'sldkj'] - """ - return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] - - def asDict( self ): - """ - Returns the named parse results as a nested dictionary. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString('12/31/1999') - print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) - - result_dict = result.asDict() - print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12', 'month': '31'} - - # even though a ParseResults supports dict-like access, sometime you just need to have a dict - import json - print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable - print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} - """ - if PY_3: - item_fn = self.items - else: - item_fn = self.iteritems - - def toItem(obj): - if isinstance(obj, ParseResults): - if obj.haskeys(): - return obj.asDict() - else: - return [toItem(v) for v in obj] - else: - return obj - - return dict((k,toItem(v)) for k,v in item_fn()) - - def copy( self ): - """ - Returns a new copy of a C{ParseResults} object. - """ - ret = ParseResults( self.__toklist ) - ret.__tokdict = self.__tokdict.copy() - ret.__parent = self.__parent - ret.__accumNames.update( self.__accumNames ) - ret.__name = self.__name - return ret - - def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): - """ - (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. - """ - nl = "\n" - out = [] - namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() - for v in vlist) - nextLevelIndent = indent + " " - - # collapse out indents if formatting is not desired - if not formatted: - indent = "" - nextLevelIndent = "" - nl = "" - - selfTag = None - if doctag is not None: - selfTag = doctag - else: - if self.__name: - selfTag = self.__name - - if not selfTag: - if namedItemsOnly: - return "" - else: - selfTag = "ITEM" - - out += [ nl, indent, "<", selfTag, ">" ] - - for i,res in enumerate(self.__toklist): - if isinstance(res,ParseResults): - if i in namedItems: - out += [ res.asXML(namedItems[i], - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] - else: - out += [ res.asXML(None, - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] - else: - # individual token, see if there is a name for it - resTag = None - if i in namedItems: - resTag = namedItems[i] - if not resTag: - if namedItemsOnly: - continue - else: - resTag = "ITEM" - xmlBodyText = _xml_escape(_ustr(res)) - out += [ nl, nextLevelIndent, "<", resTag, ">", - xmlBodyText, - "" ] - - out += [ nl, indent, "" ] - return "".join(out) - - def __lookup(self,sub): - for k,vlist in self.__tokdict.items(): - for v,loc in vlist: - if sub is v: - return k - return None - - def getName(self): - r""" - Returns the results name for this token expression. Useful when several - different expressions might match at a particular location. - - Example:: - integer = Word(nums) - ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") - house_number_expr = Suppress('#') + Word(nums, alphanums) - user_data = (Group(house_number_expr)("house_number") - | Group(ssn_expr)("ssn") - | Group(integer)("age")) - user_info = OneOrMore(user_data) - - result = user_info.parseString("22 111-22-3333 #221B") - for item in result: - print(item.getName(), ':', item[0]) - prints:: - age : 22 - ssn : 111-22-3333 - house_number : 221B - """ - if self.__name: - return self.__name - elif self.__parent: - par = self.__parent() - if par: - return par.__lookup(self) - else: - return None - elif (len(self) == 1 and - len(self.__tokdict) == 1 and - next(iter(self.__tokdict.values()))[0][1] in (0,-1)): - return next(iter(self.__tokdict.keys())) - else: - return None - - def dump(self, indent='', depth=0, full=True): - """ - Diagnostic method for listing out the contents of a C{ParseResults}. - Accepts an optional C{indent} argument so that this string can be embedded - in a nested display of other data. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString('12/31/1999') - print(result.dump()) - prints:: - ['12', '/', '31', '/', '1999'] - - day: 1999 - - month: 31 - - year: 12 - """ - out = [] - NL = '\n' - out.append( indent+_ustr(self.asList()) ) - if full: - if self.haskeys(): - items = sorted((str(k), v) for k,v in self.items()) - for k,v in items: - if out: - out.append(NL) - out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) - if isinstance(v,ParseResults): - if v: - out.append( v.dump(indent,depth+1) ) - else: - out.append(_ustr(v)) - else: - out.append(repr(v)) - elif any(isinstance(vv,ParseResults) for vv in self): - v = self - for i,vv in enumerate(v): - if isinstance(vv,ParseResults): - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) - else: - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) - - return "".join(out) - - def pprint(self, *args, **kwargs): - """ - Pretty-printer for parsed results as a list, using the C{pprint} module. - Accepts additional positional or keyword args as defined for the - C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) - - Example:: - ident = Word(alphas, alphanums) - num = Word(nums) - func = Forward() - term = ident | num | Group('(' + func + ')') - func <<= ident + Group(Optional(delimitedList(term))) - result = func.parseString("fna a,b,(fnb c,d,200),100") - result.pprint(width=40) - prints:: - ['fna', - ['a', - 'b', - ['(', 'fnb', ['c', 'd', '200'], ')'], - '100']] - """ - pprint.pprint(self.asList(), *args, **kwargs) - - # add support for pickle protocol - def __getstate__(self): - return ( self.__toklist, - ( self.__tokdict.copy(), - self.__parent is not None and self.__parent() or None, - self.__accumNames, - self.__name ) ) - - def __setstate__(self,state): - self.__toklist = state[0] - (self.__tokdict, - par, - inAccumNames, - self.__name) = state[1] - self.__accumNames = {} - self.__accumNames.update(inAccumNames) - if par is not None: - self.__parent = wkref(par) - else: - self.__parent = None - - def __getnewargs__(self): - return self.__toklist, self.__name, self.__asList, self.__modal - - def __dir__(self): - return (dir(type(self)) + list(self.keys())) - -MutableMapping.register(ParseResults) - -def col (loc,strg): - """Returns current column within a string, counting newlines as line separators. - The first column is number 1. - - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{ParserElement.parseString}} for more information - on parsing strings containing C{}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - """ - s = strg - return 1 if 0} for more information - on parsing strings containing C{}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - """ - return strg.count("\n",0,loc) + 1 - -def line( loc, strg ): - """Returns the line of text containing loc within a string, counting newlines as line separators. - """ - lastCR = strg.rfind("\n", 0, loc) - nextCR = strg.find("\n", loc) - if nextCR >= 0: - return strg[lastCR+1:nextCR] - else: - return strg[lastCR+1:] - -def _defaultStartDebugAction( instring, loc, expr ): - print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) - -def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): - print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) - -def _defaultExceptionDebugAction( instring, loc, expr, exc ): - print ("Exception raised:" + _ustr(exc)) - -def nullDebugAction(*args): - """'Do-nothing' debug action, to suppress debugging output during parsing.""" - pass - -# Only works on Python 3.x - nonlocal is toxic to Python 2 installs -#~ 'decorator to trim function calls to match the arity of the target' -#~ def _trim_arity(func, maxargs=3): - #~ if func in singleArgBuiltins: - #~ return lambda s,l,t: func(t) - #~ limit = 0 - #~ foundArity = False - #~ def wrapper(*args): - #~ nonlocal limit,foundArity - #~ while 1: - #~ try: - #~ ret = func(*args[limit:]) - #~ foundArity = True - #~ return ret - #~ except TypeError: - #~ if limit == maxargs or foundArity: - #~ raise - #~ limit += 1 - #~ continue - #~ return wrapper - -# this version is Python 2.x-3.x cross-compatible -'decorator to trim function calls to match the arity of the target' -def _trim_arity(func, maxargs=2): - if func in singleArgBuiltins: - return lambda s,l,t: func(t) - limit = [0] - foundArity = [False] - - # traceback return data structure changed in Py3.5 - normalize back to plain tuples - if system_version[:2] >= (3,5): - def extract_stack(limit=0): - # special handling for Python 3.5.0 - extra deep call stack by 1 - offset = -3 if system_version == (3,5,0) else -2 - frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] - return [frame_summary[:2]] - def extract_tb(tb, limit=0): - frames = traceback.extract_tb(tb, limit=limit) - frame_summary = frames[-1] - return [frame_summary[:2]] - else: - extract_stack = traceback.extract_stack - extract_tb = traceback.extract_tb - - # synthesize what would be returned by traceback.extract_stack at the call to - # user's parse action 'func', so that we don't incur call penalty at parse time - - LINE_DIFF = 6 - # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND - # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! - this_line = extract_stack(limit=2)[-1] - pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) - - def wrapper(*args): - while 1: - try: - ret = func(*args[limit[0]:]) - foundArity[0] = True - return ret - except TypeError: - # re-raise TypeErrors if they did not come from our arity testing - if foundArity[0]: - raise - else: - try: - tb = sys.exc_info()[-1] - if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: - raise - finally: - del tb - - if limit[0] <= maxargs: - limit[0] += 1 - continue - raise - - # copy func name to wrapper for sensible debug output - func_name = "" - try: - func_name = getattr(func, '__name__', - getattr(func, '__class__').__name__) - except Exception: - func_name = str(func) - wrapper.__name__ = func_name - - return wrapper - -class ParserElement(object): - """Abstract base level parser element class.""" - DEFAULT_WHITE_CHARS = " \n\t\r" - verbose_stacktrace = False - - @staticmethod - def setDefaultWhitespaceChars( chars ): - r""" - Overrides the default whitespace chars - - Example:: - # default whitespace chars are space, and newline - OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] - - # change to just treat newline as significant - ParserElement.setDefaultWhitespaceChars(" \t") - OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] - """ - ParserElement.DEFAULT_WHITE_CHARS = chars - - @staticmethod - def inlineLiteralsUsing(cls): - """ - Set class to be used for inclusion of string literals into a parser. - - Example:: - # default literal class used is Literal - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] - - - # change to Suppress - ParserElement.inlineLiteralsUsing(Suppress) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] - """ - ParserElement._literalStringClass = cls - - def __init__( self, savelist=False ): - self.parseAction = list() - self.failAction = None - #~ self.name = "" # don't define self.name, let subclasses try/except upcall - self.strRepr = None - self.resultsName = None - self.saveAsList = savelist - self.skipWhitespace = True - self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS - self.copyDefaultWhiteChars = True - self.mayReturnEmpty = False # used when checking for left-recursion - self.keepTabs = False - self.ignoreExprs = list() - self.debug = False - self.streamlined = False - self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index - self.errmsg = "" - self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) - self.debugActions = ( None, None, None ) #custom debug actions - self.re = None - self.callPreparse = True # used to avoid redundant calls to preParse - self.callDuringTry = False - - def copy( self ): - """ - Make a copy of this C{ParserElement}. Useful for defining different parse actions - for the same parsing pattern, using copies of the original parse element. - - Example:: - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") - integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") - - print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) - prints:: - [5120, 100, 655360, 268435456] - Equivalent form of C{expr.copy()} is just C{expr()}:: - integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") - """ - cpy = copy.copy( self ) - cpy.parseAction = self.parseAction[:] - cpy.ignoreExprs = self.ignoreExprs[:] - if self.copyDefaultWhiteChars: - cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS - return cpy - - def setName( self, name ): - """ - Define name for this expression, makes debugging and exception messages clearer. - - Example:: - Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) - Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) - """ - self.name = name - self.errmsg = "Expected " + self.name - if hasattr(self,"exception"): - self.exception.msg = self.errmsg - return self - - def setResultsName( self, name, listAllMatches=False ): - """ - Define name for referencing matching tokens as a nested attribute - of the returned parse results. - NOTE: this returns a *copy* of the original C{ParserElement} object; - this is so that the client can define a basic element, such as an - integer, and reference it in multiple places with different names. - - You can also set results names using the abbreviated syntax, - C{expr("name")} in place of C{expr.setResultsName("name")} - - see L{I{__call__}<__call__>}. - - Example:: - date_str = (integer.setResultsName("year") + '/' - + integer.setResultsName("month") + '/' - + integer.setResultsName("day")) - - # equivalent form: - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - """ - newself = self.copy() - if name.endswith("*"): - name = name[:-1] - listAllMatches=True - newself.resultsName = name - newself.modalResults = not listAllMatches - return newself - - def setBreak(self,breakFlag = True): - """Method to invoke the Python pdb debugger when this element is - about to be parsed. Set C{breakFlag} to True to enable, False to - disable. - """ - if breakFlag: - _parseMethod = self._parse - def breaker(instring, loc, doActions=True, callPreParse=True): - import pdb - pdb.set_trace() - return _parseMethod( instring, loc, doActions, callPreParse ) - breaker._originalParseMethod = _parseMethod - self._parse = breaker - else: - if hasattr(self._parse,"_originalParseMethod"): - self._parse = self._parse._originalParseMethod - return self - - def setParseAction( self, *fns, **kwargs ): - """ - Define one or more actions to perform when successfully matching parse element definition. - Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, - C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: - - s = the original string being parsed (see note below) - - loc = the location of the matching substring - - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object - If the functions in fns modify the tokens, they can return them as the return - value from fn, and the modified list of tokens will replace the original. - Otherwise, fn does not need to return any value. - - Optional keyword arguments: - - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing - - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{parseString}} for more information - on parsing strings containing C{}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - - Example:: - integer = Word(nums) - date_str = integer + '/' + integer + '/' + integer - - date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] - - # use parse action to convert to ints at parse time - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - date_str = integer + '/' + integer + '/' + integer - - # note that integer fields are now ints, not strings - date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] - """ - self.parseAction = list(map(_trim_arity, list(fns))) - self.callDuringTry = kwargs.get("callDuringTry", False) - return self - - def addParseAction( self, *fns, **kwargs ): - """ - Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}}. - - See examples in L{I{copy}}. - """ - self.parseAction += list(map(_trim_arity, list(fns))) - self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) - return self - - def addCondition(self, *fns, **kwargs): - """Add a boolean predicate function to expression's list of parse actions. See - L{I{setParseAction}} for function call signatures. Unlike C{setParseAction}, - functions passed to C{addCondition} need to return boolean success/fail of the condition. - - Optional keyword arguments: - - message = define a custom message to be used in the raised exception - - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException - - Example:: - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - year_int = integer.copy() - year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") - date_str = year_int + '/' + integer + '/' + integer - - result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) - """ - msg = kwargs.get("message", "failed user-defined condition") - exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException - for fn in fns: - def pa(s,l,t): - if not bool(_trim_arity(fn)(s,l,t)): - raise exc_type(s,l,msg) - self.parseAction.append(pa) - self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) - return self - - def setFailAction( self, fn ): - """Define action to perform if parsing fails at this expression. - Fail acton fn is a callable function that takes the arguments - C{fn(s,loc,expr,err)} where: - - s = string being parsed - - loc = location where expression match was attempted and failed - - expr = the parse expression that failed - - err = the exception thrown - The function returns no value. It may throw C{L{ParseFatalException}} - if it is desired to stop parsing immediately.""" - self.failAction = fn - return self - - def _skipIgnorables( self, instring, loc ): - exprsFound = True - while exprsFound: - exprsFound = False - for e in self.ignoreExprs: - try: - while 1: - loc,dummy = e._parse( instring, loc ) - exprsFound = True - except ParseException: - pass - return loc - - def preParse( self, instring, loc ): - if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) - - if self.skipWhitespace: - wt = self.whiteChars - instrlen = len(instring) - while loc < instrlen and instring[loc] in wt: - loc += 1 - - return loc - - def parseImpl( self, instring, loc, doActions=True ): - return loc, [] - - def postParse( self, instring, loc, tokenlist ): - return tokenlist - - #~ @profile - def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): - debugging = ( self.debug ) #and doActions ) - - if debugging or self.failAction: - #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) - if (self.debugActions[0] ): - self.debugActions[0]( instring, loc, self ) - if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) - else: - preloc = loc - tokensStart = preloc - try: - try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) - except ParseBaseException as err: - #~ print ("Exception raised:", err) - if self.debugActions[2]: - self.debugActions[2]( instring, tokensStart, self, err ) - if self.failAction: - self.failAction( instring, tokensStart, self, err ) - raise - else: - if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) - else: - preloc = loc - tokensStart = preloc - if self.mayIndexError or preloc >= len(instring): - try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) - else: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - - tokens = self.postParse( instring, loc, tokens ) - - retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) - if self.parseAction and (doActions or self.callDuringTry): - if debugging: - try: - for fn in self.parseAction: - tokens = fn( instring, tokensStart, retTokens ) - if tokens is not None: - retTokens = ParseResults( tokens, - self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) - except ParseBaseException as err: - #~ print "Exception raised in user parse action:", err - if (self.debugActions[2] ): - self.debugActions[2]( instring, tokensStart, self, err ) - raise - else: - for fn in self.parseAction: - tokens = fn( instring, tokensStart, retTokens ) - if tokens is not None: - retTokens = ParseResults( tokens, - self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) - if debugging: - #~ print ("Matched",self,"->",retTokens.asList()) - if (self.debugActions[1] ): - self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) - - return loc, retTokens - - def tryParse( self, instring, loc ): - try: - return self._parse( instring, loc, doActions=False )[0] - except ParseFatalException: - raise ParseException( instring, loc, self.errmsg, self) - - def canParseNext(self, instring, loc): - try: - self.tryParse(instring, loc) - except (ParseException, IndexError): - return False - else: - return True - - class _UnboundedCache(object): - def __init__(self): - cache = {} - self.not_in_cache = not_in_cache = object() - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - - def clear(self): - cache.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - if _OrderedDict is not None: - class _FifoCache(object): - def __init__(self, size): - self.not_in_cache = not_in_cache = object() - - cache = _OrderedDict() - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - while len(cache) > size: - try: - cache.popitem(False) - except KeyError: - pass - - def clear(self): - cache.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - else: - class _FifoCache(object): - def __init__(self, size): - self.not_in_cache = not_in_cache = object() - - cache = {} - key_fifo = collections.deque([], size) - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - while len(key_fifo) > size: - cache.pop(key_fifo.popleft(), None) - key_fifo.append(key) - - def clear(self): - cache.clear() - key_fifo.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - # argument cache for optimizing repeated calls when backtracking through recursive expressions - packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail - packrat_cache_lock = RLock() - packrat_cache_stats = [0, 0] - - # this method gets repeatedly called during backtracking with the same arguments - - # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression - def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): - HIT, MISS = 0, 1 - lookup = (self, instring, loc, callPreParse, doActions) - with ParserElement.packrat_cache_lock: - cache = ParserElement.packrat_cache - value = cache.get(lookup) - if value is cache.not_in_cache: - ParserElement.packrat_cache_stats[MISS] += 1 - try: - value = self._parseNoCache(instring, loc, doActions, callPreParse) - except ParseBaseException as pe: - # cache a copy of the exception, without the traceback - cache.set(lookup, pe.__class__(*pe.args)) - raise - else: - cache.set(lookup, (value[0], value[1].copy())) - return value - else: - ParserElement.packrat_cache_stats[HIT] += 1 - if isinstance(value, Exception): - raise value - return (value[0], value[1].copy()) - - _parse = _parseNoCache - - @staticmethod - def resetCache(): - ParserElement.packrat_cache.clear() - ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) - - _packratEnabled = False - @staticmethod - def enablePackrat(cache_size_limit=128): - """Enables "packrat" parsing, which adds memoizing to the parsing logic. - Repeated parse attempts at the same string location (which happens - often in many complex grammars) can immediately return a cached value, - instead of re-executing parsing/validating code. Memoizing is done of - both valid results and parsing exceptions. - - Parameters: - - cache_size_limit - (default=C{128}) - if an integer value is provided - will limit the size of the packrat cache; if None is passed, then - the cache size will be unbounded; if 0 is passed, the cache will - be effectively disabled. - - This speedup may break existing programs that use parse actions that - have side-effects. For this reason, packrat parsing is disabled when - you first import pyparsing. To activate the packrat feature, your - program must call the class method C{ParserElement.enablePackrat()}. If - your program uses C{psyco} to "compile as you go", you must call - C{enablePackrat} before calling C{psyco.full()}. If you do not do this, - Python will crash. For best results, call C{enablePackrat()} immediately - after importing pyparsing. - - Example:: - import pyparsing - pyparsing.ParserElement.enablePackrat() - """ - if not ParserElement._packratEnabled: - ParserElement._packratEnabled = True - if cache_size_limit is None: - ParserElement.packrat_cache = ParserElement._UnboundedCache() - else: - ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) - ParserElement._parse = ParserElement._parseCache - - def parseString( self, instring, parseAll=False ): - """ - Execute the parse expression with the given string. - This is the main interface to the client code, once the complete - expression has been built. - - If you want the grammar to require that the entire input string be - successfully parsed, then set C{parseAll} to True (equivalent to ending - the grammar with C{L{StringEnd()}}). - - Note: C{parseString} implicitly calls C{expandtabs()} on the input string, - in order to report proper column numbers in parse actions. - If the input string contains tabs and - the grammar uses parse actions that use the C{loc} argument to index into the - string being parsed, you can ensure you have a consistent view of the input - string by: - - calling C{parseWithTabs} on your grammar before calling C{parseString} - (see L{I{parseWithTabs}}) - - define your parse action using the full C{(s,loc,toks)} signature, and - reference the input string using the parse action's C{s} argument - - explictly expand the tabs in your input string before calling - C{parseString} - - Example:: - Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] - Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text - """ - ParserElement.resetCache() - if not self.streamlined: - self.streamline() - #~ self.saveAsList = True - for e in self.ignoreExprs: - e.streamline() - if not self.keepTabs: - instring = instring.expandtabs() - try: - loc, tokens = self._parse( instring, 0 ) - if parseAll: - loc = self.preParse( instring, loc ) - se = Empty() + StringEnd() - se._parse( instring, loc ) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - else: - return tokens - - def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): - """ - Scan the input string for expression matches. Each match will return the - matching tokens, start location, and end location. May be called with optional - C{maxMatches} argument, to clip scanning after 'n' matches are found. If - C{overlap} is specified, then overlapping matches will be reported. - - Note that the start and end locations are reported relative to the string - being parsed. See L{I{parseString}} for more information on parsing - strings with embedded tabs. - - Example:: - source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" - print(source) - for tokens,start,end in Word(alphas).scanString(source): - print(' '*start + '^'*(end-start)) - print(' '*start + tokens[0]) - - prints:: - - sldjf123lsdjjkf345sldkjf879lkjsfd987 - ^^^^^ - sldjf - ^^^^^^^ - lsdjjkf - ^^^^^^ - sldkjf - ^^^^^^ - lkjsfd - """ - if not self.streamlined: - self.streamline() - for e in self.ignoreExprs: - e.streamline() - - if not self.keepTabs: - instring = _ustr(instring).expandtabs() - instrlen = len(instring) - loc = 0 - preparseFn = self.preParse - parseFn = self._parse - ParserElement.resetCache() - matches = 0 - try: - while loc <= instrlen and matches < maxMatches: - try: - preloc = preparseFn( instring, loc ) - nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) - except ParseException: - loc = preloc+1 - else: - if nextLoc > loc: - matches += 1 - yield tokens, preloc, nextLoc - if overlap: - nextloc = preparseFn( instring, loc ) - if nextloc > loc: - loc = nextLoc - else: - loc += 1 - else: - loc = nextLoc - else: - loc = preloc+1 - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def transformString( self, instring ): - """ - Extension to C{L{scanString}}, to modify matching text with modified tokens that may - be returned from a parse action. To use C{transformString}, define a grammar and - attach a parse action to it that modifies the returned token list. - Invoking C{transformString()} on a target string will then scan for matches, - and replace the matched text patterns according to the logic in the parse - action. C{transformString()} returns the resulting transformed string. - - Example:: - wd = Word(alphas) - wd.setParseAction(lambda toks: toks[0].title()) - - print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) - Prints:: - Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. - """ - out = [] - lastE = 0 - # force preservation of s, to minimize unwanted transformation of string, and to - # keep string locs straight between transformString and scanString - self.keepTabs = True - try: - for t,s,e in self.scanString( instring ): - out.append( instring[lastE:s] ) - if t: - if isinstance(t,ParseResults): - out += t.asList() - elif isinstance(t,list): - out += t - else: - out.append(t) - lastE = e - out.append(instring[lastE:]) - out = [o for o in out if o] - return "".join(map(_ustr,_flatten(out))) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def searchString( self, instring, maxMatches=_MAX_INT ): - """ - Another extension to C{L{scanString}}, simplifying the access to the tokens found - to match the given parse expression. May be called with optional - C{maxMatches} argument, to clip searching after 'n' matches are found. - - Example:: - # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters - cap_word = Word(alphas.upper(), alphas.lower()) - - print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) - - # the sum() builtin can be used to merge results into a single ParseResults object - print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))) - prints:: - [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']] - ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] - """ - try: - return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): - """ - Generator method to split a string using the given expression as a separator. - May be called with optional C{maxsplit} argument, to limit the number of splits; - and the optional C{includeSeparators} argument (default=C{False}), if the separating - matching text should be included in the split results. - - Example:: - punc = oneOf(list(".,;:/-!?")) - print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) - prints:: - ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] - """ - splits = 0 - last = 0 - for t,s,e in self.scanString(instring, maxMatches=maxsplit): - yield instring[last:s] - if includeSeparators: - yield t[0] - last = e - yield instring[last:] - - def __add__(self, other ): - """ - Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement - converts them to L{Literal}s by default. - - Example:: - greet = Word(alphas) + "," + Word(alphas) + "!" - hello = "Hello, World!" - print (hello, "->", greet.parseString(hello)) - Prints:: - Hello, World! -> ['Hello', ',', 'World', '!'] - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return And( [ self, other ] ) - - def __radd__(self, other ): - """ - Implementation of + operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other + self - - def __sub__(self, other): - """ - Implementation of - operator, returns C{L{And}} with error stop - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return self + And._ErrorStop() + other - - def __rsub__(self, other ): - """ - Implementation of - operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other - self - - def __mul__(self,other): - """ - Implementation of * operator, allows use of C{expr * 3} in place of - C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer - tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples - may also include C{None} as in: - - C{expr*(n,None)} or C{expr*(n,)} is equivalent - to C{expr*n + L{ZeroOrMore}(expr)} - (read as "at least n instances of C{expr}") - - C{expr*(None,n)} is equivalent to C{expr*(0,n)} - (read as "0 to n instances of C{expr}") - - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} - - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} - - Note that C{expr*(None,n)} does not raise an exception if - more than n exprs exist in the input stream; that is, - C{expr*(None,n)} does not enforce a maximum number of expr - occurrences. If this behavior is desired, then write - C{expr*(None,n) + ~expr} - """ - if isinstance(other,int): - minElements, optElements = other,0 - elif isinstance(other,tuple): - other = (other + (None, None))[:2] - if other[0] is None: - other = (0, other[1]) - if isinstance(other[0],int) and other[1] is None: - if other[0] == 0: - return ZeroOrMore(self) - if other[0] == 1: - return OneOrMore(self) - else: - return self*other[0] + ZeroOrMore(self) - elif isinstance(other[0],int) and isinstance(other[1],int): - minElements, optElements = other - optElements -= minElements - else: - raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) - else: - raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) - - if minElements < 0: - raise ValueError("cannot multiply ParserElement by negative value") - if optElements < 0: - raise ValueError("second tuple value must be greater or equal to first tuple value") - if minElements == optElements == 0: - raise ValueError("cannot multiply ParserElement by 0 or (0,0)") - - if (optElements): - def makeOptionalList(n): - if n>1: - return Optional(self + makeOptionalList(n-1)) - else: - return Optional(self) - if minElements: - if minElements == 1: - ret = self + makeOptionalList(optElements) - else: - ret = And([self]*minElements) + makeOptionalList(optElements) - else: - ret = makeOptionalList(optElements) - else: - if minElements == 1: - ret = self - else: - ret = And([self]*minElements) - return ret - - def __rmul__(self, other): - return self.__mul__(other) - - def __or__(self, other ): - """ - Implementation of | operator - returns C{L{MatchFirst}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return MatchFirst( [ self, other ] ) - - def __ror__(self, other ): - """ - Implementation of | operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other | self - - def __xor__(self, other ): - """ - Implementation of ^ operator - returns C{L{Or}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return Or( [ self, other ] ) - - def __rxor__(self, other ): - """ - Implementation of ^ operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other ^ self - - def __and__(self, other ): - """ - Implementation of & operator - returns C{L{Each}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return Each( [ self, other ] ) - - def __rand__(self, other ): - """ - Implementation of & operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other & self - - def __invert__( self ): - """ - Implementation of ~ operator - returns C{L{NotAny}} - """ - return NotAny( self ) - - def __call__(self, name=None): - """ - Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. - - If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be - passed as C{True}. - - If C{name} is omitted, same as calling C{L{copy}}. - - Example:: - # these are equivalent - userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") - userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") - """ - if name is not None: - return self.setResultsName(name) - else: - return self.copy() - - def suppress( self ): - """ - Suppresses the output of this C{ParserElement}; useful to keep punctuation from - cluttering up returned output. - """ - return Suppress( self ) - - def leaveWhitespace( self ): - """ - Disables the skipping of whitespace before matching the characters in the - C{ParserElement}'s defined pattern. This is normally only used internally by - the pyparsing module, but may be needed in some whitespace-sensitive grammars. - """ - self.skipWhitespace = False - return self - - def setWhitespaceChars( self, chars ): - """ - Overrides the default whitespace chars - """ - self.skipWhitespace = True - self.whiteChars = chars - self.copyDefaultWhiteChars = False - return self - - def parseWithTabs( self ): - """ - Overrides default behavior to expand C{}s to spaces before parsing the input string. - Must be called before C{parseString} when the input grammar contains elements that - match C{} characters. - """ - self.keepTabs = True - return self - - def ignore( self, other ): - """ - Define expression to be ignored (e.g., comments) while doing pattern - matching; may be called repeatedly, to define multiple comment or other - ignorable patterns. - - Example:: - patt = OneOrMore(Word(alphas)) - patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] - - patt.ignore(cStyleComment) - patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] - """ - if isinstance(other, basestring): - other = Suppress(other) - - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - self.ignoreExprs.append(other) - else: - self.ignoreExprs.append( Suppress( other.copy() ) ) - return self - - def setDebugActions( self, startAction, successAction, exceptionAction ): - """ - Enable display of debugging messages while doing pattern matching. - """ - self.debugActions = (startAction or _defaultStartDebugAction, - successAction or _defaultSuccessDebugAction, - exceptionAction or _defaultExceptionDebugAction) - self.debug = True - return self - - def setDebug( self, flag=True ): - """ - Enable display of debugging messages while doing pattern matching. - Set C{flag} to True to enable, False to disable. - - Example:: - wd = Word(alphas).setName("alphaword") - integer = Word(nums).setName("numword") - term = wd | integer - - # turn on debugging for wd - wd.setDebug() - - OneOrMore(term).parseString("abc 123 xyz 890") - - prints:: - Match alphaword at loc 0(1,1) - Matched alphaword -> ['abc'] - Match alphaword at loc 3(1,4) - Exception raised:Expected alphaword (at char 4), (line:1, col:5) - Match alphaword at loc 7(1,8) - Matched alphaword -> ['xyz'] - Match alphaword at loc 11(1,12) - Exception raised:Expected alphaword (at char 12), (line:1, col:13) - Match alphaword at loc 15(1,16) - Exception raised:Expected alphaword (at char 15), (line:1, col:16) - - The output shown is that produced by the default debug actions - custom debug actions can be - specified using L{setDebugActions}. Prior to attempting - to match the C{wd} expression, the debugging message C{"Match at loc (,)"} - is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} - message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, - which makes debugging and exception messages easier to understand - for instance, the default - name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. - """ - if flag: - self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) - else: - self.debug = False - return self - - def __str__( self ): - return self.name - - def __repr__( self ): - return _ustr(self) - - def streamline( self ): - self.streamlined = True - self.strRepr = None - return self - - def checkRecursion( self, parseElementList ): - pass - - def validate( self, validateTrace=[] ): - """ - Check defined expressions for valid structure, check for infinite recursive definitions. - """ - self.checkRecursion( [] ) - - def parseFile( self, file_or_filename, parseAll=False ): - """ - Execute the parse expression on the given file or filename. - If a filename is specified (instead of a file object), - the entire file is opened, read, and closed before parsing. - """ - try: - file_contents = file_or_filename.read() - except AttributeError: - with open(file_or_filename, "r") as f: - file_contents = f.read() - try: - return self.parseString(file_contents, parseAll) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def __eq__(self,other): - if isinstance(other, ParserElement): - return self is other or vars(self) == vars(other) - elif isinstance(other, basestring): - return self.matches(other) - else: - return super(ParserElement,self)==other - - def __ne__(self,other): - return not (self == other) - - def __hash__(self): - return hash(id(self)) - - def __req__(self,other): - return self == other - - def __rne__(self,other): - return not (self == other) - - def matches(self, testString, parseAll=True): - """ - Method for quick testing of a parser against a test string. Good for simple - inline microtests of sub expressions while building up larger parser. - - Parameters: - - testString - to test against this expression for a match - - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - - Example:: - expr = Word(nums) - assert expr.matches("100") - """ - try: - self.parseString(_ustr(testString), parseAll=parseAll) - return True - except ParseBaseException: - return False - - def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False): - """ - Execute the parse expression on a series of test strings, showing each - test, the parsed results or where the parse failed. Quick and easy way to - run a parse expression against a list of sample strings. - - Parameters: - - tests - a list of separate test strings, or a multiline string of test strings - - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - - comment - (default=C{'#'}) - expression for indicating embedded comments in the test - string; pass None to disable comment filtering - - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; - if False, only dump nested list - - printResults - (default=C{True}) prints test output to stdout - - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing - - Returns: a (success, results) tuple, where success indicates that all tests succeeded - (or failed if C{failureTests} is True), and the results contain a list of lines of each - test's output - - Example:: - number_expr = pyparsing_common.number.copy() - - result = number_expr.runTests(''' - # unsigned integer - 100 - # negative integer - -100 - # float with scientific notation - 6.02e23 - # integer with scientific notation - 1e-12 - ''') - print("Success" if result[0] else "Failed!") - - result = number_expr.runTests(''' - # stray character - 100Z - # missing leading digit before '.' - -.100 - # too many '.' - 3.14.159 - ''', failureTests=True) - print("Success" if result[0] else "Failed!") - prints:: - # unsigned integer - 100 - [100] - - # negative integer - -100 - [-100] - - # float with scientific notation - 6.02e23 - [6.02e+23] - - # integer with scientific notation - 1e-12 - [1e-12] - - Success - - # stray character - 100Z - ^ - FAIL: Expected end of text (at char 3), (line:1, col:4) - - # missing leading digit before '.' - -.100 - ^ - FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) - - # too many '.' - 3.14.159 - ^ - FAIL: Expected end of text (at char 4), (line:1, col:5) - - Success - - Each test string must be on a single line. If you want to test a string that spans multiple - lines, create a test like this:: - - expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") - - (Note that this is a raw string literal, you must include the leading 'r'.) - """ - if isinstance(tests, basestring): - tests = list(map(str.strip, tests.rstrip().splitlines())) - if isinstance(comment, basestring): - comment = Literal(comment) - allResults = [] - comments = [] - success = True - for t in tests: - if comment is not None and comment.matches(t, False) or comments and not t: - comments.append(t) - continue - if not t: - continue - out = ['\n'.join(comments), t] - comments = [] - try: - t = t.replace(r'\n','\n') - result = self.parseString(t, parseAll=parseAll) - out.append(result.dump(full=fullDump)) - success = success and not failureTests - except ParseBaseException as pe: - fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" - if '\n' in t: - out.append(line(pe.loc, t)) - out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) - else: - out.append(' '*pe.loc + '^' + fatal) - out.append("FAIL: " + str(pe)) - success = success and failureTests - result = pe - except Exception as exc: - out.append("FAIL-EXCEPTION: " + str(exc)) - success = success and failureTests - result = exc - - if printResults: - if fullDump: - out.append('') - print('\n'.join(out)) - - allResults.append((t, result)) - - return success, allResults - - -class Token(ParserElement): - """ - Abstract C{ParserElement} subclass, for defining atomic matching patterns. - """ - def __init__( self ): - super(Token,self).__init__( savelist=False ) - - -class Empty(Token): - """ - An empty token, will always match. - """ - def __init__( self ): - super(Empty,self).__init__() - self.name = "Empty" - self.mayReturnEmpty = True - self.mayIndexError = False - - -class NoMatch(Token): - """ - A token that will never match. - """ - def __init__( self ): - super(NoMatch,self).__init__() - self.name = "NoMatch" - self.mayReturnEmpty = True - self.mayIndexError = False - self.errmsg = "Unmatchable token" - - def parseImpl( self, instring, loc, doActions=True ): - raise ParseException(instring, loc, self.errmsg, self) - - -class Literal(Token): - """ - Token to exactly match a specified string. - - Example:: - Literal('blah').parseString('blah') # -> ['blah'] - Literal('blah').parseString('blahfooblah') # -> ['blah'] - Literal('blah').parseString('bla') # -> Exception: Expected "blah" - - For case-insensitive matching, use L{CaselessLiteral}. - - For keyword matching (force word break before and after the matched string), - use L{Keyword} or L{CaselessKeyword}. - """ - def __init__( self, matchString ): - super(Literal,self).__init__() - self.match = matchString - self.matchLen = len(matchString) - try: - self.firstMatchChar = matchString[0] - except IndexError: - warnings.warn("null string passed to Literal; use Empty() instead", - SyntaxWarning, stacklevel=2) - self.__class__ = Empty - self.name = '"%s"' % _ustr(self.match) - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = False - self.mayIndexError = False - - # Performance tuning: this routine gets called a *lot* - # if this is a single character match string and the first character matches, - # short-circuit as quickly as possible, and avoid calling startswith - #~ @profile - def parseImpl( self, instring, loc, doActions=True ): - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) -_L = Literal -ParserElement._literalStringClass = Literal - -class Keyword(Token): - """ - Token to exactly match a specified string as a keyword, that is, it must be - immediately followed by a non-keyword character. Compare with C{L{Literal}}: - - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. - - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} - Accepts two optional constructor arguments in addition to the keyword string: - - C{identChars} is a string of characters that would be valid identifier characters, - defaulting to all alphanumerics + "_" and "$" - - C{caseless} allows case-insensitive matching, default is C{False}. - - Example:: - Keyword("start").parseString("start") # -> ['start'] - Keyword("start").parseString("starting") # -> Exception - - For case-insensitive matching, use L{CaselessKeyword}. - """ - DEFAULT_KEYWORD_CHARS = alphanums+"_$" - - def __init__( self, matchString, identChars=None, caseless=False ): - super(Keyword,self).__init__() - if identChars is None: - identChars = Keyword.DEFAULT_KEYWORD_CHARS - self.match = matchString - self.matchLen = len(matchString) - try: - self.firstMatchChar = matchString[0] - except IndexError: - warnings.warn("null string passed to Keyword; use Empty() instead", - SyntaxWarning, stacklevel=2) - self.name = '"%s"' % self.match - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = False - self.mayIndexError = False - self.caseless = caseless - if caseless: - self.caselessmatch = matchString.upper() - identChars = identChars.upper() - self.identChars = set(identChars) - - def parseImpl( self, instring, loc, doActions=True ): - if self.caseless: - if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and - (loc == 0 or instring[loc-1].upper() not in self.identChars) ): - return loc+self.matchLen, self.match - else: - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and - (loc == 0 or instring[loc-1] not in self.identChars) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) - - def copy(self): - c = super(Keyword,self).copy() - c.identChars = Keyword.DEFAULT_KEYWORD_CHARS - return c - - @staticmethod - def setDefaultKeywordChars( chars ): - """Overrides the default Keyword chars - """ - Keyword.DEFAULT_KEYWORD_CHARS = chars - -class CaselessLiteral(Literal): - """ - Token to match a specified string, ignoring case of letters. - Note: the matched results will always be in the case of the given - match string, NOT the case of the input text. - - Example:: - OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] - - (Contrast with example for L{CaselessKeyword}.) - """ - def __init__( self, matchString ): - super(CaselessLiteral,self).__init__( matchString.upper() ) - # Preserve the defining literal. - self.returnString = matchString - self.name = "'%s'" % self.returnString - self.errmsg = "Expected " + self.name - - def parseImpl( self, instring, loc, doActions=True ): - if instring[ loc:loc+self.matchLen ].upper() == self.match: - return loc+self.matchLen, self.returnString - raise ParseException(instring, loc, self.errmsg, self) - -class CaselessKeyword(Keyword): - """ - Caseless version of L{Keyword}. - - Example:: - OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] - - (Contrast with example for L{CaselessLiteral}.) - """ - def __init__( self, matchString, identChars=None ): - super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) - - def parseImpl( self, instring, loc, doActions=True ): - if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) - -class CloseMatch(Token): - """ - A variation on L{Literal} which matches "close" matches, that is, - strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: - - C{match_string} - string to be matched - - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match - - The results from a successful parse will contain the matched text from the input string and the following named results: - - C{mismatches} - a list of the positions within the match_string where mismatches were found - - C{original} - the original match_string used to compare against the input string - - If C{mismatches} is an empty list, then the match was an exact match. - - Example:: - patt = CloseMatch("ATCATCGAATGGA") - patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) - patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) - - # exact match - patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) - - # close match allowing up to 2 mismatches - patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) - patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) - """ - def __init__(self, match_string, maxMismatches=1): - super(CloseMatch,self).__init__() - self.name = match_string - self.match_string = match_string - self.maxMismatches = maxMismatches - self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches) - self.mayIndexError = False - self.mayReturnEmpty = False - - def parseImpl( self, instring, loc, doActions=True ): - start = loc - instrlen = len(instring) - maxloc = start + len(self.match_string) - - if maxloc <= instrlen: - match_string = self.match_string - match_stringloc = 0 - mismatches = [] - maxMismatches = self.maxMismatches - - for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): - src,mat = s_m - if src != mat: - mismatches.append(match_stringloc) - if len(mismatches) > maxMismatches: - break - else: - loc = match_stringloc + 1 - results = ParseResults([instring[start:loc]]) - results['original'] = self.match_string - results['mismatches'] = mismatches - return loc, results - - raise ParseException(instring, loc, self.errmsg, self) - - -class Word(Token): - """ - Token for matching words composed of allowed character sets. - Defined with string containing all allowed initial characters, - an optional string containing allowed body characters (if omitted, - defaults to the initial character set), and an optional minimum, - maximum, and/or exact length. The default value for C{min} is 1 (a - minimum value < 1 is not valid); the default values for C{max} and C{exact} - are 0, meaning no maximum or exact length restriction. An optional - C{excludeChars} parameter can list characters that might be found in - the input C{bodyChars} string; useful to define a word of all printables - except for one or two characters, for instance. - - L{srange} is useful for defining custom character set strings for defining - C{Word} expressions, using range notation from regular expression character sets. - - A common mistake is to use C{Word} to match a specific literal string, as in - C{Word("Address")}. Remember that C{Word} uses the string argument to define - I{sets} of matchable characters. This expression would match "Add", "AAA", - "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. - To match an exact literal string, use L{Literal} or L{Keyword}. - - pyparsing includes helper strings for building Words: - - L{alphas} - - L{nums} - - L{alphanums} - - L{hexnums} - - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) - - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) - - L{printables} (any non-whitespace character) - - Example:: - # a word composed of digits - integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) - - # a word with a leading capital, and zero or more lowercase - capital_word = Word(alphas.upper(), alphas.lower()) - - # hostnames are alphanumeric, with leading alpha, and '-' - hostname = Word(alphas, alphanums+'-') - - # roman numeral (not a strict parser, accepts invalid mix of characters) - roman = Word("IVXLCDM") - - # any string of non-whitespace characters, except for ',' - csv_value = Word(printables, excludeChars=",") - """ - def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): - super(Word,self).__init__() - if excludeChars: - initChars = ''.join(c for c in initChars if c not in excludeChars) - if bodyChars: - bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) - self.initCharsOrig = initChars - self.initChars = set(initChars) - if bodyChars : - self.bodyCharsOrig = bodyChars - self.bodyChars = set(bodyChars) - else: - self.bodyCharsOrig = initChars - self.bodyChars = set(initChars) - - self.maxSpecified = max > 0 - - if min < 1: - raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.asKeyword = asKeyword - - if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): - if self.bodyCharsOrig == self.initCharsOrig: - self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) - elif len(self.initCharsOrig) == 1: - self.reString = "%s[%s]*" % \ - (re.escape(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) - else: - self.reString = "[%s][%s]*" % \ - (_escapeRegexRangeChars(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) - if self.asKeyword: - self.reString = r"\b"+self.reString+r"\b" - try: - self.re = re.compile( self.reString ) - except Exception: - self.re = None - - def parseImpl( self, instring, loc, doActions=True ): - if self.re: - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - return loc, result.group() - - if not(instring[ loc ] in self.initChars): - raise ParseException(instring, loc, self.errmsg, self) - - start = loc - loc += 1 - instrlen = len(instring) - bodychars = self.bodyChars - maxloc = start + self.maxLen - maxloc = min( maxloc, instrlen ) - while loc < maxloc and instring[loc] in bodychars: - loc += 1 - - throwException = False - if loc - start < self.minLen: - throwException = True - if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: - throwException = True - if self.asKeyword: - if (start>0 and instring[start-1] in bodychars) or (loc4: - return s[:4]+"..." - else: - return s - - if ( self.initCharsOrig != self.bodyCharsOrig ): - self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) - else: - self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) - - return self.strRepr - - -class Regex(Token): - r""" - Token for matching strings that match a given regular expression. - Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. - If the given regex contains named groups (defined using C{(?P...)}), these will be preserved as - named parse results. - - Example:: - realnum = Regex(r"[+-]?\d+\.\d*") - date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)') - # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression - roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") - """ - compiledREtype = type(re.compile("[A-Z]")) - def __init__( self, pattern, flags=0): - """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" - super(Regex,self).__init__() - - if isinstance(pattern, basestring): - if not pattern: - warnings.warn("null string passed to Regex; use Empty() instead", - SyntaxWarning, stacklevel=2) - - self.pattern = pattern - self.flags = flags - - try: - self.re = re.compile(self.pattern, self.flags) - self.reString = self.pattern - except sre_constants.error: - warnings.warn("invalid pattern (%s) passed to Regex" % pattern, - SyntaxWarning, stacklevel=2) - raise - - elif isinstance(pattern, Regex.compiledREtype): - self.re = pattern - self.pattern = \ - self.reString = str(pattern) - self.flags = flags - - else: - raise ValueError("Regex may only be constructed with a string or a compiled RE object") - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - d = result.groupdict() - ret = ParseResults(result.group()) - if d: - for k in d: - ret[k] = d[k] - return loc,ret - - def __str__( self ): - try: - return super(Regex,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "Re:(%s)" % repr(self.pattern) - - return self.strRepr - - -class QuotedString(Token): - r""" - Token for matching strings that are delimited by quoting characters. - - Defined with the following parameters: - - quoteChar - string of one or more characters defining the quote delimiting string - - escChar - character to escape quotes, typically backslash (default=C{None}) - - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) - - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) - - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) - - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) - - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) - - Example:: - qs = QuotedString('"') - print(qs.searchString('lsjdf "This is the quote" sldjf')) - complex_qs = QuotedString('{{', endQuoteChar='}}') - print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) - sql_qs = QuotedString('"', escQuote='""') - print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) - prints:: - [['This is the quote']] - [['This is the "quote"']] - [['This is the quote with "embedded" quotes']] - """ - def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): - super(QuotedString,self).__init__() - - # remove white space from quote chars - wont work anyway - quoteChar = quoteChar.strip() - if not quoteChar: - warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) - raise SyntaxError() - - if endQuoteChar is None: - endQuoteChar = quoteChar - else: - endQuoteChar = endQuoteChar.strip() - if not endQuoteChar: - warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) - raise SyntaxError() - - self.quoteChar = quoteChar - self.quoteCharLen = len(quoteChar) - self.firstQuoteChar = quoteChar[0] - self.endQuoteChar = endQuoteChar - self.endQuoteCharLen = len(endQuoteChar) - self.escChar = escChar - self.escQuote = escQuote - self.unquoteResults = unquoteResults - self.convertWhitespaceEscapes = convertWhitespaceEscapes - - if multiline: - self.flags = re.MULTILINE | re.DOTALL - self.pattern = r'%s(?:[^%s%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) - else: - self.flags = 0 - self.pattern = r'%s(?:[^%s\n\r%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) - if len(self.endQuoteChar) > 1: - self.pattern += ( - '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), - _escapeRegexRangeChars(self.endQuoteChar[i])) - for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' - ) - if escQuote: - self.pattern += (r'|(?:%s)' % re.escape(escQuote)) - if escChar: - self.pattern += (r'|(?:%s.)' % re.escape(escChar)) - self.escCharReplacePattern = re.escape(self.escChar)+"(.)" - self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) - - try: - self.re = re.compile(self.pattern, self.flags) - self.reString = self.pattern - except sre_constants.error: - warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, - SyntaxWarning, stacklevel=2) - raise - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - ret = result.group() - - if self.unquoteResults: - - # strip off quotes - ret = ret[self.quoteCharLen:-self.endQuoteCharLen] - - if isinstance(ret,basestring): - # replace escaped whitespace - if '\\' in ret and self.convertWhitespaceEscapes: - ws_map = { - r'\t' : '\t', - r'\n' : '\n', - r'\f' : '\f', - r'\r' : '\r', - } - for wslit,wschar in ws_map.items(): - ret = ret.replace(wslit, wschar) - - # replace escaped characters - if self.escChar: - ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) - - # replace escaped quotes - if self.escQuote: - ret = ret.replace(self.escQuote, self.endQuoteChar) - - return loc, ret - - def __str__( self ): - try: - return super(QuotedString,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) - - return self.strRepr - - -class CharsNotIn(Token): - """ - Token for matching words composed of characters I{not} in a given set (will - include whitespace in matched characters if not listed in the provided exclusion set - see example). - Defined with string containing all disallowed characters, and an optional - minimum, maximum, and/or exact length. The default value for C{min} is 1 (a - minimum value < 1 is not valid); the default values for C{max} and C{exact} - are 0, meaning no maximum or exact length restriction. - - Example:: - # define a comma-separated-value as anything that is not a ',' - csv_value = CharsNotIn(',') - print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) - prints:: - ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] - """ - def __init__( self, notChars, min=1, max=0, exact=0 ): - super(CharsNotIn,self).__init__() - self.skipWhitespace = False - self.notChars = notChars - - if min < 1: - raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = ( self.minLen == 0 ) - self.mayIndexError = False - - def parseImpl( self, instring, loc, doActions=True ): - if instring[loc] in self.notChars: - raise ParseException(instring, loc, self.errmsg, self) - - start = loc - loc += 1 - notchars = self.notChars - maxlen = min( start+self.maxLen, len(instring) ) - while loc < maxlen and \ - (instring[loc] not in notchars): - loc += 1 - - if loc - start < self.minLen: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - def __str__( self ): - try: - return super(CharsNotIn, self).__str__() - except Exception: - pass - - if self.strRepr is None: - if len(self.notChars) > 4: - self.strRepr = "!W:(%s...)" % self.notChars[:4] - else: - self.strRepr = "!W:(%s)" % self.notChars - - return self.strRepr - -class White(Token): - """ - Special matching class for matching whitespace. Normally, whitespace is ignored - by pyparsing grammars. This class is included when some whitespace structures - are significant. Define with a string containing the whitespace characters to be - matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, - as defined for the C{L{Word}} class. - """ - whiteStrs = { - " " : "", - "\t": "", - "\n": "", - "\r": "", - "\f": "", - } - def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): - super(White,self).__init__() - self.matchWhite = ws - self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) - #~ self.leaveWhitespace() - self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) - self.mayReturnEmpty = True - self.errmsg = "Expected " + self.name - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - def parseImpl( self, instring, loc, doActions=True ): - if not(instring[ loc ] in self.matchWhite): - raise ParseException(instring, loc, self.errmsg, self) - start = loc - loc += 1 - maxloc = start + self.maxLen - maxloc = min( maxloc, len(instring) ) - while loc < maxloc and instring[loc] in self.matchWhite: - loc += 1 - - if loc - start < self.minLen: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - -class _PositionToken(Token): - def __init__( self ): - super(_PositionToken,self).__init__() - self.name=self.__class__.__name__ - self.mayReturnEmpty = True - self.mayIndexError = False - -class GoToColumn(_PositionToken): - """ - Token to advance to a specific column of input text; useful for tabular report scraping. - """ - def __init__( self, colno ): - super(GoToColumn,self).__init__() - self.col = colno - - def preParse( self, instring, loc ): - if col(loc,instring) != self.col: - instrlen = len(instring) - if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) - while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : - loc += 1 - return loc - - def parseImpl( self, instring, loc, doActions=True ): - thiscol = col( loc, instring ) - if thiscol > self.col: - raise ParseException( instring, loc, "Text not in expected column", self ) - newloc = loc + self.col - thiscol - ret = instring[ loc: newloc ] - return newloc, ret - - -class LineStart(_PositionToken): - """ - Matches if current position is at the beginning of a line within the parse string - - Example:: - - test = '''\ - AAA this line - AAA and this line - AAA but not this one - B AAA and definitely not this one - ''' - - for t in (LineStart() + 'AAA' + restOfLine).searchString(test): - print(t) - - Prints:: - ['AAA', ' this line'] - ['AAA', ' and this line'] - - """ - def __init__( self ): - super(LineStart,self).__init__() - self.errmsg = "Expected start of line" - - def parseImpl( self, instring, loc, doActions=True ): - if col(loc, instring) == 1: - return loc, [] - raise ParseException(instring, loc, self.errmsg, self) - -class LineEnd(_PositionToken): - """ - Matches if current position is at the end of a line within the parse string - """ - def __init__( self ): - super(LineEnd,self).__init__() - self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) - self.errmsg = "Expected end of line" - - def parseImpl( self, instring, loc, doActions=True ): - if loc len(instring): - return loc, [] - else: - raise ParseException(instring, loc, self.errmsg, self) - -class WordStart(_PositionToken): - """ - Matches if the current position is at the beginning of a Word, and - is not preceded by any character in a given set of C{wordChars} - (default=C{printables}). To emulate the C{\b} behavior of regular expressions, - use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of - the string being parsed, or at the beginning of a line. - """ - def __init__(self, wordChars = printables): - super(WordStart,self).__init__() - self.wordChars = set(wordChars) - self.errmsg = "Not at the start of a word" - - def parseImpl(self, instring, loc, doActions=True ): - if loc != 0: - if (instring[loc-1] in self.wordChars or - instring[loc] not in self.wordChars): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - -class WordEnd(_PositionToken): - """ - Matches if the current position is at the end of a Word, and - is not followed by any character in a given set of C{wordChars} - (default=C{printables}). To emulate the C{\b} behavior of regular expressions, - use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of - the string being parsed, or at the end of a line. - """ - def __init__(self, wordChars = printables): - super(WordEnd,self).__init__() - self.wordChars = set(wordChars) - self.skipWhitespace = False - self.errmsg = "Not at the end of a word" - - def parseImpl(self, instring, loc, doActions=True ): - instrlen = len(instring) - if instrlen>0 and loc maxExcLoc: - maxException = err - maxExcLoc = err.loc - except IndexError: - if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) - maxExcLoc = len(instring) - else: - # save match among all matches, to retry longest to shortest - matches.append((loc2, e)) - - if matches: - matches.sort(key=lambda x: -x[0]) - for _,e in matches: - try: - return e._parse( instring, loc, doActions ) - except ParseException as err: - err.__traceback__ = None - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - - if maxException is not None: - maxException.msg = self.errmsg - raise maxException - else: - raise ParseException(instring, loc, "no defined alternatives to match", self) - - - def __ixor__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #Or( [ self, other ] ) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class MatchFirst(ParseExpression): - """ - Requires that at least one C{ParseExpression} is found. - If two expressions match, the first one listed is the one that will match. - May be constructed using the C{'|'} operator. - - Example:: - # construct MatchFirst using '|' operator - - # watch the order of expressions to match - number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) - print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] - - # put more selective expression first - number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) - print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] - """ - def __init__( self, exprs, savelist = False ): - super(MatchFirst,self).__init__(exprs, savelist) - if self.exprs: - self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) - else: - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - maxExcLoc = -1 - maxException = None - for e in self.exprs: - try: - ret = e._parse( instring, loc, doActions ) - return ret - except ParseException as err: - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - except IndexError: - if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) - maxExcLoc = len(instring) - - # only got here if no expression matched, raise exception for match that made it the furthest - else: - if maxException is not None: - maxException.msg = self.errmsg - raise maxException - else: - raise ParseException(instring, loc, "no defined alternatives to match", self) - - def __ior__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #MatchFirst( [ self, other ] ) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class Each(ParseExpression): - """ - Requires all given C{ParseExpression}s to be found, but in any order. - Expressions may be separated by whitespace. - May be constructed using the C{'&'} operator. - - Example:: - color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") - shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") - integer = Word(nums) - shape_attr = "shape:" + shape_type("shape") - posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") - color_attr = "color:" + color("color") - size_attr = "size:" + integer("size") - - # use Each (using operator '&') to accept attributes in any order - # (shape and posn are required, color and size are optional) - shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) - - shape_spec.runTests(''' - shape: SQUARE color: BLACK posn: 100, 120 - shape: CIRCLE size: 50 color: BLUE posn: 50,80 - color:GREEN size:20 shape:TRIANGLE posn:20,40 - ''' - ) - prints:: - shape: SQUARE color: BLACK posn: 100, 120 - ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] - - color: BLACK - - posn: ['100', ',', '120'] - - x: 100 - - y: 120 - - shape: SQUARE - - - shape: CIRCLE size: 50 color: BLUE posn: 50,80 - ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] - - color: BLUE - - posn: ['50', ',', '80'] - - x: 50 - - y: 80 - - shape: CIRCLE - - size: 50 - - - color: GREEN size: 20 shape: TRIANGLE posn: 20,40 - ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] - - color: GREEN - - posn: ['20', ',', '40'] - - x: 20 - - y: 40 - - shape: TRIANGLE - - size: 20 - """ - def __init__( self, exprs, savelist = True ): - super(Each,self).__init__(exprs, savelist) - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - self.skipWhitespace = True - self.initExprGroups = True - - def parseImpl( self, instring, loc, doActions=True ): - if self.initExprGroups: - self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) - opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] - opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] - self.optionals = opt1 + opt2 - self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] - self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] - self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] - self.required += self.multirequired - self.initExprGroups = False - tmpLoc = loc - tmpReqd = self.required[:] - tmpOpt = self.optionals[:] - matchOrder = [] - - keepMatching = True - while keepMatching: - tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired - failed = [] - for e in tmpExprs: - try: - tmpLoc = e.tryParse( instring, tmpLoc ) - except ParseException: - failed.append(e) - else: - matchOrder.append(self.opt1map.get(id(e),e)) - if e in tmpReqd: - tmpReqd.remove(e) - elif e in tmpOpt: - tmpOpt.remove(e) - if len(failed) == len(tmpExprs): - keepMatching = False - - if tmpReqd: - missing = ", ".join(_ustr(e) for e in tmpReqd) - raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) - - # add any unmatched Optionals, in case they have default values defined - matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] - - resultlist = [] - for e in matchOrder: - loc,results = e._parse(instring,loc,doActions) - resultlist.append(results) - - finalResults = sum(resultlist, ParseResults([])) - return loc, finalResults - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class ParseElementEnhance(ParserElement): - """ - Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. - """ - def __init__( self, expr, savelist=False ): - super(ParseElementEnhance,self).__init__(savelist) - if isinstance( expr, basestring ): - if issubclass(ParserElement._literalStringClass, Token): - expr = ParserElement._literalStringClass(expr) - else: - expr = ParserElement._literalStringClass(Literal(expr)) - self.expr = expr - self.strRepr = None - if expr is not None: - self.mayIndexError = expr.mayIndexError - self.mayReturnEmpty = expr.mayReturnEmpty - self.setWhitespaceChars( expr.whiteChars ) - self.skipWhitespace = expr.skipWhitespace - self.saveAsList = expr.saveAsList - self.callPreparse = expr.callPreparse - self.ignoreExprs.extend(expr.ignoreExprs) - - def parseImpl( self, instring, loc, doActions=True ): - if self.expr is not None: - return self.expr._parse( instring, loc, doActions, callPreParse=False ) - else: - raise ParseException("",loc,self.errmsg,self) - - def leaveWhitespace( self ): - self.skipWhitespace = False - self.expr = self.expr.copy() - if self.expr is not None: - self.expr.leaveWhitespace() - return self - - def ignore( self, other ): - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - super( ParseElementEnhance, self).ignore( other ) - if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) - else: - super( ParseElementEnhance, self).ignore( other ) - if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) - return self - - def streamline( self ): - super(ParseElementEnhance,self).streamline() - if self.expr is not None: - self.expr.streamline() - return self - - def checkRecursion( self, parseElementList ): - if self in parseElementList: - raise RecursiveGrammarException( parseElementList+[self] ) - subRecCheckList = parseElementList[:] + [ self ] - if self.expr is not None: - self.expr.checkRecursion( subRecCheckList ) - - def validate( self, validateTrace=[] ): - tmp = validateTrace[:]+[self] - if self.expr is not None: - self.expr.validate(tmp) - self.checkRecursion( [] ) - - def __str__( self ): - try: - return super(ParseElementEnhance,self).__str__() - except Exception: - pass - - if self.strRepr is None and self.expr is not None: - self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) - return self.strRepr - - -class FollowedBy(ParseElementEnhance): - """ - Lookahead matching of the given parse expression. C{FollowedBy} - does I{not} advance the parsing position within the input string, it only - verifies that the specified parse expression matches at the current - position. C{FollowedBy} always returns a null token list. - - Example:: - # use FollowedBy to match a label only if it is followed by a ':' - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - - OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() - prints:: - [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] - """ - def __init__( self, expr ): - super(FollowedBy,self).__init__(expr) - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - self.expr.tryParse( instring, loc ) - return loc, [] - - -class NotAny(ParseElementEnhance): - """ - Lookahead to disallow matching with the given parse expression. C{NotAny} - does I{not} advance the parsing position within the input string, it only - verifies that the specified parse expression does I{not} match at the current - position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} - always returns a null token list. May be constructed using the '~' operator. - - Example:: - - """ - def __init__( self, expr ): - super(NotAny,self).__init__(expr) - #~ self.leaveWhitespace() - self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs - self.mayReturnEmpty = True - self.errmsg = "Found unwanted token, "+_ustr(self.expr) - - def parseImpl( self, instring, loc, doActions=True ): - if self.expr.canParseNext(instring, loc): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "~{" + _ustr(self.expr) + "}" - - return self.strRepr - -class _MultipleMatch(ParseElementEnhance): - def __init__( self, expr, stopOn=None): - super(_MultipleMatch, self).__init__(expr) - self.saveAsList = True - ender = stopOn - if isinstance(ender, basestring): - ender = ParserElement._literalStringClass(ender) - self.not_ender = ~ender if ender is not None else None - - def parseImpl( self, instring, loc, doActions=True ): - self_expr_parse = self.expr._parse - self_skip_ignorables = self._skipIgnorables - check_ender = self.not_ender is not None - if check_ender: - try_not_ender = self.not_ender.tryParse - - # must be at least one (but first see if we are the stopOn sentinel; - # if so, fail) - if check_ender: - try_not_ender(instring, loc) - loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) - try: - hasIgnoreExprs = (not not self.ignoreExprs) - while 1: - if check_ender: - try_not_ender(instring, loc) - if hasIgnoreExprs: - preloc = self_skip_ignorables( instring, loc ) - else: - preloc = loc - loc, tmptokens = self_expr_parse( instring, preloc, doActions ) - if tmptokens or tmptokens.haskeys(): - tokens += tmptokens - except (ParseException,IndexError): - pass - - return loc, tokens - -class OneOrMore(_MultipleMatch): - """ - Repetition of one or more of the given expression. - - Parameters: - - expr - expression that must match one or more times - - stopOn - (default=C{None}) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) - - Example:: - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - - text = "shape: SQUARE posn: upper left color: BLACK" - OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] - - # use stopOn attribute for OneOrMore to avoid reading label string as part of the data - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] - - # could also be written as - (attr_expr * (1,)).parseString(text).pprint() - """ - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + _ustr(self.expr) + "}..." - - return self.strRepr - -class ZeroOrMore(_MultipleMatch): - """ - Optional repetition of zero or more of the given expression. - - Parameters: - - expr - expression that must match zero or more times - - stopOn - (default=C{None}) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) - - Example: similar to L{OneOrMore} - """ - def __init__( self, expr, stopOn=None): - super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - try: - return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) - except (ParseException,IndexError): - return loc, [] - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]..." - - return self.strRepr - -class _NullToken(object): - def __bool__(self): - return False - __nonzero__ = __bool__ - def __str__(self): - return "" - -_optionalNotMatched = _NullToken() -class Optional(ParseElementEnhance): - """ - Optional matching of the given expression. - - Parameters: - - expr - expression that must match zero or more times - - default (optional) - value to be returned if the optional expression is not found. - - Example:: - # US postal code can be a 5-digit zip, plus optional 4-digit qualifier - zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) - zip.runTests(''' - # traditional ZIP code - 12345 - - # ZIP+4 form - 12101-0001 - - # invalid ZIP - 98765- - ''') - prints:: - # traditional ZIP code - 12345 - ['12345'] - - # ZIP+4 form - 12101-0001 - ['12101-0001'] - - # invalid ZIP - 98765- - ^ - FAIL: Expected end of text (at char 5), (line:1, col:6) - """ - def __init__( self, expr, default=_optionalNotMatched ): - super(Optional,self).__init__( expr, savelist=False ) - self.saveAsList = self.expr.saveAsList - self.defaultValue = default - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - try: - loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) - except (ParseException,IndexError): - if self.defaultValue is not _optionalNotMatched: - if self.expr.resultsName: - tokens = ParseResults([ self.defaultValue ]) - tokens[self.expr.resultsName] = self.defaultValue - else: - tokens = [ self.defaultValue ] - else: - tokens = [] - return loc, tokens - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]" - - return self.strRepr - -class SkipTo(ParseElementEnhance): - """ - Token for skipping over all undefined text until the matched expression is found. - - Parameters: - - expr - target expression marking the end of the data to be skipped - - include - (default=C{False}) if True, the target expression is also parsed - (the skipped text and target expression are returned as a 2-element list). - - ignore - (default=C{None}) used to define grammars (typically quoted strings and - comments) that might contain false matches to the target expression - - failOn - (default=C{None}) define expressions that are not allowed to be - included in the skipped test; if found before the target expression is found, - the SkipTo is not a match - - Example:: - report = ''' - Outstanding Issues Report - 1 Jan 2000 - - # | Severity | Description | Days Open - -----+----------+-------------------------------------------+----------- - 101 | Critical | Intermittent system crash | 6 - 94 | Cosmetic | Spelling error on Login ('log|n') | 14 - 79 | Minor | System slow when running too many reports | 47 - ''' - integer = Word(nums) - SEP = Suppress('|') - # use SkipTo to simply match everything up until the next SEP - # - ignore quoted strings, so that a '|' character inside a quoted string does not match - # - parse action will call token.strip() for each matched token, i.e., the description body - string_data = SkipTo(SEP, ignore=quotedString) - string_data.setParseAction(tokenMap(str.strip)) - ticket_expr = (integer("issue_num") + SEP - + string_data("sev") + SEP - + string_data("desc") + SEP - + integer("days_open")) - - for tkt in ticket_expr.searchString(report): - print tkt.dump() - prints:: - ['101', 'Critical', 'Intermittent system crash', '6'] - - days_open: 6 - - desc: Intermittent system crash - - issue_num: 101 - - sev: Critical - ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] - - days_open: 14 - - desc: Spelling error on Login ('log|n') - - issue_num: 94 - - sev: Cosmetic - ['79', 'Minor', 'System slow when running too many reports', '47'] - - days_open: 47 - - desc: System slow when running too many reports - - issue_num: 79 - - sev: Minor - """ - def __init__( self, other, include=False, ignore=None, failOn=None ): - super( SkipTo, self ).__init__( other ) - self.ignoreExpr = ignore - self.mayReturnEmpty = True - self.mayIndexError = False - self.includeMatch = include - self.asList = False - if isinstance(failOn, basestring): - self.failOn = ParserElement._literalStringClass(failOn) - else: - self.failOn = failOn - self.errmsg = "No match found for "+_ustr(self.expr) - - def parseImpl( self, instring, loc, doActions=True ): - startloc = loc - instrlen = len(instring) - expr = self.expr - expr_parse = self.expr._parse - self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None - self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None - - tmploc = loc - while tmploc <= instrlen: - if self_failOn_canParseNext is not None: - # break if failOn expression matches - if self_failOn_canParseNext(instring, tmploc): - break - - if self_ignoreExpr_tryParse is not None: - # advance past ignore expressions - while 1: - try: - tmploc = self_ignoreExpr_tryParse(instring, tmploc) - except ParseBaseException: - break - - try: - expr_parse(instring, tmploc, doActions=False, callPreParse=False) - except (ParseException, IndexError): - # no match, advance loc in string - tmploc += 1 - else: - # matched skipto expr, done - break - - else: - # ran off the end of the input string without matching skipto expr, fail - raise ParseException(instring, loc, self.errmsg, self) - - # build up return values - loc = tmploc - skiptext = instring[startloc:loc] - skipresult = ParseResults(skiptext) - - if self.includeMatch: - loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) - skipresult += mat - - return loc, skipresult - -class Forward(ParseElementEnhance): - """ - Forward declaration of an expression to be defined later - - used for recursive grammars, such as algebraic infix notation. - When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. - - Note: take care when assigning to C{Forward} not to overlook precedence of operators. - Specifically, '|' has a lower precedence than '<<', so that:: - fwdExpr << a | b | c - will actually be evaluated as:: - (fwdExpr << a) | b | c - thereby leaving b and c out as parseable alternatives. It is recommended that you - explicitly group the values inserted into the C{Forward}:: - fwdExpr << (a | b | c) - Converting to use the '<<=' operator instead will avoid this problem. - - See L{ParseResults.pprint} for an example of a recursive parser created using - C{Forward}. - """ - def __init__( self, other=None ): - super(Forward,self).__init__( other, savelist=False ) - - def __lshift__( self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass(other) - self.expr = other - self.strRepr = None - self.mayIndexError = self.expr.mayIndexError - self.mayReturnEmpty = self.expr.mayReturnEmpty - self.setWhitespaceChars( self.expr.whiteChars ) - self.skipWhitespace = self.expr.skipWhitespace - self.saveAsList = self.expr.saveAsList - self.ignoreExprs.extend(self.expr.ignoreExprs) - return self - - def __ilshift__(self, other): - return self << other - - def leaveWhitespace( self ): - self.skipWhitespace = False - return self - - def streamline( self ): - if not self.streamlined: - self.streamlined = True - if self.expr is not None: - self.expr.streamline() - return self - - def validate( self, validateTrace=[] ): - if self not in validateTrace: - tmp = validateTrace[:]+[self] - if self.expr is not None: - self.expr.validate(tmp) - self.checkRecursion([]) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - return self.__class__.__name__ + ": ..." - - # stubbed out for now - creates awful memory and perf issues - self._revertClass = self.__class__ - self.__class__ = _ForwardNoRecurse - try: - if self.expr is not None: - retString = _ustr(self.expr) - else: - retString = "None" - finally: - self.__class__ = self._revertClass - return self.__class__.__name__ + ": " + retString - - def copy(self): - if self.expr is not None: - return super(Forward,self).copy() - else: - ret = Forward() - ret <<= self - return ret - -class _ForwardNoRecurse(Forward): - def __str__( self ): - return "..." - -class TokenConverter(ParseElementEnhance): - """ - Abstract subclass of C{ParseExpression}, for converting parsed results. - """ - def __init__( self, expr, savelist=False ): - super(TokenConverter,self).__init__( expr )#, savelist ) - self.saveAsList = False - -class Combine(TokenConverter): - """ - Converter to concatenate all matching tokens to a single string. - By default, the matching patterns must also be contiguous in the input string; - this can be disabled by specifying C{'adjacent=False'} in the constructor. - - Example:: - real = Word(nums) + '.' + Word(nums) - print(real.parseString('3.1416')) # -> ['3', '.', '1416'] - # will also erroneously match the following - print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] - - real = Combine(Word(nums) + '.' + Word(nums)) - print(real.parseString('3.1416')) # -> ['3.1416'] - # no match when there are internal spaces - print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) - """ - def __init__( self, expr, joinString="", adjacent=True ): - super(Combine,self).__init__( expr ) - # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself - if adjacent: - self.leaveWhitespace() - self.adjacent = adjacent - self.skipWhitespace = True - self.joinString = joinString - self.callPreparse = True - - def ignore( self, other ): - if self.adjacent: - ParserElement.ignore(self, other) - else: - super( Combine, self).ignore( other ) - return self - - def postParse( self, instring, loc, tokenlist ): - retToks = tokenlist.copy() - del retToks[:] - retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) - - if self.resultsName and retToks.haskeys(): - return [ retToks ] - else: - return retToks - -class Group(TokenConverter): - """ - Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. - - Example:: - ident = Word(alphas) - num = Word(nums) - term = ident | num - func = ident + Optional(delimitedList(term)) - print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] - - func = ident + Group(Optional(delimitedList(term))) - print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] - """ - def __init__( self, expr ): - super(Group,self).__init__( expr ) - self.saveAsList = True - - def postParse( self, instring, loc, tokenlist ): - return [ tokenlist ] - -class Dict(TokenConverter): - """ - Converter to return a repetitive expression as a list, but also as a dictionary. - Each element can also be referenced using the first token in the expression as its key. - Useful for tabular report scraping when the first column can be used as a item key. - - Example:: - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - - text = "shape: SQUARE posn: upper left color: light blue texture: burlap" - attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - - # print attributes as plain groups - print(OneOrMore(attr_expr).parseString(text).dump()) - - # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names - result = Dict(OneOrMore(Group(attr_expr))).parseString(text) - print(result.dump()) - - # access named fields as dict entries, or output as dict - print(result['shape']) - print(result.asDict()) - prints:: - ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] - - [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap - SQUARE - {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} - See more examples at L{ParseResults} of accessing fields by results name. - """ - def __init__( self, expr ): - super(Dict,self).__init__( expr ) - self.saveAsList = True - - def postParse( self, instring, loc, tokenlist ): - for i,tok in enumerate(tokenlist): - if len(tok) == 0: - continue - ikey = tok[0] - if isinstance(ikey,int): - ikey = _ustr(tok[0]).strip() - if len(tok)==1: - tokenlist[ikey] = _ParseResultsWithOffset("",i) - elif len(tok)==2 and not isinstance(tok[1],ParseResults): - tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) - else: - dictvalue = tok.copy() #ParseResults(i) - del dictvalue[0] - if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) - else: - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) - - if self.resultsName: - return [ tokenlist ] - else: - return tokenlist - - -class Suppress(TokenConverter): - """ - Converter for ignoring the results of a parsed expression. - - Example:: - source = "a, b, c,d" - wd = Word(alphas) - wd_list1 = wd + ZeroOrMore(',' + wd) - print(wd_list1.parseString(source)) - - # often, delimiters that are useful during parsing are just in the - # way afterward - use Suppress to keep them out of the parsed output - wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) - print(wd_list2.parseString(source)) - prints:: - ['a', ',', 'b', ',', 'c', ',', 'd'] - ['a', 'b', 'c', 'd'] - (See also L{delimitedList}.) - """ - def postParse( self, instring, loc, tokenlist ): - return [] - - def suppress( self ): - return self - - -class OnlyOnce(object): - """ - Wrapper for parse actions, to ensure they are only called once. - """ - def __init__(self, methodCall): - self.callable = _trim_arity(methodCall) - self.called = False - def __call__(self,s,l,t): - if not self.called: - results = self.callable(s,l,t) - self.called = True - return results - raise ParseException(s,l,"") - def reset(self): - self.called = False - -def traceParseAction(f): - """ - Decorator for debugging parse actions. - - When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} - When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. - - Example:: - wd = Word(alphas) - - @traceParseAction - def remove_duplicate_chars(tokens): - return ''.join(sorted(set(''.join(tokens)))) - - wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) - print(wds.parseString("slkdjs sld sldd sdlf sdljf")) - prints:: - >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) - <3: - thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc - sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) - try: - ret = f(*paArgs) - except Exception as exc: - sys.stderr.write( "< ['aa', 'bb', 'cc'] - delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] - """ - dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." - if combine: - return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) - else: - return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) - -def countedArray( expr, intExpr=None ): - """ - Helper to define a counted list of expressions. - This helper defines a pattern of the form:: - integer expr expr expr... - where the leading integer tells how many expr expressions follow. - The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. - - If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. - - Example:: - countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] - - # in this parser, the leading integer value is given in binary, - # '10' indicating that 2 values are in the array - binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) - countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] - """ - arrayExpr = Forward() - def countFieldParseAction(s,l,t): - n = t[0] - arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) - return [] - if intExpr is None: - intExpr = Word(nums).setParseAction(lambda t:int(t[0])) - else: - intExpr = intExpr.copy() - intExpr.setName("arrayLen") - intExpr.addParseAction(countFieldParseAction, callDuringTry=True) - return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') - -def _flatten(L): - ret = [] - for i in L: - if isinstance(i,list): - ret.extend(_flatten(i)) - else: - ret.append(i) - return ret - -def matchPreviousLiteral(expr): - """ - Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks - for a 'repeat' of a previous expression. For example:: - first = Word(nums) - second = matchPreviousLiteral(first) - matchExpr = first + ":" + second - will match C{"1:1"}, but not C{"1:2"}. Because this matches a - previous literal, will also match the leading C{"1:1"} in C{"1:10"}. - If this is not desired, use C{matchPreviousExpr}. - Do I{not} use with packrat parsing enabled. - """ - rep = Forward() - def copyTokenToRepeater(s,l,t): - if t: - if len(t) == 1: - rep << t[0] - else: - # flatten t tokens - tflat = _flatten(t.asList()) - rep << And(Literal(tt) for tt in tflat) - else: - rep << Empty() - expr.addParseAction(copyTokenToRepeater, callDuringTry=True) - rep.setName('(prev) ' + _ustr(expr)) - return rep - -def matchPreviousExpr(expr): - """ - Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks - for a 'repeat' of a previous expression. For example:: - first = Word(nums) - second = matchPreviousExpr(first) - matchExpr = first + ":" + second - will match C{"1:1"}, but not C{"1:2"}. Because this matches by - expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; - the expressions are evaluated first, and then compared, so - C{"1"} is compared with C{"10"}. - Do I{not} use with packrat parsing enabled. - """ - rep = Forward() - e2 = expr.copy() - rep <<= e2 - def copyTokenToRepeater(s,l,t): - matchTokens = _flatten(t.asList()) - def mustMatchTheseTokens(s,l,t): - theseTokens = _flatten(t.asList()) - if theseTokens != matchTokens: - raise ParseException("",0,"") - rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) - expr.addParseAction(copyTokenToRepeater, callDuringTry=True) - rep.setName('(prev) ' + _ustr(expr)) - return rep - -def _escapeRegexRangeChars(s): - #~ escape these chars: ^-] - for c in r"\^-]": - s = s.replace(c,_bslash+c) - s = s.replace("\n",r"\n") - s = s.replace("\t",r"\t") - return _ustr(s) - -def oneOf( strs, caseless=False, useRegex=True ): - """ - Helper to quickly define a set of alternative Literals, and makes sure to do - longest-first testing when there is a conflict, regardless of the input order, - but returns a C{L{MatchFirst}} for best performance. - - Parameters: - - strs - a string of space-delimited literals, or a collection of string literals - - caseless - (default=C{False}) - treat all literals as caseless - - useRegex - (default=C{True}) - as an optimization, will generate a Regex - object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or - if creating a C{Regex} raises an exception) - - Example:: - comp_oper = oneOf("< = > <= >= !=") - var = Word(alphas) - number = Word(nums) - term = var | number - comparison_expr = term + comp_oper + term - print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) - prints:: - [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] - """ - if caseless: - isequal = ( lambda a,b: a.upper() == b.upper() ) - masks = ( lambda a,b: b.upper().startswith(a.upper()) ) - parseElementClass = CaselessLiteral - else: - isequal = ( lambda a,b: a == b ) - masks = ( lambda a,b: b.startswith(a) ) - parseElementClass = Literal - - symbols = [] - if isinstance(strs,basestring): - symbols = strs.split() - elif isinstance(strs, Iterable): - symbols = list(strs) - else: - warnings.warn("Invalid argument to oneOf, expected string or iterable", - SyntaxWarning, stacklevel=2) - if not symbols: - return NoMatch() - - i = 0 - while i < len(symbols)-1: - cur = symbols[i] - for j,other in enumerate(symbols[i+1:]): - if ( isequal(other, cur) ): - del symbols[i+j+1] - break - elif ( masks(cur, other) ): - del symbols[i+j+1] - symbols.insert(i,other) - cur = other - break - else: - i += 1 - - if not caseless and useRegex: - #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) - try: - if len(symbols)==len("".join(symbols)): - return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) - else: - return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) - except Exception: - warnings.warn("Exception creating Regex for oneOf, building MatchFirst", - SyntaxWarning, stacklevel=2) - - - # last resort, just use MatchFirst - return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) - -def dictOf( key, value ): - """ - Helper to easily and clearly define a dictionary by specifying the respective patterns - for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens - in the proper order. The key pattern can include delimiting markers or punctuation, - as long as they are suppressed, thereby leaving the significant key text. The value - pattern can include named results, so that the C{Dict} results can include named token - fields. - - Example:: - text = "shape: SQUARE posn: upper left color: light blue texture: burlap" - attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - print(OneOrMore(attr_expr).parseString(text).dump()) - - attr_label = label - attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) - - # similar to Dict, but simpler call format - result = dictOf(attr_label, attr_value).parseString(text) - print(result.dump()) - print(result['shape']) - print(result.shape) # object attribute access works too - print(result.asDict()) - prints:: - [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap - SQUARE - SQUARE - {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} - """ - return Dict( ZeroOrMore( Group ( key + value ) ) ) - -def originalTextFor(expr, asString=True): - """ - Helper to return the original, untokenized text for a given expression. Useful to - restore the parsed fields of an HTML start tag into the raw tag text itself, or to - revert separate tokens with intervening whitespace back to the original matching - input text. By default, returns astring containing the original parsed text. - - If the optional C{asString} argument is passed as C{False}, then the return value is a - C{L{ParseResults}} containing any results names that were originally matched, and a - single token containing the original matched text from the input string. So if - the expression passed to C{L{originalTextFor}} contains expressions with defined - results names, you must set C{asString} to C{False} if you want to preserve those - results name values. - - Example:: - src = "this is test bold text normal text " - for tag in ("b","i"): - opener,closer = makeHTMLTags(tag) - patt = originalTextFor(opener + SkipTo(closer) + closer) - print(patt.searchString(src)[0]) - prints:: - [' bold text '] - ['text'] - """ - locMarker = Empty().setParseAction(lambda s,loc,t: loc) - endlocMarker = locMarker.copy() - endlocMarker.callPreparse = False - matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") - if asString: - extractText = lambda s,l,t: s[t._original_start:t._original_end] - else: - def extractText(s,l,t): - t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] - matchExpr.setParseAction(extractText) - matchExpr.ignoreExprs = expr.ignoreExprs - return matchExpr - -def ungroup(expr): - """ - Helper to undo pyparsing's default grouping of And expressions, even - if all but one are non-empty. - """ - return TokenConverter(expr).setParseAction(lambda t:t[0]) - -def locatedExpr(expr): - """ - Helper to decorate a returned token with its starting and ending locations in the input string. - This helper adds the following results names: - - locn_start = location where matched expression begins - - locn_end = location where matched expression ends - - value = the actual parsed results - - Be careful if the input text contains C{} characters, you may want to call - C{L{ParserElement.parseWithTabs}} - - Example:: - wd = Word(alphas) - for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): - print(match) - prints:: - [[0, 'ljsdf', 5]] - [[8, 'lksdjjf', 15]] - [[18, 'lkkjj', 23]] - """ - locator = Empty().setParseAction(lambda s,l,t: l) - return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) - - -# convenience constants for positional expressions -empty = Empty().setName("empty") -lineStart = LineStart().setName("lineStart") -lineEnd = LineEnd().setName("lineEnd") -stringStart = StringStart().setName("stringStart") -stringEnd = StringEnd().setName("stringEnd") - -_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) -_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) -_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) -_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1) -_charRange = Group(_singleChar + Suppress("-") + _singleChar) -_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" - -def srange(s): - r""" - Helper to easily define string ranges for use in Word construction. Borrows - syntax from regexp '[]' string range definitions:: - srange("[0-9]") -> "0123456789" - srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" - srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" - The input string must be enclosed in []'s, and the returned string is the expanded - character set joined into a single string. - The values enclosed in the []'s may be: - - a single character - - an escaped character with a leading backslash (such as C{\-} or C{\]}) - - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) - (C{\0x##} is also supported for backwards compatibility) - - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) - - a range of any of the above, separated by a dash (C{'a-z'}, etc.) - - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) - """ - _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) - try: - return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) - except Exception: - return "" - -def matchOnlyAtCol(n): - """ - Helper method for defining parse actions that require matching at a specific - column in the input text. - """ - def verifyCol(strg,locn,toks): - if col(locn,strg) != n: - raise ParseException(strg,locn,"matched token not at column %d" % n) - return verifyCol - -def replaceWith(replStr): - """ - Helper method for common parse actions that simply return a literal value. Especially - useful when used with C{L{transformString}()}. - - Example:: - num = Word(nums).setParseAction(lambda toks: int(toks[0])) - na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) - term = na | num - - OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] - """ - return lambda s,l,t: [replStr] - -def removeQuotes(s,l,t): - """ - Helper parse action for removing quotation marks from parsed quoted strings. - - Example:: - # by default, quotation marks are included in parsed results - quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] - - # use removeQuotes to strip quotation marks from parsed results - quotedString.setParseAction(removeQuotes) - quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] - """ - return t[0][1:-1] - -def tokenMap(func, *args): - """ - Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional - args are passed, they are forwarded to the given function as additional arguments after - the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the - parsed data to an integer using base 16. - - Example (compare the last to example in L{ParserElement.transformString}:: - hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) - hex_ints.runTests(''' - 00 11 22 aa FF 0a 0d 1a - ''') - - upperword = Word(alphas).setParseAction(tokenMap(str.upper)) - OneOrMore(upperword).runTests(''' - my kingdom for a horse - ''') - - wd = Word(alphas).setParseAction(tokenMap(str.title)) - OneOrMore(wd).setParseAction(' '.join).runTests(''' - now is the winter of our discontent made glorious summer by this sun of york - ''') - prints:: - 00 11 22 aa FF 0a 0d 1a - [0, 17, 34, 170, 255, 10, 13, 26] - - my kingdom for a horse - ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] - - now is the winter of our discontent made glorious summer by this sun of york - ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] - """ - def pa(s,l,t): - return [func(tokn, *args) for tokn in t] - - try: - func_name = getattr(func, '__name__', - getattr(func, '__class__').__name__) - except Exception: - func_name = str(func) - pa.__name__ = func_name - - return pa - -upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) -"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}""" - -downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) -"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}""" - -def _makeTags(tagStr, xml): - """Internal helper to construct opening and closing tag expressions, given a tag name""" - if isinstance(tagStr,basestring): - resname = tagStr - tagStr = Keyword(tagStr, caseless=not xml) - else: - resname = tagStr.name - - tagAttrName = Word(alphas,alphanums+"_-:") - if (xml): - tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) - openTag = Suppress("<") + tagStr("tag") + \ - Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ - Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") - else: - printablesLessRAbrack = "".join(c for c in printables if c not in ">") - tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) - openTag = Suppress("<") + tagStr("tag") + \ - Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ - Optional( Suppress("=") + tagAttrValue ) ))) + \ - Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") - closeTag = Combine(_L("") - - openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname) - closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("" % resname) - openTag.tag = resname - closeTag.tag = resname - return openTag, closeTag - -def makeHTMLTags(tagStr): - """ - Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches - tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. - - Example:: - text = 'More info at the pyparsing wiki page' - # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple - a,a_end = makeHTMLTags("A") - link_expr = a + SkipTo(a_end)("link_text") + a_end - - for link in link_expr.searchString(text): - # attributes in the tag (like "href" shown here) are also accessible as named results - print(link.link_text, '->', link.href) - prints:: - pyparsing -> http://pyparsing.wikispaces.com - """ - return _makeTags( tagStr, False ) - -def makeXMLTags(tagStr): - """ - Helper to construct opening and closing tag expressions for XML, given a tag name. Matches - tags only in the given upper/lower case. - - Example: similar to L{makeHTMLTags} - """ - return _makeTags( tagStr, True ) - -def withAttribute(*args,**attrDict): - """ - Helper to create a validating parse action to be used with start tags created - with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag - with a required attribute value, to avoid false matches on common tags such as - C{} or C{
}. - - Call C{withAttribute} with a series of attribute names and values. Specify the list - of filter attributes names and values as: - - keyword arguments, as in C{(align="right")}, or - - as an explicit dict with C{**} operator, when an attribute name is also a Python - reserved word, as in C{**{"class":"Customer", "align":"right"}} - - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) - For attribute names with a namespace prefix, you must use the second form. Attribute - names are matched insensitive to upper/lower case. - - If just testing for C{class} (with or without a namespace), use C{L{withClass}}. - - To verify that the attribute exists, but without specifying a value, pass - C{withAttribute.ANY_VALUE} as the value. - - Example:: - html = ''' -
- Some text -
1 4 0 1 0
-
1,3 2,3 1,1
-
this has no type
-
- - ''' - div,div_end = makeHTMLTags("div") - - # only match div tag having a type attribute with value "grid" - div_grid = div().setParseAction(withAttribute(type="grid")) - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - # construct a match with any div tag having a type attribute, regardless of the value - div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - prints:: - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - if args: - attrs = args[:] - else: - attrs = attrDict.items() - attrs = [(k,v) for k,v in attrs] - def pa(s,l,tokens): - for attrName,attrValue in attrs: - if attrName not in tokens: - raise ParseException(s,l,"no matching attribute " + attrName) - if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: - raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % - (attrName, tokens[attrName], attrValue)) - return pa -withAttribute.ANY_VALUE = object() - -def withClass(classname, namespace=''): - """ - Simplified version of C{L{withAttribute}} when matching on a div class - made - difficult because C{class} is a reserved word in Python. - - Example:: - html = ''' -
- Some text -
1 4 0 1 0
-
1,3 2,3 1,1
-
this <div> has no class
-
- - ''' - div,div_end = makeHTMLTags("div") - div_grid = div().setParseAction(withClass("grid")) - - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - prints:: - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - classattr = "%s:class" % namespace if namespace else "class" - return withAttribute(**{classattr : classname}) - -opAssoc = _Constants() -opAssoc.LEFT = object() -opAssoc.RIGHT = object() - -def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): - """ - Helper method for constructing grammars of expressions made up of - operators working in a precedence hierarchy. Operators may be unary or - binary, left- or right-associative. Parse actions can also be attached - to operator expressions. The generated parser will also recognize the use - of parentheses to override operator precedences (see example below). - - Note: if you define a deep operator list, you may see performance issues - when using infixNotation. See L{ParserElement.enablePackrat} for a - mechanism to potentially improve your parser performance. - - Parameters: - - baseExpr - expression representing the most basic element for the nested - - opList - list of tuples, one for each operator precedence level in the - expression grammar; each tuple is of the form - (opExpr, numTerms, rightLeftAssoc, parseAction), where: - - opExpr is the pyparsing expression for the operator; - may also be a string, which will be converted to a Literal; - if numTerms is 3, opExpr is a tuple of two expressions, for the - two operators separating the 3 terms - - numTerms is the number of terms for this operator (must - be 1, 2, or 3) - - rightLeftAssoc is the indicator whether the operator is - right or left associative, using the pyparsing-defined - constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. - - parseAction is the parse action to be associated with - expressions matching this operator expression (the - parse action tuple member may be omitted); if the parse action - is passed a tuple or list of functions, this is equivalent to - calling C{setParseAction(*fn)} (L{ParserElement.setParseAction}) - - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) - - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) - - Example:: - # simple example of four-function arithmetic with ints and variable names - integer = pyparsing_common.signed_integer - varname = pyparsing_common.identifier - - arith_expr = infixNotation(integer | varname, - [ - ('-', 1, opAssoc.RIGHT), - (oneOf('* /'), 2, opAssoc.LEFT), - (oneOf('+ -'), 2, opAssoc.LEFT), - ]) - - arith_expr.runTests(''' - 5+3*6 - (5+3)*6 - -2--11 - ''', fullDump=False) - prints:: - 5+3*6 - [[5, '+', [3, '*', 6]]] - - (5+3)*6 - [[[5, '+', 3], '*', 6]] - - -2--11 - [[['-', 2], '-', ['-', 11]]] - """ - ret = Forward() - lastExpr = baseExpr | ( lpar + ret + rpar ) - for i,operDef in enumerate(opList): - opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] - termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr - if arity == 3: - if opExpr is None or len(opExpr) != 2: - raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") - opExpr1, opExpr2 = opExpr - thisExpr = Forward().setName(termName) - if rightLeftAssoc == opAssoc.LEFT: - if arity == 1: - matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) - elif arity == 2: - if opExpr is not None: - matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) - else: - matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) - elif arity == 3: - matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ - Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - elif rightLeftAssoc == opAssoc.RIGHT: - if arity == 1: - # try to avoid LR with this extra test - if not isinstance(opExpr, Optional): - opExpr = Optional(opExpr) - matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) - elif arity == 2: - if opExpr is not None: - matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) - else: - matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) - elif arity == 3: - matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ - Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - else: - raise ValueError("operator must indicate right or left associativity") - if pa: - if isinstance(pa, (tuple, list)): - matchExpr.setParseAction(*pa) - else: - matchExpr.setParseAction(pa) - thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) - lastExpr = thisExpr - ret <<= lastExpr - return ret - -operatorPrecedence = infixNotation -"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" - -dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") -sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") -quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| - Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") -unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") - -def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): - """ - Helper method for defining nested lists enclosed in opening and closing - delimiters ("(" and ")" are the default). - - Parameters: - - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression - - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression - - content - expression for items within the nested lists (default=C{None}) - - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) - - If an expression is not provided for the content argument, the nested - expression will capture all whitespace-delimited content between delimiters - as a list of separate values. - - Use the C{ignoreExpr} argument to define expressions that may contain - opening or closing characters that should not be treated as opening - or closing characters for nesting, such as quotedString or a comment - expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. - The default is L{quotedString}, but if no expressions are to be ignored, - then pass C{None} for this argument. - - Example:: - data_type = oneOf("void int short long char float double") - decl_data_type = Combine(data_type + Optional(Word('*'))) - ident = Word(alphas+'_', alphanums+'_') - number = pyparsing_common.number - arg = Group(decl_data_type + ident) - LPAR,RPAR = map(Suppress, "()") - - code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) - - c_function = (decl_data_type("type") - + ident("name") - + LPAR + Optional(delimitedList(arg), [])("args") + RPAR - + code_body("body")) - c_function.ignore(cStyleComment) - - source_code = ''' - int is_odd(int x) { - return (x%2); - } - - int dec_to_hex(char hchar) { - if (hchar >= '0' && hchar <= '9') { - return (ord(hchar)-ord('0')); - } else { - return (10+ord(hchar)-ord('A')); - } - } - ''' - for func in c_function.searchString(source_code): - print("%(name)s (%(type)s) args: %(args)s" % func) - - prints:: - is_odd (int) args: [['int', 'x']] - dec_to_hex (int) args: [['char', 'hchar']] - """ - if opener == closer: - raise ValueError("opening and closing strings cannot be the same") - if content is None: - if isinstance(opener,basestring) and isinstance(closer,basestring): - if len(opener) == 1 and len(closer)==1: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS - ).setParseAction(lambda t:t[0].strip())) - else: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - ~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - raise ValueError("opening and closing arguments must be strings if no content expression is given") - ret = Forward() - if ignoreExpr is not None: - ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) - else: - ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) - ret.setName('nested %s%s expression' % (opener,closer)) - return ret - -def indentedBlock(blockStatementExpr, indentStack, indent=True): - """ - Helper method for defining space-delimited indentation blocks, such as - those used to define block statements in Python source code. - - Parameters: - - blockStatementExpr - expression defining syntax of statement that - is repeated within the indented block - - indentStack - list created by caller to manage indentation stack - (multiple statementWithIndentedBlock expressions within a single grammar - should share a common indentStack) - - indent - boolean indicating whether block must be indented beyond the - the current level; set to False for block of left-most statements - (default=C{True}) - - A valid block must contain at least one C{blockStatement}. - - Example:: - data = ''' - def A(z): - A1 - B = 100 - G = A2 - A2 - A3 - B - def BB(a,b,c): - BB1 - def BBA(): - bba1 - bba2 - bba3 - C - D - def spam(x,y): - def eggs(z): - pass - ''' - - - indentStack = [1] - stmt = Forward() - - identifier = Word(alphas, alphanums) - funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") - func_body = indentedBlock(stmt, indentStack) - funcDef = Group( funcDecl + func_body ) - - rvalue = Forward() - funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") - rvalue << (funcCall | identifier | Word(nums)) - assignment = Group(identifier + "=" + rvalue) - stmt << ( funcDef | assignment | identifier ) - - module_body = OneOrMore(stmt) - - parseTree = module_body.parseString(data) - parseTree.pprint() - prints:: - [['def', - 'A', - ['(', 'z', ')'], - ':', - [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], - 'B', - ['def', - 'BB', - ['(', 'a', 'b', 'c', ')'], - ':', - [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], - 'C', - 'D', - ['def', - 'spam', - ['(', 'x', 'y', ')'], - ':', - [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] - """ - def checkPeerIndent(s,l,t): - if l >= len(s): return - curCol = col(l,s) - if curCol != indentStack[-1]: - if curCol > indentStack[-1]: - raise ParseFatalException(s,l,"illegal nesting") - raise ParseException(s,l,"not a peer entry") - - def checkSubIndent(s,l,t): - curCol = col(l,s) - if curCol > indentStack[-1]: - indentStack.append( curCol ) - else: - raise ParseException(s,l,"not a subentry") - - def checkUnindent(s,l,t): - if l >= len(s): return - curCol = col(l,s) - if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): - raise ParseException(s,l,"not an unindent") - indentStack.pop() - - NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) - INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') - PEER = Empty().setParseAction(checkPeerIndent).setName('') - UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') - if indent: - smExpr = Group( Optional(NL) + - #~ FollowedBy(blockStatementExpr) + - INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) - else: - smExpr = Group( Optional(NL) + - (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) - blockStatementExpr.ignore(_bslash + LineEnd()) - return smExpr.setName('indented block') - -alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") -punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") - -anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) -_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) -commonHTMLEntity = Regex('&(?P' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") -def replaceHTMLEntity(t): - """Helper parser action to replace common HTML entities with their special characters""" - return _htmlEntityMap.get(t.entity) - -# it's easy to get these comment structures wrong - they're very common, so may as well make them available -cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") -"Comment of the form C{/* ... */}" - -htmlComment = Regex(r"").setName("HTML comment") -"Comment of the form C{}" - -restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") -dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") -"Comment of the form C{// ... (to end of line)}" - -cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") -"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" - -javaStyleComment = cppStyleComment -"Same as C{L{cppStyleComment}}" - -pythonStyleComment = Regex(r"#.*").setName("Python style comment") -"Comment of the form C{# ... (to end of line)}" - -_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + - Optional( Word(" \t") + - ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") -commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") -"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. - This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" - -# some other useful expressions - using lower-case class name since we are really using this as a namespace -class pyparsing_common: - """ - Here are some common low-level expressions that may be useful in jump-starting parser development: - - numeric forms (L{integers}, L{reals}, L{scientific notation}) - - common L{programming identifiers} - - network addresses (L{MAC}, L{IPv4}, L{IPv6}) - - ISO8601 L{dates} and L{datetime} - - L{UUID} - - L{comma-separated list} - Parse actions: - - C{L{convertToInteger}} - - C{L{convertToFloat}} - - C{L{convertToDate}} - - C{L{convertToDatetime}} - - C{L{stripHTMLTags}} - - C{L{upcaseTokens}} - - C{L{downcaseTokens}} - - Example:: - pyparsing_common.number.runTests(''' - # any int or real number, returned as the appropriate type - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.fnumber.runTests(''' - # any int or real number, returned as float - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.hex_integer.runTests(''' - # hex numbers - 100 - FF - ''') - - pyparsing_common.fraction.runTests(''' - # fractions - 1/2 - -3/4 - ''') - - pyparsing_common.mixed_integer.runTests(''' - # mixed fractions - 1 - 1/2 - -3/4 - 1-3/4 - ''') - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(''' - # uuid - 12345678-1234-5678-1234-567812345678 - ''') - prints:: - # any int or real number, returned as the appropriate type - 100 - [100] - - -100 - [-100] - - +100 - [100] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # any int or real number, returned as float - 100 - [100.0] - - -100 - [-100.0] - - +100 - [100.0] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # hex numbers - 100 - [256] - - FF - [255] - - # fractions - 1/2 - [0.5] - - -3/4 - [-0.75] - - # mixed fractions - 1 - [1] - - 1/2 - [0.5] - - -3/4 - [-0.75] - - 1-3/4 - [1.75] - - # uuid - 12345678-1234-5678-1234-567812345678 - [UUID('12345678-1234-5678-1234-567812345678')] - """ - - convertToInteger = tokenMap(int) - """ - Parse action for converting parsed integers to Python int - """ - - convertToFloat = tokenMap(float) - """ - Parse action for converting parsed numbers to Python float - """ - - integer = Word(nums).setName("integer").setParseAction(convertToInteger) - """expression that parses an unsigned integer, returns an int""" - - hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) - """expression that parses a hexadecimal integer, returns an int""" - - signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) - """expression that parses an integer with optional leading sign, returns an int""" - - fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") - """fractional expression of an integer divided by an integer, returns a float""" - fraction.addParseAction(lambda t: t[0]/t[-1]) - - mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") - """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" - mixed_integer.addParseAction(sum) - - real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) - """expression that parses a floating point number and returns a float""" - - sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) - """expression that parses a floating point number with optional scientific notation and returns a float""" - - # streamlining this expression makes the docs nicer-looking - number = (sci_real | real | signed_integer).streamline() - """any numeric expression, returns the corresponding Python type""" - - fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) - """any int or real number, returned as float""" - - identifier = Word(alphas+'_', alphanums+'_').setName("identifier") - """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" - - ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") - "IPv4 address (C{0.0.0.0 - 255.255.255.255})" - - _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") - _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") - _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") - _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) - _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") - ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") - "IPv6 address (long, short, or mixed form)" - - mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") - "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" - - @staticmethod - def convertToDate(fmt="%Y-%m-%d"): - """ - Helper to create a parse action for converting parsed date string to Python datetime.date - - Params - - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) - - Example:: - date_expr = pyparsing_common.iso8601_date.copy() - date_expr.setParseAction(pyparsing_common.convertToDate()) - print(date_expr.parseString("1999-12-31")) - prints:: - [datetime.date(1999, 12, 31)] - """ - def cvt_fn(s,l,t): - try: - return datetime.strptime(t[0], fmt).date() - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - @staticmethod - def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): - """ - Helper to create a parse action for converting parsed datetime string to Python datetime.datetime - - Params - - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) - - Example:: - dt_expr = pyparsing_common.iso8601_datetime.copy() - dt_expr.setParseAction(pyparsing_common.convertToDatetime()) - print(dt_expr.parseString("1999-12-31T23:59:59.999")) - prints:: - [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] - """ - def cvt_fn(s,l,t): - try: - return datetime.strptime(t[0], fmt) - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - iso8601_date = Regex(r'(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?').setName("ISO8601 date") - "ISO8601 date (C{yyyy-mm-dd})" - - iso8601_datetime = Regex(r'(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") - "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" - - uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") - "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" - - _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() - @staticmethod - def stripHTMLTags(s, l, tokens): - """ - Parse action to remove HTML tags from web page HTML source - - Example:: - # strip HTML links from normal text - text = 'More info at the
pyparsing wiki page' - td,td_end = makeHTMLTags("TD") - table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end - - print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' - """ - return pyparsing_common._html_stripper.transformString(tokens[0]) - - _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') - + Optional( White(" \t") ) ) ).streamline().setName("commaItem") - comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") - """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" - - upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) - """Parse action to convert tokens to upper case.""" - - downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) - """Parse action to convert tokens to lower case.""" - - -if __name__ == "__main__": - - selectToken = CaselessLiteral("select") - fromToken = CaselessLiteral("from") - - ident = Word(alphas, alphanums + "_$") - - columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - columnNameList = Group(delimitedList(columnName)).setName("columns") - columnSpec = ('*' | columnNameList) - - tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - tableNameList = Group(delimitedList(tableName)).setName("tables") - - simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") - - # demo runTests method, including embedded comments in test string - simpleSQL.runTests(""" - # '*' as column list and dotted table name - select * from SYS.XYZZY - - # caseless match on "SELECT", and casts back to "select" - SELECT * from XYZZY, ABC - - # list of column names, and mixed case SELECT keyword - Select AA,BB,CC from Sys.dual - - # multiple tables - Select A, B, C from Sys.dual, Table2 - - # invalid SELECT keyword - should fail - Xelect A, B, C from Sys.dual - - # incomplete command - should fail - Select - - # invalid column name - should fail - Select ^^^ frox Sys.dual - - """) - - pyparsing_common.number.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - # any int or real number, returned as float - pyparsing_common.fnumber.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - pyparsing_common.hex_integer.runTests(""" - 100 - FF - """) - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(""" - 12345678-1234-5678-1234-567812345678 - """) diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/INSTALLER b/setuptools/_vendor/tomli-2.0.1.dist-info/INSTALLER similarity index 100% rename from setuptools/_vendor/pyparsing-2.2.1.dist-info/INSTALLER rename to setuptools/_vendor/tomli-2.0.1.dist-info/INSTALLER diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/LICENSE b/setuptools/_vendor/tomli-2.0.1.dist-info/LICENSE new file mode 100644 index 0000000000..e859590f88 --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Taneli Hukkinen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/METADATA b/setuptools/_vendor/tomli-2.0.1.dist-info/METADATA new file mode 100644 index 0000000000..efd87ecc16 --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/METADATA @@ -0,0 +1,206 @@ +Metadata-Version: 2.1 +Name: tomli +Version: 2.0.1 +Summary: A lil' TOML parser +Keywords: toml +Author-email: Taneli Hukkinen +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: MacOS +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX :: Linux +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Typing :: Typed +Project-URL: Changelog, https://github.com/hukkin/tomli/blob/master/CHANGELOG.md +Project-URL: Homepage, https://github.com/hukkin/tomli + +[![Build Status](https://github.com/hukkin/tomli/workflows/Tests/badge.svg?branch=master)](https://github.com/hukkin/tomli/actions?query=workflow%3ATests+branch%3Amaster+event%3Apush) +[![codecov.io](https://codecov.io/gh/hukkin/tomli/branch/master/graph/badge.svg)](https://codecov.io/gh/hukkin/tomli) +[![PyPI version](https://img.shields.io/pypi/v/tomli)](https://pypi.org/project/tomli) + +# Tomli + +> A lil' TOML parser + +**Table of Contents** *generated with [mdformat-toc](https://github.com/hukkin/mdformat-toc)* + + + +- [Intro](#intro) +- [Installation](#installation) +- [Usage](#usage) + - [Parse a TOML string](#parse-a-toml-string) + - [Parse a TOML file](#parse-a-toml-file) + - [Handle invalid TOML](#handle-invalid-toml) + - [Construct `decimal.Decimal`s from TOML floats](#construct-decimaldecimals-from-toml-floats) +- [FAQ](#faq) + - [Why this parser?](#why-this-parser) + - [Is comment preserving round-trip parsing supported?](#is-comment-preserving-round-trip-parsing-supported) + - [Is there a `dumps`, `write` or `encode` function?](#is-there-a-dumps-write-or-encode-function) + - [How do TOML types map into Python types?](#how-do-toml-types-map-into-python-types) +- [Performance](#performance) + + + +## Intro + +Tomli is a Python library for parsing [TOML](https://toml.io). +Tomli is fully compatible with [TOML v1.0.0](https://toml.io/en/v1.0.0). + +## Installation + +```bash +pip install tomli +``` + +## Usage + +### Parse a TOML string + +```python +import tomli + +toml_str = """ + gretzky = 99 + + [kurri] + jari = 17 + """ + +toml_dict = tomli.loads(toml_str) +assert toml_dict == {"gretzky": 99, "kurri": {"jari": 17}} +``` + +### Parse a TOML file + +```python +import tomli + +with open("path_to_file/conf.toml", "rb") as f: + toml_dict = tomli.load(f) +``` + +The file must be opened in binary mode (with the `"rb"` flag). +Binary mode will enforce decoding the file as UTF-8 with universal newlines disabled, +both of which are required to correctly parse TOML. + +### Handle invalid TOML + +```python +import tomli + +try: + toml_dict = tomli.loads("]] this is invalid TOML [[") +except tomli.TOMLDecodeError: + print("Yep, definitely not valid.") +``` + +Note that error messages are considered informational only. +They should not be assumed to stay constant across Tomli versions. + +### Construct `decimal.Decimal`s from TOML floats + +```python +from decimal import Decimal +import tomli + +toml_dict = tomli.loads("precision-matters = 0.982492", parse_float=Decimal) +assert toml_dict["precision-matters"] == Decimal("0.982492") +``` + +Note that `decimal.Decimal` can be replaced with another callable that converts a TOML float from string to a Python type. +The `decimal.Decimal` is, however, a practical choice for use cases where float inaccuracies can not be tolerated. + +Illegal types are `dict` and `list`, and their subtypes. +A `ValueError` will be raised if `parse_float` produces illegal types. + +## FAQ + +### Why this parser? + +- it's lil' +- pure Python with zero dependencies +- the fastest pure Python parser [\*](#performance): + 15x as fast as [tomlkit](https://pypi.org/project/tomlkit/), + 2.4x as fast as [toml](https://pypi.org/project/toml/) +- outputs [basic data types](#how-do-toml-types-map-into-python-types) only +- 100% spec compliant: passes all tests in + [a test set](https://github.com/toml-lang/compliance/pull/8) + soon to be merged to the official + [compliance tests for TOML](https://github.com/toml-lang/compliance) + repository +- thoroughly tested: 100% branch coverage + +### Is comment preserving round-trip parsing supported? + +No. + +The `tomli.loads` function returns a plain `dict` that is populated with builtin types and types from the standard library only. +Preserving comments requires a custom type to be returned so will not be supported, +at least not by the `tomli.loads` and `tomli.load` functions. + +Look into [TOML Kit](https://github.com/sdispater/tomlkit) if preservation of style is what you need. + +### Is there a `dumps`, `write` or `encode` function? + +[Tomli-W](https://github.com/hukkin/tomli-w) is the write-only counterpart of Tomli, providing `dump` and `dumps` functions. + +The core library does not include write capability, as most TOML use cases are read-only, and Tomli intends to be minimal. + +### How do TOML types map into Python types? + +| TOML type | Python type | Details | +| ---------------- | ------------------- | ------------------------------------------------------------ | +| Document Root | `dict` | | +| Key | `str` | | +| String | `str` | | +| Integer | `int` | | +| Float | `float` | | +| Boolean | `bool` | | +| Offset Date-Time | `datetime.datetime` | `tzinfo` attribute set to an instance of `datetime.timezone` | +| Local Date-Time | `datetime.datetime` | `tzinfo` attribute set to `None` | +| Local Date | `datetime.date` | | +| Local Time | `datetime.time` | | +| Array | `list` | | +| Table | `dict` | | +| Inline Table | `dict` | | + +## Performance + +The `benchmark/` folder in this repository contains a performance benchmark for comparing the various Python TOML parsers. +The benchmark can be run with `tox -e benchmark-pypi`. +Running the benchmark on my personal computer output the following: + +```console +foo@bar:~/dev/tomli$ tox -e benchmark-pypi +benchmark-pypi installed: attrs==19.3.0,click==7.1.2,pytomlpp==1.0.2,qtoml==0.3.0,rtoml==0.7.0,toml==0.10.2,tomli==1.1.0,tomlkit==0.7.2 +benchmark-pypi run-test-pre: PYTHONHASHSEED='2658546909' +benchmark-pypi run-test: commands[0] | python -c 'import datetime; print(datetime.date.today())' +2021-07-23 +benchmark-pypi run-test: commands[1] | python --version +Python 3.8.10 +benchmark-pypi run-test: commands[2] | python benchmark/run.py +Parsing data.toml 5000 times: +------------------------------------------------------ + parser | exec time | performance (more is better) +-----------+------------+----------------------------- + rtoml | 0.901 s | baseline (100%) + pytomlpp | 1.08 s | 83.15% + tomli | 3.89 s | 23.15% + toml | 9.36 s | 9.63% + qtoml | 11.5 s | 7.82% + tomlkit | 56.8 s | 1.59% +``` + +The parsers are ordered from fastest to slowest, using the fastest parser as baseline. +Tomli performed the best out of all pure Python TOML parsers, +losing only to pytomlpp (wraps C++) and rtoml (wraps Rust). + diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD b/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD new file mode 100644 index 0000000000..5f7a6b06b3 --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD @@ -0,0 +1,15 @@ +tomli-2.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +tomli-2.0.1.dist-info/LICENSE,sha256=uAgWsNUwuKzLTCIReDeQmEpuO2GSLCte6S8zcqsnQv4,1072 +tomli-2.0.1.dist-info/METADATA,sha256=zPDceKmPwJGLWtZykrHixL7WVXWmJGzZ1jyRT5lCoPI,8875 +tomli-2.0.1.dist-info/RECORD,, +tomli-2.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +tomli-2.0.1.dist-info/WHEEL,sha256=jPMR_Dzkc4X4icQtmz81lnNY_kAsfog7ry7qoRvYLXw,81 +tomli/__init__.py,sha256=JhUwV66DB1g4Hvt1UQCVMdfCu-IgAV8FXmvDU9onxd4,396 +tomli/__pycache__/__init__.cpython-311.pyc,, +tomli/__pycache__/_parser.cpython-311.pyc,, +tomli/__pycache__/_re.cpython-311.pyc,, +tomli/__pycache__/_types.cpython-311.pyc,, +tomli/_parser.py,sha256=g9-ENaALS-B8dokYpCuzUFalWlog7T-SIYMjLZSWrtM,22633 +tomli/_re.py,sha256=dbjg5ChZT23Ka9z9DHOXfdtSpPwUfdgMXnj8NOoly-w,2943 +tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254 +tomli/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26 diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/REQUESTED b/setuptools/_vendor/tomli-2.0.1.dist-info/REQUESTED new file mode 100644 index 0000000000..e69de29bb2 diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/WHEEL b/setuptools/_vendor/tomli-2.0.1.dist-info/WHEEL new file mode 100644 index 0000000000..c727d14823 --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.6.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/setuptools/_vendor/tomli/__init__.py b/setuptools/_vendor/tomli/__init__.py new file mode 100644 index 0000000000..4c6ec97ec6 --- /dev/null +++ b/setuptools/_vendor/tomli/__init__.py @@ -0,0 +1,11 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +__all__ = ("loads", "load", "TOMLDecodeError") +__version__ = "2.0.1" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT + +from ._parser import TOMLDecodeError, load, loads + +# Pretend this exception was created here. +TOMLDecodeError.__module__ = __name__ diff --git a/setuptools/_vendor/tomli/_parser.py b/setuptools/_vendor/tomli/_parser.py new file mode 100644 index 0000000000..f1bb0aa19a --- /dev/null +++ b/setuptools/_vendor/tomli/_parser.py @@ -0,0 +1,691 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from __future__ import annotations + +from collections.abc import Iterable +import string +from types import MappingProxyType +from typing import Any, BinaryIO, NamedTuple + +from ._re import ( + RE_DATETIME, + RE_LOCALTIME, + RE_NUMBER, + match_to_datetime, + match_to_localtime, + match_to_number, +) +from ._types import Key, ParseFloat, Pos + +ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) + +# Neither of these sets include quotation mark or backslash. They are +# currently handled as separate cases in the parser functions. +ILLEGAL_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t") +ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n") + +ILLEGAL_LITERAL_STR_CHARS = ILLEGAL_BASIC_STR_CHARS +ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ILLEGAL_MULTILINE_BASIC_STR_CHARS + +ILLEGAL_COMMENT_CHARS = ILLEGAL_BASIC_STR_CHARS + +TOML_WS = frozenset(" \t") +TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n") +BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_") +KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'") +HEXDIGIT_CHARS = frozenset(string.hexdigits) + +BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType( + { + "\\b": "\u0008", # backspace + "\\t": "\u0009", # tab + "\\n": "\u000A", # linefeed + "\\f": "\u000C", # form feed + "\\r": "\u000D", # carriage return + '\\"': "\u0022", # quote + "\\\\": "\u005C", # backslash + } +) + + +class TOMLDecodeError(ValueError): + """An error raised if a document is not valid TOML.""" + + +def load(__fp: BinaryIO, *, parse_float: ParseFloat = float) -> dict[str, Any]: + """Parse TOML from a binary file object.""" + b = __fp.read() + try: + s = b.decode() + except AttributeError: + raise TypeError( + "File must be opened in binary mode, e.g. use `open('foo.toml', 'rb')`" + ) from None + return loads(s, parse_float=parse_float) + + +def loads(__s: str, *, parse_float: ParseFloat = float) -> dict[str, Any]: # noqa: C901 + """Parse TOML from a string.""" + + # The spec allows converting "\r\n" to "\n", even in string + # literals. Let's do so to simplify parsing. + src = __s.replace("\r\n", "\n") + pos = 0 + out = Output(NestedDict(), Flags()) + header: Key = () + parse_float = make_safe_parse_float(parse_float) + + # Parse one statement at a time + # (typically means one line in TOML source) + while True: + # 1. Skip line leading whitespace + pos = skip_chars(src, pos, TOML_WS) + + # 2. Parse rules. Expect one of the following: + # - end of file + # - end of line + # - comment + # - key/value pair + # - append dict to list (and move to its namespace) + # - create dict (and move to its namespace) + # Skip trailing whitespace when applicable. + try: + char = src[pos] + except IndexError: + break + if char == "\n": + pos += 1 + continue + if char in KEY_INITIAL_CHARS: + pos = key_value_rule(src, pos, out, header, parse_float) + pos = skip_chars(src, pos, TOML_WS) + elif char == "[": + try: + second_char: str | None = src[pos + 1] + except IndexError: + second_char = None + out.flags.finalize_pending() + if second_char == "[": + pos, header = create_list_rule(src, pos, out) + else: + pos, header = create_dict_rule(src, pos, out) + pos = skip_chars(src, pos, TOML_WS) + elif char != "#": + raise suffixed_err(src, pos, "Invalid statement") + + # 3. Skip comment + pos = skip_comment(src, pos) + + # 4. Expect end of line or end of file + try: + char = src[pos] + except IndexError: + break + if char != "\n": + raise suffixed_err( + src, pos, "Expected newline or end of document after a statement" + ) + pos += 1 + + return out.data.dict + + +class Flags: + """Flags that map to parsed keys/namespaces.""" + + # Marks an immutable namespace (inline array or inline table). + FROZEN = 0 + # Marks a nest that has been explicitly created and can no longer + # be opened using the "[table]" syntax. + EXPLICIT_NEST = 1 + + def __init__(self) -> None: + self._flags: dict[str, dict] = {} + self._pending_flags: set[tuple[Key, int]] = set() + + def add_pending(self, key: Key, flag: int) -> None: + self._pending_flags.add((key, flag)) + + def finalize_pending(self) -> None: + for key, flag in self._pending_flags: + self.set(key, flag, recursive=False) + self._pending_flags.clear() + + def unset_all(self, key: Key) -> None: + cont = self._flags + for k in key[:-1]: + if k not in cont: + return + cont = cont[k]["nested"] + cont.pop(key[-1], None) + + def set(self, key: Key, flag: int, *, recursive: bool) -> None: # noqa: A003 + cont = self._flags + key_parent, key_stem = key[:-1], key[-1] + for k in key_parent: + if k not in cont: + cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont = cont[k]["nested"] + if key_stem not in cont: + cont[key_stem] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont[key_stem]["recursive_flags" if recursive else "flags"].add(flag) + + def is_(self, key: Key, flag: int) -> bool: + if not key: + return False # document root has no flags + cont = self._flags + for k in key[:-1]: + if k not in cont: + return False + inner_cont = cont[k] + if flag in inner_cont["recursive_flags"]: + return True + cont = inner_cont["nested"] + key_stem = key[-1] + if key_stem in cont: + cont = cont[key_stem] + return flag in cont["flags"] or flag in cont["recursive_flags"] + return False + + +class NestedDict: + def __init__(self) -> None: + # The parsed content of the TOML document + self.dict: dict[str, Any] = {} + + def get_or_create_nest( + self, + key: Key, + *, + access_lists: bool = True, + ) -> dict: + cont: Any = self.dict + for k in key: + if k not in cont: + cont[k] = {} + cont = cont[k] + if access_lists and isinstance(cont, list): + cont = cont[-1] + if not isinstance(cont, dict): + raise KeyError("There is no nest behind this key") + return cont + + def append_nest_to_list(self, key: Key) -> None: + cont = self.get_or_create_nest(key[:-1]) + last_key = key[-1] + if last_key in cont: + list_ = cont[last_key] + if not isinstance(list_, list): + raise KeyError("An object other than list found behind this key") + list_.append({}) + else: + cont[last_key] = [{}] + + +class Output(NamedTuple): + data: NestedDict + flags: Flags + + +def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos: + try: + while src[pos] in chars: + pos += 1 + except IndexError: + pass + return pos + + +def skip_until( + src: str, + pos: Pos, + expect: str, + *, + error_on: frozenset[str], + error_on_eof: bool, +) -> Pos: + try: + new_pos = src.index(expect, pos) + except ValueError: + new_pos = len(src) + if error_on_eof: + raise suffixed_err(src, new_pos, f"Expected {expect!r}") from None + + if not error_on.isdisjoint(src[pos:new_pos]): + while src[pos] not in error_on: + pos += 1 + raise suffixed_err(src, pos, f"Found invalid character {src[pos]!r}") + return new_pos + + +def skip_comment(src: str, pos: Pos) -> Pos: + try: + char: str | None = src[pos] + except IndexError: + char = None + if char == "#": + return skip_until( + src, pos + 1, "\n", error_on=ILLEGAL_COMMENT_CHARS, error_on_eof=False + ) + return pos + + +def skip_comments_and_array_ws(src: str, pos: Pos) -> Pos: + while True: + pos_before_skip = pos + pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) + pos = skip_comment(src, pos) + if pos == pos_before_skip: + return pos + + +def create_dict_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: + pos += 1 # Skip "[" + pos = skip_chars(src, pos, TOML_WS) + pos, key = parse_key(src, pos) + + if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Cannot declare {key} twice") + out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + try: + out.data.get_or_create_nest(key) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + + if not src.startswith("]", pos): + raise suffixed_err(src, pos, "Expected ']' at the end of a table declaration") + return pos + 1, key + + +def create_list_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: + pos += 2 # Skip "[[" + pos = skip_chars(src, pos, TOML_WS) + pos, key = parse_key(src, pos) + + if out.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}") + # Free the namespace now that it points to another empty list item... + out.flags.unset_all(key) + # ...but this key precisely is still prohibited from table declaration + out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + try: + out.data.append_nest_to_list(key) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + + if not src.startswith("]]", pos): + raise suffixed_err(src, pos, "Expected ']]' at the end of an array declaration") + return pos + 2, key + + +def key_value_rule( + src: str, pos: Pos, out: Output, header: Key, parse_float: ParseFloat +) -> Pos: + pos, key, value = parse_key_value_pair(src, pos, parse_float) + key_parent, key_stem = key[:-1], key[-1] + abs_key_parent = header + key_parent + + relative_path_cont_keys = (header + key[:i] for i in range(1, len(key))) + for cont_key in relative_path_cont_keys: + # Check that dotted key syntax does not redefine an existing table + if out.flags.is_(cont_key, Flags.EXPLICIT_NEST): + raise suffixed_err(src, pos, f"Cannot redefine namespace {cont_key}") + # Containers in the relative path can't be opened with the table syntax or + # dotted key/value syntax in following table sections. + out.flags.add_pending(cont_key, Flags.EXPLICIT_NEST) + + if out.flags.is_(abs_key_parent, Flags.FROZEN): + raise suffixed_err( + src, pos, f"Cannot mutate immutable namespace {abs_key_parent}" + ) + + try: + nest = out.data.get_or_create_nest(abs_key_parent) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + if key_stem in nest: + raise suffixed_err(src, pos, "Cannot overwrite a value") + # Mark inline table and array namespaces recursively immutable + if isinstance(value, (dict, list)): + out.flags.set(header + key, Flags.FROZEN, recursive=True) + nest[key_stem] = value + return pos + + +def parse_key_value_pair( + src: str, pos: Pos, parse_float: ParseFloat +) -> tuple[Pos, Key, Any]: + pos, key = parse_key(src, pos) + try: + char: str | None = src[pos] + except IndexError: + char = None + if char != "=": + raise suffixed_err(src, pos, "Expected '=' after a key in a key/value pair") + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + pos, value = parse_value(src, pos, parse_float) + return pos, key, value + + +def parse_key(src: str, pos: Pos) -> tuple[Pos, Key]: + pos, key_part = parse_key_part(src, pos) + key: Key = (key_part,) + pos = skip_chars(src, pos, TOML_WS) + while True: + try: + char: str | None = src[pos] + except IndexError: + char = None + if char != ".": + return pos, key + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + pos, key_part = parse_key_part(src, pos) + key += (key_part,) + pos = skip_chars(src, pos, TOML_WS) + + +def parse_key_part(src: str, pos: Pos) -> tuple[Pos, str]: + try: + char: str | None = src[pos] + except IndexError: + char = None + if char in BARE_KEY_CHARS: + start_pos = pos + pos = skip_chars(src, pos, BARE_KEY_CHARS) + return pos, src[start_pos:pos] + if char == "'": + return parse_literal_str(src, pos) + if char == '"': + return parse_one_line_basic_str(src, pos) + raise suffixed_err(src, pos, "Invalid initial character for a key part") + + +def parse_one_line_basic_str(src: str, pos: Pos) -> tuple[Pos, str]: + pos += 1 + return parse_basic_str(src, pos, multiline=False) + + +def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos, list]: + pos += 1 + array: list = [] + + pos = skip_comments_and_array_ws(src, pos) + if src.startswith("]", pos): + return pos + 1, array + while True: + pos, val = parse_value(src, pos, parse_float) + array.append(val) + pos = skip_comments_and_array_ws(src, pos) + + c = src[pos : pos + 1] + if c == "]": + return pos + 1, array + if c != ",": + raise suffixed_err(src, pos, "Unclosed array") + pos += 1 + + pos = skip_comments_and_array_ws(src, pos) + if src.startswith("]", pos): + return pos + 1, array + + +def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos, dict]: + pos += 1 + nested_dict = NestedDict() + flags = Flags() + + pos = skip_chars(src, pos, TOML_WS) + if src.startswith("}", pos): + return pos + 1, nested_dict.dict + while True: + pos, key, value = parse_key_value_pair(src, pos, parse_float) + key_parent, key_stem = key[:-1], key[-1] + if flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}") + try: + nest = nested_dict.get_or_create_nest(key_parent, access_lists=False) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + if key_stem in nest: + raise suffixed_err(src, pos, f"Duplicate inline table key {key_stem!r}") + nest[key_stem] = value + pos = skip_chars(src, pos, TOML_WS) + c = src[pos : pos + 1] + if c == "}": + return pos + 1, nested_dict.dict + if c != ",": + raise suffixed_err(src, pos, "Unclosed inline table") + if isinstance(value, (dict, list)): + flags.set(key, Flags.FROZEN, recursive=True) + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + + +def parse_basic_str_escape( + src: str, pos: Pos, *, multiline: bool = False +) -> tuple[Pos, str]: + escape_id = src[pos : pos + 2] + pos += 2 + if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}: + # Skip whitespace until next non-whitespace character or end of + # the doc. Error if non-whitespace is found before newline. + if escape_id != "\\\n": + pos = skip_chars(src, pos, TOML_WS) + try: + char = src[pos] + except IndexError: + return pos, "" + if char != "\n": + raise suffixed_err(src, pos, "Unescaped '\\' in a string") + pos += 1 + pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) + return pos, "" + if escape_id == "\\u": + return parse_hex_char(src, pos, 4) + if escape_id == "\\U": + return parse_hex_char(src, pos, 8) + try: + return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id] + except KeyError: + raise suffixed_err(src, pos, "Unescaped '\\' in a string") from None + + +def parse_basic_str_escape_multiline(src: str, pos: Pos) -> tuple[Pos, str]: + return parse_basic_str_escape(src, pos, multiline=True) + + +def parse_hex_char(src: str, pos: Pos, hex_len: int) -> tuple[Pos, str]: + hex_str = src[pos : pos + hex_len] + if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str): + raise suffixed_err(src, pos, "Invalid hex value") + pos += hex_len + hex_int = int(hex_str, 16) + if not is_unicode_scalar_value(hex_int): + raise suffixed_err(src, pos, "Escaped character is not a Unicode scalar value") + return pos, chr(hex_int) + + +def parse_literal_str(src: str, pos: Pos) -> tuple[Pos, str]: + pos += 1 # Skip starting apostrophe + start_pos = pos + pos = skip_until( + src, pos, "'", error_on=ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True + ) + return pos + 1, src[start_pos:pos] # Skip ending apostrophe + + +def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> tuple[Pos, str]: + pos += 3 + if src.startswith("\n", pos): + pos += 1 + + if literal: + delim = "'" + end_pos = skip_until( + src, + pos, + "'''", + error_on=ILLEGAL_MULTILINE_LITERAL_STR_CHARS, + error_on_eof=True, + ) + result = src[pos:end_pos] + pos = end_pos + 3 + else: + delim = '"' + pos, result = parse_basic_str(src, pos, multiline=True) + + # Add at maximum two extra apostrophes/quotes if the end sequence + # is 4 or 5 chars long instead of just 3. + if not src.startswith(delim, pos): + return pos, result + pos += 1 + if not src.startswith(delim, pos): + return pos, result + delim + pos += 1 + return pos, result + (delim * 2) + + +def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> tuple[Pos, str]: + if multiline: + error_on = ILLEGAL_MULTILINE_BASIC_STR_CHARS + parse_escapes = parse_basic_str_escape_multiline + else: + error_on = ILLEGAL_BASIC_STR_CHARS + parse_escapes = parse_basic_str_escape + result = "" + start_pos = pos + while True: + try: + char = src[pos] + except IndexError: + raise suffixed_err(src, pos, "Unterminated string") from None + if char == '"': + if not multiline: + return pos + 1, result + src[start_pos:pos] + if src.startswith('"""', pos): + return pos + 3, result + src[start_pos:pos] + pos += 1 + continue + if char == "\\": + result += src[start_pos:pos] + pos, parsed_escape = parse_escapes(src, pos) + result += parsed_escape + start_pos = pos + continue + if char in error_on: + raise suffixed_err(src, pos, f"Illegal character {char!r}") + pos += 1 + + +def parse_value( # noqa: C901 + src: str, pos: Pos, parse_float: ParseFloat +) -> tuple[Pos, Any]: + try: + char: str | None = src[pos] + except IndexError: + char = None + + # IMPORTANT: order conditions based on speed of checking and likelihood + + # Basic strings + if char == '"': + if src.startswith('"""', pos): + return parse_multiline_str(src, pos, literal=False) + return parse_one_line_basic_str(src, pos) + + # Literal strings + if char == "'": + if src.startswith("'''", pos): + return parse_multiline_str(src, pos, literal=True) + return parse_literal_str(src, pos) + + # Booleans + if char == "t": + if src.startswith("true", pos): + return pos + 4, True + if char == "f": + if src.startswith("false", pos): + return pos + 5, False + + # Arrays + if char == "[": + return parse_array(src, pos, parse_float) + + # Inline tables + if char == "{": + return parse_inline_table(src, pos, parse_float) + + # Dates and times + datetime_match = RE_DATETIME.match(src, pos) + if datetime_match: + try: + datetime_obj = match_to_datetime(datetime_match) + except ValueError as e: + raise suffixed_err(src, pos, "Invalid date or datetime") from e + return datetime_match.end(), datetime_obj + localtime_match = RE_LOCALTIME.match(src, pos) + if localtime_match: + return localtime_match.end(), match_to_localtime(localtime_match) + + # Integers and "normal" floats. + # The regex will greedily match any type starting with a decimal + # char, so needs to be located after handling of dates and times. + number_match = RE_NUMBER.match(src, pos) + if number_match: + return number_match.end(), match_to_number(number_match, parse_float) + + # Special floats + first_three = src[pos : pos + 3] + if first_three in {"inf", "nan"}: + return pos + 3, parse_float(first_three) + first_four = src[pos : pos + 4] + if first_four in {"-inf", "+inf", "-nan", "+nan"}: + return pos + 4, parse_float(first_four) + + raise suffixed_err(src, pos, "Invalid value") + + +def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError: + """Return a `TOMLDecodeError` where error message is suffixed with + coordinates in source.""" + + def coord_repr(src: str, pos: Pos) -> str: + if pos >= len(src): + return "end of document" + line = src.count("\n", 0, pos) + 1 + if line == 1: + column = pos + 1 + else: + column = pos - src.rindex("\n", 0, pos) + return f"line {line}, column {column}" + + return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})") + + +def is_unicode_scalar_value(codepoint: int) -> bool: + return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111) + + +def make_safe_parse_float(parse_float: ParseFloat) -> ParseFloat: + """A decorator to make `parse_float` safe. + + `parse_float` must not return dicts or lists, because these types + would be mixed with parsed TOML tables and arrays, thus confusing + the parser. The returned decorated callable raises `ValueError` + instead of returning illegal types. + """ + # The default `float` callable never returns illegal types. Optimize it. + if parse_float is float: # type: ignore[comparison-overlap] + return float + + def safe_parse_float(float_str: str) -> Any: + float_value = parse_float(float_str) + if isinstance(float_value, (dict, list)): + raise ValueError("parse_float must not return dicts or lists") + return float_value + + return safe_parse_float diff --git a/setuptools/_vendor/tomli/_re.py b/setuptools/_vendor/tomli/_re.py new file mode 100644 index 0000000000..994bb7493f --- /dev/null +++ b/setuptools/_vendor/tomli/_re.py @@ -0,0 +1,107 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from __future__ import annotations + +from datetime import date, datetime, time, timedelta, timezone, tzinfo +from functools import lru_cache +import re +from typing import Any + +from ._types import ParseFloat + +# E.g. +# - 00:32:00.999999 +# - 00:32:00 +_TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?" + +RE_NUMBER = re.compile( + r""" +0 +(?: + x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex + | + b[01](?:_?[01])* # bin + | + o[0-7](?:_?[0-7])* # oct +) +| +[+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part +(?P + (?:\.[0-9](?:_?[0-9])*)? # optional fractional part + (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part +) +""", + flags=re.VERBOSE, +) +RE_LOCALTIME = re.compile(_TIME_RE_STR) +RE_DATETIME = re.compile( + rf""" +([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27 +(?: + [Tt ] + {_TIME_RE_STR} + (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset +)? +""", + flags=re.VERBOSE, +) + + +def match_to_datetime(match: re.Match) -> datetime | date: + """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`. + + Raises ValueError if the match does not correspond to a valid date + or datetime. + """ + ( + year_str, + month_str, + day_str, + hour_str, + minute_str, + sec_str, + micros_str, + zulu_time, + offset_sign_str, + offset_hour_str, + offset_minute_str, + ) = match.groups() + year, month, day = int(year_str), int(month_str), int(day_str) + if hour_str is None: + return date(year, month, day) + hour, minute, sec = int(hour_str), int(minute_str), int(sec_str) + micros = int(micros_str.ljust(6, "0")) if micros_str else 0 + if offset_sign_str: + tz: tzinfo | None = cached_tz( + offset_hour_str, offset_minute_str, offset_sign_str + ) + elif zulu_time: + tz = timezone.utc + else: # local date-time + tz = None + return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz) + + +@lru_cache(maxsize=None) +def cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone: + sign = 1 if sign_str == "+" else -1 + return timezone( + timedelta( + hours=sign * int(hour_str), + minutes=sign * int(minute_str), + ) + ) + + +def match_to_localtime(match: re.Match) -> time: + hour_str, minute_str, sec_str, micros_str = match.groups() + micros = int(micros_str.ljust(6, "0")) if micros_str else 0 + return time(int(hour_str), int(minute_str), int(sec_str), micros) + + +def match_to_number(match: re.Match, parse_float: ParseFloat) -> Any: + if match.group("floatpart"): + return parse_float(match.group()) + return int(match.group(), 0) diff --git a/setuptools/_vendor/tomli/_types.py b/setuptools/_vendor/tomli/_types.py new file mode 100644 index 0000000000..d949412e03 --- /dev/null +++ b/setuptools/_vendor/tomli/_types.py @@ -0,0 +1,10 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from typing import Any, Callable, Tuple + +# Type annotations +ParseFloat = Callable[[str], Any] +Key = Tuple[str, ...] +Pos = int diff --git a/setuptools/_vendor/tomli/py.typed b/setuptools/_vendor/tomli/py.typed new file mode 100644 index 0000000000..7632ecf775 --- /dev/null +++ b/setuptools/_vendor/tomli/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 diff --git a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD index 9a7f60078f..786de8542d 100644 --- a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD +++ b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD @@ -1,4 +1,4 @@ -__pycache__/typing_extensions.cpython-310.pyc,, +__pycache__/typing_extensions.cpython-311.pyc,, typing_extensions-4.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 typing_extensions-4.0.1.dist-info/LICENSE,sha256=_xfOlOECAk3raHc-scx0ynbaTmWPNzUx8Kwi1oprsa0,12755 typing_extensions-4.0.1.dist-info/METADATA,sha256=iZ_5HONZZBXtF4kroz-IPZYIl9M8IE1B00R82dWcBqE,1736 diff --git a/setuptools/_vendor/vendored.txt b/setuptools/_vendor/vendored.txt index db24b402f5..9c9ccbe3e1 100644 --- a/setuptools/_vendor/vendored.txt +++ b/setuptools/_vendor/vendored.txt @@ -1,11 +1,11 @@ -packaging==21.3 -pyparsing==2.2.1 +packaging==23.0 ordered-set==3.1.1 more_itertools==8.8.0 jaraco.text==3.7.0 -importlib_resources==5.4.0 -importlib_metadata==4.11.1 +importlib_resources==5.10.2 +importlib_metadata==6.0.0 # required for importlib_metadata on older Pythons typing_extensions==4.0.1 # required for importlib_resources and _metadata on older Pythons zipp==3.7.0 +tomli==2.0.1 diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD b/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD index 38d0b21ad9..0a88551ce0 100644 --- a/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD +++ b/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD @@ -1,4 +1,4 @@ -__pycache__/zipp.cpython-310.pyc,, +__pycache__/zipp.cpython-311.pyc,, zipp-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 zipp-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 zipp-3.7.0.dist-info/METADATA,sha256=ZLzgaXTyZX_MxTU0lcGfhdPY4CjFrT_3vyQ2Fo49pl8,2261 diff --git a/setuptools/archive_util.py b/setuptools/archive_util.py index 73b2db752a..d8e10c13e1 100644 --- a/setuptools/archive_util.py +++ b/setuptools/archive_util.py @@ -100,29 +100,37 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): raise UnrecognizedFormat("%s is not a zip file" % (filename,)) with zipfile.ZipFile(filename) as z: - for info in z.infolist(): - name = info.filename + _unpack_zipfile_obj(z, extract_dir, progress_filter) - # don't extract absolute paths or ones with .. in them - if name.startswith('/') or '..' in name.split('/'): - continue - target = os.path.join(extract_dir, *name.split('/')) - target = progress_filter(name, target) - if not target: - continue - if name.endswith('/'): - # directory - ensure_directory(target) - else: - # file - ensure_directory(target) - data = z.read(info.filename) - with open(target, 'wb') as f: - f.write(data) - unix_attributes = info.external_attr >> 16 - if unix_attributes: - os.chmod(target, unix_attributes) +def _unpack_zipfile_obj(zipfile_obj, extract_dir, progress_filter=default_filter): + """Internal/private API used by other parts of setuptools. + Similar to ``unpack_zipfile``, but receives an already opened :obj:`zipfile.ZipFile` + object instead of a filename. + """ + for info in zipfile_obj.infolist(): + name = info.filename + + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name.split('/'): + continue + + target = os.path.join(extract_dir, *name.split('/')) + target = progress_filter(name, target) + if not target: + continue + if name.endswith('/'): + # directory + ensure_directory(target) + else: + # file + ensure_directory(target) + data = zipfile_obj.read(info.filename) + with open(target, 'wb') as f: + f.write(data) + unix_attributes = info.external_attr >> 16 + if unix_attributes: + os.chmod(target, unix_attributes) def _resolve_tar_file_or_dir(tar_obj, tar_member_obj): diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index 5dc65e2d82..ee8ef13faa 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -28,17 +28,23 @@ import io import os +import shlex import sys import tokenize import shutil import contextlib import tempfile import warnings +from pathlib import Path +from typing import Dict, Iterator, List, Optional, Union import setuptools import distutils +from . import errors +from ._path import same_path from ._reqs import parse_strings -from .extern.more_itertools import always_iterable +from .warnings import SetuptoolsDeprecationWarning +from distutils.util import strtobool __all__ = ['get_requires_for_build_sdist', @@ -46,9 +52,15 @@ 'prepare_metadata_for_build_wheel', 'build_wheel', 'build_sdist', + 'get_requires_for_build_editable', + 'prepare_metadata_for_build_editable', + 'build_editable', '__legacy__', 'SetupRequirementsError'] +SETUPTOOLS_ENABLE_FEATURES = os.getenv("SETUPTOOLS_ENABLE_FEATURES", "").lower() +LEGACY_EDITABLE = "legacy-editable" in SETUPTOOLS_ENABLE_FEATURES.replace("_", "-") + class SetupRequirementsError(BaseException): def __init__(self, specifiers): @@ -127,33 +139,185 @@ def suppress_known_deprecation(): yield -class _BuildMetaBackend: +_ConfigSettings = Optional[Dict[str, Union[str, List[str], None]]] +""" +Currently the user can run:: + + pip install -e . --config-settings key=value + python -m build -C--key=value -C key=value + +- pip will pass both key and value as strings and overwriting repeated keys + (pypa/pip#11059). +- build will accumulate values associated with repeated keys in a list. + It will also accept keys with no associated value. + This means that an option passed by build can be ``str | list[str] | None``. +- PEP 517 specifies that ``config_settings`` is an optional dict. +""" + + +class _ConfigSettingsTranslator: + """Translate ``config_settings`` into distutils-style command arguments. + Only a limited number of options is currently supported. + """ + # See pypa/setuptools#1928 pypa/setuptools#2491 + + def _get_config(self, key: str, config_settings: _ConfigSettings) -> List[str]: + """ + Get the value of a specific key in ``config_settings`` as a list of strings. + + >>> fn = _ConfigSettingsTranslator()._get_config + >>> fn("--global-option", None) + [] + >>> fn("--global-option", {}) + [] + >>> fn("--global-option", {'--global-option': 'foo'}) + ['foo'] + >>> fn("--global-option", {'--global-option': ['foo']}) + ['foo'] + >>> fn("--global-option", {'--global-option': 'foo'}) + ['foo'] + >>> fn("--global-option", {'--global-option': 'foo bar'}) + ['foo', 'bar'] + """ + cfg = config_settings or {} + opts = cfg.get(key) or [] + return shlex.split(opts) if isinstance(opts, str) else opts + + def _valid_global_options(self): + """Global options accepted by setuptools (e.g. quiet or verbose).""" + options = (opt[:2] for opt in setuptools.dist.Distribution.global_options) + return {flag for long_and_short in options for flag in long_and_short if flag} - @staticmethod - def _fix_config(config_settings): + def _global_args(self, config_settings: _ConfigSettings) -> Iterator[str]: """ - Ensure config settings meet certain expectations. - - >>> fc = _BuildMetaBackend._fix_config - >>> fc(None) - {'--global-option': []} - >>> fc({}) - {'--global-option': []} - >>> fc({'--global-option': 'foo'}) - {'--global-option': ['foo']} - >>> fc({'--global-option': ['foo']}) - {'--global-option': ['foo']} + Let the user specify ``verbose`` or ``quiet`` + escape hatch via + ``--global-option``. + Note: ``-v``, ``-vv``, ``-vvv`` have similar effects in setuptools, + so we just have to cover the basic scenario ``-v``. + + >>> fn = _ConfigSettingsTranslator()._global_args + >>> list(fn(None)) + [] + >>> list(fn({"verbose": "False"})) + ['-q'] + >>> list(fn({"verbose": "1"})) + ['-v'] + >>> list(fn({"--verbose": None})) + ['-v'] + >>> list(fn({"verbose": "true", "--global-option": "-q --no-user-cfg"})) + ['-v', '-q', '--no-user-cfg'] + >>> list(fn({"--quiet": None})) + ['-q'] """ - config_settings = config_settings or {} - config_settings['--global-option'] = list(always_iterable( - config_settings.get('--global-option'))) - return config_settings + cfg = config_settings or {} + falsey = {"false", "no", "0", "off"} + if "verbose" in cfg or "--verbose" in cfg: + level = str(cfg.get("verbose") or cfg.get("--verbose") or "1") + yield ("-q" if level.lower() in falsey else "-v") + if "quiet" in cfg or "--quiet" in cfg: + level = str(cfg.get("quiet") or cfg.get("--quiet") or "1") + yield ("-v" if level.lower() in falsey else "-q") + + valid = self._valid_global_options() + args = self._get_config("--global-option", config_settings) + yield from (arg for arg in args if arg.strip("-") in valid) + + def __dist_info_args(self, config_settings: _ConfigSettings) -> Iterator[str]: + """ + The ``dist_info`` command accepts ``tag-date`` and ``tag-build``. + + .. warning:: + We cannot use this yet as it requires the ``sdist`` and ``bdist_wheel`` + commands run in ``build_sdist`` and ``build_wheel`` to re-use the egg-info + directory created in ``prepare_metadata_for_build_wheel``. + + >>> fn = _ConfigSettingsTranslator()._ConfigSettingsTranslator__dist_info_args + >>> list(fn(None)) + [] + >>> list(fn({"tag-date": "False"})) + ['--no-date'] + >>> list(fn({"tag-date": None})) + ['--no-date'] + >>> list(fn({"tag-date": "true", "tag-build": ".a"})) + ['--tag-date', '--tag-build', '.a'] + """ + cfg = config_settings or {} + if "tag-date" in cfg: + val = strtobool(str(cfg["tag-date"] or "false")) + yield ("--tag-date" if val else "--no-date") + if "tag-build" in cfg: + yield from ["--tag-build", str(cfg["tag-build"])] + + def _editable_args(self, config_settings: _ConfigSettings) -> Iterator[str]: + """ + The ``editable_wheel`` command accepts ``editable-mode=strict``. - def _get_build_requires(self, config_settings, requirements): - config_settings = self._fix_config(config_settings) + >>> fn = _ConfigSettingsTranslator()._editable_args + >>> list(fn(None)) + [] + >>> list(fn({"editable-mode": "strict"})) + ['--mode', 'strict'] + """ + cfg = config_settings or {} + mode = cfg.get("editable-mode") or cfg.get("editable_mode") + if not mode: + return + yield from ["--mode", str(mode)] - sys.argv = sys.argv[:1] + ['egg_info'] + \ - config_settings["--global-option"] + def _arbitrary_args(self, config_settings: _ConfigSettings) -> Iterator[str]: + """ + Users may expect to pass arbitrary lists of arguments to a command + via "--global-option" (example provided in PEP 517 of a "escape hatch"). + + >>> fn = _ConfigSettingsTranslator()._arbitrary_args + >>> list(fn(None)) + [] + >>> list(fn({})) + [] + >>> list(fn({'--build-option': 'foo'})) + ['foo'] + >>> list(fn({'--build-option': ['foo']})) + ['foo'] + >>> list(fn({'--build-option': 'foo'})) + ['foo'] + >>> list(fn({'--build-option': 'foo bar'})) + ['foo', 'bar'] + >>> warnings.simplefilter('error', SetuptoolsDeprecationWarning) + >>> list(fn({'--global-option': 'foo'})) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + SetuptoolsDeprecationWarning: ...arguments given via `--global-option`... + """ + args = self._get_config("--global-option", config_settings) + global_opts = self._valid_global_options() + bad_args = [] + + for arg in args: + if arg.strip("-") not in global_opts: + bad_args.append(arg) + yield arg + + yield from self._get_config("--build-option", config_settings) + + if bad_args: + SetuptoolsDeprecationWarning.emit( + "Incompatible `config_settings` passed to build backend.", + f""" + The arguments {bad_args!r} were given via `--global-option`. + Please use `--build-option` instead, + `--global-option` is reserved for flags like `--verbose` or `--quiet`. + """, + due_date=(2023, 9, 26), # Warning introduced in v64.0.1, 11/Aug/2022. + ) + + +class _BuildMetaBackend(_ConfigSettingsTranslator): + def _get_build_requires(self, config_settings, requirements): + sys.argv = [ + *sys.argv[:1], + *self._global_args(config_settings), + "egg_info", + *self._arbitrary_args(config_settings), + ] try: with Distribution.patch(): self.run_setup() @@ -165,66 +329,74 @@ def _get_build_requires(self, config_settings, requirements): def run_setup(self, setup_script='setup.py'): # Note that we can reuse our build directory between calls # Correctness comes first, then optimization later - __file__ = setup_script + __file__ = os.path.abspath(setup_script) __name__ = '__main__' with _open_setup_script(__file__) as f: code = f.read().replace(r'\r\n', r'\n') - exec(compile(code, __file__, 'exec'), locals()) + exec(code, locals()) def get_requires_for_build_wheel(self, config_settings=None): - return self._get_build_requires( - config_settings, requirements=['wheel']) + return self._get_build_requires(config_settings, requirements=['wheel']) def get_requires_for_build_sdist(self, config_settings=None): return self._get_build_requires(config_settings, requirements=[]) - def prepare_metadata_for_build_wheel(self, metadata_directory, - config_settings=None): - sys.argv = sys.argv[:1] + [ - 'dist_info', '--egg-base', metadata_directory] - with no_install_setup_requires(): - self.run_setup() + def _bubble_up_info_directory(self, metadata_directory: str, suffix: str) -> str: + """ + PEP 517 requires that the .dist-info directory be placed in the + metadata_directory. To comply, we MUST copy the directory to the root. - dist_info_directory = metadata_directory - while True: - dist_infos = [f for f in os.listdir(dist_info_directory) - if f.endswith('.dist-info')] + Returns the basename of the info directory, e.g. `proj-0.0.0.dist-info`. + """ + info_dir = self._find_info_directory(metadata_directory, suffix) + if not same_path(info_dir.parent, metadata_directory): + shutil.move(str(info_dir), metadata_directory) + # PEP 517 allow other files and dirs to exist in metadata_directory + return info_dir.name - if ( - len(dist_infos) == 0 and - len(_get_immediate_subdirectories(dist_info_directory)) == 1 - ): + def _find_info_directory(self, metadata_directory: str, suffix: str) -> Path: + for parent, dirs, _ in os.walk(metadata_directory): + candidates = [f for f in dirs if f.endswith(suffix)] - dist_info_directory = os.path.join( - dist_info_directory, os.listdir(dist_info_directory)[0]) - continue + if len(candidates) != 0 or len(dirs) != 1: + assert len(candidates) == 1, f"Multiple {suffix} directories found" + return Path(parent, candidates[0]) - assert len(dist_infos) == 1 - break + msg = f"No {suffix} directory found in {metadata_directory}" + raise errors.InternalError(msg) - # PEP 517 requires that the .dist-info directory be placed in the - # metadata_directory. To comply, we MUST copy the directory to the root - if dist_info_directory != metadata_directory: - shutil.move( - os.path.join(dist_info_directory, dist_infos[0]), - metadata_directory) - shutil.rmtree(dist_info_directory, ignore_errors=True) + def prepare_metadata_for_build_wheel(self, metadata_directory, + config_settings=None): + sys.argv = [ + *sys.argv[:1], + *self._global_args(config_settings), + "dist_info", + "--output-dir", metadata_directory, + "--keep-egg-info", + ] + with no_install_setup_requires(): + self.run_setup() - return dist_infos[0] + self._bubble_up_info_directory(metadata_directory, ".egg-info") + return self._bubble_up_info_directory(metadata_directory, ".dist-info") def _build_with_temp_dir(self, setup_command, result_extension, result_directory, config_settings): - config_settings = self._fix_config(config_settings) result_directory = os.path.abspath(result_directory) # Build in a temporary directory, then copy to the target. os.makedirs(result_directory, exist_ok=True) - with tempfile.TemporaryDirectory(dir=result_directory) as tmp_dist_dir: - sys.argv = (sys.argv[:1] + setup_command + - ['--dist-dir', tmp_dist_dir] + - config_settings["--global-option"]) + temp_opts = {"prefix": ".tmp-", "dir": result_directory} + with tempfile.TemporaryDirectory(**temp_opts) as tmp_dist_dir: + sys.argv = [ + *sys.argv[:1], + *self._global_args(config_settings), + *setup_command, + "--dist-dir", tmp_dist_dir, + *self._arbitrary_args(config_settings), + ] with no_install_setup_requires(): self.run_setup() @@ -249,6 +421,40 @@ def build_sdist(self, sdist_directory, config_settings=None): '.tar.gz', sdist_directory, config_settings) + def _get_dist_info_dir(self, metadata_directory: Optional[str]) -> Optional[str]: + if not metadata_directory: + return None + dist_info_candidates = list(Path(metadata_directory).glob("*.dist-info")) + assert len(dist_info_candidates) <= 1 + return str(dist_info_candidates[0]) if dist_info_candidates else None + + if not LEGACY_EDITABLE: + + # PEP660 hooks: + # build_editable + # get_requires_for_build_editable + # prepare_metadata_for_build_editable + def build_editable( + self, wheel_directory, config_settings=None, metadata_directory=None + ): + # XXX can or should we hide our editable_wheel command normally? + info_dir = self._get_dist_info_dir(metadata_directory) + opts = ["--dist-info-dir", info_dir] if info_dir else [] + cmd = ["editable_wheel", *opts, *self._editable_args(config_settings)] + with suppress_known_deprecation(): + return self._build_with_temp_dir( + cmd, ".whl", wheel_directory, config_settings + ) + + def get_requires_for_build_editable(self, config_settings=None): + return self.get_requires_for_build_wheel(config_settings) + + def prepare_metadata_for_build_editable(self, metadata_directory, + config_settings=None): + return self.prepare_metadata_for_build_wheel( + metadata_directory, config_settings + ) + class _BuildMetaLegacyBackend(_BuildMetaBackend): """Compatibility backend for setuptools @@ -299,6 +505,11 @@ def run_setup(self, setup_script='setup.py'): build_wheel = _BACKEND.build_wheel build_sdist = _BACKEND.build_sdist +if not LEGACY_EDITABLE: + get_requires_for_build_editable = _BACKEND.get_requires_for_build_editable + prepare_metadata_for_build_editable = _BACKEND.prepare_metadata_for_build_editable + build_editable = _BACKEND.build_editable + # The legacy backend __legacy__ = _BuildMetaLegacyBackend() diff --git a/setuptools/command/__init__.py b/setuptools/command/__init__.py index b966dcea57..5acd7687d6 100644 --- a/setuptools/command/__init__.py +++ b/setuptools/command/__init__.py @@ -2,7 +2,11 @@ import sys if 'egg' not in bdist.format_commands: - bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") - bdist.format_commands.append('egg') + try: + bdist.format_commands['egg'] = ('bdist_egg', "Python .egg file") + except TypeError: + # For backward compatibility with older distutils (stdlib) + bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") + bdist.format_commands.append('egg') del bdist, sys diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py index 11a1c6be28..33f483cf50 100644 --- a/setuptools/command/bdist_egg.py +++ b/setuptools/command/bdist_egg.py @@ -11,7 +11,6 @@ import textwrap import marshal -from pkg_resources import get_build_platform, Distribution from setuptools.extension import Library from setuptools import Command from .._path import ensure_directory @@ -64,7 +63,7 @@ class bdist_egg(Command): ('bdist-dir=', 'b', "temporary directory for creating the distribution"), ('plat-name=', 'p', "platform name to embed in generated filenames " - "(default: %s)" % get_build_platform()), + "(by default uses `pkg_resources.get_build_platform()`)"), ('exclude-source-files', None, "remove all .py files from the generated egg"), ('keep-temp', 'k', @@ -98,18 +97,18 @@ def finalize_options(self): self.bdist_dir = os.path.join(bdist_base, 'egg') if self.plat_name is None: + from pkg_resources import get_build_platform + self.plat_name = get_build_platform() self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) if self.egg_output is None: - # Compute filename of the output egg - basename = Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version, - get_python_version(), - self.distribution.has_ext_modules() and self.plat_name - ).egg_name() + basename = ei_cmd._get_egg_basename( + py_version=get_python_version(), + platform=self.distribution.has_ext_modules() and self.plat_name, + ) self.egg_output = os.path.join(self.dist_dir, basename + '.egg') diff --git a/setuptools/command/bdist_rpm.py b/setuptools/command/bdist_rpm.py index 98bf5dea84..047a6d08c2 100644 --- a/setuptools/command/bdist_rpm.py +++ b/setuptools/command/bdist_rpm.py @@ -1,7 +1,6 @@ import distutils.command.bdist_rpm as orig -import warnings -from setuptools import SetuptoolsDeprecationWarning +from ..warnings import SetuptoolsDeprecationWarning class bdist_rpm(orig.bdist_rpm): @@ -14,10 +13,14 @@ class bdist_rpm(orig.bdist_rpm): """ def run(self): - warnings.warn( - "bdist_rpm is deprecated and will be removed in a future " - "version. Use bdist_wheel (wheel packages) instead.", - SetuptoolsDeprecationWarning, + SetuptoolsDeprecationWarning.emit( + "Deprecated command", + """ + bdist_rpm is deprecated and will be removed in a future version. + Use bdist_wheel (wheel packages) instead. + """, + see_url="https://github.com/pypa/setuptools/issues/1988", + due_date=(2023, 10, 30) # Deprecation introduced in 22 Oct 2021. ) # ensure distro name is up-to-date diff --git a/setuptools/command/build.py b/setuptools/command/build.py new file mode 100644 index 0000000000..0f1d688e17 --- /dev/null +++ b/setuptools/command/build.py @@ -0,0 +1,149 @@ +import sys +from typing import TYPE_CHECKING, List, Dict +from distutils.command.build import build as _build + +from ..warnings import SetuptoolsDeprecationWarning + +if sys.version_info >= (3, 8): + from typing import Protocol +elif TYPE_CHECKING: + from typing_extensions import Protocol +else: + from abc import ABC as Protocol + + +_ORIGINAL_SUBCOMMANDS = {"build_py", "build_clib", "build_ext", "build_scripts"} + + +class build(_build): + # copy to avoid sharing the object with parent class + sub_commands = _build.sub_commands[:] + + def get_sub_commands(self): + subcommands = {cmd[0] for cmd in _build.sub_commands} + if subcommands - _ORIGINAL_SUBCOMMANDS: + SetuptoolsDeprecationWarning.emit( + "Direct usage of `distutils` commands", + """ + It seems that you are using `distutils.command.build` to add + new subcommands. Using `distutils` directly is considered deprecated, + please use `setuptools.command.build`. + """, + due_date=(2023, 12, 13), # Warning introduced in 13 Jun 2022. + see_url="https://peps.python.org/pep-0632/", + ) + self.sub_commands = _build.sub_commands + return super().get_sub_commands() + + +class SubCommand(Protocol): + """In order to support editable installations (see :pep:`660`) all + build subcommands **SHOULD** implement this protocol. They also **MUST** inherit + from ``setuptools.Command``. + + When creating an :pep:`editable wheel <660>`, ``setuptools`` will try to evaluate + custom ``build`` subcommands using the following procedure: + + 1. ``setuptools`` will set the ``editable_mode`` attribute to ``True`` + 2. ``setuptools`` will execute the ``run()`` command. + + .. important:: + Subcommands **SHOULD** take advantage of ``editable_mode=True`` to adequate + its behaviour or perform optimisations. + + For example, if a subcommand doesn't need to generate an extra file and + all it does is to copy a source file into the build directory, + ``run()`` **SHOULD** simply "early return". + + Similarly, if the subcommand creates files that would be placed alongside + Python files in the final distribution, during an editable install + the command **SHOULD** generate these files "in place" (i.e. write them to + the original source directory, instead of using the build directory). + Note that ``get_output_mapping()`` should reflect that and include mappings + for "in place" builds accordingly. + + 3. ``setuptools`` use any knowledge it can derive from the return values of + ``get_outputs()`` and ``get_output_mapping()`` to create an editable wheel. + When relevant ``setuptools`` **MAY** attempt to use file links based on the value + of ``get_output_mapping()``. Alternatively, ``setuptools`` **MAY** attempt to use + :doc:`import hooks ` to redirect any attempt to import + to the directory with the original source code and other files built in place. + + Please note that custom sub-commands **SHOULD NOT** rely on ``run()`` being + executed (or not) to provide correct return values for ``get_outputs()``, + ``get_output_mapping()`` or ``get_source_files()``. The ``get_*`` methods should + work independently of ``run()``. + """ + + editable_mode: bool = False + """Boolean flag that will be set to ``True`` when setuptools is used for an + editable installation (see :pep:`660`). + Implementations **SHOULD** explicitly set the default value of this attribute to + ``False``. + When subcommands run, they can use this flag to perform optimizations or change + their behaviour accordingly. + """ + + build_lib: str + """String representing the directory where the build artifacts should be stored, + e.g. ``build/lib``. + For example, if a distribution wants to provide a Python module named ``pkg.mod``, + then a corresponding file should be written to ``{build_lib}/package/module.py``. + A way of thinking about this is that the files saved under ``build_lib`` + would be eventually copied to one of the directories in :obj:`site.PREFIXES` + upon installation. + + A command that produces platform-independent files (e.g. compiling text templates + into Python functions), **CAN** initialize ``build_lib`` by copying its value from + the ``build_py`` command. On the other hand, a command that produces + platform-specific files **CAN** initialize ``build_lib`` by copying its value from + the ``build_ext`` command. In general this is done inside the ``finalize_options`` + method with the help of the ``set_undefined_options`` command:: + + def finalize_options(self): + self.set_undefined_options("build_py", ("build_lib", "build_lib")) + ... + """ + + def initialize_options(self): + """(Required by the original :class:`setuptools.Command` interface)""" + + def finalize_options(self): + """(Required by the original :class:`setuptools.Command` interface)""" + + def run(self): + """(Required by the original :class:`setuptools.Command` interface)""" + + def get_source_files(self) -> List[str]: + """ + Return a list of all files that are used by the command to create the expected + outputs. + For example, if your build command transpiles Java files into Python, you should + list here all the Java files. + The primary purpose of this function is to help populating the ``sdist`` + with all the files necessary to build the distribution. + All files should be strings relative to the project root directory. + """ + + def get_outputs(self) -> List[str]: + """ + Return a list of files intended for distribution as they would have been + produced by the build. + These files should be strings in the form of + ``"{build_lib}/destination/file/path"``. + + .. note:: + The return value of ``get_output()`` should include all files used as keys + in ``get_output_mapping()`` plus files that are generated during the build + and don't correspond to any source file already present in the project. + """ + + def get_output_mapping(self) -> Dict[str, str]: + """ + Return a mapping between destination files as they would be produced by the + build (dict keys) into the respective existing (source) files (dict values). + Existing (source) files should be represented as strings relative to the project + root directory. + Destination files should be strings in the form of + ``"{build_lib}/destination/file/path"``. + """ diff --git a/setuptools/command/build_clib.py b/setuptools/command/build_clib.py index 67ce2444ea..09483e69e5 100644 --- a/setuptools/command/build_clib.py +++ b/setuptools/command/build_clib.py @@ -28,7 +28,7 @@ def build_libraries(self, libraries): "in 'libraries' option (library '%s'), " "'sources' must be present and must be " "a list of source filenames" % lib_name) - sources = list(sources) + sources = sorted(list(sources)) log.info("building '%s' library", lib_name) diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index c59eff8bbf..cbfe3ec1c2 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -2,14 +2,16 @@ import sys import itertools from importlib.machinery import EXTENSION_SUFFIXES +from importlib.util import cache_from_source as _compiled_file_name +from typing import Dict, Iterator, List, Tuple + from distutils.command.build_ext import build_ext as _du_build_ext -from distutils.file_util import copy_file from distutils.ccompiler import new_compiler from distutils.sysconfig import customize_compiler, get_config_var -from distutils.errors import DistutilsError from distutils import log -from setuptools.extension import Library +from setuptools.errors import BaseError +from setuptools.extension import Extension, Library try: # Attempt to use Cython for building extensions, if available @@ -73,6 +75,9 @@ def get_abi3_suffix(): class build_ext(_build_ext): + editable_mode: bool = False + inplace: bool = False + def run(self): """Build extensions in build directory, then copy if --inplace""" old_inplace, self.inplace = self.inplace, 0 @@ -81,27 +86,62 @@ def run(self): if old_inplace: self.copy_extensions_to_source() + def _get_inplace_equivalent(self, build_py, ext: Extension) -> Tuple[str, str]: + fullname = self.get_ext_fullname(ext.name) + filename = self.get_ext_filename(fullname) + modpath = fullname.split('.') + package = '.'.join(modpath[:-1]) + package_dir = build_py.get_package_dir(package) + inplace_file = os.path.join(package_dir, os.path.basename(filename)) + regular_file = os.path.join(self.build_lib, filename) + return (inplace_file, regular_file) + def copy_extensions_to_source(self): build_py = self.get_finalized_command('build_py') for ext in self.extensions: - fullname = self.get_ext_fullname(ext.name) - filename = self.get_ext_filename(fullname) - modpath = fullname.split('.') - package = '.'.join(modpath[:-1]) - package_dir = build_py.get_package_dir(package) - dest_filename = os.path.join(package_dir, - os.path.basename(filename)) - src_filename = os.path.join(self.build_lib, filename) + inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext) # Always copy, even if source is older than destination, to ensure # that the right extensions for the current Python/platform are # used. - copy_file( - src_filename, dest_filename, verbose=self.verbose, - dry_run=self.dry_run - ) + if os.path.exists(regular_file) or not ext.optional: + self.copy_file(regular_file, inplace_file, level=self.verbose) + if ext._needs_stub: - self.write_stub(package_dir or os.curdir, ext, True) + inplace_stub = self._get_equivalent_stub(ext, inplace_file) + self._write_stub_file(inplace_stub, ext, compile=True) + # Always compile stub and remove the original (leave the cache behind) + # (this behaviour was observed in previous iterations of the code) + + def _get_equivalent_stub(self, ext: Extension, output_file: str) -> str: + dir_ = os.path.dirname(output_file) + _, _, name = ext.name.rpartition(".") + return f"{os.path.join(dir_, name)}.py" + + def _get_output_mapping(self) -> Iterator[Tuple[str, str]]: + if not self.inplace: + return + + build_py = self.get_finalized_command('build_py') + opt = self.get_finalized_command('install_lib').optimize or "" + + for ext in self.extensions: + inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext) + yield (regular_file, inplace_file) + + if ext._needs_stub: + # This version of `build_ext` always builds artifacts in another dir, + # when "inplace=True" is given it just copies them back. + # This is done in the `copy_extensions_to_source` function, which + # always compile stub files via `_compile_and_remove_stub`. + # At the end of the process, a `.pyc` stub file is created without the + # corresponding `.py`. + + inplace_stub = self._get_equivalent_stub(ext, inplace_file) + regular_stub = self._get_equivalent_stub(ext, regular_file) + inplace_cache = _compiled_file_name(inplace_stub, optimization=opt) + output_cache = _compiled_file_name(regular_stub, optimization=opt) + yield (output_cache, inplace_cache) def get_ext_filename(self, fullname): so_ext = os.getenv('SETUPTOOLS_EXT_SUFFIX') @@ -131,6 +171,7 @@ def initialize_options(self): self.shlib_compiler = None self.shlibs = [] self.ext_map = {} + self.editable_mode = False def finalize_options(self): _build_ext.finalize_options(self) @@ -161,6 +202,9 @@ def finalize_options(self): if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: ext.runtime_library_dirs.append(os.curdir) + if self.editable_mode: + self.inplace = True + def setup_shlib_compiler(self): compiler = self.shlib_compiler = new_compiler( compiler=self.compiler, dry_run=self.dry_run, force=self.force @@ -201,8 +245,8 @@ def build_extension(self, ext): self.compiler = self.shlib_compiler _build_ext.build_extension(self, ext) if ext._needs_stub: - cmd = self.get_finalized_command('build_py').build_lib - self.write_stub(cmd, ext) + build_lib = self.get_finalized_command('build_py').build_lib + self.write_stub(build_lib, ext) finally: self.compiler = _compiler @@ -215,8 +259,15 @@ def links_to_dynamic(self, ext): pkg = '.'.join(ext._full_name.split('.')[:-1] + ['']) return any(pkg + libname in libnames for libname in ext.libraries) - def get_outputs(self): - return _build_ext.get_outputs(self) + self.__get_stubs_outputs() + def get_outputs(self) -> List[str]: + if self.inplace: + return list(self.get_output_mapping().keys()) + return sorted(_build_ext.get_outputs(self) + self.__get_stubs_outputs()) + + def get_output_mapping(self) -> Dict[str, str]: + """See :class:`setuptools.commands.build.SubCommand`""" + mapping = self._get_output_mapping() + return dict(sorted(mapping, key=lambda x: x[0])) def __get_stubs_outputs(self): # assemble the base name for each extension that needs a stub @@ -236,12 +287,13 @@ def __get_output_extensions(self): yield '.pyo' def write_stub(self, output_dir, ext, compile=False): - log.info("writing stub loader for %s to %s", ext._full_name, - output_dir) - stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) + - '.py') + stub_file = os.path.join(output_dir, *ext._full_name.split('.')) + '.py' + self._write_stub_file(stub_file, ext, compile) + + def _write_stub_file(self, stub_file: str, ext: Extension, compile=False): + log.info("writing stub loader for %s to %s", ext._full_name, stub_file) if compile and os.path.exists(stub_file): - raise DistutilsError(stub_file + " already exists! Please delete.") + raise BaseError(stub_file + " already exists! Please delete.") if not self.dry_run: f = open(stub_file, 'w') f.write( @@ -274,16 +326,19 @@ def write_stub(self, output_dir, ext, compile=False): ) f.close() if compile: - from distutils.util import byte_compile + self._compile_and_remove_stub(stub_file) + + def _compile_and_remove_stub(self, stub_file: str): + from distutils.util import byte_compile - byte_compile([stub_file], optimize=0, + byte_compile([stub_file], optimize=0, + force=True, dry_run=self.dry_run) + optimize = self.get_finalized_command('install_lib').optimize + if optimize > 0: + byte_compile([stub_file], optimize=optimize, force=True, dry_run=self.dry_run) - optimize = self.get_finalized_command('install_lib').optimize - if optimize > 0: - byte_compile([stub_file], optimize=optimize, - force=True, dry_run=self.dry_run) - if os.path.exists(stub_file) and not self.dry_run: - os.unlink(stub_file) + if os.path.exists(stub_file) and not self.dry_run: + os.unlink(stub_file) if use_stubs or os.name == 'nt': diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index c3fdc0927c..f094496e11 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -1,3 +1,4 @@ +from functools import partial from glob import glob from distutils.util import convert_path import distutils.command.build_py as orig @@ -8,7 +9,11 @@ import distutils.errors import itertools import stat -from setuptools.extern.more_itertools import unique_everseen +from pathlib import Path +from typing import Dict, Iterable, Iterator, List, Optional, Tuple + +from ..extern.more_itertools import unique_everseen +from ..warnings import SetuptoolsDeprecationWarning def make_writable(target): @@ -24,6 +29,8 @@ class build_py(orig.build_py): Also, this version of the 'build_py' command allows you to specify both 'py_modules' and 'packages' in the same setup operation. """ + editable_mode: bool = False + existing_egg_info_dir: Optional[str] = None #: Private API, internal use only. def finalize_options(self): orig.build_py.finalize_options(self) @@ -33,9 +40,18 @@ def finalize_options(self): del self.__dict__['data_files'] self.__updated_files = [] + def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1, + link=None, level=1): + # Overwrite base class to allow using links + if link: + infile = str(Path(infile).resolve()) + outfile = str(Path(outfile).resolve()) + return super().copy_file(infile, outfile, preserve_mode, preserve_times, + link, level) + def run(self): """Build modules, packages, and copy data files to build directory""" - if not self.py_modules and not self.packages: + if not (self.py_modules or self.packages) or self.editable_mode: return if self.py_modules: @@ -98,7 +114,7 @@ def find_data_files(self, package, src_dir): package, src_dir, ) - globs_expanded = map(glob, patterns) + globs_expanded = map(partial(glob, recursive=True), patterns) # flatten the expanded globs into an iterable of matches globs_matches = itertools.chain.from_iterable(globs_expanded) glob_files = filter(os.path.isfile, globs_matches) @@ -108,16 +124,41 @@ def find_data_files(self, package, src_dir): ) return self.exclude_data_files(package, src_dir, files) - def build_package_data(self): - """Copy data files into build directory""" + def get_outputs(self, include_bytecode=1) -> List[str]: + """See :class:`setuptools.commands.build.SubCommand`""" + if self.editable_mode: + return list(self.get_output_mapping().keys()) + return super().get_outputs(include_bytecode) + + def get_output_mapping(self) -> Dict[str, str]: + """See :class:`setuptools.commands.build.SubCommand`""" + mapping = itertools.chain( + self._get_package_data_output_mapping(), + self._get_module_mapping(), + ) + return dict(sorted(mapping, key=lambda x: x[0])) + + def _get_module_mapping(self) -> Iterator[Tuple[str, str]]: + """Iterate over all modules producing (dest, src) pairs.""" + for (package, module, module_file) in self.find_all_modules(): + package = package.split('.') + filename = self.get_module_outfile(self.build_lib, package, module) + yield (filename, module_file) + + def _get_package_data_output_mapping(self) -> Iterator[Tuple[str, str]]: + """Iterate over package data producing (dest, src) pairs.""" for package, src_dir, build_dir, filenames in self.data_files: for filename in filenames: target = os.path.join(build_dir, filename) - self.mkpath(os.path.dirname(target)) srcfile = os.path.join(src_dir, filename) - outf, copied = self.copy_file(srcfile, target) - make_writable(target) - srcfile = os.path.abspath(srcfile) + yield (target, srcfile) + + def build_package_data(self): + """Copy data files into build directory""" + for target, srcfile in self._get_package_data_output_mapping(): + self.mkpath(os.path.dirname(target)) + _outf, _copied = self.copy_file(srcfile, target) + make_writable(target) def analyze_manifest(self): self.manifest_files = mf = {} @@ -128,9 +169,21 @@ def analyze_manifest(self): # Locate package source directory src_dirs[assert_relative(self.get_package_dir(package))] = package - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - for path in ei_cmd.filelist.files: + if ( + getattr(self, 'existing_egg_info_dir', None) + and Path(self.existing_egg_info_dir, "SOURCES.txt").exists() + ): + egg_info_dir = self.existing_egg_info_dir + manifest = Path(egg_info_dir, "SOURCES.txt") + files = manifest.read_text(encoding="utf-8").splitlines() + else: + self.run_command('egg_info') + ei_cmd = self.get_finalized_command('egg_info') + egg_info_dir = ei_cmd.egg_info + files = ei_cmd.filelist.files + + check = _IncludePackageDataAbuse() + for path in self._filter_build_files(files, egg_info_dir): d, f = os.path.split(assert_relative(path)) prev = None oldf = f @@ -139,10 +192,34 @@ def analyze_manifest(self): d, df = os.path.split(d) f = os.path.join(df, f) if d in src_dirs: - if path.endswith('.py') and f == oldf: - continue # it's a module, not data + if f == oldf: + if check.is_module(f): + continue # it's a module, not data + else: + importable = check.importable_subpackage(src_dirs[d], f) + if importable: + check.warn(importable) mf.setdefault(src_dirs[d], []).append(path) + def _filter_build_files(self, files: Iterable[str], egg_info: str) -> Iterator[str]: + """ + ``build_meta`` may try to create egg_info outside of the project directory, + and this can be problematic for certain plugins (reported in issue #3500). + + Extensions might also include between their sources files created on the + ``build_lib`` and ``build_temp`` directories. + + This function should filter this case of invalid files out. + """ + build = self.get_finalized_command("build") + build_dirs = (egg_info, self.build_lib, build.build_temp, build.build_base) + norm_dirs = [os.path.normpath(p) for p in build_dirs if p] + + for file in files: + norm_path = os.path.normpath(file) + if not os.path.isabs(file) or all(d not in norm_path for d in norm_dirs): + yield file + def get_data_files(self): pass # Lazily compute data files in _get_data_files() function. @@ -179,6 +256,8 @@ def check_package(self, package, package_dir): def initialize_options(self): self.packages_checked = {} orig.build_py.initialize_options(self) + self.editable_mode = False + self.existing_egg_info_dir = None def get_package_dir(self, package): res = orig.build_py.get_package_dir(self, package) @@ -240,3 +319,68 @@ def assert_relative(path): % path ) raise DistutilsSetupError(msg) + + +class _IncludePackageDataAbuse: + """Inform users that package or module is included as 'data file'""" + + class _Warning(SetuptoolsDeprecationWarning): + _SUMMARY = """ + Package {importable!r} is absent from the `packages` configuration. + """ + + _DETAILS = """ + ############################ + # Package would be ignored # + ############################ + Python recognizes {importable!r} as an importable package[^1], + but it is absent from setuptools' `packages` configuration. + + This leads to an ambiguous overall configuration. If you want to distribute this + package, please make sure that {importable!r} is explicitly added + to the `packages` configuration field. + + Alternatively, you can also rely on setuptools' discovery methods + (for example by using `find_namespace_packages(...)`/`find_namespace:` + instead of `find_packages(...)`/`find:`). + + You can read more about "package discovery" on setuptools documentation page: + + - https://setuptools.pypa.io/en/latest/userguide/package_discovery.html + + If you don't want {importable!r} to be distributed and are + already explicitly excluding {importable!r} via + `find_namespace_packages(...)/find_namespace` or `find_packages(...)/find`, + you can try to use `exclude_package_data`, or `include-package-data=False` in + combination with a more fine grained `package-data` configuration. + + You can read more about "package data files" on setuptools documentation page: + + - https://setuptools.pypa.io/en/latest/userguide/datafiles.html + + + [^1]: For Python, any directory (with suitable naming) can be imported, + even if it does not contain any `.py` files. + On the other hand, currently there is no concept of package data + directory, all directories are treated like packages. + """ + # _DUE_DATE: still not defined as this is particularly controversial. + # Warning initially introduced in May 2022. See issue #3340 for discussion. + + def __init__(self): + self._already_warned = set() + + def is_module(self, file): + return file.endswith(".py") and file[:-len(".py")].isidentifier() + + def importable_subpackage(self, parent, file): + pkg = Path(file).parent + parts = list(itertools.takewhile(str.isidentifier, pkg.parts)) + if parts: + return ".".join([parent, *parts]) + return None + + def warn(self, importable): + if importable not in self._already_warned: + self._Warning.emit(importable=importable) + self._already_warned.add(importable) diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py index 24fb0a7c81..5630ca4cdc 100644 --- a/setuptools/command/develop.py +++ b/setuptools/command/develop.py @@ -5,8 +5,8 @@ import glob import io -import pkg_resources from setuptools.command.easy_install import easy_install +from setuptools import _path from setuptools import namespaces import setuptools @@ -42,6 +42,8 @@ def initialize_options(self): self.always_copy_from = '.' # always copy eggs installed in curdir def finalize_options(self): + import pkg_resources + ei = self.get_finalized_command("egg_info") if ei.broken_egg_info: template = "Please rename %r to %r before using 'develop'" @@ -61,10 +63,8 @@ def finalize_options(self): if self.egg_path is None: self.egg_path = os.path.abspath(ei.egg_base) - target = pkg_resources.normalize_path(self.egg_base) - egg_path = pkg_resources.normalize_path( - os.path.join(self.install_dir, self.egg_path) - ) + target = _path.normpath(self.egg_base) + egg_path = _path.normpath(os.path.join(self.install_dir, self.egg_path)) if egg_path != target: raise DistutilsOptionError( "--egg-path must be a relative path from the install" @@ -94,15 +94,16 @@ def _resolve_setup_path(egg_base, install_dir, egg_path): path_to_setup = egg_base.replace(os.sep, '/').rstrip('/') if path_to_setup != os.curdir: path_to_setup = '../' * (path_to_setup.count('/') + 1) - resolved = pkg_resources.normalize_path( + resolved = _path.normpath( os.path.join(install_dir, egg_path, path_to_setup) ) - if resolved != pkg_resources.normalize_path(os.curdir): + curdir = _path.normpath(os.curdir) + if resolved != curdir: raise DistutilsOptionError( "Can't get a consistent path to setup script from" " installation directory", resolved, - pkg_resources.normalize_path(os.curdir), + curdir, ) return path_to_setup diff --git a/setuptools/command/dist_info.py b/setuptools/command/dist_info.py index c45258fa03..99d3976dc5 100644 --- a/setuptools/command/dist_info.py +++ b/setuptools/command/dist_info.py @@ -4,33 +4,116 @@ """ import os - -from distutils.core import Command +import shutil +import sys +from contextlib import contextmanager from distutils import log +from distutils.core import Command +from pathlib import Path + +from .. import _normalization +from ..warnings import SetuptoolsDeprecationWarning class dist_info(Command): + """ + This command is private and reserved for internal use of setuptools, + users should rely on ``setuptools.build_meta`` APIs. + """ - description = 'create a .dist-info directory' + description = "DO NOT CALL DIRECTLY, INTERNAL ONLY: create .dist-info directory" user_options = [ ('egg-base=', 'e', "directory containing .egg-info directories" - " (default: top of the source tree)"), + " (default: top of the source tree)" + " DEPRECATED: use --output-dir."), + ('output-dir=', 'o', "directory inside of which the .dist-info will be" + "created (default: top of the source tree)"), + ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), + ('tag-build=', 'b', "Specify explicit tag to add to version number"), + ('no-date', 'D', "Don't include date stamp [default]"), + ('keep-egg-info', None, "*TRANSITIONAL* will be removed in the future"), ] + boolean_options = ['tag-date', 'keep-egg-info'] + negative_opt = {'no-date': 'tag-date'} + def initialize_options(self): self.egg_base = None + self.output_dir = None + self.name = None + self.dist_info_dir = None + self.tag_date = None + self.tag_build = None + self.keep_egg_info = False def finalize_options(self): - pass + if self.egg_base: + msg = "--egg-base is deprecated for dist_info command. Use --output-dir." + SetuptoolsDeprecationWarning.emit(msg, due_date=(2023, 9, 26)) + # This command is internal to setuptools, therefore it should be safe + # to remove the deprecated support soon. + self.output_dir = self.egg_base or self.output_dir + + dist = self.distribution + project_dir = dist.src_root or os.curdir + self.output_dir = Path(self.output_dir or project_dir) + + egg_info = self.reinitialize_command("egg_info") + egg_info.egg_base = str(self.output_dir) + + if self.tag_date: + egg_info.tag_date = self.tag_date + else: + self.tag_date = egg_info.tag_date + + if self.tag_build: + egg_info.tag_build = self.tag_build + else: + self.tag_build = egg_info.tag_build - def run(self): - egg_info = self.get_finalized_command('egg_info') - egg_info.egg_base = self.egg_base egg_info.finalize_options() - egg_info.run() - dist_info_dir = egg_info.egg_info[:-len('.egg-info')] + '.dist-info' - log.info("creating '{}'".format(os.path.abspath(dist_info_dir))) + self.egg_info = egg_info + name = _normalization.safer_name(dist.get_name()) + version = _normalization.safer_best_effort_version(dist.get_version()) + self.name = f"{name}-{version}" + self.dist_info_dir = os.path.join(self.output_dir, f"{self.name}.dist-info") + + @contextmanager + def _maybe_bkp_dir(self, dir_path: str, requires_bkp: bool): + if requires_bkp: + bkp_name = f"{dir_path}.__bkp__" + _rm(bkp_name, ignore_errors=True) + _copy(dir_path, bkp_name, dirs_exist_ok=True, symlinks=True) + try: + yield + finally: + _rm(dir_path, ignore_errors=True) + shutil.move(bkp_name, dir_path) + else: + yield + + def run(self): + self.output_dir.mkdir(parents=True, exist_ok=True) + self.egg_info.run() + egg_info_dir = self.egg_info.egg_info + assert os.path.isdir(egg_info_dir), ".egg-info dir should have been created" + + log.info("creating '{}'".format(os.path.abspath(self.dist_info_dir))) bdist_wheel = self.get_finalized_command('bdist_wheel') - bdist_wheel.egg2dist(egg_info.egg_info, dist_info_dir) + + # TODO: if bdist_wheel if merged into setuptools, just add "keep_egg_info" there + with self._maybe_bkp_dir(egg_info_dir, self.keep_egg_info): + bdist_wheel.egg2dist(egg_info_dir, self.dist_info_dir) + + +def _rm(dir_name, **opts): + if os.path.isdir(dir_name): + shutil.rmtree(dir_name, **opts) + + +def _copy(src, dst, **opts): + if sys.version_info < (3, 8): + opts.pop("dirs_exist_ok", None) + shutil.copytree(src, dst, **opts) diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 21785e4cb1..16cf264fab 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -44,8 +44,6 @@ from sysconfig import get_path -from setuptools import SetuptoolsDeprecationWarning - from setuptools import Command from setuptools.sandbox import run_setup from setuptools.command import setopt @@ -54,6 +52,7 @@ PackageIndex, parse_requirement_arg, URL_SCHEME, ) from setuptools.command import bdist_egg, egg_info +from setuptools.warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning from setuptools.wheel import Wheel from pkg_resources import ( normalize_path, resource_string, @@ -62,6 +61,7 @@ VersionConflict, DEVELOP_DIST, ) import pkg_resources +from .. import py312compat from .._path import ensure_directory from ..extern.jaraco.text import yield_lines @@ -70,7 +70,7 @@ warnings.filterwarnings("default", category=pkg_resources.PEP440Warning) __all__ = [ - 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', + 'easy_install', 'PthDistributions', 'extract_wininst_cfg', 'get_exe_prefixes', ] @@ -79,22 +79,6 @@ def is_64bit(): return struct.calcsize("P") == 8 -def samefile(p1, p2): - """ - Determine if two paths reference the same file. - - Augments os.path.samefile to work on Windows and - suppresses errors if the path doesn't exist. - """ - both_exist = os.path.exists(p1) and os.path.exists(p2) - use_samefile = hasattr(os.path, 'samefile') and both_exist - if use_samefile: - return os.path.samefile(p1, p2) - norm_p1 = os.path.normpath(os.path.normcase(p1)) - norm_p2 = os.path.normpath(os.path.normcase(p2)) - return norm_p1 == norm_p2 - - def _to_bytes(s): return s.encode('utf8') @@ -157,11 +141,7 @@ class easy_install(Command): create_index = PackageIndex def initialize_options(self): - warnings.warn( - "easy_install command is deprecated. " - "Use build and pip and other standards-based tools.", - EasyInstallDeprecationWarning, - ) + EasyInstallDeprecationWarning.emit() # the --user option seems to be an opt-in one, # so the default should be False. @@ -185,12 +165,8 @@ def initialize_options(self): self.install_data = None self.install_base = None self.install_platbase = None - if site.ENABLE_USER_SITE: - self.install_userbase = site.USER_BASE - self.install_usersite = site.USER_SITE - else: - self.install_userbase = None - self.install_usersite = None + self.install_userbase = site.USER_BASE + self.install_usersite = site.USER_SITE self.no_find_links = None # Options not specifiable via command line @@ -222,7 +198,7 @@ def _delete_path(self, path): return is_tree = os.path.isdir(path) and not os.path.islink(path) - remover = rmtree if is_tree else os.unlink + remover = _rmtree if is_tree else os.unlink remover(path) @staticmethod @@ -269,11 +245,9 @@ def finalize_options(self): # noqa: C901 # is too complex (25) # FIXME getattr(sys, 'windir', '').replace('.', ''), ) - if site.ENABLE_USER_SITE: - self.config_vars['userbase'] = self.install_userbase - self.config_vars['usersite'] = self.install_usersite - - elif self.user: + self.config_vars['userbase'] = self.install_userbase + self.config_vars['usersite'] = self.install_usersite + if self.user and not site.ENABLE_USER_SITE: log.warn("WARNING: The user site-packages directory is disabled.") self._fix_install_dir_for_user_site() @@ -309,27 +283,14 @@ def finalize_options(self): # noqa: C901 # is too complex (25) # FIXME self.script_dir = self.install_scripts # default --record from the install command self.set_undefined_options('install', ('record', 'record')) - # Should this be moved to the if statement below? It's not used - # elsewhere - normpath = map(normalize_path, sys.path) self.all_site_dirs = get_site_dirs() - if self.site_dirs is not None: - site_dirs = [ - os.path.expanduser(s.strip()) for s in - self.site_dirs.split(',') - ] - for d in site_dirs: - if not os.path.isdir(d): - log.warn("%s (in --site-dirs) does not exist", d) - elif normalize_path(d) not in normpath: - raise DistutilsOptionError( - d + " (in --site-dirs) is not on sys.path" - ) - else: - self.all_site_dirs.append(normalize_path(d)) + self.all_site_dirs.extend(self._process_site_dirs(self.site_dirs)) + if not self.editable: self.check_site_dir() - self.index_url = self.index_url or "https://pypi.org/simple/" + default_index = os.getenv("__EASYINSTALL_INDEX", "https://pypi.org/simple/") + # ^ Private API for testing purposes only + self.index_url = self.index_url or default_index self.shadow_path = self.all_site_dirs[:] for path_item in self.install_dir, normalize_path(self.script_dir): if path_item not in self.shadow_path: @@ -355,15 +316,7 @@ def finalize_options(self): # noqa: C901 # is too complex (25) # FIXME if not self.no_find_links: self.package_index.add_find_links(self.find_links) self.set_undefined_options('install_lib', ('optimize', 'optimize')) - if not isinstance(self.optimize, int): - try: - self.optimize = int(self.optimize) - if not (0 <= self.optimize <= 2): - raise ValueError - except ValueError as e: - raise DistutilsOptionError( - "--optimize must be 0, 1, or 2" - ) from e + self.optimize = self._validate_optimize(self.optimize) if self.editable and not self.build_directory: raise DistutilsArgError( @@ -375,11 +328,44 @@ def finalize_options(self): # noqa: C901 # is too complex (25) # FIXME self.outputs = [] + @staticmethod + def _process_site_dirs(site_dirs): + if site_dirs is None: + return + + normpath = map(normalize_path, sys.path) + site_dirs = [ + os.path.expanduser(s.strip()) for s in + site_dirs.split(',') + ] + for d in site_dirs: + if not os.path.isdir(d): + log.warn("%s (in --site-dirs) does not exist", d) + elif normalize_path(d) not in normpath: + raise DistutilsOptionError( + d + " (in --site-dirs) is not on sys.path" + ) + else: + yield normalize_path(d) + + @staticmethod + def _validate_optimize(value): + try: + value = int(value) + if value not in range(3): + raise ValueError + except ValueError as e: + raise DistutilsOptionError( + "--optimize must be 0, 1, or 2" + ) from e + + return value + def _fix_install_dir_for_user_site(self): """ Fix the install_dir if "--user" was used. """ - if not self.user or not site.ENABLE_USER_SITE: + if not self.user: return self.create_home_path() @@ -655,7 +641,7 @@ def _tmpdir(self): # cast to str as workaround for #709 and #710 and #712 yield str(tmpdir) finally: - os.path.exists(tmpdir) and rmtree(tmpdir) + os.path.exists(tmpdir) and _rmtree(tmpdir) def easy_install(self, spec, deps=False): with self._tmpdir() as tmpdir: @@ -928,7 +914,9 @@ def install_egg(self, egg_path, tmpdir): # noqa: C901 ensure_directory(destination) dist = self.egg_distribution(egg_path) - if not samefile(egg_path, destination): + if not ( + os.path.exists(destination) and os.path.samefile(egg_path, destination) + ): if os.path.isdir(destination) and not os.path.islink(destination): dir_util.remove_tree(destination, dry_run=self.dry_run) elif os.path.exists(destination): @@ -1190,7 +1178,7 @@ def build_and_install(self, setup_script, setup_base): dist_dir) return eggs finally: - rmtree(dist_dir) + _rmtree(dist_dir) log.set_verbosity(self.verbose) # restore our log verbosity def _set_fetcher_options(self, base): @@ -1687,14 +1675,14 @@ def add(self, dist): if new_path: self.paths.append(dist.location) self.dirty = True - Environment.add(self, dist) + super().add(dist) def remove(self, dist): """Remove `dist` from the distribution map""" while dist.location in self.paths: self.paths.remove(dist.location) self.dirty = True - Environment.remove(self, dist) + super().remove(dist) def make_relative(self, path): npath, last = os.path.split(normalize_path(path)) @@ -1885,7 +1873,7 @@ def _update_zipimporter_cache(normalized_path, cache, updater=None): # get/del patterns instead. For more detailed information see the # following links: # https://github.com/pypa/setuptools/issues/202#issuecomment-202913420 - # http://bit.ly/2h9itJX + # https://foss.heptapod.net/pypy/pypy/-/blob/144c4e65cb6accb8e592f3a7584ea38265d1873c/pypy/module/zipimport/interp_zipimport.py old_entry = cache[p] del cache[p] new_entry = updater and updater(p, old_entry) @@ -2130,7 +2118,8 @@ def importlib_load_entry_point(spec, group, name): @classmethod def get_script_args(cls, dist, executable=None, wininst=False): # for backward compatibility - warnings.warn("Use get_args", EasyInstallDeprecationWarning) + EasyInstallDeprecationWarning.emit("Use get_args", due_date=(2023, 6, 1)) + # This is a direct API call, it should be safe to remove soon. writer = (WindowsScriptWriter if wininst else ScriptWriter).best() header = cls.get_script_header("", executable, wininst) return writer.get_args(dist, header) @@ -2138,8 +2127,8 @@ def get_script_args(cls, dist, executable=None, wininst=False): @classmethod def get_script_header(cls, script_text, executable=None, wininst=False): # for backward compatibility - warnings.warn( - "Use get_header", EasyInstallDeprecationWarning, stacklevel=2) + EasyInstallDeprecationWarning.emit("Use get_header", due_date=(2023, 6, 1)) + # This is a direct API call, it should be safe to remove soon. if wininst: executable = "python.exe" return cls.get_header(script_text, executable) @@ -2174,7 +2163,8 @@ def _ensure_safe_name(name): @classmethod def get_writer(cls, force_windows): # for backward compatibility - warnings.warn("Use best", EasyInstallDeprecationWarning) + EasyInstallDeprecationWarning.emit("Use best", due_date=(2023, 6, 1)) + # This is a direct API call, it should be safe to remove soon. return WindowsScriptWriter.best() if force_windows else cls.best() @classmethod @@ -2206,7 +2196,8 @@ class WindowsScriptWriter(ScriptWriter): @classmethod def get_writer(cls): # for backward compatibility - warnings.warn("Use best", EasyInstallDeprecationWarning) + EasyInstallDeprecationWarning.emit("Use best", due_date=(2023, 6, 1)) + # This is a direct API call, it should be safe to remove soon. return cls.best() @classmethod @@ -2231,7 +2222,7 @@ def _get_script_args(cls, type_, name, header, script_text): "{ext} not listed in PATHEXT; scripts will not be " "recognized as executables." ).format(**locals()) - warnings.warn(msg, UserWarning) + SetuptoolsWarning.emit(msg) old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe'] old.remove(ext) header = cls._adjust_header(type_, header) @@ -2325,8 +2316,8 @@ def load_launcher_manifest(name): return manifest.decode('utf-8') % vars() -def rmtree(path, ignore_errors=False, onerror=auto_chmod): - return shutil.rmtree(path, ignore_errors, onerror) +def _rmtree(path, ignore_errors=False, onexc=auto_chmod): + return py312compat.shutil_rmtree(path, ignore_errors, onexc) def current_umask(): @@ -2343,6 +2334,11 @@ def only_strs(values): class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning): + _SUMMARY = "easy_install command is deprecated." + _DETAILS = """ + Please avoid running ``setup.py`` and ``easy_install``. + Instead, use pypa/build, pypa/installer, pypa/build or + other standards-based tools. """ - Warning for EasyInstall deprecations, bypassing suppression. - """ + _SEE_URL = "https://github.com/pypa/setuptools/issues/917" + # _DUE_DATE not defined yet diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py new file mode 100644 index 0000000000..ffcc2cc0e6 --- /dev/null +++ b/setuptools/command/editable_wheel.py @@ -0,0 +1,857 @@ +""" +Create a wheel that, when installed, will make the source package 'editable' +(add it to the interpreter's path, including metadata) per PEP 660. Replaces +'setup.py develop'. + +.. note:: + One of the mechanisms briefly mentioned in PEP 660 to implement editable installs is + to create a separated directory inside ``build`` and use a .pth file to point to that + directory. In the context of this file such directory is referred as + *auxiliary build directory* or ``auxiliary_dir``. +""" + +import logging +import os +import shutil +import sys +import traceback +from contextlib import suppress +from enum import Enum +from inspect import cleandoc +from itertools import chain +from pathlib import Path +from tempfile import TemporaryDirectory +from typing import ( + TYPE_CHECKING, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Tuple, + TypeVar, + Union, +) + +from .. import ( + Command, + _normalization, + _path, + errors, + namespaces, +) +from ..discovery import find_package_path +from ..dist import Distribution +from ..warnings import ( + InformationOnly, + SetuptoolsDeprecationWarning, + SetuptoolsWarning, +) +from .build_py import build_py as build_py_cls + +if TYPE_CHECKING: + from wheel.wheelfile import WheelFile # noqa + +if sys.version_info >= (3, 8): + from typing import Protocol +elif TYPE_CHECKING: + from typing_extensions import Protocol +else: + from abc import ABC as Protocol + +_Path = Union[str, Path] +_P = TypeVar("_P", bound=_Path) +_logger = logging.getLogger(__name__) + + +class _EditableMode(Enum): + """ + Possible editable installation modes: + `lenient` (new files automatically added to the package - DEFAULT); + `strict` (requires a new installation when files are added/removed); or + `compat` (attempts to emulate `python setup.py develop` - DEPRECATED). + """ + + STRICT = "strict" + LENIENT = "lenient" + COMPAT = "compat" # TODO: Remove `compat` after Dec/2022. + + @classmethod + def convert(cls, mode: Optional[str]) -> "_EditableMode": + if not mode: + return _EditableMode.LENIENT # default + + _mode = mode.upper() + if _mode not in _EditableMode.__members__: + raise errors.OptionError(f"Invalid editable mode: {mode!r}. Try: 'strict'.") + + if _mode == "COMPAT": + SetuptoolsDeprecationWarning.emit( + "Compat editable installs", + """ + The 'compat' editable mode is transitional and will be removed + in future versions of `setuptools`. + Please adapt your code accordingly to use either the 'strict' or the + 'lenient' modes. + """, + see_docs="userguide/development_mode.html", + # TODO: define due_date + # There is a series of shortcomings with the available editable install + # methods, and they are very controversial. This is something that still + # needs work. + # Moreover, `pip` is still hiding this warning, so users are not aware. + ) + + return _EditableMode[_mode] + + +_STRICT_WARNING = """ +New or renamed files may not be automatically picked up without a new installation. +""" + +_LENIENT_WARNING = """ +Options like `package-data`, `include/exclude-package-data` or +`packages.find.exclude/include` may have no effect. +""" + + +class editable_wheel(Command): + """Build 'editable' wheel for development. + This command is private and reserved for internal use of setuptools, + users should rely on ``setuptools.build_meta`` APIs. + """ + + description = "DO NOT CALL DIRECTLY, INTERNAL ONLY: create PEP 660 editable wheel" + + user_options = [ + ("dist-dir=", "d", "directory to put final built distributions in"), + ("dist-info-dir=", "I", "path to a pre-build .dist-info directory"), + ("mode=", None, cleandoc(_EditableMode.__doc__ or "")), + ] + + def initialize_options(self): + self.dist_dir = None + self.dist_info_dir = None + self.project_dir = None + self.mode = None + + def finalize_options(self): + dist = self.distribution + self.project_dir = dist.src_root or os.curdir + self.package_dir = dist.package_dir or {} + self.dist_dir = Path(self.dist_dir or os.path.join(self.project_dir, "dist")) + + def run(self): + try: + self.dist_dir.mkdir(exist_ok=True) + self._ensure_dist_info() + + # Add missing dist_info files + self.reinitialize_command("bdist_wheel") + bdist_wheel = self.get_finalized_command("bdist_wheel") + bdist_wheel.write_wheelfile(self.dist_info_dir) + + self._create_wheel_file(bdist_wheel) + except Exception: + traceback.print_exc() + project = self.distribution.name or self.distribution.get_name() + _DebuggingTips.emit(project=project) + raise + + def _ensure_dist_info(self): + if self.dist_info_dir is None: + dist_info = self.reinitialize_command("dist_info") + dist_info.output_dir = self.dist_dir + dist_info.ensure_finalized() + dist_info.run() + self.dist_info_dir = dist_info.dist_info_dir + else: + assert str(self.dist_info_dir).endswith(".dist-info") + assert Path(self.dist_info_dir, "METADATA").exists() + + def _install_namespaces(self, installation_dir, pth_prefix): + # XXX: Only required to support the deprecated namespace practice + dist = self.distribution + if not dist.namespace_packages: + return + + src_root = Path(self.project_dir, self.package_dir.get("", ".")).resolve() + installer = _NamespaceInstaller(dist, installation_dir, pth_prefix, src_root) + installer.install_namespaces() + + def _find_egg_info_dir(self) -> Optional[str]: + parent_dir = Path(self.dist_info_dir).parent if self.dist_info_dir else Path() + candidates = map(str, parent_dir.glob("*.egg-info")) + return next(candidates, None) + + def _configure_build( + self, name: str, unpacked_wheel: _Path, build_lib: _Path, tmp_dir: _Path + ): + """Configure commands to behave in the following ways: + + - Build commands can write to ``build_lib`` if they really want to... + (but this folder is expected to be ignored and modules are expected to live + in the project directory...) + - Binary extensions should be built in-place (editable_mode = True) + - Data/header/script files are not part of the "editable" specification + so they are written directly to the unpacked_wheel directory. + """ + # Non-editable files (data, headers, scripts) are written directly to the + # unpacked_wheel + + dist = self.distribution + wheel = str(unpacked_wheel) + build_lib = str(build_lib) + data = str(Path(unpacked_wheel, f"{name}.data", "data")) + headers = str(Path(unpacked_wheel, f"{name}.data", "headers")) + scripts = str(Path(unpacked_wheel, f"{name}.data", "scripts")) + + # egg-info may be generated again to create a manifest (used for package data) + egg_info = dist.reinitialize_command("egg_info", reinit_subcommands=True) + egg_info.egg_base = str(tmp_dir) + egg_info.ignore_egg_info_in_manifest = True + + build = dist.reinitialize_command("build", reinit_subcommands=True) + install = dist.reinitialize_command("install", reinit_subcommands=True) + + build.build_platlib = build.build_purelib = build.build_lib = build_lib + install.install_purelib = install.install_platlib = install.install_lib = wheel + install.install_scripts = build.build_scripts = scripts + install.install_headers = headers + install.install_data = data + + install_scripts = dist.get_command_obj("install_scripts") + install_scripts.no_ep = True + + build.build_temp = str(tmp_dir) + + build_py = dist.get_command_obj("build_py") + build_py.compile = False + build_py.existing_egg_info_dir = self._find_egg_info_dir() + + self._set_editable_mode() + + build.ensure_finalized() + install.ensure_finalized() + + def _set_editable_mode(self): + """Set the ``editable_mode`` flag in the build sub-commands""" + dist = self.distribution + build = dist.get_command_obj("build") + for cmd_name in build.get_sub_commands(): + cmd = dist.get_command_obj(cmd_name) + if hasattr(cmd, "editable_mode"): + cmd.editable_mode = True + elif hasattr(cmd, "inplace"): + cmd.inplace = True # backward compatibility with distutils + + def _collect_build_outputs(self) -> Tuple[List[str], Dict[str, str]]: + files: List[str] = [] + mapping: Dict[str, str] = {} + build = self.get_finalized_command("build") + + for cmd_name in build.get_sub_commands(): + cmd = self.get_finalized_command(cmd_name) + if hasattr(cmd, "get_outputs"): + files.extend(cmd.get_outputs() or []) + if hasattr(cmd, "get_output_mapping"): + mapping.update(cmd.get_output_mapping() or {}) + + return files, mapping + + def _run_build_commands( + self, dist_name: str, unpacked_wheel: _Path, build_lib: _Path, tmp_dir: _Path + ) -> Tuple[List[str], Dict[str, str]]: + self._configure_build(dist_name, unpacked_wheel, build_lib, tmp_dir) + self._run_build_subcommands() + files, mapping = self._collect_build_outputs() + self._run_install("headers") + self._run_install("scripts") + self._run_install("data") + return files, mapping + + def _run_build_subcommands(self): + """ + Issue #3501 indicates that some plugins/customizations might rely on: + + 1. ``build_py`` not running + 2. ``build_py`` always copying files to ``build_lib`` + + However both these assumptions may be false in editable_wheel. + This method implements a temporary workaround to support the ecosystem + while the implementations catch up. + """ + # TODO: Once plugins/customisations had the chance to catch up, replace + # `self._run_build_subcommands()` with `self.run_command("build")`. + # Also remove _safely_run, TestCustomBuildPy. Suggested date: Aug/2023. + build: Command = self.get_finalized_command("build") + for name in build.get_sub_commands(): + cmd = self.get_finalized_command(name) + if name == "build_py" and type(cmd) != build_py_cls: + self._safely_run(name) + else: + self.run_command(name) + + def _safely_run(self, cmd_name: str): + try: + return self.run_command(cmd_name) + except Exception: + SetuptoolsDeprecationWarning.emit( + "Customization incompatible with editable install", + f""" + {traceback.format_exc()} + + If you are seeing this warning it is very likely that a setuptools + plugin or customization overrides the `{cmd_name}` command, without + taking into consideration how editable installs run build steps + starting from setuptools v64.0.0. + + Plugin authors and developers relying on custom build steps are + encouraged to update their `{cmd_name}` implementation considering the + information about editable installs in + https://setuptools.pypa.io/en/latest/userguide/extension.html. + + For the time being `setuptools` will silence this error and ignore + the faulty command, but this behaviour will change in future versions. + """, + # TODO: define due_date + # There is a series of shortcomings with the available editable install + # methods, and they are very controversial. This is something that still + # needs work. + ) + + def _create_wheel_file(self, bdist_wheel): + from wheel.wheelfile import WheelFile + + dist_info = self.get_finalized_command("dist_info") + dist_name = dist_info.name + tag = "-".join(bdist_wheel.get_tag()) + build_tag = "0.editable" # According to PEP 427 needs to start with digit + archive_name = f"{dist_name}-{build_tag}-{tag}.whl" + wheel_path = Path(self.dist_dir, archive_name) + if wheel_path.exists(): + wheel_path.unlink() + + unpacked_wheel = TemporaryDirectory(suffix=archive_name) + build_lib = TemporaryDirectory(suffix=".build-lib") + build_tmp = TemporaryDirectory(suffix=".build-temp") + + with unpacked_wheel as unpacked, build_lib as lib, build_tmp as tmp: + unpacked_dist_info = Path(unpacked, Path(self.dist_info_dir).name) + shutil.copytree(self.dist_info_dir, unpacked_dist_info) + self._install_namespaces(unpacked, dist_info.name) + files, mapping = self._run_build_commands(dist_name, unpacked, lib, tmp) + strategy = self._select_strategy(dist_name, tag, lib) + with strategy, WheelFile(wheel_path, "w") as wheel_obj: + strategy(wheel_obj, files, mapping) + wheel_obj.write_files(unpacked) + + return wheel_path + + def _run_install(self, category: str): + has_category = getattr(self.distribution, f"has_{category}", None) + if has_category and has_category(): + _logger.info(f"Installing {category} as non editable") + self.run_command(f"install_{category}") + + def _select_strategy( + self, + name: str, + tag: str, + build_lib: _Path, + ) -> "EditableStrategy": + """Decides which strategy to use to implement an editable installation.""" + build_name = f"__editable__.{name}-{tag}" + project_dir = Path(self.project_dir) + mode = _EditableMode.convert(self.mode) + + if mode is _EditableMode.STRICT: + auxiliary_dir = _empty_dir(Path(self.project_dir, "build", build_name)) + return _LinkTree(self.distribution, name, auxiliary_dir, build_lib) + + packages = _find_packages(self.distribution) + has_simple_layout = _simple_layout(packages, self.package_dir, project_dir) + is_compat_mode = mode is _EditableMode.COMPAT + if set(self.package_dir) == {""} and has_simple_layout or is_compat_mode: + # src-layout(ish) is relatively safe for a simple pth file + src_dir = self.package_dir.get("", ".") + return _StaticPth(self.distribution, name, [Path(project_dir, src_dir)]) + + # Use a MetaPathFinder to avoid adding accidental top-level packages/modules + return _TopLevelFinder(self.distribution, name) + + +class EditableStrategy(Protocol): + def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]): + ... + + def __enter__(self): + ... + + def __exit__(self, _exc_type, _exc_value, _traceback): + ... + + +class _StaticPth: + def __init__(self, dist: Distribution, name: str, path_entries: List[Path]): + self.dist = dist + self.name = name + self.path_entries = path_entries + + def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]): + entries = "\n".join((str(p.resolve()) for p in self.path_entries)) + contents = bytes(f"{entries}\n", "utf-8") + wheel.writestr(f"__editable__.{self.name}.pth", contents) + + def __enter__(self): + msg = f""" + Editable install will be performed using .pth file to extend `sys.path` with: + {list(map(os.fspath, self.path_entries))!r} + """ + _logger.warning(msg + _LENIENT_WARNING) + return self + + def __exit__(self, _exc_type, _exc_value, _traceback): + ... + + +class _LinkTree(_StaticPth): + """ + Creates a ``.pth`` file that points to a link tree in the ``auxiliary_dir``. + + This strategy will only link files (not dirs), so it can be implemented in + any OS, even if that means using hardlinks instead of symlinks. + + By collocating ``auxiliary_dir`` and the original source code, limitations + with hardlinks should be avoided. + """ + def __init__( + self, dist: Distribution, + name: str, + auxiliary_dir: _Path, + build_lib: _Path, + ): + self.auxiliary_dir = Path(auxiliary_dir) + self.build_lib = Path(build_lib).resolve() + self._file = dist.get_command_obj("build_py").copy_file + super().__init__(dist, name, [self.auxiliary_dir]) + + def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]): + self._create_links(files, mapping) + super().__call__(wheel, files, mapping) + + def _normalize_output(self, file: str) -> Optional[str]: + # Files relative to build_lib will be normalized to None + with suppress(ValueError): + path = Path(file).resolve().relative_to(self.build_lib) + return str(path).replace(os.sep, '/') + return None + + def _create_file(self, relative_output: str, src_file: str, link=None): + dest = self.auxiliary_dir / relative_output + if not dest.parent.is_dir(): + dest.parent.mkdir(parents=True) + self._file(src_file, dest, link=link) + + def _create_links(self, outputs, output_mapping): + self.auxiliary_dir.mkdir(parents=True, exist_ok=True) + link_type = "sym" if _can_symlink_files(self.auxiliary_dir) else "hard" + mappings = { + self._normalize_output(k): v + for k, v in output_mapping.items() + } + mappings.pop(None, None) # remove files that are not relative to build_lib + + for output in outputs: + relative = self._normalize_output(output) + if relative and relative not in mappings: + self._create_file(relative, output) + + for relative, src in mappings.items(): + self._create_file(relative, src, link=link_type) + + def __enter__(self): + msg = "Strict editable install will be performed using a link tree.\n" + _logger.warning(msg + _STRICT_WARNING) + return self + + def __exit__(self, _exc_type, _exc_value, _traceback): + msg = f"""\n + Strict editable installation performed using the auxiliary directory: + {self.auxiliary_dir} + + Please be careful to not remove this directory, otherwise you might not be able + to import/use your package. + """ + InformationOnly.emit("Editable installation.", msg) + + +class _TopLevelFinder: + def __init__(self, dist: Distribution, name: str): + self.dist = dist + self.name = name + + def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]): + src_root = self.dist.src_root or os.curdir + top_level = chain(_find_packages(self.dist), _find_top_level_modules(self.dist)) + package_dir = self.dist.package_dir or {} + roots = _find_package_roots(top_level, package_dir, src_root) + + namespaces_: Dict[str, List[str]] = dict(chain( + _find_namespaces(self.dist.packages or [], roots), + ((ns, []) for ns in _find_virtual_namespaces(roots)), + )) + + name = f"__editable__.{self.name}.finder" + finder = _normalization.safe_identifier(name) + content = bytes(_finder_template(name, roots, namespaces_), "utf-8") + wheel.writestr(f"{finder}.py", content) + + content = bytes(f"import {finder}; {finder}.install()", "utf-8") + wheel.writestr(f"__editable__.{self.name}.pth", content) + + def __enter__(self): + msg = "Editable install will be performed using a meta path finder.\n" + _logger.warning(msg + _LENIENT_WARNING) + return self + + def __exit__(self, _exc_type, _exc_value, _traceback): + msg = """\n + Please be careful with folders in your working directory with the same + name as your package as they may take precedence during imports. + """ + InformationOnly.emit("Editable installation.", msg) + + +def _can_symlink_files(base_dir: Path) -> bool: + with TemporaryDirectory(dir=str(base_dir.resolve())) as tmp: + path1, path2 = Path(tmp, "file1.txt"), Path(tmp, "file2.txt") + path1.write_text("file1", encoding="utf-8") + with suppress(AttributeError, NotImplementedError, OSError): + os.symlink(path1, path2) + if path2.is_symlink() and path2.read_text(encoding="utf-8") == "file1": + return True + + try: + os.link(path1, path2) # Ensure hard links can be created + except Exception as ex: + msg = ( + "File system does not seem to support either symlinks or hard links. " + "Strict editable installs require one of them to be supported." + ) + raise LinksNotSupported(msg) from ex + return False + + +def _simple_layout( + packages: Iterable[str], package_dir: Dict[str, str], project_dir: Path +) -> bool: + """Return ``True`` if: + - all packages are contained by the same parent directory, **and** + - all packages become importable if the parent directory is added to ``sys.path``. + + >>> _simple_layout(['a'], {"": "src"}, "/tmp/myproj") + True + >>> _simple_layout(['a', 'a.b'], {"": "src"}, "/tmp/myproj") + True + >>> _simple_layout(['a', 'a.b'], {}, "/tmp/myproj") + True + >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"": "src"}, "/tmp/myproj") + True + >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "a", "b": "b"}, ".") + True + >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "_a", "b": "_b"}, ".") + False + >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "_a"}, "/tmp/myproj") + False + >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a.a1.a2": "_a2"}, ".") + False + >>> _simple_layout(['a', 'a.b'], {"": "src", "a.b": "_ab"}, "/tmp/myproj") + False + >>> # Special cases, no packages yet: + >>> _simple_layout([], {"": "src"}, "/tmp/myproj") + True + >>> _simple_layout([], {"a": "_a", "": "src"}, "/tmp/myproj") + False + """ + layout = { + pkg: find_package_path(pkg, package_dir, project_dir) + for pkg in packages + } + if not layout: + return set(package_dir) in ({}, {""}) + parent = os.path.commonpath([_parent_path(k, v) for k, v in layout.items()]) + return all( + _path.same_path(Path(parent, *key.split('.')), value) + for key, value in layout.items() + ) + + +def _parent_path(pkg, pkg_path): + """Infer the parent path containing a package, that if added to ``sys.path`` would + allow importing that package. + When ``pkg`` is directly mapped into a directory with a different name, return its + own path. + >>> _parent_path("a", "src/a") + 'src' + >>> _parent_path("b", "src/c") + 'src/c' + """ + parent = pkg_path[:-len(pkg)] if pkg_path.endswith(pkg) else pkg_path + return parent.rstrip("/" + os.sep) + + +def _find_packages(dist: Distribution) -> Iterator[str]: + yield from iter(dist.packages or []) + + py_modules = dist.py_modules or [] + nested_modules = [mod for mod in py_modules if "." in mod] + if dist.ext_package: + yield dist.ext_package + else: + ext_modules = dist.ext_modules or [] + nested_modules += [x.name for x in ext_modules if "." in x.name] + + for module in nested_modules: + package, _, _ = module.rpartition(".") + yield package + + +def _find_top_level_modules(dist: Distribution) -> Iterator[str]: + py_modules = dist.py_modules or [] + yield from (mod for mod in py_modules if "." not in mod) + + if not dist.ext_package: + ext_modules = dist.ext_modules or [] + yield from (x.name for x in ext_modules if "." not in x.name) + + +def _find_package_roots( + packages: Iterable[str], + package_dir: Mapping[str, str], + src_root: _Path, +) -> Dict[str, str]: + pkg_roots: Dict[str, str] = { + pkg: _absolute_root(find_package_path(pkg, package_dir, src_root)) + for pkg in sorted(packages) + } + + return _remove_nested(pkg_roots) + + +def _absolute_root(path: _Path) -> str: + """Works for packages and top-level modules""" + path_ = Path(path) + parent = path_.parent + + if path_.exists(): + return str(path_.resolve()) + else: + return str(parent.resolve() / path_.name) + + +def _find_virtual_namespaces(pkg_roots: Dict[str, str]) -> Iterator[str]: + """By carefully designing ``package_dir``, it is possible to implement the logical + structure of PEP 420 in a package without the corresponding directories. + + Moreover a parent package can be purposefully/accidentally skipped in the discovery + phase (e.g. ``find_packages(include=["mypkg.*"])``, when ``mypkg.foo`` is included + by ``mypkg`` itself is not). + We consider this case to also be a virtual namespace (ignoring the original + directory) to emulate a non-editable installation. + + This function will try to find these kinds of namespaces. + """ + for pkg in pkg_roots: + if "." not in pkg: + continue + parts = pkg.split(".") + for i in range(len(parts) - 1, 0, -1): + partial_name = ".".join(parts[:i]) + path = Path(find_package_path(partial_name, pkg_roots, "")) + if not path.exists() or partial_name not in pkg_roots: + # partial_name not in pkg_roots ==> purposefully/accidentally skipped + yield partial_name + + +def _find_namespaces( + packages: List[str], pkg_roots: Dict[str, str] +) -> Iterator[Tuple[str, List[str]]]: + for pkg in packages: + path = find_package_path(pkg, pkg_roots, "") + if Path(path).exists() and not Path(path, "__init__.py").exists(): + yield (pkg, [path]) + + +def _remove_nested(pkg_roots: Dict[str, str]) -> Dict[str, str]: + output = dict(pkg_roots.copy()) + + for pkg, path in reversed(list(pkg_roots.items())): + if any( + pkg != other and _is_nested(pkg, path, other, other_path) + for other, other_path in pkg_roots.items() + ): + output.pop(pkg) + + return output + + +def _is_nested(pkg: str, pkg_path: str, parent: str, parent_path: str) -> bool: + """ + Return ``True`` if ``pkg`` is nested inside ``parent`` both logically and in the + file system. + >>> _is_nested("a.b", "path/a/b", "a", "path/a") + True + >>> _is_nested("a.b", "path/a/b", "a", "otherpath/a") + False + >>> _is_nested("a.b", "path/a/b", "c", "path/c") + False + >>> _is_nested("a.a", "path/a/a", "a", "path/a") + True + >>> _is_nested("b.a", "path/b/a", "a", "path/a") + False + """ + norm_pkg_path = _path.normpath(pkg_path) + rest = pkg.replace(parent, "", 1).strip(".").split(".") + return ( + pkg.startswith(parent) + and norm_pkg_path == _path.normpath(Path(parent_path, *rest)) + ) + + +def _empty_dir(dir_: _P) -> _P: + """Create a directory ensured to be empty. Existing files may be removed.""" + shutil.rmtree(dir_, ignore_errors=True) + os.makedirs(dir_) + return dir_ + + +class _NamespaceInstaller(namespaces.Installer): + def __init__(self, distribution, installation_dir, editable_name, src_root): + self.distribution = distribution + self.src_root = src_root + self.installation_dir = installation_dir + self.editable_name = editable_name + self.outputs = [] + self.dry_run = False + + def _get_target(self): + """Installation target.""" + return os.path.join(self.installation_dir, self.editable_name) + + def _get_root(self): + """Where the modules/packages should be loaded from.""" + return repr(str(self.src_root)) + + +_FINDER_TEMPLATE = """\ +import sys +from importlib.machinery import ModuleSpec +from importlib.machinery import all_suffixes as module_suffixes +from importlib.util import spec_from_file_location +from itertools import chain +from pathlib import Path + +MAPPING = {mapping!r} +NAMESPACES = {namespaces!r} +PATH_PLACEHOLDER = {name!r} + ".__path_hook__" + + +class _EditableFinder: # MetaPathFinder + @classmethod + def find_spec(cls, fullname, path=None, target=None): + for pkg, pkg_path in reversed(list(MAPPING.items())): + if fullname == pkg or fullname.startswith(f"{{pkg}}."): + rest = fullname.replace(pkg, "", 1).strip(".").split(".") + return cls._find_spec(fullname, Path(pkg_path, *rest)) + + return None + + @classmethod + def _find_spec(cls, fullname, candidate_path): + init = candidate_path / "__init__.py" + candidates = (candidate_path.with_suffix(x) for x in module_suffixes()) + for candidate in chain([init], candidates): + if candidate.exists(): + return spec_from_file_location(fullname, candidate) + + +class _EditableNamespaceFinder: # PathEntryFinder + @classmethod + def _path_hook(cls, path): + if path == PATH_PLACEHOLDER: + return cls + raise ImportError + + @classmethod + def _paths(cls, fullname): + # Ensure __path__ is not empty for the spec to be considered a namespace. + return NAMESPACES[fullname] or MAPPING.get(fullname) or [PATH_PLACEHOLDER] + + @classmethod + def find_spec(cls, fullname, target=None): + if fullname in NAMESPACES: + spec = ModuleSpec(fullname, None, is_package=True) + spec.submodule_search_locations = cls._paths(fullname) + return spec + return None + + @classmethod + def find_module(cls, fullname): + return None + + +def install(): + if not any(finder == _EditableFinder for finder in sys.meta_path): + sys.meta_path.append(_EditableFinder) + + if not NAMESPACES: + return + + if not any(hook == _EditableNamespaceFinder._path_hook for hook in sys.path_hooks): + # PathEntryFinder is needed to create NamespaceSpec without private APIS + sys.path_hooks.append(_EditableNamespaceFinder._path_hook) + if PATH_PLACEHOLDER not in sys.path: + sys.path.append(PATH_PLACEHOLDER) # Used just to trigger the path hook +""" + + +def _finder_template( + name: str, mapping: Mapping[str, str], namespaces: Dict[str, List[str]] +) -> str: + """Create a string containing the code for the``MetaPathFinder`` and + ``PathEntryFinder``. + """ + mapping = dict(sorted(mapping.items(), key=lambda p: p[0])) + return _FINDER_TEMPLATE.format(name=name, mapping=mapping, namespaces=namespaces) + + +class LinksNotSupported(errors.FileError): + """File system does not seem to support either symlinks or hard links.""" + + +class _DebuggingTips(SetuptoolsWarning): + _SUMMARY = "Problem in editable installation." + _DETAILS = """ + An error happened while installing `{project}` in editable mode. + + The following steps are recommended to help debug this problem: + + - Try to install the project normally, without using the editable mode. + Does the error still persist? + (If it does, try fixing the problem before attempting the editable mode). + - If you are using binary extensions, make sure you have all OS-level + dependencies installed (e.g. compilers, toolchains, binary libraries, ...). + - Try the latest version of setuptools (maybe the error was already fixed). + - If you (or your project dependencies) are using any setuptools extension + or customization, make sure they support the editable mode. + + After following the steps above, if the problem still persists and + you think this is related to how setuptools handles editable installations, + please submit a reproducible example + (see https://stackoverflow.com/help/minimal-reproducible-example) to: + + https://github.com/pypa/setuptools/issues + """ + _SEE_DOCS = "userguide/development_mode.html" diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index 63389654af..f5163ae7b5 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -13,27 +13,26 @@ import re import sys import io -import warnings import time import collections from .._importlib import metadata -from .. import _entry_points +from .. import _entry_points, _normalization from setuptools import Command from setuptools.command.sdist import sdist from setuptools.command.sdist import walk_revctrl from setuptools.command.setopt import edit_config from setuptools.command import bdist_egg -from pkg_resources import ( - Requirement, safe_name, parse_version, - safe_version, to_filename) import setuptools.unicode_utils as unicode_utils from setuptools.glob import glob from setuptools.extern import packaging from setuptools.extern.jaraco.text import yield_lines -from setuptools import SetuptoolsDeprecationWarning +from ..warnings import SetuptoolsDeprecationWarning + + +PY_MAJOR = '{}.{}'.format(*sys.version_info) def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME @@ -125,10 +124,11 @@ class InfoCommon: @property def name(self): - return safe_name(self.distribution.get_name()) + return _normalization.safe_name(self.distribution.get_name()) def tagged_version(self): - return safe_version(self._maybe_tag(self.distribution.get_version())) + tagged = self._maybe_tag(self.distribution.get_version()) + return _normalization.best_effort_version(tagged) def _maybe_tag(self, version): """ @@ -136,16 +136,26 @@ def _maybe_tag(self, version): in which case the version string already contains all tags. """ return ( - version if self.vtags and version.endswith(self.vtags) + version if self.vtags and self._already_tagged(version) else version + self.vtags ) - def tags(self): + def _already_tagged(self, version: str) -> bool: + # Depending on their format, tags may change with version normalization. + # So in addition the regular tags, we have to search for the normalized ones. + return version.endswith(self.vtags) or version.endswith(self._safe_tags()) + + def _safe_tags(self) -> str: + # To implement this we can rely on `safe_version` pretending to be version 0 + # followed by tags. Then we simply discard the starting 0 (fake version number) + return _normalization.best_effort_version(f"0{self.vtags}")[1:] + + def tags(self) -> str: version = '' if self.tag_build: version += self.tag_build if self.tag_date: - version += time.strftime("-%Y%m%d") + version += time.strftime("%Y%m%d") return version vtags = property(tags) @@ -172,6 +182,7 @@ def initialize_options(self): self.egg_info = None self.egg_version = None self.broken_egg_info = False + self.ignore_egg_info_in_manifest = False #################################### # allow the 'tag_svn_revision' to be detected and @@ -205,12 +216,12 @@ def finalize_options(self): # repercussions. self.egg_name = self.name self.egg_version = self.tagged_version() - parsed_version = parse_version(self.egg_version) + parsed_version = packaging.version.Version(self.egg_version) try: is_version = isinstance(parsed_version, packaging.version.Version) spec = "%s==%s" if is_version else "%s===%s" - Requirement(spec % (self.egg_name, self.egg_version)) + packaging.requirements.Requirement(spec % (self.egg_name, self.egg_version)) except ValueError as e: raise distutils.errors.DistutilsOptionError( "Invalid distribution name or version syntax: %s-%s" % @@ -222,7 +233,7 @@ def finalize_options(self): self.egg_base = (dirs or {}).get('', os.curdir) self.ensure_dirname('egg_base') - self.egg_info = to_filename(self.egg_name) + '.egg-info' + self.egg_info = _normalization.filename_component(self.egg_name) + '.egg-info' if self.egg_base != os.curdir: self.egg_info = os.path.join(self.egg_base, self.egg_info) if '-' in self.egg_name: @@ -238,11 +249,16 @@ def finalize_options(self): # to the version info # pd = self.distribution._patched_dist - if pd is not None and pd.key == self.egg_name.lower(): + key = getattr(pd, "key", None) or getattr(pd, "name", None) + if pd is not None and key == self.egg_name.lower(): pd._version = self.egg_version - pd._parsed_version = parse_version(self.egg_version) + pd._parsed_version = packaging.version.Version(self.egg_version) self.distribution._patched_dist = None + def _get_egg_basename(self, py_version=PY_MAJOR, platform=None): + """Compute filename of the output egg. Private API.""" + return _egg_basename(self.egg_name, self.egg_version, py_version, platform) + def write_or_delete_file(self, what, filename, data, force=False): """Write `data` to `filename` or delete if empty @@ -284,9 +300,12 @@ def delete_file(self, filename): def run(self): self.mkpath(self.egg_info) - os.utime(self.egg_info, None) + try: + os.utime(self.egg_info, None) + except OSError as e: + msg = f"Cannot update time stamp of directory '{self.egg_info}'" + raise distutils.errors.DistutilsFileError(msg) from e for ep in metadata.entry_points(group='egg_info.writers'): - self.distribution._install_dependencies(ep) writer = ep.load() writer(self, ep.name, os.path.join(self.egg_info, ep.name)) @@ -301,6 +320,7 @@ def find_sources(self): """Generate SOURCES.txt manifest file""" manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") mm = manifest_maker(self.distribution) + mm.ignore_egg_info_dir = self.ignore_egg_info_in_manifest mm.manifest = manifest_filename mm.run() self.filelist = mm.filelist @@ -310,12 +330,16 @@ def check_broken_egg_info(self): if self.egg_base != os.curdir: bei = os.path.join(self.egg_base, bei) if os.path.exists(bei): - log.warn( - "-" * 78 + '\n' - "Note: Your current .egg-info directory has a '-' in its name;" - '\nthis will not work correctly with "setup.py develop".\n\n' - 'Please rename %s to %s to correct this problem.\n' + '-' * 78, - bei, self.egg_info + EggInfoDeprecationWarning.emit( + "Invalid egg-info directory name.", + f""" + Your current .egg-info directory has a '-' in its name; + this will not work correctly with setuptools commands. + + Please rename {bei!r} to {self.egg_info!r} to correct this problem. + """, + due_date=(2023, 6, 1), + # Old warning, introduced in 2005, might be safe to remove soon ) self.broken_egg_info = self.egg_info self.egg_info = bei # make it work for now @@ -324,6 +348,10 @@ def check_broken_egg_info(self): class FileList(_FileList): # Implementations of the various MANIFEST.in commands + def __init__(self, warn=None, debug_print=None, ignore_egg_info_dir=False): + super().__init__(warn, debug_print) + self.ignore_egg_info_dir = ignore_egg_info_dir + def process_template_line(self, line): # Parse the line: split it up, make sure the right number of words # is there, and return the relevant words. 'action' is always @@ -513,6 +541,10 @@ def _safe_path(self, path): return False try: + # ignore egg-info paths + is_egg_info = ".egg-info" in u_path or b".egg-info" in utf8_path + if self.ignore_egg_info_dir and is_egg_info: + return False # accept is either way checks out if os.path.exists(u_path) or os.path.exists(utf8_path): return True @@ -529,18 +561,20 @@ def initialize_options(self): self.prune = 1 self.manifest_only = 1 self.force_manifest = 1 + self.ignore_egg_info_dir = False def finalize_options(self): pass def run(self): - self.filelist = FileList() + self.filelist = FileList(ignore_egg_info_dir=self.ignore_egg_info_dir) if not os.path.exists(self.manifest): self.write_manifest() # it must exist so it'll get in the list self.add_defaults() if os.path.exists(self.template): self.read_template() self.add_license_files() + self._add_referenced_files() self.prune_file_list() self.filelist.sort() self.filelist.remove_duplicates() @@ -595,9 +629,16 @@ def add_license_files(self): license_files = self.distribution.metadata.license_files or [] for lf in license_files: log.info("adding license file '%s'", lf) - pass self.filelist.extend(license_files) + def _add_referenced_files(self): + """Add files referenced by the config (e.g. `file:` directive) to filelist""" + referenced = getattr(self.distribution, '_referenced_files', []) + # ^-- fallback if dist comes from distutils or is a custom class + for rf in referenced: + log.debug("adding file referenced by config '%s'", rf) + self.filelist.extend(referenced) + def prune_file_list(self): build = self.get_finalized_command('build') base_dir = self.distribution.get_fullname() @@ -620,11 +661,14 @@ def _safe_data_files(self, build_py): if hasattr(build_py, 'get_data_files_without_manifest'): return build_py.get_data_files_without_manifest() - warnings.warn( - "Custom 'build_py' does not implement " - "'get_data_files_without_manifest'.\nPlease extend command classes" - " from setuptools instead of distutils.", - SetuptoolsDeprecationWarning + SetuptoolsDeprecationWarning.emit( + "`build_py` command does not inherit from setuptools' `build_py`.", + """ + Custom 'build_py' does not implement 'get_data_files_without_manifest'. + Please extend command classes from setuptools instead of distutils. + """, + see_url="https://peps.python.org/pep-0632/", + # due_date not defined yet, old projects might still do it? ) return build_py.get_data_files() @@ -663,9 +707,15 @@ def write_pkg_info(cmd, basename, filename): def warn_depends_obsolete(cmd, basename, filename): if os.path.exists(filename): - log.warn( - "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." + EggInfoDeprecationWarning.emit( + "Deprecated config.", + """ + 'depends.txt' is not used by setuptools >= 0.6! + Configure your dependencies via `setup.cfg` or `pyproject.toml` instead. + """, + see_docs="userguide/declarative_config.html", + due_date=(2023, 6, 1), + # Old warning, introduced in 2005, it might be safe to remove soon. ) @@ -728,8 +778,12 @@ def get_pkg_info_revision(): Get a -r### off of PKG-INFO Version in case this is an sdist of a subversion revision. """ - warnings.warn( - "get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning) + EggInfoDeprecationWarning.emit( + "Deprecated API call", + "get_pkg_info_revision is deprecated.", + due_date=(2023, 6, 1), + # Warning introduced in 11 Dec 2015, should be safe to remove + ) if os.path.exists('PKG-INFO'): with io.open('PKG-INFO') as f: for line in f: @@ -739,5 +793,15 @@ def get_pkg_info_revision(): return 0 +def _egg_basename(egg_name, egg_version, py_version=None, platform=None): + """Compute filename of the output egg. Private API.""" + name = _normalization.filename_component(egg_name) + version = _normalization.filename_component(egg_version) + egg = f"{name}-{version}-py{py_version or PY_MAJOR}" + if platform: + egg += f"-{platform}" + return egg + + class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning): """Deprecated behavior warning for EggInfo, bypassing suppression.""" diff --git a/setuptools/command/install.py b/setuptools/command/install.py index 35e54d2043..dec4e32018 100644 --- a/setuptools/command/install.py +++ b/setuptools/command/install.py @@ -1,11 +1,11 @@ from distutils.errors import DistutilsArgError import inspect import glob -import warnings import platform import distutils.command.install as orig import setuptools +from ..warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning # Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for # now. See https://github.com/pypa/setuptools/issues/199/ @@ -30,11 +30,17 @@ class install(orig.install): _nc = dict(new_commands) def initialize_options(self): - - warnings.warn( - "setup.py install is deprecated. " - "Use build and pip and other standards-based tools.", - setuptools.SetuptoolsDeprecationWarning, + SetuptoolsDeprecationWarning.emit( + "setup.py install is deprecated.", + """ + Please avoid running ``setup.py`` directly. + Instead, use pypa/build, pypa/installer, pypa/build or + other standards-based tools. + """, + see_url="https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html", + # TODO: Document how to bootstrap setuptools without install + # (e.g. by unziping the wheel file) + # and then add a due_date to this warning. ) orig.install.initialize_options(self) @@ -86,19 +92,26 @@ def _called_from_setup(run_frame): """ if run_frame is None: msg = "Call stack not available. bdist_* commands may fail." - warnings.warn(msg) + SetuptoolsWarning.emit(msg) if platform.python_implementation() == 'IronPython': msg = "For best results, pass -X:Frames to enable call stack." - warnings.warn(msg) + SetuptoolsWarning.emit(msg) return True - res = inspect.getouterframes(run_frame)[2] - caller, = res[:1] - info = inspect.getframeinfo(caller) - caller_module = caller.f_globals.get('__name__', '') - return ( - caller_module == 'distutils.dist' - and info.function == 'run_commands' - ) + + frames = inspect.getouterframes(run_frame) + for frame in frames[2:4]: + caller, = frame[:1] + info = inspect.getframeinfo(caller) + caller_module = caller.f_globals.get('__name__', '') + + if caller_module == "setuptools.dist" and info.function == "run_command": + # Starting from v61.0.0 setuptools overwrites dist.run_command + continue + + return ( + caller_module == 'distutils.dist' + and info.function == 'run_commands' + ) def do_egg_install(self): diff --git a/setuptools/command/install_egg_info.py b/setuptools/command/install_egg_info.py index 65ede406bf..1c549c98ea 100644 --- a/setuptools/command/install_egg_info.py +++ b/setuptools/command/install_egg_info.py @@ -5,7 +5,6 @@ from setuptools import namespaces from setuptools.archive_util import unpack_archive from .._path import ensure_directory -import pkg_resources class install_egg_info(namespaces.Installer, Command): @@ -24,9 +23,7 @@ def finalize_options(self): self.set_undefined_options('install_lib', ('install_dir', 'install_dir')) ei_cmd = self.get_finalized_command("egg_info") - basename = pkg_resources.Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version - ).egg_name() + '.egg-info' + basename = f"{ei_cmd._get_egg_basename()}.egg-info" self.source = ei_cmd.egg_info self.target = os.path.join(self.install_dir, basename) self.outputs = [] diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py index aeb0e4240c..8b3133f1fd 100644 --- a/setuptools/command/install_scripts.py +++ b/setuptools/command/install_scripts.py @@ -4,7 +4,6 @@ import os import sys -from pkg_resources import Distribution, PathMetadata from .._path import ensure_directory @@ -16,8 +15,6 @@ def initialize_options(self): self.no_ep = False def run(self): - import setuptools.command.easy_install as ei - self.run_command("egg_info") if self.distribution.scripts: orig.install_scripts.run(self) # run first to set up self.outfiles @@ -26,6 +23,12 @@ def run(self): if self.no_ep: # don't install entry point scripts into .egg file! return + self._install_ep_scripts() + + def _install_ep_scripts(self): + # Delay import side-effects + from pkg_resources import Distribution, PathMetadata + from . import easy_install as ei ei_cmd = self.get_finalized_command("egg_info") dist = Distribution( diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py index 0ffeacf319..4a8cde7e16 100644 --- a/setuptools/command/sdist.py +++ b/setuptools/command/sdist.py @@ -4,10 +4,12 @@ import sys import io import contextlib +from itertools import chain from .py36compat import sdist_add_defaults from .._importlib import metadata +from .build import _ORIGINAL_SUBCOMMANDS _default_revctrl = list @@ -100,6 +102,10 @@ class NoValue: if orig_val is not NoValue: setattr(os, 'link', orig_val) + def add_defaults(self): + super().add_defaults() + self._add_defaults_build_sub_commands() + def _add_defaults_optional(self): super()._add_defaults_optional() if os.path.isfile('pyproject.toml'): @@ -112,6 +118,14 @@ def _add_defaults_python(self): self.filelist.extend(build_py.get_source_files()) self._add_data_files(self._safe_data_files(build_py)) + def _add_defaults_build_sub_commands(self): + build = self.get_finalized_command("build") + missing_cmds = set(build.get_sub_commands()) - _ORIGINAL_SUBCOMMANDS + # ^-- the original built-in sub-commands are already handled by default. + cmds = (self.get_finalized_command(c) for c in missing_cmds) + files = (c.get_source_files() for c in cmds if hasattr(c, "get_source_files")) + self.filelist.extend(chain.from_iterable(files)) + def _safe_data_files(self, build_py): """ Since the ``sdist`` class is also used to compute the MANIFEST diff --git a/setuptools/command/test.py b/setuptools/command/test.py index 652f3e4a0f..8dde513c95 100644 --- a/setuptools/command/test.py +++ b/setuptools/command/test.py @@ -118,7 +118,7 @@ def test_args(self): return list(self._test_args()) def _test_args(self): - if not self.test_suite and sys.version_info >= (2, 7): + if not self.test_suite: yield 'discover' if self.verbose: yield '--verbose' diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py index a5480005c7..077c9d2fcd 100644 --- a/setuptools/command/upload_docs.py +++ b/setuptools/command/upload_docs.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """upload_docs Implements a Distutils 'upload_docs' subcommand (upload documentation to @@ -17,10 +16,9 @@ import functools import http.client import urllib.parse -import warnings from .._importlib import metadata -from .. import SetuptoolsDeprecationWarning +from ..warnings import SetuptoolsDeprecationWarning from .upload import upload @@ -59,6 +57,9 @@ def initialize_options(self): self.target_dir = None def finalize_options(self): + log.warn( + "Upload_docs command is deprecated. Use Read the Docs " + "(https://readthedocs.org) instead.") upload.finalize_options(self) if self.upload_dir is None: if self.has_sphinx(): @@ -70,8 +71,6 @@ def finalize_options(self): else: self.ensure_dirname('upload_dir') self.target_dir = self.upload_dir - if 'pypi.python.org' in self.repository: - log.warn("Upload_docs command is deprecated for PyPi. Use RTD instead.") self.announce('Using upload directory %s' % self.target_dir) def create_zipfile(self, filename): @@ -91,10 +90,14 @@ def create_zipfile(self, filename): zip_file.close() def run(self): - warnings.warn( - "upload_docs is deprecated and will be removed in a future " - "version. Use tools like httpie or curl instead.", - SetuptoolsDeprecationWarning, + SetuptoolsDeprecationWarning.emit( + "Deprecated command", + """ + upload_docs is deprecated and will be removed in a future version. + Instead, use tools like devpi and Read the Docs; or lower level tools like + httpie and curl to interact directly with your hosting service API. + """, + due_date=(2023, 9, 26), # warning introduced in 27 Jul 2022 ) # Run sub commands diff --git a/setuptools/config/__init__.py b/setuptools/config/__init__.py new file mode 100644 index 0000000000..ffea394436 --- /dev/null +++ b/setuptools/config/__init__.py @@ -0,0 +1,42 @@ +"""For backward compatibility, expose main functions from +``setuptools.config.setupcfg`` +""" +from functools import wraps +from typing import Callable, TypeVar, cast + +from ..warnings import SetuptoolsDeprecationWarning +from . import setupcfg + +Fn = TypeVar("Fn", bound=Callable) + +__all__ = ('parse_configuration', 'read_configuration') + + +def _deprecation_notice(fn: Fn) -> Fn: + @wraps(fn) + def _wrapper(*args, **kwargs): + SetuptoolsDeprecationWarning.emit( + "Deprecated API usage.", + f""" + As setuptools moves its configuration towards `pyproject.toml`, + `{__name__}.{fn.__name__}` became deprecated. + + For the time being, you can use the `{setupcfg.__name__}` module + to access a backward compatible API, but this module is provisional + and might be removed in the future. + + To read project metadata, consider using + ``build.util.project_wheel_metadata`` (https://pypi.org/project/build/). + For simple scenarios, you can also try parsing the file directly + with the help of ``configparser``. + """, + # due_date not defined yet, because the community still heavily relies on it + # Warning introduced in 24 Mar 2022 + ) + return fn(*args, **kwargs) + + return cast(Fn, _wrapper) + + +read_configuration = _deprecation_notice(setupcfg.read_configuration) +parse_configuration = _deprecation_notice(setupcfg.parse_configuration) diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py new file mode 100644 index 0000000000..3091e3b5e3 --- /dev/null +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -0,0 +1,386 @@ +"""Translation layer between pyproject config and setuptools distribution and +metadata objects. + +The distribution and metadata objects are modeled after (an old version of) +core metadata, therefore configs in the format specified for ``pyproject.toml`` +need to be processed before being applied. + +**PRIVATE MODULE**: API reserved for setuptools internal usage only. +""" +import logging +import os +from collections.abc import Mapping +from email.headerregistry import Address +from functools import partial, reduce +from itertools import chain +from types import MappingProxyType +from typing import (TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple, + Type, Union, cast) + +from ..warnings import SetuptoolsWarning, SetuptoolsDeprecationWarning + +if TYPE_CHECKING: + from setuptools._importlib import metadata # noqa + from setuptools.dist import Distribution # noqa + +EMPTY: Mapping = MappingProxyType({}) # Immutable dict-like +_Path = Union[os.PathLike, str] +_DictOrStr = Union[dict, str] +_CorrespFn = Callable[["Distribution", Any, _Path], None] +_Correspondence = Union[str, _CorrespFn] + +_logger = logging.getLogger(__name__) + + +def apply(dist: "Distribution", config: dict, filename: _Path) -> "Distribution": + """Apply configuration dict read with :func:`read_configuration`""" + + if not config: + return dist # short-circuit unrelated pyproject.toml file + + root_dir = os.path.dirname(filename) or "." + + _apply_project_table(dist, config, root_dir) + _apply_tool_table(dist, config, filename) + + current_directory = os.getcwd() + os.chdir(root_dir) + try: + dist._finalize_requires() + dist._finalize_license_files() + finally: + os.chdir(current_directory) + + return dist + + +def _apply_project_table(dist: "Distribution", config: dict, root_dir: _Path): + project_table = config.get("project", {}).copy() + if not project_table: + return # short-circuit + + _handle_missing_dynamic(dist, project_table) + _unify_entry_points(project_table) + + for field, value in project_table.items(): + norm_key = json_compatible_key(field) + corresp = PYPROJECT_CORRESPONDENCE.get(norm_key, norm_key) + if callable(corresp): + corresp(dist, value, root_dir) + else: + _set_config(dist, corresp, value) + + +def _apply_tool_table(dist: "Distribution", config: dict, filename: _Path): + tool_table = config.get("tool", {}).get("setuptools", {}) + if not tool_table: + return # short-circuit + + for field, value in tool_table.items(): + norm_key = json_compatible_key(field) + + if norm_key in TOOL_TABLE_DEPRECATIONS: + suggestion, kwargs = TOOL_TABLE_DEPRECATIONS[norm_key] + msg = f"The parameter `{norm_key}` is deprecated, {suggestion}" + SetuptoolsDeprecationWarning.emit( + "Deprecated config", msg, **kwargs # type: ignore + ) + + norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key) + _set_config(dist, norm_key, value) + + _copy_command_options(config, dist, filename) + + +def _handle_missing_dynamic(dist: "Distribution", project_table: dict): + """Be temporarily forgiving with ``dynamic`` fields not listed in ``dynamic``""" + # TODO: Set fields back to `None` once the feature stabilizes + dynamic = set(project_table.get("dynamic", [])) + for field, getter in _PREVIOUSLY_DEFINED.items(): + if not (field in project_table or field in dynamic): + value = getter(dist) + if value: + _WouldIgnoreField.emit(field=field, value=value) + + +def json_compatible_key(key: str) -> str: + """As defined in :pep:`566#json-compatible-metadata`""" + return key.lower().replace("-", "_") + + +def _set_config(dist: "Distribution", field: str, value: Any): + setter = getattr(dist.metadata, f"set_{field}", None) + if setter: + setter(value) + elif hasattr(dist.metadata, field) or field in SETUPTOOLS_PATCHES: + setattr(dist.metadata, field, value) + else: + setattr(dist, field, value) + + +_CONTENT_TYPES = { + ".md": "text/markdown", + ".rst": "text/x-rst", + ".txt": "text/plain", +} + + +def _guess_content_type(file: str) -> Optional[str]: + _, ext = os.path.splitext(file.lower()) + if not ext: + return None + + if ext in _CONTENT_TYPES: + return _CONTENT_TYPES[ext] + + valid = ", ".join(f"{k} ({v})" for k, v in _CONTENT_TYPES.items()) + msg = f"only the following file extensions are recognized: {valid}." + raise ValueError(f"Undefined content type for {file}, {msg}") + + +def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: _Path): + from setuptools.config import expand + + if isinstance(val, str): + file: Union[str, list] = val + text = expand.read_files(file, root_dir) + ctype = _guess_content_type(val) + else: + file = val.get("file") or [] + text = val.get("text") or expand.read_files(file, root_dir) + ctype = val["content-type"] + + _set_config(dist, "long_description", text) + + if ctype: + _set_config(dist, "long_description_content_type", ctype) + + if file: + dist._referenced_files.add(cast(str, file)) + + +def _license(dist: "Distribution", val: dict, root_dir: _Path): + from setuptools.config import expand + + if "file" in val: + _set_config(dist, "license", expand.read_files([val["file"]], root_dir)) + dist._referenced_files.add(val["file"]) + else: + _set_config(dist, "license", val["text"]) + + +def _people(dist: "Distribution", val: List[dict], _root_dir: _Path, kind: str): + field = [] + email_field = [] + for person in val: + if "name" not in person: + email_field.append(person["email"]) + elif "email" not in person: + field.append(person["name"]) + else: + addr = Address(display_name=person["name"], addr_spec=person["email"]) + email_field.append(str(addr)) + + if field: + _set_config(dist, kind, ", ".join(field)) + if email_field: + _set_config(dist, f"{kind}_email", ", ".join(email_field)) + + +def _project_urls(dist: "Distribution", val: dict, _root_dir): + _set_config(dist, "project_urls", val) + + +def _python_requires(dist: "Distribution", val: dict, _root_dir): + from setuptools.extern.packaging.specifiers import SpecifierSet + + _set_config(dist, "python_requires", SpecifierSet(val)) + + +def _dependencies(dist: "Distribution", val: list, _root_dir): + if getattr(dist, "install_requires", []): + msg = "`install_requires` overwritten in `pyproject.toml` (dependencies)" + SetuptoolsWarning.emit(msg) + _set_config(dist, "install_requires", val) + + +def _optional_dependencies(dist: "Distribution", val: dict, _root_dir): + existing = getattr(dist, "extras_require", {}) + _set_config(dist, "extras_require", {**existing, **val}) + + +def _unify_entry_points(project_table: dict): + project = project_table + entry_points = project.pop("entry-points", project.pop("entry_points", {})) + renaming = {"scripts": "console_scripts", "gui_scripts": "gui_scripts"} + for key, value in list(project.items()): # eager to allow modifications + norm_key = json_compatible_key(key) + if norm_key in renaming and value: + entry_points[renaming[norm_key]] = project.pop(key) + + if entry_points: + project["entry-points"] = { + name: [f"{k} = {v}" for k, v in group.items()] + for name, group in entry_points.items() + } + + +def _copy_command_options(pyproject: dict, dist: "Distribution", filename: _Path): + tool_table = pyproject.get("tool", {}) + cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {}) + valid_options = _valid_command_options(cmdclass) + + cmd_opts = dist.command_options + for cmd, config in pyproject.get("tool", {}).get("distutils", {}).items(): + cmd = json_compatible_key(cmd) + valid = valid_options.get(cmd, set()) + cmd_opts.setdefault(cmd, {}) + for key, value in config.items(): + key = json_compatible_key(key) + cmd_opts[cmd][key] = (str(filename), value) + if key not in valid: + # To avoid removing options that are specified dynamically we + # just log a warn... + _logger.warning(f"Command option {cmd}.{key} is not defined") + + +def _valid_command_options(cmdclass: Mapping = EMPTY) -> Dict[str, Set[str]]: + from .._importlib import metadata + from setuptools.dist import Distribution + + valid_options = {"global": _normalise_cmd_options(Distribution.global_options)} + + unloaded_entry_points = metadata.entry_points(group='distutils.commands') + loaded_entry_points = (_load_ep(ep) for ep in unloaded_entry_points) + entry_points = (ep for ep in loaded_entry_points if ep) + for cmd, cmd_class in chain(entry_points, cmdclass.items()): + opts = valid_options.get(cmd, set()) + opts = opts | _normalise_cmd_options(getattr(cmd_class, "user_options", [])) + valid_options[cmd] = opts + + return valid_options + + +def _load_ep(ep: "metadata.EntryPoint") -> Optional[Tuple[str, Type]]: + # Ignore all the errors + try: + return (ep.name, ep.load()) + except Exception as ex: + msg = f"{ex.__class__.__name__} while trying to load entry-point {ep.name}" + _logger.warning(f"{msg}: {ex}") + return None + + +def _normalise_cmd_option_key(name: str) -> str: + return json_compatible_key(name).strip("_=") + + +def _normalise_cmd_options(desc: List[Tuple[str, Optional[str], str]]) -> Set[str]: + return {_normalise_cmd_option_key(fancy_option[0]) for fancy_option in desc} + + +def _get_previous_entrypoints(dist: "Distribution") -> Dict[str, list]: + ignore = ("console_scripts", "gui_scripts") + value = getattr(dist, "entry_points", None) or {} + return {k: v for k, v in value.items() if k not in ignore} + + +def _attrgetter(attr): + """ + Similar to ``operator.attrgetter`` but returns None if ``attr`` is not found + >>> from types import SimpleNamespace + >>> obj = SimpleNamespace(a=42, b=SimpleNamespace(c=13)) + >>> _attrgetter("a")(obj) + 42 + >>> _attrgetter("b.c")(obj) + 13 + >>> _attrgetter("d")(obj) is None + True + """ + return partial(reduce, lambda acc, x: getattr(acc, x, None), attr.split(".")) + + +def _some_attrgetter(*items): + """ + Return the first "truth-y" attribute or None + >>> from types import SimpleNamespace + >>> obj = SimpleNamespace(a=42, b=SimpleNamespace(c=13)) + >>> _some_attrgetter("d", "a", "b.c")(obj) + 42 + >>> _some_attrgetter("d", "e", "b.c", "a")(obj) + 13 + >>> _some_attrgetter("d", "e", "f")(obj) is None + True + """ + def _acessor(obj): + values = (_attrgetter(i)(obj) for i in items) + return next((i for i in values if i is not None), None) + return _acessor + + +PYPROJECT_CORRESPONDENCE: Dict[str, _Correspondence] = { + "readme": _long_description, + "license": _license, + "authors": partial(_people, kind="author"), + "maintainers": partial(_people, kind="maintainer"), + "urls": _project_urls, + "dependencies": _dependencies, + "optional_dependencies": _optional_dependencies, + "requires_python": _python_requires, +} + +TOOL_TABLE_RENAMES = {"script_files": "scripts"} +TOOL_TABLE_DEPRECATIONS = { + "namespace_packages": ( + "consider using implicit namespaces instead (PEP 420).", + {"due_date": (2023, 10, 30)}, # warning introduced in May 2022 + ) +} + +SETUPTOOLS_PATCHES = {"long_description_content_type", "project_urls", + "provides_extras", "license_file", "license_files"} + +_PREVIOUSLY_DEFINED = { + "name": _attrgetter("metadata.name"), + "version": _attrgetter("metadata.version"), + "description": _attrgetter("metadata.description"), + "readme": _attrgetter("metadata.long_description"), + "requires-python": _some_attrgetter("python_requires", "metadata.python_requires"), + "license": _attrgetter("metadata.license"), + "authors": _some_attrgetter("metadata.author", "metadata.author_email"), + "maintainers": _some_attrgetter("metadata.maintainer", "metadata.maintainer_email"), + "keywords": _attrgetter("metadata.keywords"), + "classifiers": _attrgetter("metadata.classifiers"), + "urls": _attrgetter("metadata.project_urls"), + "entry-points": _get_previous_entrypoints, + "dependencies": _some_attrgetter("_orig_install_requires", "install_requires"), + "optional-dependencies": _some_attrgetter("_orig_extras_require", "extras_require"), +} + + +class _WouldIgnoreField(SetuptoolsDeprecationWarning): + _SUMMARY = "`{field}` defined outside of `pyproject.toml` would be ignored." + + _DETAILS = """ + ########################################################################## + # configuration would be ignored/result in error due to `pyproject.toml` # + ########################################################################## + + The following seems to be defined outside of `pyproject.toml`: + + `{field} = {value!r}` + + According to the spec (see the link below), however, setuptools CANNOT + consider this value unless `{field}` is listed as `dynamic`. + + https://packaging.python.org/en/latest/specifications/declaring-project-metadata/ + + For the time being, `setuptools` will still consider the given value (as a + **transitional** measure), but please note that future releases of setuptools will + follow strictly the standard. + + To prevent this warning, you can list `{field}` under `dynamic` or alternatively + remove the `[project]` table from your file and rely entirely on other means of + configuration. + """ + _DUE_DATE = (2023, 10, 30) # Initially introduced in 27 May 2022 diff --git a/setuptools/config/_validate_pyproject/NOTICE b/setuptools/config/_validate_pyproject/NOTICE new file mode 100644 index 0000000000..286d29082e --- /dev/null +++ b/setuptools/config/_validate_pyproject/NOTICE @@ -0,0 +1,439 @@ +The code contained in this directory was automatically generated using the +following command: + + python -m validate_pyproject.pre_compile --output-dir=setuptools/config/_validate_pyproject --enable-plugins setuptools distutils --very-verbose + +Please avoid changing it manually. + + +You can report issues or suggest changes directly to `validate-pyproject` +(or to the relevant plugin repository) + +- https://github.com/abravalheri/validate-pyproject/issues + + +*** + +The following files include code from opensource projects +(either as direct copies or modified versions): + +- `fastjsonschema_exceptions.py`: + - project: `fastjsonschema` - licensed under BSD-3-Clause + (https://github.com/horejsek/python-fastjsonschema) +- `extra_validations.py` and `format.py`, `error_reporting.py`: + - project: `validate-pyproject` - licensed under MPL-2.0 + (https://github.com/abravalheri/validate-pyproject) + + +Additionally the following files are automatically generated by tools provided +by the same projects: + +- `__init__.py` +- `fastjsonschema_validations.py` + +The relevant copyright notes and licenses are included below. + + +*** + +`fastjsonschema` +================ + +Copyright (c) 2018, Michal Horejsek +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + + Neither the name of the {organization} nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +*** + +`validate-pyproject` +==================== + +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. "Contributor" + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. "Contributor Version" + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the terms of + a Secondary License. + +1.6. "Executable Form" + + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + + means a work that combines Covered Software with other material, in a + separate file or files, that is not Covered Software. + +1.8. "License" + + means this document. + +1.9. "Licensable" + + means having the right to grant, to the maximum extent possible, whether + at the time of the initial grant or subsequently, any and all of the + rights conveyed by this License. + +1.10. "Modifications" + + means any of the following: + + a. any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. "Patent Claims" of a Contributor + + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the License, + by the making, using, selling, offering for sale, having made, import, + or transfer of either its Contributions or its Contributor Version. + +1.12. "Secondary License" + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. "Source Code Form" + + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, "control" means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of + its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights to + grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter the + recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, or + limitations of liability) contained within the Source Code Form of the + Covered Software, except that You may alter any license notices to the + extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, + judicial order, or regulation then You must: (a) comply with the terms of + this License to the maximum extent possible; and (b) describe the + limitations and the code they affect. Such description must be placed in a + text file included with all distributions of the Covered Software under + this License. Except to the extent prohibited by statute or regulation, + such description must be sufficiently detailed for a recipient of ordinary + skill to be able to understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing + basis, if such Contributor fails to notify You of the non-compliance by + some reasonable means prior to 60 days after You have come back into + compliance. Moreover, Your grants from a particular Contributor are + reinstated on an ongoing basis if such Contributor notifies You of the + non-compliance by some reasonable means, this is the first time You have + received notice of non-compliance with this License from such + Contributor, and You become compliant prior to 30 days after Your receipt + of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an "as is" basis, + without warranty of any kind, either expressed, implied, or statutory, + including, without limitation, warranties that the Covered Software is free + of defects, merchantable, fit for a particular purpose or non-infringing. + The entire risk as to the quality and performance of the Covered Software + is with You. Should any Covered Software prove defective in any respect, + You (not any Contributor) assume the cost of any necessary servicing, + repair, or correction. This disclaimer of warranty constitutes an essential + part of this License. No use of any Covered Software is authorized under + this License except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from + such party's negligence to the extent applicable law prohibits such + limitation. Some jurisdictions do not allow the exclusion or limitation of + incidental or consequential damages, so this exclusion and limitation may + not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts + of a jurisdiction where the defendant maintains its principal place of + business and such litigation shall be governed by laws of that + jurisdiction, without reference to its conflict-of-law provisions. Nothing + in this Section shall prevent a party's ability to bring cross-claims or + counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. Any law or regulation which provides that + the language of a contract shall be construed against the drafter shall not + be used to construe this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version + of the License under which You originally received the Covered Software, + or under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a + modified version of this License if you rename the license and remove + any references to the name of the license steward (except to note that + such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses If You choose to distribute Source Code Form that is + Incompatible With Secondary Licenses under the terms of this version of + the License, the notice described in Exhibit B of this License must be + attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + https://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, +then You may include the notice in a location (such as a LICENSE file in a +relevant directory) where a recipient would be likely to look for such a +notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice + + This Source Code Form is "Incompatible + With Secondary Licenses", as defined by + the Mozilla Public License, v. 2.0. + diff --git a/setuptools/config/_validate_pyproject/__init__.py b/setuptools/config/_validate_pyproject/__init__.py new file mode 100644 index 0000000000..dbe6cb4ca4 --- /dev/null +++ b/setuptools/config/_validate_pyproject/__init__.py @@ -0,0 +1,34 @@ +from functools import reduce +from typing import Any, Callable, Dict + +from . import formats +from .error_reporting import detailed_errors, ValidationError +from .extra_validations import EXTRA_VALIDATIONS +from .fastjsonschema_exceptions import JsonSchemaException, JsonSchemaValueException +from .fastjsonschema_validations import validate as _validate + +__all__ = [ + "validate", + "FORMAT_FUNCTIONS", + "EXTRA_VALIDATIONS", + "ValidationError", + "JsonSchemaException", + "JsonSchemaValueException", +] + + +FORMAT_FUNCTIONS: Dict[str, Callable[[str], bool]] = { + fn.__name__.replace("_", "-"): fn + for fn in formats.__dict__.values() + if callable(fn) and not fn.__name__.startswith("_") +} + + +def validate(data: Any) -> bool: + """Validate the given ``data`` object using JSON Schema + This function raises ``ValidationError`` if ``data`` is invalid. + """ + with detailed_errors(): + _validate(data, custom_formats=FORMAT_FUNCTIONS) + reduce(lambda acc, fn: fn(acc), EXTRA_VALIDATIONS, data) + return True diff --git a/setuptools/config/_validate_pyproject/error_reporting.py b/setuptools/config/_validate_pyproject/error_reporting.py new file mode 100644 index 0000000000..f78e4838fb --- /dev/null +++ b/setuptools/config/_validate_pyproject/error_reporting.py @@ -0,0 +1,318 @@ +import io +import json +import logging +import os +import re +from contextlib import contextmanager +from textwrap import indent, wrap +from typing import Any, Dict, Iterator, List, Optional, Sequence, Union, cast + +from .fastjsonschema_exceptions import JsonSchemaValueException + +_logger = logging.getLogger(__name__) + +_MESSAGE_REPLACEMENTS = { + "must be named by propertyName definition": "keys must be named by", + "one of contains definition": "at least one item that matches", + " same as const definition:": "", + "only specified items": "only items matching the definition", +} + +_SKIP_DETAILS = ( + "must not be empty", + "is always invalid", + "must not be there", +) + +_NEED_DETAILS = {"anyOf", "oneOf", "anyOf", "contains", "propertyNames", "not", "items"} + +_CAMEL_CASE_SPLITTER = re.compile(r"\W+|([A-Z][^A-Z\W]*)") +_IDENTIFIER = re.compile(r"^[\w_]+$", re.I) + +_TOML_JARGON = { + "object": "table", + "property": "key", + "properties": "keys", + "property names": "keys", +} + + +class ValidationError(JsonSchemaValueException): + """Report violations of a given JSON schema. + + This class extends :exc:`~fastjsonschema.JsonSchemaValueException` + by adding the following properties: + + - ``summary``: an improved version of the ``JsonSchemaValueException`` error message + with only the necessary information) + + - ``details``: more contextual information about the error like the failing schema + itself and the value that violates the schema. + + Depending on the level of the verbosity of the ``logging`` configuration + the exception message will be only ``summary`` (default) or a combination of + ``summary`` and ``details`` (when the logging level is set to :obj:`logging.DEBUG`). + """ + + summary = "" + details = "" + _original_message = "" + + @classmethod + def _from_jsonschema(cls, ex: JsonSchemaValueException): + formatter = _ErrorFormatting(ex) + obj = cls(str(formatter), ex.value, formatter.name, ex.definition, ex.rule) + debug_code = os.getenv("JSONSCHEMA_DEBUG_CODE_GENERATION", "false").lower() + if debug_code != "false": # pragma: no cover + obj.__cause__, obj.__traceback__ = ex.__cause__, ex.__traceback__ + obj._original_message = ex.message + obj.summary = formatter.summary + obj.details = formatter.details + return obj + + +@contextmanager +def detailed_errors(): + try: + yield + except JsonSchemaValueException as ex: + raise ValidationError._from_jsonschema(ex) from None + + +class _ErrorFormatting: + def __init__(self, ex: JsonSchemaValueException): + self.ex = ex + self.name = f"`{self._simplify_name(ex.name)}`" + self._original_message = self.ex.message.replace(ex.name, self.name) + self._summary = "" + self._details = "" + + def __str__(self) -> str: + if _logger.getEffectiveLevel() <= logging.DEBUG and self.details: + return f"{self.summary}\n\n{self.details}" + + return self.summary + + @property + def summary(self) -> str: + if not self._summary: + self._summary = self._expand_summary() + + return self._summary + + @property + def details(self) -> str: + if not self._details: + self._details = self._expand_details() + + return self._details + + def _simplify_name(self, name): + x = len("data.") + return name[x:] if name.startswith("data.") else name + + def _expand_summary(self): + msg = self._original_message + + for bad, repl in _MESSAGE_REPLACEMENTS.items(): + msg = msg.replace(bad, repl) + + if any(substring in msg for substring in _SKIP_DETAILS): + return msg + + schema = self.ex.rule_definition + if self.ex.rule in _NEED_DETAILS and schema: + summary = _SummaryWriter(_TOML_JARGON) + return f"{msg}:\n\n{indent(summary(schema), ' ')}" + + return msg + + def _expand_details(self) -> str: + optional = [] + desc_lines = self.ex.definition.pop("$$description", []) + desc = self.ex.definition.pop("description", None) or " ".join(desc_lines) + if desc: + description = "\n".join( + wrap( + desc, + width=80, + initial_indent=" ", + subsequent_indent=" ", + break_long_words=False, + ) + ) + optional.append(f"DESCRIPTION:\n{description}") + schema = json.dumps(self.ex.definition, indent=4) + value = json.dumps(self.ex.value, indent=4) + defaults = [ + f"GIVEN VALUE:\n{indent(value, ' ')}", + f"OFFENDING RULE: {self.ex.rule!r}", + f"DEFINITION:\n{indent(schema, ' ')}", + ] + return "\n\n".join(optional + defaults) + + +class _SummaryWriter: + _IGNORE = {"description", "default", "title", "examples"} + + def __init__(self, jargon: Optional[Dict[str, str]] = None): + self.jargon: Dict[str, str] = jargon or {} + # Clarify confusing terms + self._terms = { + "anyOf": "at least one of the following", + "oneOf": "exactly one of the following", + "allOf": "all of the following", + "not": "(*NOT* the following)", + "prefixItems": f"{self._jargon('items')} (in order)", + "items": "items", + "contains": "contains at least one of", + "propertyNames": ( + f"non-predefined acceptable {self._jargon('property names')}" + ), + "patternProperties": f"{self._jargon('properties')} named via pattern", + "const": "predefined value", + "enum": "one of", + } + # Attributes that indicate that the definition is easy and can be done + # inline (e.g. string and number) + self._guess_inline_defs = [ + "enum", + "const", + "maxLength", + "minLength", + "pattern", + "format", + "minimum", + "maximum", + "exclusiveMinimum", + "exclusiveMaximum", + "multipleOf", + ] + + def _jargon(self, term: Union[str, List[str]]) -> Union[str, List[str]]: + if isinstance(term, list): + return [self.jargon.get(t, t) for t in term] + return self.jargon.get(term, term) + + def __call__( + self, + schema: Union[dict, List[dict]], + prefix: str = "", + *, + _path: Sequence[str] = (), + ) -> str: + if isinstance(schema, list): + return self._handle_list(schema, prefix, _path) + + filtered = self._filter_unecessary(schema, _path) + simple = self._handle_simple_dict(filtered, _path) + if simple: + return f"{prefix}{simple}" + + child_prefix = self._child_prefix(prefix, " ") + item_prefix = self._child_prefix(prefix, "- ") + indent = len(prefix) * " " + with io.StringIO() as buffer: + for i, (key, value) in enumerate(filtered.items()): + child_path = [*_path, key] + line_prefix = prefix if i == 0 else indent + buffer.write(f"{line_prefix}{self._label(child_path)}:") + # ^ just the first item should receive the complete prefix + if isinstance(value, dict): + filtered = self._filter_unecessary(value, child_path) + simple = self._handle_simple_dict(filtered, child_path) + buffer.write( + f" {simple}" + if simple + else f"\n{self(value, child_prefix, _path=child_path)}" + ) + elif isinstance(value, list) and ( + key != "type" or self._is_property(child_path) + ): + children = self._handle_list(value, item_prefix, child_path) + sep = " " if children.startswith("[") else "\n" + buffer.write(f"{sep}{children}") + else: + buffer.write(f" {self._value(value, child_path)}\n") + return buffer.getvalue() + + def _is_unecessary(self, path: Sequence[str]) -> bool: + if self._is_property(path) or not path: # empty path => instruction @ root + return False + key = path[-1] + return any(key.startswith(k) for k in "$_") or key in self._IGNORE + + def _filter_unecessary(self, schema: dict, path: Sequence[str]): + return { + key: value + for key, value in schema.items() + if not self._is_unecessary([*path, key]) + } + + def _handle_simple_dict(self, value: dict, path: Sequence[str]) -> Optional[str]: + inline = any(p in value for p in self._guess_inline_defs) + simple = not any(isinstance(v, (list, dict)) for v in value.values()) + if inline or simple: + return f"{{{', '.join(self._inline_attrs(value, path))}}}\n" + return None + + def _handle_list( + self, schemas: list, prefix: str = "", path: Sequence[str] = () + ) -> str: + if self._is_unecessary(path): + return "" + + repr_ = repr(schemas) + if all(not isinstance(e, (dict, list)) for e in schemas) and len(repr_) < 60: + return f"{repr_}\n" + + item_prefix = self._child_prefix(prefix, "- ") + return "".join( + self(v, item_prefix, _path=[*path, f"[{i}]"]) for i, v in enumerate(schemas) + ) + + def _is_property(self, path: Sequence[str]): + """Check if the given path can correspond to an arbitrarily named property""" + counter = 0 + for key in path[-2::-1]: + if key not in {"properties", "patternProperties"}: + break + counter += 1 + + # If the counter if even, the path correspond to a JSON Schema keyword + # otherwise it can be any arbitrary string naming a property + return counter % 2 == 1 + + def _label(self, path: Sequence[str]) -> str: + *parents, key = path + if not self._is_property(path): + norm_key = _separate_terms(key) + return self._terms.get(key) or " ".join(self._jargon(norm_key)) + + if parents[-1] == "patternProperties": + return f"(regex {key!r})" + return repr(key) # property name + + def _value(self, value: Any, path: Sequence[str]) -> str: + if path[-1] == "type" and not self._is_property(path): + type_ = self._jargon(value) + return ( + f"[{', '.join(type_)}]" if isinstance(value, list) else cast(str, type_) + ) + return repr(value) + + def _inline_attrs(self, schema: dict, path: Sequence[str]) -> Iterator[str]: + for key, value in schema.items(): + child_path = [*path, key] + yield f"{self._label(child_path)}: {self._value(value, child_path)}" + + def _child_prefix(self, parent_prefix: str, child_prefix: str) -> str: + return len(parent_prefix) * " " + child_prefix + + +def _separate_terms(word: str) -> List[str]: + """ + >>> _separate_terms("FooBar-foo") + ['foo', 'bar', 'foo'] + """ + return [w.lower() for w in _CAMEL_CASE_SPLITTER.split(word) if w] diff --git a/setuptools/config/_validate_pyproject/extra_validations.py b/setuptools/config/_validate_pyproject/extra_validations.py new file mode 100644 index 0000000000..4130a421cf --- /dev/null +++ b/setuptools/config/_validate_pyproject/extra_validations.py @@ -0,0 +1,36 @@ +"""The purpose of this module is implement PEP 621 validations that are +difficult to express as a JSON Schema (or that are not supported by the current +JSON Schema library). +""" + +from typing import Mapping, TypeVar + +from .error_reporting import ValidationError + +T = TypeVar("T", bound=Mapping) + + +class RedefiningStaticFieldAsDynamic(ValidationError): + """According to PEP 621: + + Build back-ends MUST raise an error if the metadata specifies a field + statically as well as being listed in dynamic. + """ + + +def validate_project_dynamic(pyproject: T) -> T: + project_table = pyproject.get("project", {}) + dynamic = project_table.get("dynamic", []) + + for field in dynamic: + if field in project_table: + msg = f"You cannot provide a value for `project.{field}` and " + msg += "list it under `project.dynamic` at the same time" + name = f"data.project.{field}" + value = {field: project_table[field], "...": " # ...", "dynamic": dynamic} + raise RedefiningStaticFieldAsDynamic(msg, value, name, rule="PEP 621") + + return pyproject + + +EXTRA_VALIDATIONS = (validate_project_dynamic,) diff --git a/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py b/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py new file mode 100644 index 0000000000..d2dddd6a10 --- /dev/null +++ b/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py @@ -0,0 +1,51 @@ +import re + + +SPLIT_RE = re.compile(r'[\.\[\]]+') + + +class JsonSchemaException(ValueError): + """ + Base exception of ``fastjsonschema`` library. + """ + + +class JsonSchemaValueException(JsonSchemaException): + """ + Exception raised by validation function. Available properties: + + * ``message`` containing human-readable information what is wrong (e.g. ``data.property[index] must be smaller than or equal to 42``), + * invalid ``value`` (e.g. ``60``), + * ``name`` of a path in the data structure (e.g. ``data.property[index]``), + * ``path`` as an array in the data structure (e.g. ``['data', 'property', 'index']``), + * the whole ``definition`` which the ``value`` has to fulfil (e.g. ``{'type': 'number', 'maximum': 42}``), + * ``rule`` which the ``value`` is breaking (e.g. ``maximum``) + * and ``rule_definition`` (e.g. ``42``). + + .. versionchanged:: 2.14.0 + Added all extra properties. + """ + + def __init__(self, message, value=None, name=None, definition=None, rule=None): + super().__init__(message) + self.message = message + self.value = value + self.name = name + self.definition = definition + self.rule = rule + + @property + def path(self): + return [item for item in SPLIT_RE.split(self.name) if item != ''] + + @property + def rule_definition(self): + if not self.rule or not self.definition: + return None + return self.definition.get(self.rule) + + +class JsonSchemaDefinitionException(JsonSchemaException): + """ + Exception raised by generator of validation function. + """ diff --git a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py new file mode 100644 index 0000000000..52e18da24e --- /dev/null +++ b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py @@ -0,0 +1,1052 @@ +# noqa +# type: ignore +# flake8: noqa +# pylint: skip-file +# mypy: ignore-errors +# yapf: disable +# pylama:skip=1 + + +# *** PLEASE DO NOT MODIFY DIRECTLY: Automatically generated code *** + + +VERSION = "2.16.3" +import re +from .fastjsonschema_exceptions import JsonSchemaValueException + + +REGEX_PATTERNS = { + '^.*$': re.compile('^.*$'), + '.+': re.compile('.+'), + '^.+$': re.compile('^.+$'), + 'idn-email_re_pattern': re.compile('^[^@]+@[^@]+\\.[^@]+\\Z') +} + +NoneType = type(None) + +def validate(data, custom_formats={}, name_prefix=None): + validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats, (name_prefix or "data") + "") + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive'}}}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "build-system" in data_keys: + data_keys.remove("build-system") + data__buildsystem = data["build-system"] + if not isinstance(data__buildsystem, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must be object", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='type') + data__buildsystem_is_dict = isinstance(data__buildsystem, dict) + if data__buildsystem_is_dict: + data__buildsystem_len = len(data__buildsystem) + if not all(prop in data__buildsystem for prop in ['requires']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must contain ['requires'] properties", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='required') + data__buildsystem_keys = set(data__buildsystem.keys()) + if "requires" in data__buildsystem_keys: + data__buildsystem_keys.remove("requires") + data__buildsystem__requires = data__buildsystem["requires"] + if not isinstance(data__buildsystem__requires, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.requires must be array", value=data__buildsystem__requires, name="" + (name_prefix or "data") + ".build-system.requires", definition={'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, rule='type') + data__buildsystem__requires_is_list = isinstance(data__buildsystem__requires, (list, tuple)) + if data__buildsystem__requires_is_list: + data__buildsystem__requires_len = len(data__buildsystem__requires) + for data__buildsystem__requires_x, data__buildsystem__requires_item in enumerate(data__buildsystem__requires): + if not isinstance(data__buildsystem__requires_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.requires[{data__buildsystem__requires_x}]".format(**locals()) + " must be string", value=data__buildsystem__requires_item, name="" + (name_prefix or "data") + ".build-system.requires[{data__buildsystem__requires_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "build-backend" in data__buildsystem_keys: + data__buildsystem_keys.remove("build-backend") + data__buildsystem__buildbackend = data__buildsystem["build-backend"] + if not isinstance(data__buildsystem__buildbackend, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.build-backend must be string", value=data__buildsystem__buildbackend, name="" + (name_prefix or "data") + ".build-system.build-backend", definition={'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, rule='type') + if isinstance(data__buildsystem__buildbackend, str): + if not custom_formats["pep517-backend-reference"](data__buildsystem__buildbackend): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.build-backend must be pep517-backend-reference", value=data__buildsystem__buildbackend, name="" + (name_prefix or "data") + ".build-system.build-backend", definition={'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, rule='format') + if "backend-path" in data__buildsystem_keys: + data__buildsystem_keys.remove("backend-path") + data__buildsystem__backendpath = data__buildsystem["backend-path"] + if not isinstance(data__buildsystem__backendpath, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.backend-path must be array", value=data__buildsystem__backendpath, name="" + (name_prefix or "data") + ".build-system.backend-path", definition={'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}, rule='type') + data__buildsystem__backendpath_is_list = isinstance(data__buildsystem__backendpath, (list, tuple)) + if data__buildsystem__backendpath_is_list: + data__buildsystem__backendpath_len = len(data__buildsystem__backendpath) + for data__buildsystem__backendpath_x, data__buildsystem__backendpath_item in enumerate(data__buildsystem__backendpath): + if not isinstance(data__buildsystem__backendpath_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.backend-path[{data__buildsystem__backendpath_x}]".format(**locals()) + " must be string", value=data__buildsystem__backendpath_item, name="" + (name_prefix or "data") + ".build-system.backend-path[{data__buildsystem__backendpath_x}]".format(**locals()) + "", definition={'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}, rule='type') + if data__buildsystem_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must not contain "+str(data__buildsystem_keys)+" properties", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='additionalProperties') + if "project" in data_keys: + data_keys.remove("project") + data__project = data["project"] + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata(data__project, custom_formats, (name_prefix or "data") + ".project") + if "tool" in data_keys: + data_keys.remove("tool") + data__tool = data["tool"] + if not isinstance(data__tool, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".tool must be object", value=data__tool, name="" + (name_prefix or "data") + ".tool", definition={'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive'}}}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, rule='type') + data__tool_is_dict = isinstance(data__tool, dict) + if data__tool_is_dict: + data__tool_keys = set(data__tool.keys()) + if "distutils" in data__tool_keys: + data__tool_keys.remove("distutils") + data__tool__distutils = data__tool["distutils"] + validate_https___docs_python_org_3_install(data__tool__distutils, custom_formats, (name_prefix or "data") + ".tool.distutils") + if "setuptools" in data__tool_keys: + data__tool_keys.remove("setuptools") + data__tool__setuptools = data__tool["setuptools"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data__tool__setuptools, custom_formats, (name_prefix or "data") + ".tool.setuptools") + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive'}}}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "platforms" in data_keys: + data_keys.remove("platforms") + data__platforms = data["platforms"] + if not isinstance(data__platforms, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".platforms must be array", value=data__platforms, name="" + (name_prefix or "data") + ".platforms", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__platforms_is_list = isinstance(data__platforms, (list, tuple)) + if data__platforms_is_list: + data__platforms_len = len(data__platforms) + for data__platforms_x, data__platforms_item in enumerate(data__platforms): + if not isinstance(data__platforms_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".platforms[{data__platforms_x}]".format(**locals()) + " must be string", value=data__platforms_item, name="" + (name_prefix or "data") + ".platforms[{data__platforms_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "provides" in data_keys: + data_keys.remove("provides") + data__provides = data["provides"] + if not isinstance(data__provides, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides must be array", value=data__provides, name="" + (name_prefix or "data") + ".provides", definition={'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, rule='type') + data__provides_is_list = isinstance(data__provides, (list, tuple)) + if data__provides_is_list: + data__provides_len = len(data__provides) + for data__provides_x, data__provides_item in enumerate(data__provides): + if not isinstance(data__provides_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + " must be string", value=data__provides_item, name="" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type') + if isinstance(data__provides_item, str): + if not custom_formats["pep508-identifier"](data__provides_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + " must be pep508-identifier", value=data__provides_item, name="" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format') + if "obsoletes" in data_keys: + data_keys.remove("obsoletes") + data__obsoletes = data["obsoletes"] + if not isinstance(data__obsoletes, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes must be array", value=data__obsoletes, name="" + (name_prefix or "data") + ".obsoletes", definition={'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, rule='type') + data__obsoletes_is_list = isinstance(data__obsoletes, (list, tuple)) + if data__obsoletes_is_list: + data__obsoletes_len = len(data__obsoletes) + for data__obsoletes_x, data__obsoletes_item in enumerate(data__obsoletes): + if not isinstance(data__obsoletes_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + " must be string", value=data__obsoletes_item, name="" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type') + if isinstance(data__obsoletes_item, str): + if not custom_formats["pep508-identifier"](data__obsoletes_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + " must be pep508-identifier", value=data__obsoletes_item, name="" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format') + if "zip-safe" in data_keys: + data_keys.remove("zip-safe") + data__zipsafe = data["zip-safe"] + if not isinstance(data__zipsafe, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".zip-safe must be boolean", value=data__zipsafe, name="" + (name_prefix or "data") + ".zip-safe", definition={'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, rule='type') + if "script-files" in data_keys: + data_keys.remove("script-files") + data__scriptfiles = data["script-files"] + if not isinstance(data__scriptfiles, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".script-files must be array", value=data__scriptfiles, name="" + (name_prefix or "data") + ".script-files", definition={'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, rule='type') + data__scriptfiles_is_list = isinstance(data__scriptfiles, (list, tuple)) + if data__scriptfiles_is_list: + data__scriptfiles_len = len(data__scriptfiles) + for data__scriptfiles_x, data__scriptfiles_item in enumerate(data__scriptfiles): + if not isinstance(data__scriptfiles_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".script-files[{data__scriptfiles_x}]".format(**locals()) + " must be string", value=data__scriptfiles_item, name="" + (name_prefix or "data") + ".script-files[{data__scriptfiles_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "eager-resources" in data_keys: + data_keys.remove("eager-resources") + data__eagerresources = data["eager-resources"] + if not isinstance(data__eagerresources, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".eager-resources must be array", value=data__eagerresources, name="" + (name_prefix or "data") + ".eager-resources", definition={'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__eagerresources_is_list = isinstance(data__eagerresources, (list, tuple)) + if data__eagerresources_is_list: + data__eagerresources_len = len(data__eagerresources) + for data__eagerresources_x, data__eagerresources_item in enumerate(data__eagerresources): + if not isinstance(data__eagerresources_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".eager-resources[{data__eagerresources_x}]".format(**locals()) + " must be string", value=data__eagerresources_item, name="" + (name_prefix or "data") + ".eager-resources[{data__eagerresources_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "packages" in data_keys: + data_keys.remove("packages") + data__packages = data["packages"] + data__packages_one_of_count1 = 0 + if data__packages_one_of_count1 < 2: + try: + if not isinstance(data__packages, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be array", value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}}, rule='type') + data__packages_is_list = isinstance(data__packages, (list, tuple)) + if data__packages_is_list: + data__packages_len = len(data__packages) + for data__packages_x, data__packages_item in enumerate(data__packages): + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_package_name(data__packages_item, custom_formats, (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals())) + data__packages_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data__packages_one_of_count1 < 2: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data__packages, custom_formats, (name_prefix or "data") + ".packages") + data__packages_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data__packages_one_of_count1 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be valid exactly by one definition" + (" (" + str(data__packages_one_of_count1) + " matches found)"), value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, rule='oneOf') + if "package-dir" in data_keys: + data_keys.remove("package-dir") + data__packagedir = data["package-dir"] + if not isinstance(data__packagedir, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be object", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='type') + data__packagedir_is_dict = isinstance(data__packagedir, dict) + if data__packagedir_is_dict: + data__packagedir_keys = set(data__packagedir.keys()) + for data__packagedir_key, data__packagedir_val in data__packagedir.items(): + if REGEX_PATTERNS['^.*$'].search(data__packagedir_key): + if data__packagedir_key in data__packagedir_keys: + data__packagedir_keys.remove(data__packagedir_key) + if not isinstance(data__packagedir_val, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + " must be string", value=data__packagedir_val, name="" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if data__packagedir_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must not contain "+str(data__packagedir_keys)+" properties", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='additionalProperties') + data__packagedir_len = len(data__packagedir) + if data__packagedir_len != 0: + data__packagedir_property_names = True + for data__packagedir_key in data__packagedir: + try: + data__packagedir_key_one_of_count2 = 0 + if data__packagedir_key_one_of_count2 < 2: + try: + if data__packagedir_key != "": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be same as const definition: ", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'const': ''}, rule='const') + data__packagedir_key_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data__packagedir_key_one_of_count2 < 2: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_package_name(data__packagedir_key, custom_formats, (name_prefix or "data") + ".package-dir") + data__packagedir_key_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data__packagedir_key_one_of_count2 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be valid exactly by one definition" + (" (" + str(data__packagedir_key_one_of_count2) + " matches found)"), value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'oneOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}]}, rule='oneOf') + except JsonSchemaValueException: + data__packagedir_property_names = False + if not data__packagedir_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be named by propertyName definition", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='propertyNames') + if "package-data" in data_keys: + data_keys.remove("package-data") + data__packagedata = data["package-data"] + if not isinstance(data__packagedata, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be object", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type') + data__packagedata_is_dict = isinstance(data__packagedata, dict) + if data__packagedata_is_dict: + data__packagedata_keys = set(data__packagedata.keys()) + for data__packagedata_key, data__packagedata_val in data__packagedata.items(): + if REGEX_PATTERNS['^.*$'].search(data__packagedata_key): + if data__packagedata_key in data__packagedata_keys: + data__packagedata_keys.remove(data__packagedata_key) + if not isinstance(data__packagedata_val, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data.{data__packagedata_key}".format(**locals()) + " must be array", value=data__packagedata_val, name="" + (name_prefix or "data") + ".package-data.{data__packagedata_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__packagedata_val_is_list = isinstance(data__packagedata_val, (list, tuple)) + if data__packagedata_val_is_list: + data__packagedata_val_len = len(data__packagedata_val) + for data__packagedata_val_x, data__packagedata_val_item in enumerate(data__packagedata_val): + if not isinstance(data__packagedata_val_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals()) + " must be string", value=data__packagedata_val_item, name="" + (name_prefix or "data") + ".package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if data__packagedata_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must not contain "+str(data__packagedata_keys)+" properties", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties') + data__packagedata_len = len(data__packagedata) + if data__packagedata_len != 0: + data__packagedata_property_names = True + for data__packagedata_key in data__packagedata: + try: + data__packagedata_key_one_of_count3 = 0 + if data__packagedata_key_one_of_count3 < 2: + try: + if isinstance(data__packagedata_key, str): + if not custom_formats["python-module-name"](data__packagedata_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be python-module-name", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'format': 'python-module-name'}, rule='format') + data__packagedata_key_one_of_count3 += 1 + except JsonSchemaValueException: pass + if data__packagedata_key_one_of_count3 < 2: + try: + if data__packagedata_key != "*": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be same as const definition: *", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'const': '*'}, rule='const') + data__packagedata_key_one_of_count3 += 1 + except JsonSchemaValueException: pass + if data__packagedata_key_one_of_count3 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be valid exactly by one definition" + (" (" + str(data__packagedata_key_one_of_count3) + " matches found)"), value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf') + except JsonSchemaValueException: + data__packagedata_property_names = False + if not data__packagedata_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be named by propertyName definition", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames') + if "include-package-data" in data_keys: + data_keys.remove("include-package-data") + data__includepackagedata = data["include-package-data"] + if not isinstance(data__includepackagedata, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".include-package-data must be boolean", value=data__includepackagedata, name="" + (name_prefix or "data") + ".include-package-data", definition={'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, rule='type') + if "exclude-package-data" in data_keys: + data_keys.remove("exclude-package-data") + data__excludepackagedata = data["exclude-package-data"] + if not isinstance(data__excludepackagedata, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be object", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type') + data__excludepackagedata_is_dict = isinstance(data__excludepackagedata, dict) + if data__excludepackagedata_is_dict: + data__excludepackagedata_keys = set(data__excludepackagedata.keys()) + for data__excludepackagedata_key, data__excludepackagedata_val in data__excludepackagedata.items(): + if REGEX_PATTERNS['^.*$'].search(data__excludepackagedata_key): + if data__excludepackagedata_key in data__excludepackagedata_keys: + data__excludepackagedata_keys.remove(data__excludepackagedata_key) + if not isinstance(data__excludepackagedata_val, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}".format(**locals()) + " must be array", value=data__excludepackagedata_val, name="" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__excludepackagedata_val_is_list = isinstance(data__excludepackagedata_val, (list, tuple)) + if data__excludepackagedata_val_is_list: + data__excludepackagedata_val_len = len(data__excludepackagedata_val) + for data__excludepackagedata_val_x, data__excludepackagedata_val_item in enumerate(data__excludepackagedata_val): + if not isinstance(data__excludepackagedata_val_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals()) + " must be string", value=data__excludepackagedata_val_item, name="" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if data__excludepackagedata_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must not contain "+str(data__excludepackagedata_keys)+" properties", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties') + data__excludepackagedata_len = len(data__excludepackagedata) + if data__excludepackagedata_len != 0: + data__excludepackagedata_property_names = True + for data__excludepackagedata_key in data__excludepackagedata: + try: + data__excludepackagedata_key_one_of_count4 = 0 + if data__excludepackagedata_key_one_of_count4 < 2: + try: + if isinstance(data__excludepackagedata_key, str): + if not custom_formats["python-module-name"](data__excludepackagedata_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be python-module-name", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'format': 'python-module-name'}, rule='format') + data__excludepackagedata_key_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data__excludepackagedata_key_one_of_count4 < 2: + try: + if data__excludepackagedata_key != "*": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be same as const definition: *", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'const': '*'}, rule='const') + data__excludepackagedata_key_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data__excludepackagedata_key_one_of_count4 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be valid exactly by one definition" + (" (" + str(data__excludepackagedata_key_one_of_count4) + " matches found)"), value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf') + except JsonSchemaValueException: + data__excludepackagedata_property_names = False + if not data__excludepackagedata_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be named by propertyName definition", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames') + if "namespace-packages" in data_keys: + data_keys.remove("namespace-packages") + data__namespacepackages = data["namespace-packages"] + if not isinstance(data__namespacepackages, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages must be array", value=data__namespacepackages, name="" + (name_prefix or "data") + ".namespace-packages", definition={'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, rule='type') + data__namespacepackages_is_list = isinstance(data__namespacepackages, (list, tuple)) + if data__namespacepackages_is_list: + data__namespacepackages_len = len(data__namespacepackages) + for data__namespacepackages_x, data__namespacepackages_item in enumerate(data__namespacepackages): + if not isinstance(data__namespacepackages_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be string", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type') + if isinstance(data__namespacepackages_item, str): + if not custom_formats["python-module-name"](data__namespacepackages_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be python-module-name", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format') + if "py-modules" in data_keys: + data_keys.remove("py-modules") + data__pymodules = data["py-modules"] + if not isinstance(data__pymodules, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules must be array", value=data__pymodules, name="" + (name_prefix or "data") + ".py-modules", definition={'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, rule='type') + data__pymodules_is_list = isinstance(data__pymodules, (list, tuple)) + if data__pymodules_is_list: + data__pymodules_len = len(data__pymodules) + for data__pymodules_x, data__pymodules_item in enumerate(data__pymodules): + if not isinstance(data__pymodules_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be string", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type') + if isinstance(data__pymodules_item, str): + if not custom_formats["python-module-name"](data__pymodules_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be python-module-name", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format') + if "data-files" in data_keys: + data_keys.remove("data-files") + data__datafiles = data["data-files"] + if not isinstance(data__datafiles, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files must be object", value=data__datafiles, name="" + (name_prefix or "data") + ".data-files", definition={'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type') + data__datafiles_is_dict = isinstance(data__datafiles, dict) + if data__datafiles_is_dict: + data__datafiles_keys = set(data__datafiles.keys()) + for data__datafiles_key, data__datafiles_val in data__datafiles.items(): + if REGEX_PATTERNS['^.*$'].search(data__datafiles_key): + if data__datafiles_key in data__datafiles_keys: + data__datafiles_keys.remove(data__datafiles_key) + if not isinstance(data__datafiles_val, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files.{data__datafiles_key}".format(**locals()) + " must be array", value=data__datafiles_val, name="" + (name_prefix or "data") + ".data-files.{data__datafiles_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__datafiles_val_is_list = isinstance(data__datafiles_val, (list, tuple)) + if data__datafiles_val_is_list: + data__datafiles_val_len = len(data__datafiles_val) + for data__datafiles_val_x, data__datafiles_val_item in enumerate(data__datafiles_val): + if not isinstance(data__datafiles_val_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files.{data__datafiles_key}[{data__datafiles_val_x}]".format(**locals()) + " must be string", value=data__datafiles_val_item, name="" + (name_prefix or "data") + ".data-files.{data__datafiles_key}[{data__datafiles_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "cmdclass" in data_keys: + data_keys.remove("cmdclass") + data__cmdclass = data["cmdclass"] + if not isinstance(data__cmdclass, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass must be object", value=data__cmdclass, name="" + (name_prefix or "data") + ".cmdclass", definition={'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, rule='type') + data__cmdclass_is_dict = isinstance(data__cmdclass, dict) + if data__cmdclass_is_dict: + data__cmdclass_keys = set(data__cmdclass.keys()) + for data__cmdclass_key, data__cmdclass_val in data__cmdclass.items(): + if REGEX_PATTERNS['^.*$'].search(data__cmdclass_key): + if data__cmdclass_key in data__cmdclass_keys: + data__cmdclass_keys.remove(data__cmdclass_key) + if not isinstance(data__cmdclass_val, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + " must be string", value=data__cmdclass_val, name="" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='type') + if isinstance(data__cmdclass_val, str): + if not custom_formats["python-qualified-identifier"](data__cmdclass_val): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + " must be python-qualified-identifier", value=data__cmdclass_val, name="" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='format') + if "license-files" in data_keys: + data_keys.remove("license-files") + data__licensefiles = data["license-files"] + if not isinstance(data__licensefiles, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files must be array", value=data__licensefiles, name="" + (name_prefix or "data") + ".license-files", definition={'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, rule='type') + data__licensefiles_is_list = isinstance(data__licensefiles, (list, tuple)) + if data__licensefiles_is_list: + data__licensefiles_len = len(data__licensefiles) + for data__licensefiles_x, data__licensefiles_item in enumerate(data__licensefiles): + if not isinstance(data__licensefiles_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files[{data__licensefiles_x}]".format(**locals()) + " must be string", value=data__licensefiles_item, name="" + (name_prefix or "data") + ".license-files[{data__licensefiles_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "dynamic" in data_keys: + data_keys.remove("dynamic") + data__dynamic = data["dynamic"] + if not isinstance(data__dynamic, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be object", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}, rule='type') + data__dynamic_is_dict = isinstance(data__dynamic, dict) + if data__dynamic_is_dict: + data__dynamic_keys = set(data__dynamic.keys()) + if "version" in data__dynamic_keys: + data__dynamic_keys.remove("version") + data__dynamic__version = data__dynamic["version"] + data__dynamic__version_one_of_count5 = 0 + if data__dynamic__version_one_of_count5 < 2: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version") + data__dynamic__version_one_of_count5 += 1 + except JsonSchemaValueException: pass + if data__dynamic__version_one_of_count5 < 2: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version") + data__dynamic__version_one_of_count5 += 1 + except JsonSchemaValueException: pass + if data__dynamic__version_one_of_count5 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.version must be valid exactly by one definition" + (" (" + str(data__dynamic__version_one_of_count5) + " matches found)"), value=data__dynamic__version, name="" + (name_prefix or "data") + ".dynamic.version", definition={'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, rule='oneOf') + if "classifiers" in data__dynamic_keys: + data__dynamic_keys.remove("classifiers") + data__dynamic__classifiers = data__dynamic["classifiers"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__classifiers, custom_formats, (name_prefix or "data") + ".dynamic.classifiers") + if "description" in data__dynamic_keys: + data__dynamic_keys.remove("description") + data__dynamic__description = data__dynamic["description"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__description, custom_formats, (name_prefix or "data") + ".dynamic.description") + if "dependencies" in data__dynamic_keys: + data__dynamic_keys.remove("dependencies") + data__dynamic__dependencies = data__dynamic["dependencies"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__dependencies, custom_formats, (name_prefix or "data") + ".dynamic.dependencies") + if "entry-points" in data__dynamic_keys: + data__dynamic_keys.remove("entry-points") + data__dynamic__entrypoints = data__dynamic["entry-points"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__entrypoints, custom_formats, (name_prefix or "data") + ".dynamic.entry-points") + if "optional-dependencies" in data__dynamic_keys: + data__dynamic_keys.remove("optional-dependencies") + data__dynamic__optionaldependencies = data__dynamic["optional-dependencies"] + if not isinstance(data__dynamic__optionaldependencies, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be object", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, rule='type') + data__dynamic__optionaldependencies_is_dict = isinstance(data__dynamic__optionaldependencies, dict) + if data__dynamic__optionaldependencies_is_dict: + data__dynamic__optionaldependencies_keys = set(data__dynamic__optionaldependencies.keys()) + for data__dynamic__optionaldependencies_key, data__dynamic__optionaldependencies_val in data__dynamic__optionaldependencies.items(): + if REGEX_PATTERNS['.+'].search(data__dynamic__optionaldependencies_key): + if data__dynamic__optionaldependencies_key in data__dynamic__optionaldependencies_keys: + data__dynamic__optionaldependencies_keys.remove(data__dynamic__optionaldependencies_key) + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__optionaldependencies_val, custom_formats, (name_prefix or "data") + ".dynamic.optional-dependencies.{data__dynamic__optionaldependencies_key}".format(**locals())) + if data__dynamic__optionaldependencies_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must not contain "+str(data__dynamic__optionaldependencies_keys)+" properties", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, rule='additionalProperties') + data__dynamic__optionaldependencies_len = len(data__dynamic__optionaldependencies) + if data__dynamic__optionaldependencies_len != 0: + data__dynamic__optionaldependencies_property_names = True + for data__dynamic__optionaldependencies_key in data__dynamic__optionaldependencies: + try: + if isinstance(data__dynamic__optionaldependencies_key, str): + if not custom_formats["python-identifier"](data__dynamic__optionaldependencies_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be python-identifier", value=data__dynamic__optionaldependencies_key, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'format': 'python-identifier'}, rule='format') + except JsonSchemaValueException: + data__dynamic__optionaldependencies_property_names = False + if not data__dynamic__optionaldependencies_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be named by propertyName definition", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, rule='propertyNames') + if "readme" in data__dynamic_keys: + data__dynamic_keys.remove("readme") + data__dynamic__readme = data__dynamic["readme"] + data__dynamic__readme_any_of_count6 = 0 + if not data__dynamic__readme_any_of_count6: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__readme, custom_formats, (name_prefix or "data") + ".dynamic.readme") + data__dynamic__readme_any_of_count6 += 1 + except JsonSchemaValueException: pass + if not data__dynamic__readme_any_of_count6: + try: + data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict) + if data__dynamic__readme_is_dict: + data__dynamic__readme_keys = set(data__dynamic__readme.keys()) + if "content-type" in data__dynamic__readme_keys: + data__dynamic__readme_keys.remove("content-type") + data__dynamic__readme__contenttype = data__dynamic__readme["content-type"] + if not isinstance(data__dynamic__readme__contenttype, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme.content-type must be string", value=data__dynamic__readme__contenttype, name="" + (name_prefix or "data") + ".dynamic.readme.content-type", definition={'type': 'string'}, rule='type') + data__dynamic__readme_any_of_count6 += 1 + except JsonSchemaValueException: pass + if not data__dynamic__readme_any_of_count6: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme cannot be validated by any definition", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='anyOf') + data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict) + if data__dynamic__readme_is_dict: + data__dynamic__readme_len = len(data__dynamic__readme) + if not all(prop in data__dynamic__readme for prop in ['file']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme must contain ['file'] properties", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='required') + if data__dynamic_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must not contain "+str(data__dynamic_keys)+" properties", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}, rule='additionalProperties') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['file']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['file'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='required') + data_keys = set(data.keys()) + if "file" in data_keys: + data_keys.remove("file") + data__file = data["file"] + data__file_one_of_count7 = 0 + if data__file_one_of_count7 < 2: + try: + if not isinstance(data__file, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be string", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'string'}, rule='type') + data__file_one_of_count7 += 1 + except JsonSchemaValueException: pass + if data__file_one_of_count7 < 2: + try: + if not isinstance(data__file, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be array", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__file_is_list = isinstance(data__file, (list, tuple)) + if data__file_is_list: + data__file_len = len(data__file) + for data__file_x, data__file_item in enumerate(data__file): + if not isinstance(data__file_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + " must be string", value=data__file_item, name="" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + data__file_one_of_count7 += 1 + except JsonSchemaValueException: pass + if data__file_one_of_count7 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be valid exactly by one definition" + (" (" + str(data__file_one_of_count7) + " matches found)"), value=data__file, name="" + (name_prefix or "data") + ".file", definition={'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, rule='oneOf') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['attr']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['attr'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='required') + data_keys = set(data.keys()) + if "attr" in data_keys: + data_keys.remove("attr") + data__attr = data["attr"] + if not isinstance(data__attr, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".attr must be string", value=data__attr, name="" + (name_prefix or "data") + ".attr", definition={'type': 'string'}, rule='type') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "find" in data_keys: + data_keys.remove("find") + data__find = data["find"] + if not isinstance(data__find, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find must be object", value=data__find, name="" + (name_prefix or "data") + ".find", definition={'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}, rule='type') + data__find_is_dict = isinstance(data__find, dict) + if data__find_is_dict: + data__find_keys = set(data__find.keys()) + if "where" in data__find_keys: + data__find_keys.remove("where") + data__find__where = data__find["where"] + if not isinstance(data__find__where, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.where must be array", value=data__find__where, name="" + (name_prefix or "data") + ".find.where", definition={'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__find__where_is_list = isinstance(data__find__where, (list, tuple)) + if data__find__where_is_list: + data__find__where_len = len(data__find__where) + for data__find__where_x, data__find__where_item in enumerate(data__find__where): + if not isinstance(data__find__where_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.where[{data__find__where_x}]".format(**locals()) + " must be string", value=data__find__where_item, name="" + (name_prefix or "data") + ".find.where[{data__find__where_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "exclude" in data__find_keys: + data__find_keys.remove("exclude") + data__find__exclude = data__find["exclude"] + if not isinstance(data__find__exclude, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.exclude must be array", value=data__find__exclude, name="" + (name_prefix or "data") + ".find.exclude", definition={'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, rule='type') + data__find__exclude_is_list = isinstance(data__find__exclude, (list, tuple)) + if data__find__exclude_is_list: + data__find__exclude_len = len(data__find__exclude) + for data__find__exclude_x, data__find__exclude_item in enumerate(data__find__exclude): + if not isinstance(data__find__exclude_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.exclude[{data__find__exclude_x}]".format(**locals()) + " must be string", value=data__find__exclude_item, name="" + (name_prefix or "data") + ".find.exclude[{data__find__exclude_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "include" in data__find_keys: + data__find_keys.remove("include") + data__find__include = data__find["include"] + if not isinstance(data__find__include, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.include must be array", value=data__find__include, name="" + (name_prefix or "data") + ".find.include", definition={'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, rule='type') + data__find__include_is_list = isinstance(data__find__include, (list, tuple)) + if data__find__include_is_list: + data__find__include_len = len(data__find__include) + for data__find__include_x, data__find__include_item in enumerate(data__find__include): + if not isinstance(data__find__include_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.include[{data__find__include_x}]".format(**locals()) + " must be string", value=data__find__include_item, name="" + (name_prefix or "data") + ".find.include[{data__find__include_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "namespaces" in data__find_keys: + data__find_keys.remove("namespaces") + data__find__namespaces = data__find["namespaces"] + if not isinstance(data__find__namespaces, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.namespaces must be boolean", value=data__find__namespaces, name="" + (name_prefix or "data") + ".find.namespaces", definition={'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}, rule='type') + if data__find_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find must not contain "+str(data__find_keys)+" properties", value=data__find, name="" + (name_prefix or "data") + ".find", definition={'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}, rule='additionalProperties') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_package_name(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}, rule='type') + data_any_of_count8 = 0 + if not data_any_of_count8: + try: + if isinstance(data, str): + if not custom_formats["python-module-name"](data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be python-module-name", value=data, name="" + (name_prefix or "data") + "", definition={'format': 'python-module-name'}, rule='format') + data_any_of_count8 += 1 + except JsonSchemaValueException: pass + if not data_any_of_count8: + try: + if isinstance(data, str): + if not custom_formats["pep561-stub-name"](data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be pep561-stub-name", value=data, name="" + (name_prefix or "data") + "", definition={'format': 'pep561-stub-name'}, rule='format') + data_any_of_count8 += 1 + except JsonSchemaValueException: pass + if not data_any_of_count8: + raise JsonSchemaValueException("" + (name_prefix or "data") + " cannot be validated by any definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}, rule='anyOf') + return data + +def validate_https___docs_python_org_3_install(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "global" in data_keys: + data_keys.remove("global") + data__global = data["global"] + if not isinstance(data__global, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".global must be object", value=data__global, name="" + (name_prefix or "data") + ".global", definition={'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}, rule='type') + for data_key, data_val in data.items(): + if REGEX_PATTERNS['.+'].search(data_key): + if data_key in data_keys: + data_keys.remove(data_key) + if not isinstance(data_val, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be object", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'object'}, rule='type') + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, rule='type') + if isinstance(data__name, str): + if not custom_formats["pep508-identifier"](data__name): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be pep508-identifier", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, rule='format') + if "version" in data_keys: + data_keys.remove("version") + data__version = data["version"] + if not isinstance(data__version, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be string", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, rule='type') + if isinstance(data__version, str): + if not custom_formats["pep440"](data__version): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be pep440", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, rule='format') + if "description" in data_keys: + data_keys.remove("description") + data__description = data["description"] + if not isinstance(data__description, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".description must be string", value=data__description, name="" + (name_prefix or "data") + ".description", definition={'type': 'string', '$$description': ['The `summary description of the project', '`_']}, rule='type') + if "readme" in data_keys: + data_keys.remove("readme") + data__readme = data["readme"] + data__readme_one_of_count9 = 0 + if data__readme_one_of_count9 < 2: + try: + if not isinstance(data__readme, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be string", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, rule='type') + data__readme_one_of_count9 += 1 + except JsonSchemaValueException: pass + if data__readme_one_of_count9 < 2: + try: + if not isinstance(data__readme, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be object", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}, rule='type') + data__readme_any_of_count10 = 0 + if not data__readme_any_of_count10: + try: + data__readme_is_dict = isinstance(data__readme, dict) + if data__readme_is_dict: + data__readme_len = len(data__readme) + if not all(prop in data__readme for prop in ['file']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['file'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, rule='required') + data__readme_keys = set(data__readme.keys()) + if "file" in data__readme_keys: + data__readme_keys.remove("file") + data__readme__file = data__readme["file"] + if not isinstance(data__readme__file, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.file must be string", value=data__readme__file, name="" + (name_prefix or "data") + ".readme.file", definition={'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}, rule='type') + data__readme_any_of_count10 += 1 + except JsonSchemaValueException: pass + if not data__readme_any_of_count10: + try: + data__readme_is_dict = isinstance(data__readme, dict) + if data__readme_is_dict: + data__readme_len = len(data__readme) + if not all(prop in data__readme for prop in ['text']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['text'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}, rule='required') + data__readme_keys = set(data__readme.keys()) + if "text" in data__readme_keys: + data__readme_keys.remove("text") + data__readme__text = data__readme["text"] + if not isinstance(data__readme__text, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.text must be string", value=data__readme__text, name="" + (name_prefix or "data") + ".readme.text", definition={'type': 'string', 'description': 'Full text describing the project.'}, rule='type') + data__readme_any_of_count10 += 1 + except JsonSchemaValueException: pass + if not data__readme_any_of_count10: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme cannot be validated by any definition", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, rule='anyOf') + data__readme_is_dict = isinstance(data__readme, dict) + if data__readme_is_dict: + data__readme_len = len(data__readme) + if not all(prop in data__readme for prop in ['content-type']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['content-type'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}, rule='required') + data__readme_keys = set(data__readme.keys()) + if "content-type" in data__readme_keys: + data__readme_keys.remove("content-type") + data__readme__contenttype = data__readme["content-type"] + if not isinstance(data__readme__contenttype, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.content-type must be string", value=data__readme__contenttype, name="" + (name_prefix or "data") + ".readme.content-type", definition={'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}, rule='type') + data__readme_one_of_count9 += 1 + except JsonSchemaValueException: pass + if data__readme_one_of_count9 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be valid exactly by one definition" + (" (" + str(data__readme_one_of_count9) + " matches found)"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf') + if "requires-python" in data_keys: + data_keys.remove("requires-python") + data__requirespython = data["requires-python"] + if not isinstance(data__requirespython, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".requires-python must be string", value=data__requirespython, name="" + (name_prefix or "data") + ".requires-python", definition={'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, rule='type') + if isinstance(data__requirespython, str): + if not custom_formats["pep508-versionspec"](data__requirespython): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".requires-python must be pep508-versionspec", value=data__requirespython, name="" + (name_prefix or "data") + ".requires-python", definition={'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, rule='format') + if "license" in data_keys: + data_keys.remove("license") + data__license = data["license"] + data__license_one_of_count11 = 0 + if data__license_one_of_count11 < 2: + try: + data__license_is_dict = isinstance(data__license, dict) + if data__license_is_dict: + data__license_len = len(data__license) + if not all(prop in data__license for prop in ['file']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain ['file'] properties", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, rule='required') + data__license_keys = set(data__license.keys()) + if "file" in data__license_keys: + data__license_keys.remove("file") + data__license__file = data__license["file"] + if not isinstance(data__license__file, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.file must be string", value=data__license__file, name="" + (name_prefix or "data") + ".license.file", definition={'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}, rule='type') + data__license_one_of_count11 += 1 + except JsonSchemaValueException: pass + if data__license_one_of_count11 < 2: + try: + data__license_is_dict = isinstance(data__license, dict) + if data__license_is_dict: + data__license_len = len(data__license) + if not all(prop in data__license for prop in ['text']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain ['text'] properties", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}, rule='required') + data__license_keys = set(data__license.keys()) + if "text" in data__license_keys: + data__license_keys.remove("text") + data__license__text = data__license["text"] + if not isinstance(data__license__text, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.text must be string", value=data__license__text, name="" + (name_prefix or "data") + ".license.text", definition={'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}, rule='type') + data__license_one_of_count11 += 1 + except JsonSchemaValueException: pass + if data__license_one_of_count11 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be valid exactly by one definition" + (" (" + str(data__license_one_of_count11) + " matches found)"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, rule='oneOf') + if "authors" in data_keys: + data_keys.remove("authors") + data__authors = data["authors"] + if not isinstance(data__authors, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".authors must be array", value=data__authors, name="" + (name_prefix or "data") + ".authors", definition={'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, rule='type') + data__authors_is_list = isinstance(data__authors, (list, tuple)) + if data__authors_is_list: + data__authors_len = len(data__authors) + for data__authors_x, data__authors_item in enumerate(data__authors): + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data__authors_item, custom_formats, (name_prefix or "data") + ".authors[{data__authors_x}]".format(**locals())) + if "maintainers" in data_keys: + data_keys.remove("maintainers") + data__maintainers = data["maintainers"] + if not isinstance(data__maintainers, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".maintainers must be array", value=data__maintainers, name="" + (name_prefix or "data") + ".maintainers", definition={'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, rule='type') + data__maintainers_is_list = isinstance(data__maintainers, (list, tuple)) + if data__maintainers_is_list: + data__maintainers_len = len(data__maintainers) + for data__maintainers_x, data__maintainers_item in enumerate(data__maintainers): + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data__maintainers_item, custom_formats, (name_prefix or "data") + ".maintainers[{data__maintainers_x}]".format(**locals())) + if "keywords" in data_keys: + data_keys.remove("keywords") + data__keywords = data["keywords"] + if not isinstance(data__keywords, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".keywords must be array", value=data__keywords, name="" + (name_prefix or "data") + ".keywords", definition={'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, rule='type') + data__keywords_is_list = isinstance(data__keywords, (list, tuple)) + if data__keywords_is_list: + data__keywords_len = len(data__keywords) + for data__keywords_x, data__keywords_item in enumerate(data__keywords): + if not isinstance(data__keywords_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".keywords[{data__keywords_x}]".format(**locals()) + " must be string", value=data__keywords_item, name="" + (name_prefix or "data") + ".keywords[{data__keywords_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "classifiers" in data_keys: + data_keys.remove("classifiers") + data__classifiers = data["classifiers"] + if not isinstance(data__classifiers, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers must be array", value=data__classifiers, name="" + (name_prefix or "data") + ".classifiers", definition={'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, rule='type') + data__classifiers_is_list = isinstance(data__classifiers, (list, tuple)) + if data__classifiers_is_list: + data__classifiers_len = len(data__classifiers) + for data__classifiers_x, data__classifiers_item in enumerate(data__classifiers): + if not isinstance(data__classifiers_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + " must be string", value=data__classifiers_item, name="" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, rule='type') + if isinstance(data__classifiers_item, str): + if not custom_formats["trove-classifier"](data__classifiers_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + " must be trove-classifier", value=data__classifiers_item, name="" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, rule='format') + if "urls" in data_keys: + data_keys.remove("urls") + data__urls = data["urls"] + if not isinstance(data__urls, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls must be object", value=data__urls, name="" + (name_prefix or "data") + ".urls", definition={'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, rule='type') + data__urls_is_dict = isinstance(data__urls, dict) + if data__urls_is_dict: + data__urls_keys = set(data__urls.keys()) + for data__urls_key, data__urls_val in data__urls.items(): + if REGEX_PATTERNS['^.+$'].search(data__urls_key): + if data__urls_key in data__urls_keys: + data__urls_keys.remove(data__urls_key) + if not isinstance(data__urls_val, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + " must be string", value=data__urls_val, name="" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'url'}, rule='type') + if isinstance(data__urls_val, str): + if not custom_formats["url"](data__urls_val): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + " must be url", value=data__urls_val, name="" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'url'}, rule='format') + if data__urls_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls must not contain "+str(data__urls_keys)+" properties", value=data__urls, name="" + (name_prefix or "data") + ".urls", definition={'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, rule='additionalProperties') + if "scripts" in data_keys: + data_keys.remove("scripts") + data__scripts = data["scripts"] + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__scripts, custom_formats, (name_prefix or "data") + ".scripts") + if "gui-scripts" in data_keys: + data_keys.remove("gui-scripts") + data__guiscripts = data["gui-scripts"] + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__guiscripts, custom_formats, (name_prefix or "data") + ".gui-scripts") + if "entry-points" in data_keys: + data_keys.remove("entry-points") + data__entrypoints = data["entry-points"] + data__entrypoints_is_dict = isinstance(data__entrypoints, dict) + if data__entrypoints_is_dict: + data__entrypoints_keys = set(data__entrypoints.keys()) + for data__entrypoints_key, data__entrypoints_val in data__entrypoints.items(): + if REGEX_PATTERNS['^.+$'].search(data__entrypoints_key): + if data__entrypoints_key in data__entrypoints_keys: + data__entrypoints_keys.remove(data__entrypoints_key) + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__entrypoints_val, custom_formats, (name_prefix or "data") + ".entry-points.{data__entrypoints_key}".format(**locals())) + if data__entrypoints_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must not contain "+str(data__entrypoints_keys)+" properties", value=data__entrypoints, name="" + (name_prefix or "data") + ".entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, rule='additionalProperties') + data__entrypoints_len = len(data__entrypoints) + if data__entrypoints_len != 0: + data__entrypoints_property_names = True + for data__entrypoints_key in data__entrypoints: + try: + if isinstance(data__entrypoints_key, str): + if not custom_formats["python-entrypoint-group"](data__entrypoints_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must be python-entrypoint-group", value=data__entrypoints_key, name="" + (name_prefix or "data") + ".entry-points", definition={'format': 'python-entrypoint-group'}, rule='format') + except JsonSchemaValueException: + data__entrypoints_property_names = False + if not data__entrypoints_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must be named by propertyName definition", value=data__entrypoints, name="" + (name_prefix or "data") + ".entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, rule='propertyNames') + if "dependencies" in data_keys: + data_keys.remove("dependencies") + data__dependencies = data["dependencies"] + if not isinstance(data__dependencies, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependencies must be array", value=data__dependencies, name="" + (name_prefix or "data") + ".dependencies", definition={'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, rule='type') + data__dependencies_is_list = isinstance(data__dependencies, (list, tuple)) + if data__dependencies_is_list: + data__dependencies_len = len(data__dependencies) + for data__dependencies_x, data__dependencies_item in enumerate(data__dependencies): + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data__dependencies_item, custom_formats, (name_prefix or "data") + ".dependencies[{data__dependencies_x}]".format(**locals())) + if "optional-dependencies" in data_keys: + data_keys.remove("optional-dependencies") + data__optionaldependencies = data["optional-dependencies"] + if not isinstance(data__optionaldependencies, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be object", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type') + data__optionaldependencies_is_dict = isinstance(data__optionaldependencies, dict) + if data__optionaldependencies_is_dict: + data__optionaldependencies_keys = set(data__optionaldependencies.keys()) + for data__optionaldependencies_key, data__optionaldependencies_val in data__optionaldependencies.items(): + if REGEX_PATTERNS['^.+$'].search(data__optionaldependencies_key): + if data__optionaldependencies_key in data__optionaldependencies_keys: + data__optionaldependencies_keys.remove(data__optionaldependencies_key) + if not isinstance(data__optionaldependencies_val, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}".format(**locals()) + " must be array", value=data__optionaldependencies_val, name="" + (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, rule='type') + data__optionaldependencies_val_is_list = isinstance(data__optionaldependencies_val, (list, tuple)) + if data__optionaldependencies_val_is_list: + data__optionaldependencies_val_len = len(data__optionaldependencies_val) + for data__optionaldependencies_val_x, data__optionaldependencies_val_item in enumerate(data__optionaldependencies_val): + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data__optionaldependencies_val_item, custom_formats, (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}[{data__optionaldependencies_val_x}]".format(**locals())) + if data__optionaldependencies_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must not contain "+str(data__optionaldependencies_keys)+" properties", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties') + data__optionaldependencies_len = len(data__optionaldependencies) + if data__optionaldependencies_len != 0: + data__optionaldependencies_property_names = True + for data__optionaldependencies_key in data__optionaldependencies: + try: + if isinstance(data__optionaldependencies_key, str): + if not custom_formats["pep508-identifier"](data__optionaldependencies_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be pep508-identifier", value=data__optionaldependencies_key, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'format': 'pep508-identifier'}, rule='format') + except JsonSchemaValueException: + data__optionaldependencies_property_names = False + if not data__optionaldependencies_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be named by propertyName definition", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='propertyNames') + if "dynamic" in data_keys: + data_keys.remove("dynamic") + data__dynamic = data["dynamic"] + if not isinstance(data__dynamic, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be array", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}, rule='type') + data__dynamic_is_list = isinstance(data__dynamic, (list, tuple)) + if data__dynamic_is_list: + data__dynamic_len = len(data__dynamic) + for data__dynamic_x, data__dynamic_item in enumerate(data__dynamic): + if data__dynamic_item not in ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + " must be one of ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']", value=data__dynamic_item, name="" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + "", definition={'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}, rule='enum') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='additionalProperties') + try: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['dynamic']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['dynamic'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, rule='required') + data_keys = set(data.keys()) + if "dynamic" in data_keys: + data_keys.remove("dynamic") + data__dynamic = data["dynamic"] + data__dynamic_is_list = isinstance(data__dynamic, (list, tuple)) + if data__dynamic_is_list: + data__dynamic_contains = False + for data__dynamic_key in data__dynamic: + try: + if data__dynamic_key != "version": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be same as const definition: version", value=data__dynamic_key, name="" + (name_prefix or "data") + ".dynamic", definition={'const': 'version'}, rule='const') + data__dynamic_contains = True + break + except JsonSchemaValueException: pass + if not data__dynamic_contains: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must contain one of contains definition", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}, rule='contains') + except JsonSchemaValueException: pass + else: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must NOT match a disallowed definition", value=data, name="" + (name_prefix or "data") + "", definition={'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, rule='not') + except JsonSchemaValueException: + pass + else: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['version']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['version'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, rule='required') + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='type') + if isinstance(data, str): + if not custom_formats["pep508"](data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be pep508", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='format') + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + for data_key, data_val in data.items(): + if REGEX_PATTERNS['^.+$'].search(data_key): + if data_key in data_keys: + data_keys.remove(data_key) + if not isinstance(data_val, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}, rule='type') + if isinstance(data_val, str): + if not custom_formats["python-entrypoint-reference"](data_val): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be python-entrypoint-reference", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}, rule='format') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='additionalProperties') + data_len = len(data) + if data_len != 0: + data_property_names = True + for data_key in data: + try: + if isinstance(data_key, str): + if not custom_formats["python-entrypoint-name"](data_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be python-entrypoint-name", value=data_key, name="" + (name_prefix or "data") + "", definition={'format': 'python-entrypoint-name'}, rule='format') + except JsonSchemaValueException: + data_property_names = False + if not data_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be named by propertyName definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='propertyNames') + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, rule='type') + if "email" in data_keys: + data_keys.remove("email") + data__email = data["email"] + if not isinstance(data__email, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".email must be string", value=data__email, name="" + (name_prefix or "data") + ".email", definition={'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}, rule='type') + if isinstance(data__email, str): + if not REGEX_PATTERNS["idn-email_re_pattern"].match(data__email): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".email must be idn-email", value=data__email, name="" + (name_prefix or "data") + ".email", definition={'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}, rule='format') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, rule='additionalProperties') + return data \ No newline at end of file diff --git a/setuptools/config/_validate_pyproject/formats.py b/setuptools/config/_validate_pyproject/formats.py new file mode 100644 index 0000000000..486d52602c --- /dev/null +++ b/setuptools/config/_validate_pyproject/formats.py @@ -0,0 +1,275 @@ +import logging +import os +import re +import string +import typing +from itertools import chain as _chain + +if typing.TYPE_CHECKING: + from typing_extensions import Literal + +_logger = logging.getLogger(__name__) + +# ------------------------------------------------------------------------------------- +# PEP 440 + +VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+VERSION_REGEX = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.X | re.I)
+
+
+def pep440(version: str) -> bool:
+    return VERSION_REGEX.match(version) is not None
+
+
+# -------------------------------------------------------------------------------------
+# PEP 508
+
+PEP508_IDENTIFIER_PATTERN = r"([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])"
+PEP508_IDENTIFIER_REGEX = re.compile(f"^{PEP508_IDENTIFIER_PATTERN}$", re.I)
+
+
+def pep508_identifier(name: str) -> bool:
+    return PEP508_IDENTIFIER_REGEX.match(name) is not None
+
+
+try:
+    try:
+        from packaging import requirements as _req
+    except ImportError:  # pragma: no cover
+        # let's try setuptools vendored version
+        from setuptools._vendor.packaging import requirements as _req  # type: ignore
+
+    def pep508(value: str) -> bool:
+        try:
+            _req.Requirement(value)
+            return True
+        except _req.InvalidRequirement:
+            return False
+
+except ImportError:  # pragma: no cover
+    _logger.warning(
+        "Could not find an installation of `packaging`. Requirements, dependencies and "
+        "versions might not be validated. "
+        "To enforce validation, please install `packaging`."
+    )
+
+    def pep508(value: str) -> bool:
+        return True
+
+
+def pep508_versionspec(value: str) -> bool:
+    """Expression that can be used to specify/lock versions (including ranges)"""
+    if any(c in value for c in (";", "]", "@")):
+        # In PEP 508:
+        # conditional markers, extras and URL specs are not included in the
+        # versionspec
+        return False
+    # Let's pretend we have a dependency called `requirement` with the given
+    # version spec, then we can re-use the pep508 function for validation:
+    return pep508(f"requirement{value}")
+
+
+# -------------------------------------------------------------------------------------
+# PEP 517
+
+
+def pep517_backend_reference(value: str) -> bool:
+    module, _, obj = value.partition(":")
+    identifiers = (i.strip() for i in _chain(module.split("."), obj.split(".")))
+    return all(python_identifier(i) for i in identifiers if i)
+
+
+# -------------------------------------------------------------------------------------
+# Classifiers - PEP 301
+
+
+def _download_classifiers() -> str:
+    import ssl
+    from email.message import Message
+    from urllib.request import urlopen
+
+    url = "https://pypi.org/pypi?:action=list_classifiers"
+    context = ssl.create_default_context()
+    with urlopen(url, context=context) as response:
+        headers = Message()
+        headers["content_type"] = response.getheader("content-type", "text/plain")
+        return response.read().decode(headers.get_param("charset", "utf-8"))
+
+
+class _TroveClassifier:
+    """The ``trove_classifiers`` package is the official way of validating classifiers,
+    however this package might not be always available.
+    As a workaround we can still download a list from PyPI.
+    We also don't want to be over strict about it, so simply skipping silently is an
+    option (classifiers will be validated anyway during the upload to PyPI).
+    """
+
+    downloaded: typing.Union[None, "Literal[False]", typing.Set[str]]
+
+    def __init__(self):
+        self.downloaded = None
+        self._skip_download = False
+        # None => not cached yet
+        # False => cache not available
+        self.__name__ = "trove_classifier"  # Emulate a public function
+
+    def _disable_download(self):
+        # This is a private API. Only setuptools has the consent of using it.
+        self._skip_download = True
+
+    def __call__(self, value: str) -> bool:
+        if self.downloaded is False or self._skip_download is True:
+            return True
+
+        if os.getenv("NO_NETWORK") or os.getenv("VALIDATE_PYPROJECT_NO_NETWORK"):
+            self.downloaded = False
+            msg = (
+                "Install ``trove-classifiers`` to ensure proper validation. "
+                "Skipping download of classifiers list from PyPI (NO_NETWORK)."
+            )
+            _logger.debug(msg)
+            return True
+
+        if self.downloaded is None:
+            msg = (
+                "Install ``trove-classifiers`` to ensure proper validation. "
+                "Meanwhile a list of classifiers will be downloaded from PyPI."
+            )
+            _logger.debug(msg)
+            try:
+                self.downloaded = set(_download_classifiers().splitlines())
+            except Exception:
+                self.downloaded = False
+                _logger.debug("Problem with download, skipping validation")
+                return True
+
+        return value in self.downloaded or value.lower().startswith("private ::")
+
+
+try:
+    from trove_classifiers import classifiers as _trove_classifiers
+
+    def trove_classifier(value: str) -> bool:
+        return value in _trove_classifiers or value.lower().startswith("private ::")
+
+except ImportError:  # pragma: no cover
+    trove_classifier = _TroveClassifier()
+
+
+# -------------------------------------------------------------------------------------
+# Stub packages - PEP 561
+
+
+def pep561_stub_name(value: str) -> bool:
+    top, *children = value.split(".")
+    if not top.endswith("-stubs"):
+        return False
+    return python_module_name(".".join([top[: -len("-stubs")], *children]))
+
+
+# -------------------------------------------------------------------------------------
+# Non-PEP related
+
+
+def url(value: str) -> bool:
+    from urllib.parse import urlparse
+
+    try:
+        parts = urlparse(value)
+        if not parts.scheme:
+            _logger.warning(
+                "For maximum compatibility please make sure to include a "
+                "`scheme` prefix in your URL (e.g. 'http://'). "
+                f"Given value: {value}"
+            )
+            if not (value.startswith("/") or value.startswith("\\") or "@" in value):
+                parts = urlparse(f"http://{value}")
+
+        return bool(parts.scheme and parts.netloc)
+    except Exception:
+        return False
+
+
+# https://packaging.python.org/specifications/entry-points/
+ENTRYPOINT_PATTERN = r"[^\[\s=]([^=]*[^\s=])?"
+ENTRYPOINT_REGEX = re.compile(f"^{ENTRYPOINT_PATTERN}$", re.I)
+RECOMMEDED_ENTRYPOINT_PATTERN = r"[\w.-]+"
+RECOMMEDED_ENTRYPOINT_REGEX = re.compile(f"^{RECOMMEDED_ENTRYPOINT_PATTERN}$", re.I)
+ENTRYPOINT_GROUP_PATTERN = r"\w+(\.\w+)*"
+ENTRYPOINT_GROUP_REGEX = re.compile(f"^{ENTRYPOINT_GROUP_PATTERN}$", re.I)
+
+
+def python_identifier(value: str) -> bool:
+    return value.isidentifier()
+
+
+def python_qualified_identifier(value: str) -> bool:
+    if value.startswith(".") or value.endswith("."):
+        return False
+    return all(python_identifier(m) for m in value.split("."))
+
+
+def python_module_name(value: str) -> bool:
+    return python_qualified_identifier(value)
+
+
+def python_entrypoint_group(value: str) -> bool:
+    return ENTRYPOINT_GROUP_REGEX.match(value) is not None
+
+
+def python_entrypoint_name(value: str) -> bool:
+    if not ENTRYPOINT_REGEX.match(value):
+        return False
+    if not RECOMMEDED_ENTRYPOINT_REGEX.match(value):
+        msg = f"Entry point `{value}` does not follow recommended pattern: "
+        msg += RECOMMEDED_ENTRYPOINT_PATTERN
+        _logger.warning(msg)
+    return True
+
+
+def python_entrypoint_reference(value: str) -> bool:
+    module, _, rest = value.partition(":")
+    if "[" in rest:
+        obj, _, extras_ = rest.partition("[")
+        if extras_.strip()[-1] != "]":
+            return False
+        extras = (x.strip() for x in extras_.strip(string.whitespace + "[]").split(","))
+        if not all(pep508_identifier(e) for e in extras):
+            return False
+        _logger.warning(f"`{value}` - using extras for entry points is not recommended")
+    else:
+        obj = rest
+
+    module_parts = module.split(".")
+    identifiers = _chain(module_parts, obj.split(".")) if rest else module_parts
+    return all(python_identifier(i.strip()) for i in identifiers)
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
new file mode 100644
index 0000000000..309888437d
--- /dev/null
+++ b/setuptools/config/expand.py
@@ -0,0 +1,462 @@
+"""Utility functions to expand configuration directives or special values
+(such glob patterns).
+
+We can split the process of interpreting configuration files into 2 steps:
+
+1. The parsing the file contents from strings to value objects
+   that can be understand by Python (for example a string with a comma
+   separated list of keywords into an actual Python list of strings).
+
+2. The expansion (or post-processing) of these values according to the
+   semantics ``setuptools`` assign to them (for example a configuration field
+   with the ``file:`` directive should be expanded from a list of file paths to
+   a single string with the contents of those files concatenated)
+
+This module focus on the second step, and therefore allow sharing the expansion
+functions among several configuration file formats.
+
+**PRIVATE MODULE**: API reserved for setuptools internal usage only.
+"""
+import ast
+import importlib
+import io
+import os
+import pathlib
+import sys
+from glob import iglob
+from configparser import ConfigParser
+from importlib.machinery import ModuleSpec
+from itertools import chain
+from typing import (
+    TYPE_CHECKING,
+    Callable,
+    Dict,
+    Iterable,
+    Iterator,
+    List,
+    Mapping,
+    Optional,
+    Tuple,
+    TypeVar,
+    Union,
+    cast
+)
+from pathlib import Path
+from types import ModuleType
+
+from distutils.errors import DistutilsOptionError
+
+from .._path import same_path as _same_path
+from ..warnings import SetuptoolsWarning
+
+if TYPE_CHECKING:
+    from setuptools.dist import Distribution  # noqa
+    from setuptools.discovery import ConfigDiscovery  # noqa
+    from distutils.dist import DistributionMetadata  # noqa
+
+chain_iter = chain.from_iterable
+_Path = Union[str, os.PathLike]
+_K = TypeVar("_K")
+_V = TypeVar("_V", covariant=True)
+
+
+class StaticModule:
+    """Proxy to a module object that avoids executing arbitrary code."""
+
+    def __init__(self, name: str, spec: ModuleSpec):
+        module = ast.parse(pathlib.Path(spec.origin).read_bytes())
+        vars(self).update(locals())
+        del self.self
+
+    def _find_assignments(self) -> Iterator[Tuple[ast.AST, ast.AST]]:
+        for statement in self.module.body:
+            if isinstance(statement, ast.Assign):
+                yield from ((target, statement.value) for target in statement.targets)
+            elif isinstance(statement, ast.AnnAssign) and statement.value:
+                yield (statement.target, statement.value)
+
+    def __getattr__(self, attr):
+        """Attempt to load an attribute "statically", via :func:`ast.literal_eval`."""
+        try:
+            return next(
+                ast.literal_eval(value)
+                for target, value in self._find_assignments()
+                if isinstance(target, ast.Name) and target.id == attr
+            )
+        except Exception as e:
+            raise AttributeError(f"{self.name} has no attribute {attr}") from e
+
+
+def glob_relative(
+    patterns: Iterable[str], root_dir: Optional[_Path] = None
+) -> List[str]:
+    """Expand the list of glob patterns, but preserving relative paths.
+
+    :param list[str] patterns: List of glob patterns
+    :param str root_dir: Path to which globs should be relative
+                         (current directory by default)
+    :rtype: list
+    """
+    glob_characters = {'*', '?', '[', ']', '{', '}'}
+    expanded_values = []
+    root_dir = root_dir or os.getcwd()
+    for value in patterns:
+
+        # Has globby characters?
+        if any(char in value for char in glob_characters):
+            # then expand the glob pattern while keeping paths *relative*:
+            glob_path = os.path.abspath(os.path.join(root_dir, value))
+            expanded_values.extend(sorted(
+                os.path.relpath(path, root_dir).replace(os.sep, "/")
+                for path in iglob(glob_path, recursive=True)))
+
+        else:
+            # take the value as-is
+            path = os.path.relpath(value, root_dir).replace(os.sep, "/")
+            expanded_values.append(path)
+
+    return expanded_values
+
+
+def read_files(filepaths: Union[str, bytes, Iterable[_Path]], root_dir=None) -> str:
+    """Return the content of the files concatenated using ``\n`` as str
+
+    This function is sandboxed and won't reach anything outside ``root_dir``
+
+    (By default ``root_dir`` is the current directory).
+    """
+    from setuptools.extern.more_itertools import always_iterable
+
+    root_dir = os.path.abspath(root_dir or os.getcwd())
+    _filepaths = (os.path.join(root_dir, path) for path in always_iterable(filepaths))
+    return '\n'.join(
+        _read_file(path)
+        for path in _filter_existing_files(_filepaths)
+        if _assert_local(path, root_dir)
+    )
+
+
+def _filter_existing_files(filepaths: Iterable[_Path]) -> Iterator[_Path]:
+    for path in filepaths:
+        if os.path.isfile(path):
+            yield path
+        else:
+            SetuptoolsWarning.emit(f"File {path!r} cannot be found")
+
+
+def _read_file(filepath: Union[bytes, _Path]) -> str:
+    with io.open(filepath, encoding='utf-8') as f:
+        return f.read()
+
+
+def _assert_local(filepath: _Path, root_dir: str):
+    if Path(os.path.abspath(root_dir)) not in Path(os.path.abspath(filepath)).parents:
+        msg = f"Cannot access {filepath!r} (or anything outside {root_dir!r})"
+        raise DistutilsOptionError(msg)
+
+    return True
+
+
+def read_attr(
+    attr_desc: str,
+    package_dir: Optional[Mapping[str, str]] = None,
+    root_dir: Optional[_Path] = None
+):
+    """Reads the value of an attribute from a module.
+
+    This function will try to read the attributed statically first
+    (via :func:`ast.literal_eval`), and only evaluate the module if it fails.
+
+    Examples:
+        read_attr("package.attr")
+        read_attr("package.module.attr")
+
+    :param str attr_desc: Dot-separated string describing how to reach the
+        attribute (see examples above)
+    :param dict[str, str] package_dir: Mapping of package names to their
+        location in disk (represented by paths relative to ``root_dir``).
+    :param str root_dir: Path to directory containing all the packages in
+        ``package_dir`` (current directory by default).
+    :rtype: str
+    """
+    root_dir = root_dir or os.getcwd()
+    attrs_path = attr_desc.strip().split('.')
+    attr_name = attrs_path.pop()
+    module_name = '.'.join(attrs_path)
+    module_name = module_name or '__init__'
+    _parent_path, path, module_name = _find_module(module_name, package_dir, root_dir)
+    spec = _find_spec(module_name, path)
+
+    try:
+        return getattr(StaticModule(module_name, spec), attr_name)
+    except Exception:
+        # fallback to evaluate module
+        module = _load_spec(spec, module_name)
+        return getattr(module, attr_name)
+
+
+def _find_spec(module_name: str, module_path: Optional[_Path]) -> ModuleSpec:
+    spec = importlib.util.spec_from_file_location(module_name, module_path)
+    spec = spec or importlib.util.find_spec(module_name)
+
+    if spec is None:
+        raise ModuleNotFoundError(module_name)
+
+    return spec
+
+
+def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType:
+    name = getattr(spec, "__name__", module_name)
+    if name in sys.modules:
+        return sys.modules[name]
+    module = importlib.util.module_from_spec(spec)
+    sys.modules[name] = module  # cache (it also ensures `==` works on loaded items)
+    spec.loader.exec_module(module)  # type: ignore
+    return module
+
+
+def _find_module(
+    module_name: str, package_dir: Optional[Mapping[str, str]], root_dir: _Path
+) -> Tuple[_Path, Optional[str], str]:
+    """Given a module (that could normally be imported by ``module_name``
+    after the build is complete), find the path to the parent directory where
+    it is contained and the canonical name that could be used to import it
+    considering the ``package_dir`` in the build configuration and ``root_dir``
+    """
+    parent_path = root_dir
+    module_parts = module_name.split('.')
+    if package_dir:
+        if module_parts[0] in package_dir:
+            # A custom path was specified for the module we want to import
+            custom_path = package_dir[module_parts[0]]
+            parts = custom_path.rsplit('/', 1)
+            if len(parts) > 1:
+                parent_path = os.path.join(root_dir, parts[0])
+                parent_module = parts[1]
+            else:
+                parent_module = custom_path
+            module_name = ".".join([parent_module, *module_parts[1:]])
+        elif '' in package_dir:
+            # A custom parent directory was specified for all root modules
+            parent_path = os.path.join(root_dir, package_dir[''])
+
+    path_start = os.path.join(parent_path, *module_name.split("."))
+    candidates = chain(
+        (f"{path_start}.py", os.path.join(path_start, "__init__.py")),
+        iglob(f"{path_start}.*")
+    )
+    module_path = next((x for x in candidates if os.path.isfile(x)), None)
+    return parent_path, module_path, module_name
+
+
+def resolve_class(
+    qualified_class_name: str,
+    package_dir: Optional[Mapping[str, str]] = None,
+    root_dir: Optional[_Path] = None
+) -> Callable:
+    """Given a qualified class name, return the associated class object"""
+    root_dir = root_dir or os.getcwd()
+    idx = qualified_class_name.rfind('.')
+    class_name = qualified_class_name[idx + 1 :]
+    pkg_name = qualified_class_name[:idx]
+
+    _parent_path, path, module_name = _find_module(pkg_name, package_dir, root_dir)
+    module = _load_spec(_find_spec(module_name, path), module_name)
+    return getattr(module, class_name)
+
+
+def cmdclass(
+    values: Dict[str, str],
+    package_dir: Optional[Mapping[str, str]] = None,
+    root_dir: Optional[_Path] = None
+) -> Dict[str, Callable]:
+    """Given a dictionary mapping command names to strings for qualified class
+    names, apply :func:`resolve_class` to the dict values.
+    """
+    return {k: resolve_class(v, package_dir, root_dir) for k, v in values.items()}
+
+
+def find_packages(
+    *,
+    namespaces=True,
+    fill_package_dir: Optional[Dict[str, str]] = None,
+    root_dir: Optional[_Path] = None,
+    **kwargs
+) -> List[str]:
+    """Works similarly to :func:`setuptools.find_packages`, but with all
+    arguments given as keyword arguments. Moreover, ``where`` can be given
+    as a list (the results will be simply concatenated).
+
+    When the additional keyword argument ``namespaces`` is ``True``, it will
+    behave like :func:`setuptools.find_namespace_packages`` (i.e. include
+    implicit namespaces as per :pep:`420`).
+
+    The ``where`` argument will be considered relative to ``root_dir`` (or the current
+    working directory when ``root_dir`` is not given).
+
+    If the ``fill_package_dir`` argument is passed, this function will consider it as a
+    similar data structure to the ``package_dir`` configuration parameter add fill-in
+    any missing package location.
+
+    :rtype: list
+    """
+    from setuptools.discovery import construct_package_dir
+    from setuptools.extern.more_itertools import unique_everseen, always_iterable
+
+    if namespaces:
+        from setuptools.discovery import PEP420PackageFinder as PackageFinder
+    else:
+        from setuptools.discovery import PackageFinder  # type: ignore
+
+    root_dir = root_dir or os.curdir
+    where = kwargs.pop('where', ['.'])
+    packages: List[str] = []
+    fill_package_dir = {} if fill_package_dir is None else fill_package_dir
+    search = list(unique_everseen(always_iterable(where)))
+
+    if len(search) == 1 and all(not _same_path(search[0], x) for x in (".", root_dir)):
+        fill_package_dir.setdefault("", search[0])
+
+    for path in search:
+        package_path = _nest_path(root_dir, path)
+        pkgs = PackageFinder.find(package_path, **kwargs)
+        packages.extend(pkgs)
+        if pkgs and not (
+            fill_package_dir.get("") == path
+            or os.path.samefile(package_path, root_dir)
+        ):
+            fill_package_dir.update(construct_package_dir(pkgs, path))
+
+    return packages
+
+
+def _nest_path(parent: _Path, path: _Path) -> str:
+    path = parent if path in {".", ""} else os.path.join(parent, path)
+    return os.path.normpath(path)
+
+
+def version(value: Union[Callable, Iterable[Union[str, int]], str]) -> str:
+    """When getting the version directly from an attribute,
+    it should be normalised to string.
+    """
+    if callable(value):
+        value = value()
+
+    value = cast(Iterable[Union[str, int]], value)
+
+    if not isinstance(value, str):
+        if hasattr(value, '__iter__'):
+            value = '.'.join(map(str, value))
+        else:
+            value = '%s' % value
+
+    return value
+
+
+def canonic_package_data(package_data: dict) -> dict:
+    if "*" in package_data:
+        package_data[""] = package_data.pop("*")
+    return package_data
+
+
+def canonic_data_files(
+    data_files: Union[list, dict], root_dir: Optional[_Path] = None
+) -> List[Tuple[str, List[str]]]:
+    """For compatibility with ``setup.py``, ``data_files`` should be a list
+    of pairs instead of a dict.
+
+    This function also expands glob patterns.
+    """
+    if isinstance(data_files, list):
+        return data_files
+
+    return [
+        (dest, glob_relative(patterns, root_dir))
+        for dest, patterns in data_files.items()
+    ]
+
+
+def entry_points(text: str, text_source="entry-points") -> Dict[str, dict]:
+    """Given the contents of entry-points file,
+    process it into a 2-level dictionary (``dict[str, dict[str, str]]``).
+    The first level keys are entry-point groups, the second level keys are
+    entry-point names, and the second level values are references to objects
+    (that correspond to the entry-point value).
+    """
+    parser = ConfigParser(default_section=None, delimiters=("=",))  # type: ignore
+    parser.optionxform = str  # case sensitive
+    parser.read_string(text, text_source)
+    groups = {k: dict(v.items()) for k, v in parser.items()}
+    groups.pop(parser.default_section, None)
+    return groups
+
+
+class EnsurePackagesDiscovered:
+    """Some expand functions require all the packages to already be discovered before
+    they run, e.g. :func:`read_attr`, :func:`resolve_class`, :func:`cmdclass`.
+
+    Therefore in some cases we will need to run autodiscovery during the evaluation of
+    the configuration. However, it is better to postpone calling package discovery as
+    much as possible, because some parameters can influence it (e.g. ``package_dir``),
+    and those might not have been processed yet.
+    """
+
+    def __init__(self, distribution: "Distribution"):
+        self._dist = distribution
+        self._called = False
+
+    def __call__(self):
+        """Trigger the automatic package discovery, if it is still necessary."""
+        if not self._called:
+            self._called = True
+            self._dist.set_defaults(name=False)  # Skip name, we can still be parsing
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, _exc_type, _exc_value, _traceback):
+        if self._called:
+            self._dist.set_defaults.analyse_name()  # Now we can set a default name
+
+    def _get_package_dir(self) -> Mapping[str, str]:
+        self()
+        pkg_dir = self._dist.package_dir
+        return {} if pkg_dir is None else pkg_dir
+
+    @property
+    def package_dir(self) -> Mapping[str, str]:
+        """Proxy to ``package_dir`` that may trigger auto-discovery when used."""
+        return LazyMappingProxy(self._get_package_dir)
+
+
+class LazyMappingProxy(Mapping[_K, _V]):
+    """Mapping proxy that delays resolving the target object, until really needed.
+
+    >>> def obtain_mapping():
+    ...     print("Running expensive function!")
+    ...     return {"key": "value", "other key": "other value"}
+    >>> mapping = LazyMappingProxy(obtain_mapping)
+    >>> mapping["key"]
+    Running expensive function!
+    'value'
+    >>> mapping["other key"]
+    'other value'
+    """
+
+    def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V]]):
+        self._obtain = obtain_mapping_value
+        self._value: Optional[Mapping[_K, _V]] = None
+
+    def _target(self) -> Mapping[_K, _V]:
+        if self._value is None:
+            self._value = self._obtain()
+        return self._value
+
+    def __getitem__(self, key: _K) -> _V:
+        return self._target()[key]
+
+    def __len__(self) -> int:
+        return len(self._target())
+
+    def __iter__(self) -> Iterator[_K]:
+        return iter(self._target())
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
new file mode 100644
index 0000000000..8d1dcaed54
--- /dev/null
+++ b/setuptools/config/pyprojecttoml.py
@@ -0,0 +1,496 @@
+"""
+Load setuptools configuration from ``pyproject.toml`` files.
+
+**PRIVATE MODULE**: API reserved for setuptools internal usage only.
+
+To read project metadata, consider using
+``build.util.project_wheel_metadata`` (https://pypi.org/project/build/).
+For simple scenarios, you can also try parsing the file directly
+with the help of ``tomllib`` or ``tomli``.
+"""
+import logging
+import os
+from contextlib import contextmanager
+from functools import partial
+from typing import TYPE_CHECKING, Callable, Dict, Mapping, Optional, Set, Union
+
+from ..errors import FileError, OptionError
+from ..warnings import SetuptoolsWarning
+from . import expand as _expand
+from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _WouldIgnoreField
+from ._apply_pyprojecttoml import apply as _apply
+
+if TYPE_CHECKING:
+    from setuptools.dist import Distribution  # noqa
+
+_Path = Union[str, os.PathLike]
+_logger = logging.getLogger(__name__)
+
+
+def load_file(filepath: _Path) -> dict:
+    from setuptools.extern import tomli  # type: ignore
+
+    with open(filepath, "rb") as file:
+        return tomli.load(file)
+
+
+def validate(config: dict, filepath: _Path) -> bool:
+    from . import _validate_pyproject as validator
+
+    trove_classifier = validator.FORMAT_FUNCTIONS.get("trove-classifier")
+    if hasattr(trove_classifier, "_disable_download"):
+        # Improve reproducibility by default. See issue 31 for validate-pyproject.
+        trove_classifier._disable_download()  # type: ignore
+
+    try:
+        return validator.validate(config)
+    except validator.ValidationError as ex:
+        summary = f"configuration error: {ex.summary}"
+        if ex.name.strip("`") != "project":
+            # Probably it is just a field missing/misnamed, not worthy the verbosity...
+            _logger.debug(summary)
+            _logger.debug(ex.details)
+
+        error = f"invalid pyproject.toml config: {ex.name}."
+        raise ValueError(f"{error}\n{summary}") from None
+
+
+def apply_configuration(
+    dist: "Distribution",
+    filepath: _Path,
+    ignore_option_errors=False,
+) -> "Distribution":
+    """Apply the configuration from a ``pyproject.toml`` file into an existing
+    distribution object.
+    """
+    config = read_configuration(filepath, True, ignore_option_errors, dist)
+    return _apply(dist, config, filepath)
+
+
+def read_configuration(
+    filepath: _Path,
+    expand=True,
+    ignore_option_errors=False,
+    dist: Optional["Distribution"] = None,
+):
+    """Read given configuration file and returns options from it as a dict.
+
+    :param str|unicode filepath: Path to configuration file in the ``pyproject.toml``
+        format.
+
+    :param bool expand: Whether to expand directives and other computed values
+        (i.e. post-process the given configuration)
+
+    :param bool ignore_option_errors: Whether to silently ignore
+        options, values of which could not be resolved (e.g. due to exceptions
+        in directives such as file:, attr:, etc.).
+        If False exceptions are propagated as expected.
+
+    :param Distribution|None: Distribution object to which the configuration refers.
+        If not given a dummy object will be created and discarded after the
+        configuration is read. This is used for auto-discovery of packages and in the
+        case a dynamic configuration (e.g. ``attr`` or ``cmdclass``) is expanded.
+        When ``expand=False`` this object is simply ignored.
+
+    :rtype: dict
+    """
+    filepath = os.path.abspath(filepath)
+
+    if not os.path.isfile(filepath):
+        raise FileError(f"Configuration file {filepath!r} does not exist.")
+
+    asdict = load_file(filepath) or {}
+    project_table = asdict.get("project", {})
+    tool_table = asdict.get("tool", {})
+    setuptools_table = tool_table.get("setuptools", {})
+    if not asdict or not (project_table or setuptools_table):
+        return {}  # User is not using pyproject to configure setuptools
+
+    if setuptools_table:
+        # TODO: Remove the following once the feature stabilizes:
+        _BetaConfiguration.emit()
+
+    # There is an overall sense in the community that making include_package_data=True
+    # the default would be an improvement.
+    # `ini2toml` backfills include_package_data=False when nothing is explicitly given,
+    # therefore setting a default here is backwards compatible.
+    orig_setuptools_table = setuptools_table.copy()
+    if dist and getattr(dist, "include_package_data", None) is not None:
+        setuptools_table.setdefault("include-package-data", dist.include_package_data)
+    else:
+        setuptools_table.setdefault("include-package-data", True)
+    # Persist changes:
+    asdict["tool"] = tool_table
+    tool_table["setuptools"] = setuptools_table
+
+    try:
+        # Don't complain about unrelated errors (e.g. tools not using the "tool" table)
+        subset = {"project": project_table, "tool": {"setuptools": setuptools_table}}
+        validate(subset, filepath)
+    except Exception as ex:
+        # TODO: Remove the following once the feature stabilizes:
+        if _skip_bad_config(project_table, orig_setuptools_table, dist):
+            return {}
+        # TODO: After the previous statement is removed the try/except can be replaced
+        # by the _ignore_errors context manager.
+        if ignore_option_errors:
+            _logger.debug(f"ignored error: {ex.__class__.__name__} - {ex}")
+        else:
+            raise  # re-raise exception
+
+    if expand:
+        root_dir = os.path.dirname(filepath)
+        return expand_configuration(asdict, root_dir, ignore_option_errors, dist)
+
+    return asdict
+
+
+def _skip_bad_config(
+    project_cfg: dict, setuptools_cfg: dict, dist: Optional["Distribution"]
+) -> bool:
+    """Be temporarily forgiving with invalid ``pyproject.toml``"""
+    # See pypa/setuptools#3199 and pypa/cibuildwheel#1064
+
+    if dist is None or (
+        dist.metadata.name is None
+        and dist.metadata.version is None
+        and dist.install_requires is None
+    ):
+        # It seems that the build is not getting any configuration from other places
+        return False
+
+    if setuptools_cfg:
+        # If `[tool.setuptools]` is set, then `pyproject.toml` config is intentional
+        return False
+
+    given_config = set(project_cfg.keys())
+    popular_subset = {"name", "version", "python_requires", "requires-python"}
+    if given_config <= popular_subset:
+        # It seems that the docs in cibuildtool has been inadvertently encouraging users
+        # to create `pyproject.toml` files that are not compliant with the standards.
+        # Let's be forgiving for the time being.
+        _InvalidFile.emit()
+        return True
+
+    return False
+
+
+def expand_configuration(
+    config: dict,
+    root_dir: Optional[_Path] = None,
+    ignore_option_errors: bool = False,
+    dist: Optional["Distribution"] = None,
+) -> dict:
+    """Given a configuration with unresolved fields (e.g. dynamic, cmdclass, ...)
+    find their final values.
+
+    :param dict config: Dict containing the configuration for the distribution
+    :param str root_dir: Top-level directory for the distribution/project
+        (the same directory where ``pyproject.toml`` is place)
+    :param bool ignore_option_errors: see :func:`read_configuration`
+    :param Distribution|None: Distribution object to which the configuration refers.
+        If not given a dummy object will be created and discarded after the
+        configuration is read. Used in the case a dynamic configuration
+        (e.g. ``attr`` or ``cmdclass``).
+
+    :rtype: dict
+    """
+    return _ConfigExpander(config, root_dir, ignore_option_errors, dist).expand()
+
+
+class _ConfigExpander:
+    def __init__(
+        self,
+        config: dict,
+        root_dir: Optional[_Path] = None,
+        ignore_option_errors: bool = False,
+        dist: Optional["Distribution"] = None,
+    ):
+        self.config = config
+        self.root_dir = root_dir or os.getcwd()
+        self.project_cfg = config.get("project", {})
+        self.dynamic = self.project_cfg.get("dynamic", [])
+        self.setuptools_cfg = config.get("tool", {}).get("setuptools", {})
+        self.dynamic_cfg = self.setuptools_cfg.get("dynamic", {})
+        self.ignore_option_errors = ignore_option_errors
+        self._dist = dist
+        self._referenced_files: Set[str] = set()
+
+    def _ensure_dist(self) -> "Distribution":
+        from setuptools.dist import Distribution
+
+        attrs = {"src_root": self.root_dir, "name": self.project_cfg.get("name", None)}
+        return self._dist or Distribution(attrs)
+
+    def _process_field(self, container: dict, field: str, fn: Callable):
+        if field in container:
+            with _ignore_errors(self.ignore_option_errors):
+                container[field] = fn(container[field])
+
+    def _canonic_package_data(self, field="package-data"):
+        package_data = self.setuptools_cfg.get(field, {})
+        return _expand.canonic_package_data(package_data)
+
+    def expand(self):
+        self._expand_packages()
+        self._canonic_package_data()
+        self._canonic_package_data("exclude-package-data")
+
+        # A distribution object is required for discovering the correct package_dir
+        dist = self._ensure_dist()
+        ctx = _EnsurePackagesDiscovered(dist, self.project_cfg, self.setuptools_cfg)
+        with ctx as ensure_discovered:
+            package_dir = ensure_discovered.package_dir
+            self._expand_data_files()
+            self._expand_cmdclass(package_dir)
+            self._expand_all_dynamic(dist, package_dir)
+
+        dist._referenced_files.update(self._referenced_files)
+        return self.config
+
+    def _expand_packages(self):
+        packages = self.setuptools_cfg.get("packages")
+        if packages is None or isinstance(packages, (list, tuple)):
+            return
+
+        find = packages.get("find")
+        if isinstance(find, dict):
+            find["root_dir"] = self.root_dir
+            find["fill_package_dir"] = self.setuptools_cfg.setdefault("package-dir", {})
+            with _ignore_errors(self.ignore_option_errors):
+                self.setuptools_cfg["packages"] = _expand.find_packages(**find)
+
+    def _expand_data_files(self):
+        data_files = partial(_expand.canonic_data_files, root_dir=self.root_dir)
+        self._process_field(self.setuptools_cfg, "data-files", data_files)
+
+    def _expand_cmdclass(self, package_dir: Mapping[str, str]):
+        root_dir = self.root_dir
+        cmdclass = partial(_expand.cmdclass, package_dir=package_dir, root_dir=root_dir)
+        self._process_field(self.setuptools_cfg, "cmdclass", cmdclass)
+
+    def _expand_all_dynamic(self, dist: "Distribution", package_dir: Mapping[str, str]):
+        special = (  # need special handling
+            "version",
+            "readme",
+            "entry-points",
+            "scripts",
+            "gui-scripts",
+            "classifiers",
+            "dependencies",
+            "optional-dependencies",
+        )
+        # `_obtain` functions are assumed to raise appropriate exceptions/warnings.
+        obtained_dynamic = {
+            field: self._obtain(dist, field, package_dir)
+            for field in self.dynamic
+            if field not in special
+        }
+        obtained_dynamic.update(
+            self._obtain_entry_points(dist, package_dir) or {},
+            version=self._obtain_version(dist, package_dir),
+            readme=self._obtain_readme(dist),
+            classifiers=self._obtain_classifiers(dist),
+            dependencies=self._obtain_dependencies(dist),
+            optional_dependencies=self._obtain_optional_dependencies(dist),
+        )
+        # `None` indicates there is nothing in `tool.setuptools.dynamic` but the value
+        # might have already been set by setup.py/extensions, so avoid overwriting.
+        updates = {k: v for k, v in obtained_dynamic.items() if v is not None}
+        self.project_cfg.update(updates)
+
+    def _ensure_previously_set(self, dist: "Distribution", field: str):
+        previous = _PREVIOUSLY_DEFINED[field](dist)
+        if previous is None and not self.ignore_option_errors:
+            msg = (
+                f"No configuration found for dynamic {field!r}.\n"
+                "Some dynamic fields need to be specified via `tool.setuptools.dynamic`"
+                "\nothers must be specified via the equivalent attribute in `setup.py`."
+            )
+            raise OptionError(msg)
+
+    def _expand_directive(
+        self, specifier: str, directive, package_dir: Mapping[str, str]
+    ):
+        from setuptools.extern.more_itertools import always_iterable  # type: ignore
+
+        with _ignore_errors(self.ignore_option_errors):
+            root_dir = self.root_dir
+            if "file" in directive:
+                self._referenced_files.update(always_iterable(directive["file"]))
+                return _expand.read_files(directive["file"], root_dir)
+            if "attr" in directive:
+                return _expand.read_attr(directive["attr"], package_dir, root_dir)
+            raise ValueError(f"invalid `{specifier}`: {directive!r}")
+        return None
+
+    def _obtain(self, dist: "Distribution", field: str, package_dir: Mapping[str, str]):
+        if field in self.dynamic_cfg:
+            return self._expand_directive(
+                f"tool.setuptools.dynamic.{field}",
+                self.dynamic_cfg[field],
+                package_dir,
+            )
+        self._ensure_previously_set(dist, field)
+        return None
+
+    def _obtain_version(self, dist: "Distribution", package_dir: Mapping[str, str]):
+        # Since plugins can set version, let's silently skip if it cannot be obtained
+        if "version" in self.dynamic and "version" in self.dynamic_cfg:
+            return _expand.version(self._obtain(dist, "version", package_dir))
+        return None
+
+    def _obtain_readme(self, dist: "Distribution") -> Optional[Dict[str, str]]:
+        if "readme" not in self.dynamic:
+            return None
+
+        dynamic_cfg = self.dynamic_cfg
+        if "readme" in dynamic_cfg:
+            return {
+                "text": self._obtain(dist, "readme", {}),
+                "content-type": dynamic_cfg["readme"].get("content-type", "text/x-rst"),
+            }
+
+        self._ensure_previously_set(dist, "readme")
+        return None
+
+    def _obtain_entry_points(
+        self, dist: "Distribution", package_dir: Mapping[str, str]
+    ) -> Optional[Dict[str, dict]]:
+        fields = ("entry-points", "scripts", "gui-scripts")
+        if not any(field in self.dynamic for field in fields):
+            return None
+
+        text = self._obtain(dist, "entry-points", package_dir)
+        if text is None:
+            return None
+
+        groups = _expand.entry_points(text)
+        expanded = {"entry-points": groups}
+
+        def _set_scripts(field: str, group: str):
+            if group in groups:
+                value = groups.pop(group)
+                if field not in self.dynamic:
+                    _WouldIgnoreField.emit(field=field, value=value)
+                # TODO: Don't set field when support for pyproject.toml stabilizes
+                #       instead raise an error as specified in PEP 621
+                expanded[field] = value
+
+        _set_scripts("scripts", "console_scripts")
+        _set_scripts("gui-scripts", "gui_scripts")
+
+        return expanded
+
+    def _obtain_classifiers(self, dist: "Distribution"):
+        if "classifiers" in self.dynamic:
+            value = self._obtain(dist, "classifiers", {})
+            if value:
+                return value.splitlines()
+        return None
+
+    def _obtain_dependencies(self, dist: "Distribution"):
+        if "dependencies" in self.dynamic:
+            value = self._obtain(dist, "dependencies", {})
+            if value:
+                return _parse_requirements_list(value)
+        return None
+
+    def _obtain_optional_dependencies(self, dist: "Distribution"):
+        if "optional-dependencies" not in self.dynamic:
+            return None
+        if "optional-dependencies" in self.dynamic_cfg:
+            optional_dependencies_map = self.dynamic_cfg["optional-dependencies"]
+            assert isinstance(optional_dependencies_map, dict)
+            return {
+                group: _parse_requirements_list(self._expand_directive(
+                    f"tool.setuptools.dynamic.optional-dependencies.{group}",
+                    directive,
+                    {},
+                ))
+                for group, directive in optional_dependencies_map.items()
+            }
+        self._ensure_previously_set(dist, "optional-dependencies")
+        return None
+
+
+def _parse_requirements_list(value):
+    return [
+        line
+        for line in value.splitlines()
+        if line.strip() and not line.strip().startswith("#")
+    ]
+
+
+@contextmanager
+def _ignore_errors(ignore_option_errors: bool):
+    if not ignore_option_errors:
+        yield
+        return
+
+    try:
+        yield
+    except Exception as ex:
+        _logger.debug(f"ignored error: {ex.__class__.__name__} - {ex}")
+
+
+class _EnsurePackagesDiscovered(_expand.EnsurePackagesDiscovered):
+    def __init__(
+        self, distribution: "Distribution", project_cfg: dict, setuptools_cfg: dict
+    ):
+        super().__init__(distribution)
+        self._project_cfg = project_cfg
+        self._setuptools_cfg = setuptools_cfg
+
+    def __enter__(self):
+        """When entering the context, the values of ``packages``, ``py_modules`` and
+        ``package_dir`` that are missing in ``dist`` are copied from ``setuptools_cfg``.
+        """
+        dist, cfg = self._dist, self._setuptools_cfg
+        package_dir: Dict[str, str] = cfg.setdefault("package-dir", {})
+        package_dir.update(dist.package_dir or {})
+        dist.package_dir = package_dir  # needs to be the same object
+
+        dist.set_defaults._ignore_ext_modules()  # pyproject.toml-specific behaviour
+
+        # Set `name`, `py_modules` and `packages` in dist to short-circuit
+        # auto-discovery, but avoid overwriting empty lists purposefully set by users.
+        if dist.metadata.name is None:
+            dist.metadata.name = self._project_cfg.get("name")
+        if dist.py_modules is None:
+            dist.py_modules = cfg.get("py-modules")
+        if dist.packages is None:
+            dist.packages = cfg.get("packages")
+
+        return super().__enter__()
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        """When exiting the context, if values of ``packages``, ``py_modules`` and
+        ``package_dir`` are missing in ``setuptools_cfg``, copy from ``dist``.
+        """
+        # If anything was discovered set them back, so they count in the final config.
+        self._setuptools_cfg.setdefault("packages", self._dist.packages)
+        self._setuptools_cfg.setdefault("py-modules", self._dist.py_modules)
+        return super().__exit__(exc_type, exc_value, traceback)
+
+
+class _BetaConfiguration(SetuptoolsWarning):
+    _SUMMARY = "Support for `[tool.setuptools]` in `pyproject.toml` is still *beta*."
+
+
+class _InvalidFile(SetuptoolsWarning):
+    _SUMMARY = "The given `pyproject.toml` file is invalid and would be ignored."
+    _DETAILS = """
+    ############################
+    # Invalid `pyproject.toml` #
+    ############################
+
+    Any configurations in `pyproject.toml` will be ignored.
+    Please note that future releases of setuptools will halt the build process
+    if an invalid file is given.
+
+    To prevent setuptools from considering `pyproject.toml` please
+    DO NOT include both `[project]` or `[tool.setuptools]` tables in your file.
+    """
+    _DUE_DATE = (2023, 6, 1)  # warning introduced in 2022-03-26
+    _SEE_DOCS = "userguide/pyproject_config.html"
diff --git a/setuptools/config.py b/setuptools/config/setupcfg.py
similarity index 52%
rename from setuptools/config.py
rename to setuptools/config/setupcfg.py
index b4e968e5ca..bb35559069 100644
--- a/setuptools/config.py
+++ b/setuptools/config/setupcfg.py
@@ -1,63 +1,61 @@
-import ast
-import io
-import os
-import sys
+"""
+Load setuptools configuration from ``setup.cfg`` files.
+
+**API will be made private in the future**
 
-import warnings
+To read project metadata, consider using
+``build.util.project_wheel_metadata`` (https://pypi.org/project/build/).
+For simple scenarios, you can also try parsing the file directly
+with the help of ``configparser``.
+"""
+import contextlib
 import functools
-import importlib
+import os
 from collections import defaultdict
 from functools import partial
 from functools import wraps
-from glob import iglob
-import contextlib
-
-from distutils.errors import DistutilsOptionError, DistutilsFileError
-from setuptools.extern.packaging.version import Version, InvalidVersion
-from setuptools.extern.packaging.specifiers import SpecifierSet
-
-
-class StaticModule:
-    """
-    Attempt to load the module by the name
-    """
-
-    def __init__(self, name):
-        spec = importlib.util.find_spec(name)
-        with open(spec.origin) as strm:
-            src = strm.read()
-        module = ast.parse(src)
-        vars(self).update(locals())
-        del self.self
-
-    def __getattr__(self, attr):
-        try:
-            return next(
-                ast.literal_eval(statement.value)
-                for statement in self.module.body
-                if isinstance(statement, ast.Assign)
-                for target in statement.targets
-                if isinstance(target, ast.Name) and target.id == attr
-            )
-        except Exception as e:
-            raise AttributeError(
-                "{self.name} has no attribute {attr}".format(**locals())
-            ) from e
-
-
-@contextlib.contextmanager
-def patch_path(path):
-    """
-    Add path to front of sys.path for the duration of the context.
-    """
-    try:
-        sys.path.insert(0, path)
-        yield
-    finally:
-        sys.path.remove(path)
-
-
-def read_configuration(filepath, find_others=False, ignore_option_errors=False):
+from typing import (
+    TYPE_CHECKING,
+    Callable,
+    Any,
+    Dict,
+    Generic,
+    Iterable,
+    List,
+    Optional,
+    Set,
+    Tuple,
+    TypeVar,
+    Union,
+)
+
+from ..errors import FileError, OptionError
+from ..extern.packaging.markers import default_environment as marker_env
+from ..extern.packaging.requirements import InvalidRequirement, Requirement
+from ..extern.packaging.specifiers import SpecifierSet
+from ..extern.packaging.version import InvalidVersion, Version
+from ..warnings import SetuptoolsDeprecationWarning
+from . import expand
+
+if TYPE_CHECKING:
+    from distutils.dist import DistributionMetadata  # noqa
+
+    from setuptools.dist import Distribution  # noqa
+
+_Path = Union[str, os.PathLike]
+SingleCommandOptions = Dict["str", Tuple["str", Any]]
+"""Dict that associate the name of the options of a particular command to a
+tuple. The first element of the tuple indicates the origin of the option value
+(e.g. the name of the configuration file where it was read from),
+while the second element of the tuple is the option value itself
+"""
+AllCommandOptions = Dict["str", SingleCommandOptions]  # cmd name => its options
+Target = TypeVar("Target", bound=Union["Distribution", "DistributionMetadata"])
+
+
+def read_configuration(
+    filepath: _Path, find_others=False, ignore_option_errors=False
+) -> dict:
     """Read given configuration file and returns options from it as a dict.
 
     :param str|unicode filepath: Path to configuration file
@@ -73,48 +71,66 @@ def read_configuration(filepath, find_others=False, ignore_option_errors=False):
 
     :rtype: dict
     """
-    from setuptools.dist import Distribution, _Distribution
+    from setuptools.dist import Distribution
+
+    dist = Distribution()
+    filenames = dist.find_config_files() if find_others else []
+    handlers = _apply(dist, filepath, filenames, ignore_option_errors)
+    return configuration_to_dict(handlers)
+
+
+def apply_configuration(dist: "Distribution", filepath: _Path) -> "Distribution":
+    """Apply the configuration from a ``setup.cfg`` file into an existing
+    distribution object.
+    """
+    _apply(dist, filepath)
+    dist._finalize_requires()
+    return dist
+
+
+def _apply(
+    dist: "Distribution",
+    filepath: _Path,
+    other_files: Iterable[_Path] = (),
+    ignore_option_errors: bool = False,
+) -> Tuple["ConfigHandler", ...]:
+    """Read configuration from ``filepath`` and applies to the ``dist`` object."""
+    from setuptools.dist import _Distribution
 
     filepath = os.path.abspath(filepath)
 
     if not os.path.isfile(filepath):
-        raise DistutilsFileError('Configuration file %s does not exist.' % filepath)
+        raise FileError(f'Configuration file {filepath} does not exist.')
 
     current_directory = os.getcwd()
     os.chdir(os.path.dirname(filepath))
+    filenames = [*other_files, filepath]
 
     try:
-        dist = Distribution()
-
-        filenames = dist.find_config_files() if find_others else []
-        if filepath not in filenames:
-            filenames.append(filepath)
-
         _Distribution.parse_config_files(dist, filenames=filenames)
-
         handlers = parse_configuration(
             dist, dist.command_options, ignore_option_errors=ignore_option_errors
         )
-
+        dist._finalize_license_files()
     finally:
         os.chdir(current_directory)
 
-    return configuration_to_dict(handlers)
+    return handlers
 
 
-def _get_option(target_obj, key):
+def _get_option(target_obj: Target, key: str):
     """
     Given a target object and option key, get that option from
     the target object, either through a get_{key} method or
     from an attribute directly.
     """
-    getter_name = 'get_{key}'.format(**locals())
+    getter_name = f'get_{key}'
     by_attribute = functools.partial(getattr, target_obj, key)
     getter = getattr(target_obj, getter_name, by_attribute)
     return getter()
 
 
-def configuration_to_dict(handlers):
+def configuration_to_dict(handlers: Tuple["ConfigHandler", ...]) -> dict:
     """Returns configuration data gathered by given handlers as a dict.
 
     :param list[ConfigHandler] handlers: Handlers list,
@@ -122,7 +138,7 @@ def configuration_to_dict(handlers):
 
     :rtype: dict
     """
-    config_dict = defaultdict(dict)
+    config_dict: dict = defaultdict(dict)
 
     for handler in handlers:
         for option in handler.set_options:
@@ -132,7 +148,11 @@ def configuration_to_dict(handlers):
     return config_dict
 
 
-def parse_configuration(distribution, command_options, ignore_option_errors=False):
+def parse_configuration(
+    distribution: "Distribution",
+    command_options: AllCommandOptions,
+    ignore_option_errors=False,
+) -> Tuple["ConfigMetadataHandler", "ConfigOptionsHandler"]:
     """Performs additional parsing of configuration options
     for a distribution.
 
@@ -146,51 +166,106 @@ def parse_configuration(distribution, command_options, ignore_option_errors=Fals
         If False exceptions are propagated as expected.
     :rtype: list
     """
-    options = ConfigOptionsHandler(distribution, command_options, ignore_option_errors)
-    options.parse()
+    with expand.EnsurePackagesDiscovered(distribution) as ensure_discovered:
+        options = ConfigOptionsHandler(
+            distribution,
+            command_options,
+            ignore_option_errors,
+            ensure_discovered,
+        )
 
-    meta = ConfigMetadataHandler(
-        distribution.metadata,
-        command_options,
-        ignore_option_errors,
-        distribution.package_dir,
-    )
-    meta.parse()
+        options.parse()
+        if not distribution.package_dir:
+            distribution.package_dir = options.package_dir  # Filled by `find_packages`
+
+        meta = ConfigMetadataHandler(
+            distribution.metadata,
+            command_options,
+            ignore_option_errors,
+            ensure_discovered,
+            distribution.package_dir,
+            distribution.src_root,
+        )
+        meta.parse()
+        distribution._referenced_files.update(
+            options._referenced_files, meta._referenced_files
+        )
 
     return meta, options
 
 
-class ConfigHandler:
+def _warn_accidental_env_marker_misconfig(label: str, orig_value: str, parsed: list):
+    """Because users sometimes misinterpret this configuration:
+
+    [options.extras_require]
+    foo = bar;python_version<"4"
+
+    It looks like one requirement with an environment marker
+    but because there is no newline, it's parsed as two requirements
+    with a semicolon as separator.
+
+    Therefore, if:
+        * input string does not contain a newline AND
+        * parsed result contains two requirements AND
+        * parsing of the two parts from the result (";")
+        leads in a valid Requirement with a valid marker
+    a UserWarning is shown to inform the user about the possible problem.
+    """
+    if "\n" in orig_value or len(parsed) != 2:
+        return
+
+    markers = marker_env().keys()
+
+    try:
+        req = Requirement(parsed[1])
+        if req.name in markers:
+            _AmbiguousMarker.emit(field=label, req=parsed[1])
+    except InvalidRequirement as ex:
+        if any(parsed[1].startswith(marker) for marker in markers):
+            msg = _AmbiguousMarker.message(field=label, req=parsed[1])
+            raise InvalidRequirement(msg) from ex
+
+
+class ConfigHandler(Generic[Target]):
     """Handles metadata supplied in configuration files."""
 
-    section_prefix = None
+    section_prefix: str
     """Prefix for config sections handled by this handler.
     Must be provided by class heirs.
 
     """
 
-    aliases = {}
+    aliases: Dict[str, str] = {}
     """Options aliases.
     For compatibility with various packages. E.g.: d2to1 and pbr.
     Note: `-` in keys is replaced with `_` by config parser.
 
     """
 
-    def __init__(self, target_obj, options, ignore_option_errors=False):
-        sections = {}
-
-        section_prefix = self.section_prefix
-        for section_name, section_options in options.items():
-            if not section_name.startswith(section_prefix):
-                continue
-
-            section_name = section_name.replace(section_prefix, '').strip('.')
-            sections[section_name] = section_options
-
+    def __init__(
+        self,
+        target_obj: Target,
+        options: AllCommandOptions,
+        ignore_option_errors,
+        ensure_discovered: expand.EnsurePackagesDiscovered,
+    ):
         self.ignore_option_errors = ignore_option_errors
         self.target_obj = target_obj
-        self.sections = sections
-        self.set_options = []
+        self.sections = dict(self._section_options(options))
+        self.set_options: List[str] = []
+        self.ensure_discovered = ensure_discovered
+        self._referenced_files: Set[str] = set()
+        """After parsing configurations, this property will enumerate
+        all files referenced by the "file:" directive. Private API for setuptools only.
+        """
+
+    @classmethod
+    def _section_options(cls, options: AllCommandOptions):
+        for full_name, value in options.items():
+            pre, sep, name = full_name.partition(cls.section_prefix)
+            if pre:
+                continue
+            yield name.lstrip('.'), value
 
     @property
     def parsers(self):
@@ -200,40 +275,28 @@ def parsers(self):
         )
 
     def __setitem__(self, option_name, value):
-        unknown = tuple()
         target_obj = self.target_obj
 
         # Translate alias into real name.
         option_name = self.aliases.get(option_name, option_name)
 
-        current_value = getattr(target_obj, option_name, unknown)
-
-        if current_value is unknown:
+        try:
+            current_value = getattr(target_obj, option_name)
+        except AttributeError:
             raise KeyError(option_name)
 
         if current_value:
             # Already inhabited. Skipping.
             return
 
-        skip_option = False
-        parser = self.parsers.get(option_name)
-        if parser:
-            try:
-                value = parser(value)
-
-            except Exception:
-                skip_option = True
-                if not self.ignore_option_errors:
-                    raise
-
-        if skip_option:
+        try:
+            parsed = self.parsers.get(option_name, lambda x: x)(value)
+        except (Exception,) * self.ignore_option_errors:
             return
 
-        setter = getattr(target_obj, 'set_%s' % option_name, None)
-        if setter is None:
-            setattr(target_obj, option_name, value)
-        else:
-            setter(value)
+        simple_setter = functools.partial(target_obj.__setattr__, option_name)
+        setter = getattr(target_obj, 'set_%s' % option_name, simple_setter)
+        setter(parsed)
 
         self.set_options.append(option_name)
 
@@ -257,34 +320,6 @@ def _parse_list(cls, value, separator=','):
 
         return [chunk.strip() for chunk in value if chunk.strip()]
 
-    @classmethod
-    def _parse_list_glob(cls, value, separator=','):
-        """Equivalent to _parse_list() but expands any glob patterns using glob().
-
-        However, unlike with glob() calls, the results remain relative paths.
-
-        :param value:
-        :param separator: List items separator character.
-        :rtype: list
-        """
-        glob_characters = ('*', '?', '[', ']', '{', '}')
-        values = cls._parse_list(value, separator=separator)
-        expanded_values = []
-        for value in values:
-
-            # Has globby characters?
-            if any(char in value for char in glob_characters):
-                # then expand the glob pattern while keeping paths *relative*:
-                expanded_values.extend(sorted(
-                    os.path.relpath(path, os.getcwd())
-                    for path in iglob(os.path.abspath(value))))
-
-            else:
-                # take the value as-is:
-                expanded_values.append(value)
-
-        return expanded_values
-
     @classmethod
     def _parse_dict(cls, value):
         """Represents value as a dict.
@@ -297,9 +332,7 @@ def _parse_dict(cls, value):
         for line in cls._parse_list(value):
             key, sep, val = line.partition(separator)
             if sep != separator:
-                raise DistutilsOptionError(
-                    'Unable to parse option value to dict: %s' % value
-                )
+                raise OptionError(f"Unable to parse option value to dict: {value}")
             result[key.strip()] = val.strip()
 
         return result
@@ -337,8 +370,7 @@ def parser(value):
 
         return parser
 
-    @classmethod
-    def _parse_file(cls, value):
+    def _parse_file(self, value, root_dir: _Path):
         """Represents value as a string, allowing including text
         from nearest files using `file:` directive.
 
@@ -360,25 +392,11 @@ def _parse_file(cls, value):
             return value
 
         spec = value[len(include_directive) :]
-        filepaths = (os.path.abspath(path.strip()) for path in spec.split(','))
-        return '\n'.join(
-            cls._read_file(path)
-            for path in filepaths
-            if (cls._assert_local(path) or True) and os.path.isfile(path)
-        )
+        filepaths = [path.strip() for path in spec.split(',')]
+        self._referenced_files.update(filepaths)
+        return expand.read_files(filepaths, root_dir)
 
-    @staticmethod
-    def _assert_local(filepath):
-        if not filepath.startswith(os.getcwd()):
-            raise DistutilsOptionError('`file:` directive can not access %s' % filepath)
-
-    @staticmethod
-    def _read_file(filepath):
-        with io.open(filepath, encoding='utf-8') as f:
-            return f.read()
-
-    @classmethod
-    def _parse_attr(cls, value, package_dir=None):
+    def _parse_attr(self, value, package_dir, root_dir: _Path):
         """Represents value as a module attribute.
 
         Examples:
@@ -392,36 +410,11 @@ def _parse_attr(cls, value, package_dir=None):
         if not value.startswith(attr_directive):
             return value
 
-        attrs_path = value.replace(attr_directive, '').strip().split('.')
-        attr_name = attrs_path.pop()
-
-        module_name = '.'.join(attrs_path)
-        module_name = module_name or '__init__'
-
-        parent_path = os.getcwd()
-        if package_dir:
-            if attrs_path[0] in package_dir:
-                # A custom path was specified for the module we want to import
-                custom_path = package_dir[attrs_path[0]]
-                parts = custom_path.rsplit('/', 1)
-                if len(parts) > 1:
-                    parent_path = os.path.join(os.getcwd(), parts[0])
-                    module_name = parts[1]
-                else:
-                    module_name = custom_path
-            elif '' in package_dir:
-                # A custom parent directory was specified for all root modules
-                parent_path = os.path.join(os.getcwd(), package_dir[''])
-
-        with patch_path(parent_path):
-            try:
-                # attempt to load value statically
-                return getattr(StaticModule(module_name), attr_name)
-            except Exception:
-                # fallback to simple import
-                module = importlib.import_module(module_name)
+        attr_desc = value.replace(attr_directive, '')
 
-        return getattr(module, attr_name)
+        # Make sure package_dir is populated correctly, so `attr:` directives can work
+        package_dir.update(self.ensure_discovered.package_dir)
+        return expand.read_attr(attr_desc, package_dir, root_dir)
 
     @classmethod
     def _get_parser_compound(cls, *parse_methods):
@@ -444,45 +437,54 @@ def parse(value):
         return parse
 
     @classmethod
-    def _parse_section_to_dict(cls, section_options, values_parser=None):
+    def _parse_section_to_dict_with_key(cls, section_options, values_parser):
         """Parses section options into a dictionary.
 
-        Optionally applies a given parser to values.
+        Applies a given parser to each option in a section.
 
         :param dict section_options:
-        :param callable values_parser:
+        :param callable values_parser: function with 2 args corresponding to key, value
         :rtype: dict
         """
         value = {}
-        values_parser = values_parser or (lambda val: val)
         for key, (_, val) in section_options.items():
-            value[key] = values_parser(val)
+            value[key] = values_parser(key, val)
         return value
 
+    @classmethod
+    def _parse_section_to_dict(cls, section_options, values_parser=None):
+        """Parses section options into a dictionary.
+
+        Optionally applies a given parser to each value.
+
+        :param dict section_options:
+        :param callable values_parser: function with 1 arg corresponding to option value
+        :rtype: dict
+        """
+        parser = (lambda _, v: values_parser(v)) if values_parser else (lambda _, v: v)
+        return cls._parse_section_to_dict_with_key(section_options, parser)
+
     def parse_section(self, section_options):
         """Parses configuration file section.
 
         :param dict section_options:
         """
-        for (name, (_, value)) in section_options.items():
-            try:
+        for name, (_, value) in section_options.items():
+            with contextlib.suppress(KeyError):
+                # Keep silent for a new option may appear anytime.
                 self[name] = value
 
-            except KeyError:
-                pass  # Keep silent for a new option may appear anytime.
-
     def parse(self):
         """Parses configuration file items from one
         or more related sections.
 
         """
         for section_name, section_options in self.sections.items():
-
             method_postfix = ''
             if section_name:  # [section.option] variant
                 method_postfix = '_%s' % section_name
 
-            section_parser_method = getattr(
+            section_parser_method: Optional[Callable] = getattr(
                 self,
                 # Dots in section names are translated into dunderscores.
                 ('parse_section%s' % method_postfix).replace('.', '__'),
@@ -490,31 +492,30 @@ def parse(self):
             )
 
             if section_parser_method is None:
-                raise DistutilsOptionError(
-                    'Unsupported distribution option section: [%s.%s]'
-                    % (self.section_prefix, section_name)
+                raise OptionError(
+                    "Unsupported distribution option section: "
+                    f"[{self.section_prefix}.{section_name}]"
                 )
 
             section_parser_method(section_options)
 
-    def _deprecated_config_handler(self, func, msg, warning_class):
+    def _deprecated_config_handler(self, func, msg, **kw):
         """this function will wrap around parameters that are deprecated
 
         :param msg: deprecation message
-        :param warning_class: class of warning exception to be raised
         :param func: function to be wrapped around
         """
 
         @wraps(func)
         def config_handler(*args, **kwargs):
-            warnings.warn(msg, warning_class)
+            kw.setdefault("stacklevel", 2)
+            _DeprecatedConfig.emit("Deprecated config in `setup.cfg`", msg, **kw)
             return func(*args, **kwargs)
 
         return config_handler
 
 
-class ConfigMetadataHandler(ConfigHandler):
-
+class ConfigMetadataHandler(ConfigHandler["DistributionMetadata"]):
     section_prefix = 'metadata'
 
     aliases = {
@@ -531,18 +532,23 @@ class ConfigMetadataHandler(ConfigHandler):
     """
 
     def __init__(
-        self, target_obj, options, ignore_option_errors=False, package_dir=None
+        self,
+        target_obj: "DistributionMetadata",
+        options: AllCommandOptions,
+        ignore_option_errors: bool,
+        ensure_discovered: expand.EnsurePackagesDiscovered,
+        package_dir: Optional[dict] = None,
+        root_dir: _Path = os.curdir,
     ):
-        super(ConfigMetadataHandler, self).__init__(
-            target_obj, options, ignore_option_errors
-        )
+        super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
         self.package_dir = package_dir
+        self.root_dir = root_dir
 
     @property
     def parsers(self):
         """Metadata item name to parser function mapping."""
         parse_list = self._parse_list
-        parse_file = self._parse_file
+        parse_file = partial(self._parse_file, root_dir=self.root_dir)
         parse_dict = self._parse_dict
         exclude_files_parser = self._exclude_files_parser
 
@@ -554,7 +560,8 @@ def parsers(self):
                 parse_list,
                 "The requires parameter is deprecated, please use "
                 "install_requires for runtime dependencies.",
-                DeprecationWarning,
+                due_date=(2023, 10, 30),
+                # Warning introduced in 27 Oct 2018
             ),
             'obsoletes': parse_list,
             'classifiers': self._get_parser_compound(parse_file, parse_list),
@@ -563,7 +570,8 @@ def parsers(self):
                 exclude_files_parser('license_file'),
                 "The license_file parameter is deprecated, "
                 "use license_files instead.",
-                DeprecationWarning,
+                due_date=(2023, 10, 30),
+                # Warning introduced in 23 May 2021
             ),
             'license_files': parse_list,
             'description': parse_file,
@@ -579,7 +587,7 @@ def _parse_version(self, value):
         :rtype: str
 
         """
-        version = self._parse_file(value)
+        version = self._parse_file(value, self.root_dir)
 
         if version != value:
             version = version.strip()
@@ -588,37 +596,49 @@ def _parse_version(self, value):
             try:
                 Version(version)
             except InvalidVersion:
-                tmpl = (
-                    'Version loaded from {value} does not '
-                    'comply with PEP 440: {version}'
+                raise OptionError(
+                    f'Version loaded from {value} does not '
+                    f'comply with PEP 440: {version}'
                 )
-                raise DistutilsOptionError(tmpl.format(**locals()))
 
             return version
 
-        version = self._parse_attr(value, self.package_dir)
+        return expand.version(self._parse_attr(value, self.package_dir, self.root_dir))
 
-        if callable(version):
-            version = version()
 
-        if not isinstance(version, str):
-            if hasattr(version, '__iter__'):
-                version = '.'.join(map(str, version))
-            else:
-                version = '%s' % version
+class ConfigOptionsHandler(ConfigHandler["Distribution"]):
+    section_prefix = 'options'
 
-        return version
+    def __init__(
+        self,
+        target_obj: "Distribution",
+        options: AllCommandOptions,
+        ignore_option_errors: bool,
+        ensure_discovered: expand.EnsurePackagesDiscovered,
+    ):
+        super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
+        self.root_dir = target_obj.src_root
+        self.package_dir: Dict[str, str] = {}  # To be filled by `find_packages`
 
+    @classmethod
+    def _parse_list_semicolon(cls, value):
+        return cls._parse_list(value, separator=';')
 
-class ConfigOptionsHandler(ConfigHandler):
+    def _parse_file_in_root(self, value):
+        return self._parse_file(value, root_dir=self.root_dir)
 
-    section_prefix = 'options'
+    def _parse_requirements_list(self, label: str, value: str):
+        # Parse a requirements list, either by reading in a `file:`, or a list.
+        parsed = self._parse_list_semicolon(self._parse_file_in_root(value))
+        _warn_accidental_env_marker_misconfig(label, value, parsed)
+        # Filter it to only include lines that are not comments. `parse_list`
+        # will have stripped each line and filtered out empties.
+        return [line for line in parsed if not line.startswith("#")]
 
     @property
     def parsers(self):
         """Metadata item name to parser function mapping."""
         parse_list = self._parse_list
-        parse_list_semicolon = partial(self._parse_list, separator=';')
         parse_bool = self._parse_bool
         parse_dict = self._parse_dict
         parse_cmdclass = self._parse_cmdclass
@@ -630,28 +650,27 @@ def parsers(self):
             'scripts': parse_list,
             'eager_resources': parse_list,
             'dependency_links': parse_list,
-            'namespace_packages': parse_list,
-            'install_requires': parse_list_semicolon,
-            'setup_requires': parse_list_semicolon,
-            'tests_require': parse_list_semicolon,
+            'namespace_packages': self._deprecated_config_handler(
+                parse_list,
+                "The namespace_packages parameter is deprecated, "
+                "consider using implicit namespaces instead (PEP 420).",
+                # TODO: define due date, see setuptools.dist:check_nsp.
+            ),
+            'install_requires': partial(
+                self._parse_requirements_list, "install_requires"
+            ),
+            'setup_requires': self._parse_list_semicolon,
+            'tests_require': self._parse_list_semicolon,
             'packages': self._parse_packages,
-            'entry_points': self._parse_file,
+            'entry_points': self._parse_file_in_root,
             'py_modules': parse_list,
             'python_requires': SpecifierSet,
             'cmdclass': parse_cmdclass,
         }
 
     def _parse_cmdclass(self, value):
-        def resolve_class(qualified_class_name):
-            idx = qualified_class_name.rfind('.')
-            class_name = qualified_class_name[idx + 1 :]
-            pkg_name = qualified_class_name[:idx]
-
-            module = __import__(pkg_name)
-
-            return getattr(module, class_name)
-
-        return {k: resolve_class(v) for k, v in self._parse_dict(value).items()}
+        package_dir = self.ensure_discovered.package_dir
+        return expand.cmdclass(self._parse_dict(value), package_dir, self.root_dir)
 
     def _parse_packages(self, value):
         """Parses `packages` option value.
@@ -665,19 +684,18 @@ def _parse_packages(self, value):
         if trimmed_value not in find_directives:
             return self._parse_list(value)
 
-        findns = trimmed_value == find_directives[1]
-
         # Read function arguments from a dedicated section.
         find_kwargs = self.parse_section_packages__find(
             self.sections.get('packages.find', {})
         )
 
-        if findns:
-            from setuptools import find_namespace_packages as find_packages
-        else:
-            from setuptools import find_packages
+        find_kwargs.update(
+            namespaces=(trimmed_value == find_directives[1]),
+            root_dir=self.root_dir,
+            fill_package_dir=self.package_dir,
+        )
 
-        return find_packages(**find_kwargs)
+        return expand.find_packages(**find_kwargs)
 
     def parse_section_packages__find(self, section_options):
         """Parses `packages.find` configuration file section.
@@ -709,14 +727,8 @@ def parse_section_entry_points(self, section_options):
         self['entry_points'] = parsed
 
     def _parse_package_data(self, section_options):
-        parsed = self._parse_section_to_dict(section_options, self._parse_list)
-
-        root = parsed.get('*')
-        if root:
-            parsed[''] = root
-            del parsed['*']
-
-        return parsed
+        package_data = self._parse_section_to_dict(section_options, self._parse_list)
+        return expand.canonic_package_data(package_data)
 
     def parse_section_package_data(self, section_options):
         """Parses `package_data` configuration file section.
@@ -737,15 +749,41 @@ def parse_section_extras_require(self, section_options):
 
         :param dict section_options:
         """
-        parse_list = partial(self._parse_list, separator=';')
-        self['extras_require'] = self._parse_section_to_dict(
-            section_options, parse_list
+        parsed = self._parse_section_to_dict_with_key(
+            section_options,
+            lambda k, v: self._parse_requirements_list(f"extras_require[{k}]", v),
         )
 
+        self['extras_require'] = parsed
+
     def parse_section_data_files(self, section_options):
         """Parses `data_files` configuration file section.
 
         :param dict section_options:
         """
-        parsed = self._parse_section_to_dict(section_options, self._parse_list_glob)
-        self['data_files'] = [(k, v) for k, v in parsed.items()]
+        parsed = self._parse_section_to_dict(section_options, self._parse_list)
+        self['data_files'] = expand.canonic_data_files(parsed, self.root_dir)
+
+
+class _AmbiguousMarker(SetuptoolsDeprecationWarning):
+    _SUMMARY = "Ambiguous requirement marker."
+    _DETAILS = """
+    One of the parsed requirements in `{field}` looks like a valid environment marker:
+
+        {req!r}
+
+    Please make sure that the configuration file is correct.
+    You can use dangling lines to avoid this problem.
+    """
+    _SEE_DOCS = "userguide/declarative_config.html#opt-2"
+    # TODO: should we include due_date here? Initially introduced in 6 Aug 2022.
+    # Does this make sense with latest version of packaging?
+
+    @classmethod
+    def message(cls, **kw):
+        docs = f"https://setuptools.pypa.io/en/latest/{cls._SEE_DOCS}"
+        return cls._format(cls._SUMMARY, cls._DETAILS, see_url=docs, format_args=kw)
+
+
+class _DeprecatedConfig(SetuptoolsDeprecationWarning):
+    _SEE_DOCS = "userguide/declarative_config.html"
diff --git a/setuptools/discovery.py b/setuptools/discovery.py
new file mode 100644
index 0000000000..3110b72794
--- /dev/null
+++ b/setuptools/discovery.py
@@ -0,0 +1,611 @@
+"""Automatic discovery of Python modules and packages (for inclusion in the
+distribution) and other config values.
+
+For the purposes of this module, the following nomenclature is used:
+
+- "src-layout": a directory representing a Python project that contains a "src"
+  folder. Everything under the "src" folder is meant to be included in the
+  distribution when packaging the project. Example::
+
+    .
+    ├── tox.ini
+    ├── pyproject.toml
+    └── src/
+        └── mypkg/
+            ├── __init__.py
+            ├── mymodule.py
+            └── my_data_file.txt
+
+- "flat-layout": a Python project that does not use "src-layout" but instead
+  have a directory under the project root for each package::
+
+    .
+    ├── tox.ini
+    ├── pyproject.toml
+    └── mypkg/
+        ├── __init__.py
+        ├── mymodule.py
+        └── my_data_file.txt
+
+- "single-module": a project that contains a single Python script direct under
+  the project root (no directory used)::
+
+    .
+    ├── tox.ini
+    ├── pyproject.toml
+    └── mymodule.py
+
+"""
+
+import itertools
+import os
+from fnmatch import fnmatchcase
+from glob import glob
+from pathlib import Path
+from typing import (
+    TYPE_CHECKING,
+    Dict,
+    Iterable,
+    Iterator,
+    List,
+    Mapping,
+    Optional,
+    Tuple,
+    Union
+)
+
+import _distutils_hack.override  # noqa: F401
+
+from distutils import log
+from distutils.util import convert_path
+
+_Path = Union[str, os.PathLike]
+StrIter = Iterator[str]
+
+chain_iter = itertools.chain.from_iterable
+
+if TYPE_CHECKING:
+    from setuptools import Distribution  # noqa
+
+
+def _valid_name(path: _Path) -> bool:
+    # Ignore invalid names that cannot be imported directly
+    return os.path.basename(path).isidentifier()
+
+
+class _Filter:
+    """
+    Given a list of patterns, create a callable that will be true only if
+    the input matches at least one of the patterns.
+    """
+
+    def __init__(self, *patterns: str):
+        self._patterns = dict.fromkeys(patterns)
+
+    def __call__(self, item: str) -> bool:
+        return any(fnmatchcase(item, pat) for pat in self._patterns)
+
+    def __contains__(self, item: str) -> bool:
+        return item in self._patterns
+
+
+class _Finder:
+    """Base class that exposes functionality for module/package finders"""
+
+    ALWAYS_EXCLUDE: Tuple[str, ...] = ()
+    DEFAULT_EXCLUDE: Tuple[str, ...] = ()
+
+    @classmethod
+    def find(
+        cls,
+        where: _Path = '.',
+        exclude: Iterable[str] = (),
+        include: Iterable[str] = ('*',)
+    ) -> List[str]:
+        """Return a list of all Python items (packages or modules, depending on
+        the finder implementation) found within directory 'where'.
+
+        'where' is the root directory which will be searched.
+        It should be supplied as a "cross-platform" (i.e. URL-style) path;
+        it will be converted to the appropriate local path syntax.
+
+        'exclude' is a sequence of names to exclude; '*' can be used
+        as a wildcard in the names.
+        When finding packages, 'foo.*' will exclude all subpackages of 'foo'
+        (but not 'foo' itself).
+
+        'include' is a sequence of names to include.
+        If it's specified, only the named items will be included.
+        If it's not specified, all found items will be included.
+        'include' can contain shell style wildcard patterns just like
+        'exclude'.
+        """
+
+        exclude = exclude or cls.DEFAULT_EXCLUDE
+        return list(
+            cls._find_iter(
+                convert_path(str(where)),
+                _Filter(*cls.ALWAYS_EXCLUDE, *exclude),
+                _Filter(*include),
+            )
+        )
+
+    @classmethod
+    def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter:
+        raise NotImplementedError
+
+
+class PackageFinder(_Finder):
+    """
+    Generate a list of all Python packages found within a directory
+    """
+
+    ALWAYS_EXCLUDE = ("ez_setup", "*__pycache__")
+
+    @classmethod
+    def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter:
+        """
+        All the packages found in 'where' that pass the 'include' filter, but
+        not the 'exclude' filter.
+        """
+        for root, dirs, files in os.walk(str(where), followlinks=True):
+            # Copy dirs to iterate over it, then empty dirs.
+            all_dirs = dirs[:]
+            dirs[:] = []
+
+            for dir in all_dirs:
+                full_path = os.path.join(root, dir)
+                rel_path = os.path.relpath(full_path, where)
+                package = rel_path.replace(os.path.sep, '.')
+
+                # Skip directory trees that are not valid packages
+                if '.' in dir or not cls._looks_like_package(full_path, package):
+                    continue
+
+                # Should this package be included?
+                if include(package) and not exclude(package):
+                    yield package
+
+                # Early pruning if there is nothing else to be scanned
+                if f"{package}*" in exclude or f"{package}.*" in exclude:
+                    continue
+
+                # Keep searching subdirectories, as there may be more packages
+                # down there, even if the parent was excluded.
+                dirs.append(dir)
+
+    @staticmethod
+    def _looks_like_package(path: _Path, _package_name: str) -> bool:
+        """Does a directory look like a package?"""
+        return os.path.isfile(os.path.join(path, '__init__.py'))
+
+
+class PEP420PackageFinder(PackageFinder):
+    @staticmethod
+    def _looks_like_package(_path: _Path, _package_name: str) -> bool:
+        return True
+
+
+class ModuleFinder(_Finder):
+    """Find isolated Python modules.
+    This function will **not** recurse subdirectories.
+    """
+
+    @classmethod
+    def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter:
+        for file in glob(os.path.join(where, "*.py")):
+            module, _ext = os.path.splitext(os.path.basename(file))
+
+            if not cls._looks_like_module(module):
+                continue
+
+            if include(module) and not exclude(module):
+                yield module
+
+    _looks_like_module = staticmethod(_valid_name)
+
+
+# We have to be extra careful in the case of flat layout to not include files
+# and directories not meant for distribution (e.g. tool-related)
+
+
+class FlatLayoutPackageFinder(PEP420PackageFinder):
+    _EXCLUDE = (
+        "ci",
+        "bin",
+        "doc",
+        "docs",
+        "documentation",
+        "manpages",
+        "news",
+        "changelog",
+        "test",
+        "tests",
+        "unit_test",
+        "unit_tests",
+        "example",
+        "examples",
+        "scripts",
+        "tools",
+        "util",
+        "utils",
+        "python",
+        "build",
+        "dist",
+        "venv",
+        "env",
+        "requirements",
+        # ---- Task runners / Build tools ----
+        "tasks",  # invoke
+        "fabfile",  # fabric
+        "site_scons",  # SCons
+        # ---- Other tools ----
+        "benchmark",
+        "benchmarks",
+        "exercise",
+        "exercises",
+        "htmlcov",  # Coverage.py
+        # ---- Hidden directories/Private packages ----
+        "[._]*",
+    )
+
+    DEFAULT_EXCLUDE = tuple(chain_iter((p, f"{p}.*") for p in _EXCLUDE))
+    """Reserved package names"""
+
+    @staticmethod
+    def _looks_like_package(_path: _Path, package_name: str) -> bool:
+        names = package_name.split('.')
+        # Consider PEP 561
+        root_pkg_is_valid = names[0].isidentifier() or names[0].endswith("-stubs")
+        return root_pkg_is_valid and all(name.isidentifier() for name in names[1:])
+
+
+class FlatLayoutModuleFinder(ModuleFinder):
+    DEFAULT_EXCLUDE = (
+        "setup",
+        "conftest",
+        "test",
+        "tests",
+        "example",
+        "examples",
+        "build",
+        # ---- Task runners ----
+        "toxfile",
+        "noxfile",
+        "pavement",
+        "dodo",
+        "tasks",
+        "fabfile",
+        # ---- Other tools ----
+        "[Ss][Cc]onstruct",  # SCons
+        "conanfile",  # Connan: C/C++ build tool
+        "manage",  # Django
+        "benchmark",
+        "benchmarks",
+        "exercise",
+        "exercises",
+        # ---- Hidden files/Private modules ----
+        "[._]*",
+    )
+    """Reserved top-level module names"""
+
+
+def _find_packages_within(root_pkg: str, pkg_dir: _Path) -> List[str]:
+    nested = PEP420PackageFinder.find(pkg_dir)
+    return [root_pkg] + [".".join((root_pkg, n)) for n in nested]
+
+
+class ConfigDiscovery:
+    """Fill-in metadata and options that can be automatically derived
+    (from other metadata/options, the file system or conventions)
+    """
+
+    def __init__(self, distribution: "Distribution"):
+        self.dist = distribution
+        self._called = False
+        self._disabled = False
+        self._skip_ext_modules = False
+
+    def _disable(self):
+        """Internal API to disable automatic discovery"""
+        self._disabled = True
+
+    def _ignore_ext_modules(self):
+        """Internal API to disregard ext_modules.
+
+        Normally auto-discovery would not be triggered if ``ext_modules`` are set
+        (this is done for backward compatibility with existing packages relying on
+        ``setup.py`` or ``setup.cfg``). However, ``setuptools`` can call this function
+        to ignore given ``ext_modules`` and proceed with the auto-discovery if
+        ``packages`` and ``py_modules`` are not given (e.g. when using pyproject.toml
+        metadata).
+        """
+        self._skip_ext_modules = True
+
+    @property
+    def _root_dir(self) -> _Path:
+        # The best is to wait until `src_root` is set in dist, before using _root_dir.
+        return self.dist.src_root or os.curdir
+
+    @property
+    def _package_dir(self) -> Dict[str, str]:
+        if self.dist.package_dir is None:
+            return {}
+        return self.dist.package_dir
+
+    def __call__(self, force=False, name=True, ignore_ext_modules=False):
+        """Automatically discover missing configuration fields
+        and modifies the given ``distribution`` object in-place.
+
+        Note that by default this will only have an effect the first time the
+        ``ConfigDiscovery`` object is called.
+
+        To repeatedly invoke automatic discovery (e.g. when the project
+        directory changes), please use ``force=True`` (or create a new
+        ``ConfigDiscovery`` instance).
+        """
+        if force is False and (self._called or self._disabled):
+            # Avoid overhead of multiple calls
+            return
+
+        self._analyse_package_layout(ignore_ext_modules)
+        if name:
+            self.analyse_name()  # depends on ``packages`` and ``py_modules``
+
+        self._called = True
+
+    def _explicitly_specified(self, ignore_ext_modules: bool) -> bool:
+        """``True`` if the user has specified some form of package/module listing"""
+        ignore_ext_modules = ignore_ext_modules or self._skip_ext_modules
+        ext_modules = not (self.dist.ext_modules is None or ignore_ext_modules)
+        return (
+            self.dist.packages is not None
+            or self.dist.py_modules is not None
+            or ext_modules
+            or hasattr(self.dist, "configuration") and self.dist.configuration
+            # ^ Some projects use numpy.distutils.misc_util.Configuration
+        )
+
+    def _analyse_package_layout(self, ignore_ext_modules: bool) -> bool:
+        if self._explicitly_specified(ignore_ext_modules):
+            # For backward compatibility, just try to find modules/packages
+            # when nothing is given
+            return True
+
+        log.debug(
+            "No `packages` or `py_modules` configuration, performing "
+            "automatic discovery."
+        )
+
+        return (
+            self._analyse_explicit_layout()
+            or self._analyse_src_layout()
+            # flat-layout is the trickiest for discovery so it should be last
+            or self._analyse_flat_layout()
+        )
+
+    def _analyse_explicit_layout(self) -> bool:
+        """The user can explicitly give a package layout via ``package_dir``"""
+        package_dir = self._package_dir.copy()  # don't modify directly
+        package_dir.pop("", None)  # This falls under the "src-layout" umbrella
+        root_dir = self._root_dir
+
+        if not package_dir:
+            return False
+
+        log.debug(f"`explicit-layout` detected -- analysing {package_dir}")
+        pkgs = chain_iter(
+            _find_packages_within(pkg, os.path.join(root_dir, parent_dir))
+            for pkg, parent_dir in package_dir.items()
+        )
+        self.dist.packages = list(pkgs)
+        log.debug(f"discovered packages -- {self.dist.packages}")
+        return True
+
+    def _analyse_src_layout(self) -> bool:
+        """Try to find all packages or modules under the ``src`` directory
+        (or anything pointed by ``package_dir[""]``).
+
+        The "src-layout" is relatively safe for automatic discovery.
+        We assume that everything within is meant to be included in the
+        distribution.
+
+        If ``package_dir[""]`` is not given, but the ``src`` directory exists,
+        this function will set ``package_dir[""] = "src"``.
+        """
+        package_dir = self._package_dir
+        src_dir = os.path.join(self._root_dir, package_dir.get("", "src"))
+        if not os.path.isdir(src_dir):
+            return False
+
+        log.debug(f"`src-layout` detected -- analysing {src_dir}")
+        package_dir.setdefault("", os.path.basename(src_dir))
+        self.dist.package_dir = package_dir  # persist eventual modifications
+        self.dist.packages = PEP420PackageFinder.find(src_dir)
+        self.dist.py_modules = ModuleFinder.find(src_dir)
+        log.debug(f"discovered packages -- {self.dist.packages}")
+        log.debug(f"discovered py_modules -- {self.dist.py_modules}")
+        return True
+
+    def _analyse_flat_layout(self) -> bool:
+        """Try to find all packages and modules under the project root.
+
+        Since the ``flat-layout`` is more dangerous in terms of accidentally including
+        extra files/directories, this function is more conservative and will raise an
+        error if multiple packages or modules are found.
+
+        This assumes that multi-package dists are uncommon and refuse to support that
+        use case in order to be able to prevent unintended errors.
+        """
+        log.debug(f"`flat-layout` detected -- analysing {self._root_dir}")
+        return self._analyse_flat_packages() or self._analyse_flat_modules()
+
+    def _analyse_flat_packages(self) -> bool:
+        self.dist.packages = FlatLayoutPackageFinder.find(self._root_dir)
+        top_level = remove_nested_packages(remove_stubs(self.dist.packages))
+        log.debug(f"discovered packages -- {self.dist.packages}")
+        self._ensure_no_accidental_inclusion(top_level, "packages")
+        return bool(top_level)
+
+    def _analyse_flat_modules(self) -> bool:
+        self.dist.py_modules = FlatLayoutModuleFinder.find(self._root_dir)
+        log.debug(f"discovered py_modules -- {self.dist.py_modules}")
+        self._ensure_no_accidental_inclusion(self.dist.py_modules, "modules")
+        return bool(self.dist.py_modules)
+
+    def _ensure_no_accidental_inclusion(self, detected: List[str], kind: str):
+        if len(detected) > 1:
+            from inspect import cleandoc
+
+            from setuptools.errors import PackageDiscoveryError
+
+            msg = f"""Multiple top-level {kind} discovered in a flat-layout: {detected}.
+
+            To avoid accidental inclusion of unwanted files or directories,
+            setuptools will not proceed with this build.
+
+            If you are trying to create a single distribution with multiple {kind}
+            on purpose, you should not rely on automatic discovery.
+            Instead, consider the following options:
+
+            1. set up custom discovery (`find` directive with `include` or `exclude`)
+            2. use a `src-layout`
+            3. explicitly set `py_modules` or `packages` with a list of names
+
+            To find more information, look for "package discovery" on setuptools docs.
+            """
+            raise PackageDiscoveryError(cleandoc(msg))
+
+    def analyse_name(self):
+        """The packages/modules are the essential contribution of the author.
+        Therefore the name of the distribution can be derived from them.
+        """
+        if self.dist.metadata.name or self.dist.name:
+            # get_name() is not reliable (can return "UNKNOWN")
+            return None
+
+        log.debug("No `name` configuration, performing automatic discovery")
+
+        name = (
+            self._find_name_single_package_or_module()
+            or self._find_name_from_packages()
+        )
+        if name:
+            self.dist.metadata.name = name
+
+    def _find_name_single_package_or_module(self) -> Optional[str]:
+        """Exactly one module or package"""
+        for field in ('packages', 'py_modules'):
+            items = getattr(self.dist, field, None) or []
+            if items and len(items) == 1:
+                log.debug(f"Single module/package detected, name: {items[0]}")
+                return items[0]
+
+        return None
+
+    def _find_name_from_packages(self) -> Optional[str]:
+        """Try to find the root package that is not a PEP 420 namespace"""
+        if not self.dist.packages:
+            return None
+
+        packages = remove_stubs(sorted(self.dist.packages, key=len))
+        package_dir = self.dist.package_dir or {}
+
+        parent_pkg = find_parent_package(packages, package_dir, self._root_dir)
+        if parent_pkg:
+            log.debug(f"Common parent package detected, name: {parent_pkg}")
+            return parent_pkg
+
+        log.warn("No parent package detected, impossible to derive `name`")
+        return None
+
+
+def remove_nested_packages(packages: List[str]) -> List[str]:
+    """Remove nested packages from a list of packages.
+
+    >>> remove_nested_packages(["a", "a.b1", "a.b2", "a.b1.c1"])
+    ['a']
+    >>> remove_nested_packages(["a", "b", "c.d", "c.d.e.f", "g.h", "a.a1"])
+    ['a', 'b', 'c.d', 'g.h']
+    """
+    pkgs = sorted(packages, key=len)
+    top_level = pkgs[:]
+    size = len(pkgs)
+    for i, name in enumerate(reversed(pkgs)):
+        if any(name.startswith(f"{other}.") for other in top_level):
+            top_level.pop(size - i - 1)
+
+    return top_level
+
+
+def remove_stubs(packages: List[str]) -> List[str]:
+    """Remove type stubs (:pep:`561`) from a list of packages.
+
+    >>> remove_stubs(["a", "a.b", "a-stubs", "a-stubs.b.c", "b", "c-stubs"])
+    ['a', 'a.b', 'b']
+    """
+    return [pkg for pkg in packages if not pkg.split(".")[0].endswith("-stubs")]
+
+
+def find_parent_package(
+    packages: List[str], package_dir: Mapping[str, str], root_dir: _Path
+) -> Optional[str]:
+    """Find the parent package that is not a namespace."""
+    packages = sorted(packages, key=len)
+    common_ancestors = []
+    for i, name in enumerate(packages):
+        if not all(n.startswith(f"{name}.") for n in packages[i+1:]):
+            # Since packages are sorted by length, this condition is able
+            # to find a list of all common ancestors.
+            # When there is divergence (e.g. multiple root packages)
+            # the list will be empty
+            break
+        common_ancestors.append(name)
+
+    for name in common_ancestors:
+        pkg_path = find_package_path(name, package_dir, root_dir)
+        init = os.path.join(pkg_path, "__init__.py")
+        if os.path.isfile(init):
+            return name
+
+    return None
+
+
+def find_package_path(
+    name: str, package_dir: Mapping[str, str], root_dir: _Path
+) -> str:
+    """Given a package name, return the path where it should be found on
+    disk, considering the ``package_dir`` option.
+
+    >>> path = find_package_path("my.pkg", {"": "root/is/nested"}, ".")
+    >>> path.replace(os.sep, "/")
+    './root/is/nested/my/pkg'
+
+    >>> path = find_package_path("my.pkg", {"my": "root/is/nested"}, ".")
+    >>> path.replace(os.sep, "/")
+    './root/is/nested/pkg'
+
+    >>> path = find_package_path("my.pkg", {"my.pkg": "root/is/nested"}, ".")
+    >>> path.replace(os.sep, "/")
+    './root/is/nested'
+
+    >>> path = find_package_path("other.pkg", {"my.pkg": "root/is/nested"}, ".")
+    >>> path.replace(os.sep, "/")
+    './other/pkg'
+    """
+    parts = name.split(".")
+    for i in range(len(parts), 0, -1):
+        # Look backwards, the most specific package_dir first
+        partial_name = ".".join(parts[:i])
+        if partial_name in package_dir:
+            parent = package_dir[partial_name]
+            return os.path.join(root_dir, parent, *parts[i:])
+
+    parent = package_dir.get("") or ""
+    return os.path.join(root_dir, *parent.split("/"), *parts)
+
+
+def construct_package_dir(packages: List[str], package_path: _Path) -> Dict[str, str]:
+    parent_pkgs = remove_nested_packages(packages)
+    prefix = Path(package_path).parts
+    return {pkg: "/".join([*prefix, *pkg.split(".")]) for pkg in parent_pkgs}
diff --git a/setuptools/dist.py b/setuptools/dist.py
index e825785e2a..e5a7b8ed9e 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -1,11 +1,9 @@
-# -*- coding: utf-8 -*-
 __all__ = ['Distribution']
 
 import io
 import sys
 import re
 import os
-import warnings
 import numbers
 import distutils.log
 import distutils.core
@@ -18,7 +16,9 @@
 from glob import iglob
 import itertools
 import textwrap
-from typing import List, Optional, TYPE_CHECKING
+from contextlib import suppress
+from typing import List, Optional, Set, TYPE_CHECKING
+from pathlib import Path
 
 from collections import defaultdict
 from email import message_from_file
@@ -28,21 +28,21 @@
 
 from setuptools.extern import packaging
 from setuptools.extern import ordered_set
-from setuptools.extern.more_itertools import unique_everseen, always_iterable
-
-from ._importlib import metadata
-
-from . import SetuptoolsDeprecationWarning
+from setuptools.extern.more_itertools import unique_everseen, partition
 
 import setuptools
 import setuptools.command
 from setuptools import windows_support
 from setuptools.monkey import get_unpatched
-from setuptools.config import parse_configuration
-import pkg_resources
-from setuptools.extern.packaging import version, requirements
+from setuptools.config import setupcfg, pyprojecttoml
+from setuptools.discovery import ConfigDiscovery
+
+from setuptools.extern.packaging import version
 from . import _reqs
 from . import _entry_points
+from . import _normalization
+from ._importlib import metadata
+from .warnings import InformationOnly, SetuptoolsDeprecationWarning
 
 if TYPE_CHECKING:
     from email.message import Message
@@ -52,7 +52,12 @@
 
 
 def _get_unpatched(cls):
-    warnings.warn("Do not call this function", DistDeprecationWarning)
+    DistDeprecationWarning.emit(
+        "Private function",
+        "Do not call this function",
+        due_date=(2023, 6, 1),
+        # Warning initially introduced in 2016
+    )
     return get_unpatched(cls)
 
 
@@ -98,7 +103,7 @@ def _read_list_from_msg(msg: "Message", field: str) -> Optional[List[str]]:
 
 def _read_payload_from_msg(msg: "Message") -> Optional[str]:
     value = msg.get_payload().strip()
-    if value == 'UNKNOWN':
+    if value == 'UNKNOWN' or not value:
         return None
     return value
 
@@ -154,7 +159,9 @@ def single_line(val):
     if '\n' in val:
         # TODO: Replace with `raise ValueError("newlines not allowed")`
         # after reviewing #2893.
-        warnings.warn("newlines not allowed and will break in the future")
+        msg = "newlines are not allowed in `summary` and will break in the future"
+        SetuptoolsDeprecationWarning.emit("Invalid config.", msg)
+        # due_date is undefined. Controversial change, there was a lot of push back.
         val = val.strip().split('\n')[0]
     return val
 
@@ -170,7 +177,10 @@ def write_field(key, value):
     write_field('Metadata-Version', str(version))
     write_field('Name', self.get_name())
     write_field('Version', self.get_version())
-    write_field('Summary', single_line(self.get_description()))
+
+    summary = self.get_description()
+    if summary:
+        write_field('Summary', single_line(summary))
 
     optional_fields = (
         ('Home-page', 'url'),
@@ -186,8 +196,10 @@ def write_field(key, value):
         if attr_val is not None:
             write_field(field, attr_val)
 
-    license = rfc822_escape(self.get_license())
-    write_field('License', license)
+    license = self.get_license()
+    if license:
+        write_field('License', rfc822_escape(license))
+
     for project_url in self.project_urls.items():
         write_field('Project-URL', '%s, %s' % project_url)
 
@@ -195,7 +207,8 @@ def write_field(key, value):
     if keywords:
         write_field('Keywords', keywords)
 
-    for platform in self.get_platforms():
+    platforms = self.get_platforms() or []
+    for platform in platforms:
         write_field('Platform', platform)
 
     self._write_list(file, 'Classifier', self.get_classifiers())
@@ -218,7 +231,11 @@ def write_field(key, value):
 
     self._write_list(file, 'License-File', self.license_files or [])
 
-    file.write("\n%s\n\n" % self.get_long_description())
+    long_description = self.get_long_description()
+    if long_description:
+        file.write("\n%s" % long_description)
+        if not long_description.endswith("\n"):
+            file.write("\n")
 
 
 sequence = tuple, list
@@ -266,6 +283,15 @@ def check_nsp(dist, attr, value):
                 nsp,
                 parent,
             )
+        SetuptoolsDeprecationWarning.emit(
+            "The namespace_packages parameter is deprecated.",
+            "Please replace its usage with implicit namespaces (PEP 420).",
+            see_docs="references/keywords.html#keyword-namespace-packages"
+            # TODO: define due_date, it may break old packages that are no longer
+            # maintained (e.g. sphinxcontrib extensions) when installed from source.
+            # Warning officially introduced in May 2022, however the deprecation
+            # was mentioned much earlier in the docs (May 2020, see #2149).
+        )
 
 
 def check_extras(dist, attr, value):
@@ -282,11 +308,21 @@ def check_extras(dist, attr, value):
 
 def _check_extra(extra, reqs):
     name, sep, marker = extra.partition(':')
-    if marker and pkg_resources.invalid_marker(marker):
-        raise DistutilsSetupError("Invalid environment marker: " + marker)
+    try:
+        _check_marker(marker)
+    except packaging.markers.InvalidMarker:
+        msg = f"Invalid environment marker: {marker} ({extra!r})"
+        raise DistutilsSetupError(msg) from None
     list(_reqs.parse(reqs))
 
 
+def _check_marker(marker):
+    if not marker:
+        return
+    m = packaging.markers.Marker(marker)
+    m.evaluate()
+
+
 def assert_bool(dist, attr, value):
     """Verify that value is True, False, 0, or 1"""
     if bool(value) != value:
@@ -296,7 +332,8 @@ def assert_bool(dist, attr, value):
 
 def invalid_unless_false(dist, attr, value):
     if not value:
-        warnings.warn(f"{attr} is ignored.", DistDeprecationWarning)
+        DistDeprecationWarning.emit(f"{attr} is ignored.")
+        # TODO: should there be a `due_date` here?
         return
     raise DistutilsSetupError(f"{attr} is invalid.")
 
@@ -436,11 +473,12 @@ def patch_missing_pkg_info(self, attrs):
         #
         if not attrs or 'name' not in attrs or 'version' not in attrs:
             return
-        key = pkg_resources.safe_name(str(attrs['name'])).lower()
-        dist = pkg_resources.working_set.by_key.get(key)
-        if dist is not None and not dist.has_metadata('PKG-INFO'):
-            dist._version = pkg_resources.safe_version(str(attrs['version']))
-            self._patched_dist = dist
+        name = _normalization.safe_name(str(attrs['name'])).lower()
+        with suppress(metadata.PackageNotFoundError):
+            dist = metadata.distribution(name)
+            if dist is not None and not dist.read_text('PKG-INFO'):
+                dist._version = _normalization.safe_version(str(attrs['version']))
+                self._patched_dist = dist
 
     def __init__(self, attrs=None):
         have_package_data = hasattr(self, "package_data")
@@ -464,6 +502,18 @@ def __init__(self, attrs=None):
             },
         )
 
+        # Private API (setuptools-use only, not restricted to Distribution)
+        # Stores files that are referenced by the configuration and need to be in the
+        # sdist (e.g. `version = file: VERSION.txt`)
+        self._referenced_files: Set[str] = set()
+
+        # Save the original dependencies before they are processed into the egg format
+        self._orig_extras_require = {}
+        self._orig_install_requires = []
+        self._tmp_extras_require = defaultdict(ordered_set.OrderedSet)
+
+        self.set_defaults = ConfigDiscovery(self)
+
         self._set_metadata_defaults(attrs)
 
         self.metadata.version = self._normalize_version(
@@ -501,8 +551,7 @@ def _normalize_version(version):
 
         normalized = str(packaging.version.Version(version))
         if version != normalized:
-            tmpl = "Normalizing '{version}' to '{normalized}'"
-            warnings.warn(tmpl.format(**locals()))
+            InformationOnly.emit(f"Normalizing '{version}' to '{normalized}'")
             return normalized
         return version
 
@@ -516,11 +565,17 @@ def _validate_version(version):
             try:
                 packaging.version.Version(version)
             except (packaging.version.InvalidVersion, TypeError):
-                warnings.warn(
-                    "The version specified (%r) is an invalid version, this "
-                    "may not work as expected with newer versions of "
-                    "setuptools, pip, and PyPI. Please see PEP 440 for more "
-                    "details." % version
+                SetuptoolsDeprecationWarning.emit(
+                    f"Invalid version: {version!r}.",
+                    """
+                    The version specified is not a valid version according to PEP 440.
+                    This may not work as expected with newer versions of
+                    setuptools, pip, and PyPI.
+                    """,
+                    see_url="https://peps.python.org/pep-0440/",
+                    due_date=(2023, 9, 26),
+                    # Warning initially introduced in 26 Sept 2014
+                    # pypa/packaging already removed legacy versions.
                 )
                 return setuptools.sic(version)
         return version
@@ -534,6 +589,8 @@ def _finalize_requires(self):
             self.metadata.python_requires = self.python_requires
 
         if getattr(self, 'extras_require', None):
+            # Save original before it is messed by _convert_extras_requirements
+            self._orig_extras_require = self._orig_extras_require or self.extras_require
             for extra in self.extras_require.keys():
                 # Since this gets called multiple times at points where the
                 # keys have become 'converted' extras, ensure that we are only
@@ -542,6 +599,10 @@ def _finalize_requires(self):
                 if extra:
                     self.metadata.provides_extras.add(extra)
 
+        if getattr(self, 'install_requires', None) and not self._orig_install_requires:
+            # Save original before it is messed by _move_install_requirements_markers
+            self._orig_install_requires = self.install_requires
+
         self._convert_extras_requirements()
         self._move_install_requirements_markers()
 
@@ -552,7 +613,8 @@ def _convert_extras_requirements(self):
         `"extra:{marker}": ["barbazquux"]`.
         """
         spec_ext_reqs = getattr(self, 'extras_require', None) or {}
-        self._tmp_extras_require = defaultdict(list)
+        tmp = defaultdict(ordered_set.OrderedSet)
+        self._tmp_extras_require = getattr(self, '_tmp_extras_require', tmp)
         for section, v in spec_ext_reqs.items():
             # Do not strip empty sections.
             self._tmp_extras_require[section]
@@ -590,7 +652,8 @@ def is_simple_req(req):
         for r in complex_reqs:
             self._tmp_extras_require[':' + str(r.marker)].append(r)
         self.extras_require = dict(
-            (k, [str(r) for r in map(self._clean_req, v)])
+            # list(dict.fromkeys(...))  ensures a list of unique strings
+            (k, list(dict.fromkeys(str(r) for r in map(self._clean_req, v))))
             for k, v in self._tmp_extras_require.items()
         )
 
@@ -735,10 +798,15 @@ def warn_dash_deprecation(self, opt, section):
             return underscore_opt
 
         if '-' in opt:
-            warnings.warn(
-                "Usage of dash-separated '%s' will not be supported in future "
-                "versions. Please use the underscore name '%s' instead"
-                % (opt, underscore_opt)
+            SetuptoolsDeprecationWarning.emit(
+                "Invalid dash-separated options",
+                f"""
+                Usage of dash-separated {opt!r} will not be supported in future
+                versions. Please use the underscore name {underscore_opt!r} instead.
+                """,
+                see_docs="userguide/declarative_config.html",
+                due_date=(2023, 9, 26),
+                # Warning initially introduced in 3 Mar 2021
             )
         return underscore_opt
 
@@ -754,10 +822,15 @@ def make_option_lowercase(self, opt, section):
             return opt
 
         lowercase_opt = opt.lower()
-        warnings.warn(
-            "Usage of uppercase key '%s' in '%s' will be deprecated in future "
-            "versions. Please use lowercase '%s' instead"
-            % (opt, section, lowercase_opt)
+        SetuptoolsDeprecationWarning.emit(
+            "Invalid uppercase configuration",
+            f"""
+            Usage of uppercase key {opt!r} in {section!r} will not be supported in
+            future versions. Please use lowercase {lowercase_opt!r} instead.
+            """,
+            see_docs="userguide/declarative_config.html",
+            due_date=(2023, 9, 26),
+            # Warning initially introduced in 6 Mar 2021
         )
         return lowercase_opt
 
@@ -808,29 +881,40 @@ def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
             except ValueError as e:
                 raise DistutilsOptionError(e) from e
 
+    def _get_project_config_files(self, filenames):
+        """Add default file and split between INI and TOML"""
+        tomlfiles = []
+        standard_project_metadata = Path(self.src_root or os.curdir, "pyproject.toml")
+        if filenames is not None:
+            parts = partition(lambda f: Path(f).suffix == ".toml", filenames)
+            filenames = list(parts[0])  # 1st element => predicate is False
+            tomlfiles = list(parts[1])  # 2nd element => predicate is True
+        elif standard_project_metadata.exists():
+            tomlfiles = [standard_project_metadata]
+        return filenames, tomlfiles
+
     def parse_config_files(self, filenames=None, ignore_option_errors=False):
         """Parses configuration files from various levels
         and loads configuration.
-
         """
-        self._parse_config_files(filenames=filenames)
+        inifiles, tomlfiles = self._get_project_config_files(filenames)
+
+        self._parse_config_files(filenames=inifiles)
 
-        parse_configuration(
+        setupcfg.parse_configuration(
             self, self.command_options, ignore_option_errors=ignore_option_errors
         )
+        for filename in tomlfiles:
+            pyprojecttoml.apply_configuration(self, filename, ignore_option_errors)
+
         self._finalize_requires()
         self._finalize_license_files()
 
     def fetch_build_eggs(self, requires):
         """Resolve pre-setup requirements"""
-        resolved_dists = pkg_resources.working_set.resolve(
-            _reqs.parse(requires),
-            installer=self.fetch_build_egg,
-            replace_conflicting=True,
-        )
-        for dist in resolved_dists:
-            pkg_resources.working_set.add(dist, replace=True)
-        return resolved_dists
+        from setuptools.installer import _fetch_build_eggs
+
+        return _fetch_build_eggs(self, requires)
 
     def finalize_options(self):
         """
@@ -868,33 +952,8 @@ def _finalize_setup_keywords(self):
         for ep in metadata.entry_points(group='distutils.setup_keywords'):
             value = getattr(self, ep.name, None)
             if value is not None:
-                self._install_dependencies(ep)
                 ep.load()(self, ep.name, value)
 
-    def _install_dependencies(self, ep):
-        """
-        Given an entry point, ensure that any declared extras for
-        its distribution are installed.
-        """
-        reqs = {
-            req
-            for req in map(requirements.Requirement, always_iterable(ep.dist.requires))
-            for extra in ep.extras
-            if extra in req.extras
-        }
-        missing = itertools.filterfalse(self._is_installed, reqs)
-        for req in missing:
-            # fetch_build_egg expects pkg_resources.Requirement
-            self.fetch_build_egg(pkg_resources.Requirement(str(req)))
-
-    def _is_installed(self, req):
-        try:
-            dist = metadata.distribution(req.name)
-        except metadata.PackageNotFoundError:
-            return False
-        found_ver = packaging.version.Version(dist.version())
-        return found_ver in req.specifier
-
     def get_egg_cache_dir(self):
         egg_cache_dir = os.path.join(os.curdir, '.eggs')
         if not os.path.exists(egg_cache_dir):
@@ -927,7 +986,6 @@ def get_command_class(self, command):
 
         eps = metadata.entry_points(group='distutils.commands', name=command)
         for ep in eps:
-            self._install_dependencies(ep)
             self.cmdclass[command] = cmdclass = ep.load()
             return cmdclass
         else:
@@ -1172,19 +1230,18 @@ def handle_display_options(self, option_order):
 
         # Print metadata in UTF-8 no matter the platform
         encoding = sys.stdout.encoding
-        errors = sys.stdout.errors
-        newline = sys.platform != 'win32' and '\n' or None
-        line_buffering = sys.stdout.line_buffering
-
-        sys.stdout = io.TextIOWrapper(
-            sys.stdout.detach(), 'utf-8', errors, newline, line_buffering
-        )
+        sys.stdout.reconfigure(encoding='utf-8')
         try:
             return _Distribution.handle_display_options(self, option_order)
         finally:
-            sys.stdout = io.TextIOWrapper(
-                sys.stdout.detach(), encoding, errors, newline, line_buffering
-            )
+            sys.stdout.reconfigure(encoding=encoding)
+
+    def run_command(self, command):
+        self.set_defaults()
+        # Postpone defaults until all explicit configuration is considered
+        # (setup() args, config files, command line and plugins)
+
+        super().run_command(command)
 
 
 class DistDeprecationWarning(SetuptoolsDeprecationWarning):
diff --git a/setuptools/errors.py b/setuptools/errors.py
index f4d35a630a..ec7fb3b6c4 100644
--- a/setuptools/errors.py
+++ b/setuptools/errors.py
@@ -4,17 +4,6 @@
 """
 
 from distutils import errors as _distutils_errors
-from distutils.errors import DistutilsError
-
-
-class RemovedCommandError(DistutilsError, RuntimeError):
-    """Error used for commands that have been removed in setuptools.
-
-    Since ``setuptools`` is built on ``distutils``, simply removing a command
-    from ``setuptools`` will make the behavior fall back to ``distutils``; this
-    error is raised if a command exists in ``distutils`` but has been actively
-    removed in ``setuptools``.
-    """
 
 
 # Re-export errors from distutils to facilitate the migration to PEP632
@@ -38,3 +27,32 @@ class RemovedCommandError(DistutilsError, RuntimeError):
 
 # The root error class in the hierarchy
 BaseError = _distutils_errors.DistutilsError
+
+
+class RemovedCommandError(BaseError, RuntimeError):
+    """Error used for commands that have been removed in setuptools.
+
+    Since ``setuptools`` is built on ``distutils``, simply removing a command
+    from ``setuptools`` will make the behavior fall back to ``distutils``; this
+    error is raised if a command exists in ``distutils`` but has been actively
+    removed in ``setuptools``.
+    """
+
+
+class PackageDiscoveryError(BaseError, RuntimeError):
+    """Impossible to perform automatic discovery of packages and/or modules.
+
+    The current project layout or given discovery options can lead to problems when
+    scanning the project directory.
+
+    Setuptools might also refuse to complete auto-discovery if an error prone condition
+    is detected (e.g. when a project is organised as a flat-layout but contains
+    multiple directories that can be taken as top-level packages inside a single
+    distribution [*]_). In these situations the users are encouraged to be explicit
+    about which packages to include or to make the discovery parameters more specific.
+
+    .. [*] Since multi-package distributions are uncommon it is very likely that the
+       developers did not intend for all the directories to be packaged, and are just
+       leaving auxiliary code in the repository top-level, such as maintenance-related
+       scripts.
+    """
diff --git a/setuptools/extension.py b/setuptools/extension.py
index f696c9c1ac..58c023f6b4 100644
--- a/setuptools/extension.py
+++ b/setuptools/extension.py
@@ -28,7 +28,100 @@ def _have_cython():
 
 
 class Extension(_Extension):
-    """Extension that uses '.c' files in place of '.pyx' files"""
+    """
+    Describes a single extension module.
+
+    This means that all source files will be compiled into a single binary file
+    ``.`` (with ```` derived from ``name`` and
+    ```` defined by one of the values in
+    ``importlib.machinery.EXTENSION_SUFFIXES``).
+
+    In the case ``.pyx`` files are passed as ``sources and`` ``Cython`` is **not**
+    installed in the build environment, ``setuptools`` may also try to look for the
+    equivalent ``.cpp`` or ``.c`` files.
+
+    :arg str name:
+      the full name of the extension, including any packages -- ie.
+      *not* a filename or pathname, but Python dotted name
+
+    :arg list[str] sources:
+      list of source filenames, relative to the distribution root
+      (where the setup script lives), in Unix form (slash-separated)
+      for portability.  Source files may be C, C++, SWIG (.i),
+      platform-specific resource files, or whatever else is recognized
+      by the "build_ext" command as source for a Python extension.
+
+    :keyword list[str] include_dirs:
+      list of directories to search for C/C++ header files (in Unix
+      form for portability)
+
+    :keyword list[tuple[str, str|None]] define_macros:
+      list of macros to define; each macro is defined using a 2-tuple:
+      the first item corresponding to the name of the macro and the second
+      item either a string with its value or None to
+      define it without a particular value (equivalent of "#define
+      FOO" in source or -DFOO on Unix C compiler command line)
+
+    :keyword list[str] undef_macros:
+      list of macros to undefine explicitly
+
+    :keyword list[str] library_dirs:
+      list of directories to search for C/C++ libraries at link time
+
+    :keyword list[str] libraries:
+      list of library names (not filenames or paths) to link against
+
+    :keyword list[str] runtime_library_dirs:
+      list of directories to search for C/C++ libraries at run time
+      (for shared extensions, this is when the extension is loaded).
+      Setting this will cause an exception during build on Windows
+      platforms.
+
+    :keyword list[str] extra_objects:
+      list of extra files to link with (eg. object files not implied
+      by 'sources', static library that must be explicitly specified,
+      binary resource files, etc.)
+
+    :keyword list[str] extra_compile_args:
+      any extra platform- and compiler-specific information to use
+      when compiling the source files in 'sources'.  For platforms and
+      compilers where "command line" makes sense, this is typically a
+      list of command-line arguments, but for other platforms it could
+      be anything.
+
+    :keyword list[str] extra_link_args:
+      any extra platform- and compiler-specific information to use
+      when linking object files together to create the extension (or
+      to create a new static Python interpreter).  Similar
+      interpretation as for 'extra_compile_args'.
+
+    :keyword list[str] export_symbols:
+      list of symbols to be exported from a shared extension.  Not
+      used on all platforms, and not generally necessary for Python
+      extensions, which typically export exactly one symbol: "init" +
+      extension_name.
+
+    :keyword list[str] swig_opts:
+      any extra options to pass to SWIG if a source file has the .i
+      extension.
+
+    :keyword list[str] depends:
+      list of files that the extension depends on
+
+    :keyword str language:
+      extension language (i.e. "c", "c++", "objc"). Will be detected
+      from the source extensions if not provided.
+
+    :keyword bool optional:
+      specifies that a build failure in the extension should not abort the
+      build process, but simply not install the failing extension.
+
+    :keyword bool py_limited_api:
+      opt-in flag for the usage of :doc:`Python's limited API `.
+
+    :raises setuptools.errors.PlatformError: if 'runtime_library_dirs' is
+      specified on Windows. (since v63)
+    """
 
     def __init__(self, name, sources, *args, **kw):
         # The *args is needed for compatibility as calls may use positional
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index 98235a4b7b..bfd4d2d48d 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -70,7 +70,14 @@ def install(self):
 
 
 names = (
-    'packaging', 'pyparsing', 'ordered_set', 'more_itertools', 'importlib_metadata',
-    'zipp', 'importlib_resources', 'jaraco', 'typing_extensions',
+    'packaging',
+    'ordered_set',
+    'more_itertools',
+    'importlib_metadata',
+    'zipp',
+    'importlib_resources',
+    'jaraco',
+    'typing_extensions',
+    'tomli',
 )
 VendorImporter(__name__, names, 'setuptools._vendor').install()
diff --git a/setuptools/installer.py b/setuptools/installer.py
index b7096df14b..44ed0da2a3 100644
--- a/setuptools/installer.py
+++ b/setuptools/installer.py
@@ -3,13 +3,13 @@
 import subprocess
 import sys
 import tempfile
-import warnings
 from distutils import log
 from distutils.errors import DistutilsError
+from functools import partial
 
-import pkg_resources
-from setuptools.wheel import Wheel
-from ._deprecation_warning import SetuptoolsDeprecationWarning
+from . import _reqs
+from .wheel import Wheel
+from .warnings import SetuptoolsDeprecationWarning
 
 
 def _fixup_find_links(find_links):
@@ -20,20 +20,34 @@ def _fixup_find_links(find_links):
     return find_links
 
 
-def fetch_build_egg(dist, req):  # noqa: C901  # is too complex (16)  # FIXME
+def fetch_build_egg(dist, req):
     """Fetch an egg needed for building.
 
     Use pip/wheel to fetch/build a wheel."""
-    warnings.warn(
-        "setuptools.installer is deprecated. Requirements should "
-        "be satisfied by a PEP 517 installer.",
-        SetuptoolsDeprecationWarning,
+    _DeprecatedInstaller.emit()
+    _warn_wheel_not_available(dist)
+    return _fetch_build_egg_no_warn(dist, req)
+
+
+def _fetch_build_eggs(dist, requires):
+    import pkg_resources  # Delay import to avoid unnecessary side-effects
+
+    _DeprecatedInstaller.emit(stacklevel=3)
+    _warn_wheel_not_available(dist)
+
+    resolved_dists = pkg_resources.working_set.resolve(
+        _reqs.parse(requires, pkg_resources.Requirement),  # required for compatibility
+        installer=partial(_fetch_build_egg_no_warn, dist),  # avoid warning twice
+        replace_conflicting=True,
     )
-    # Warn if wheel is not available
-    try:
-        pkg_resources.get_distribution('wheel')
-    except pkg_resources.DistributionNotFound:
-        dist.announce('WARNING: The wheel package is not available.', log.WARN)
+    for dist in resolved_dists:
+        pkg_resources.working_set.add(dist, replace=True)
+    return resolved_dists
+
+
+def _fetch_build_egg_no_warn(dist, req):  # noqa: C901  # is too complex (16)  # FIXME
+    import pkg_resources  # Delay import to avoid unnecessary side-effects
+
     # Ignore environment markers; if supplied, it is required.
     req = strip_marker(req)
     # Take easy_install options into account, but do not override relevant
@@ -98,7 +112,27 @@ def strip_marker(req):
     calling pip with something like `babel; extra == "i18n"`, which
     would always be ignored.
     """
+    import pkg_resources  # Delay import to avoid unnecessary side-effects
+
     # create a copy to avoid mutating the input
     req = pkg_resources.Requirement.parse(str(req))
     req.marker = None
     return req
+
+
+def _warn_wheel_not_available(dist):
+    import pkg_resources  # Delay import to avoid unnecessary side-effects
+
+    try:
+        pkg_resources.get_distribution('wheel')
+    except pkg_resources.DistributionNotFound:
+        dist.announce('WARNING: The wheel package is not available.', log.WARN)
+
+
+class _DeprecatedInstaller(SetuptoolsDeprecationWarning):
+    _SUMMARY = "setuptools.installer and fetch_build_eggs are deprecated."
+    _DETAILS = """
+    Requirements should be satisfied by a PEP 517 installer.
+    If you are using pip, you can try `pip install --use-pep517`.
+    """
+    # _DUE_DATE not decided yet
diff --git a/setuptools/logging.py b/setuptools/logging.py
index 15b57613f6..0653878fc0 100644
--- a/setuptools/logging.py
+++ b/setuptools/logging.py
@@ -1,4 +1,5 @@
 import sys
+import inspect
 import logging
 import distutils.log
 from . import monkey
@@ -12,7 +13,7 @@ def configure():
     """
     Configure logging to emit warning and above to stderr
     and everything else to stdout. This behavior is provided
-    for compatibilty with distutils.log but may change in
+    for compatibility with distutils.log but may change in
     the future.
     """
     err_handler = logging.StreamHandler()
@@ -22,13 +23,13 @@ def configure():
     handlers = err_handler, out_handler
     logging.basicConfig(
         format="{message}", style='{', handlers=handlers, level=logging.DEBUG)
-    monkey.patch_func(set_threshold, distutils.log, 'set_threshold')
-
-    # For some reason `distutils.log` module is getting cached in `distutils.dist`
-    # and then loaded again when patched,
-    # implying: id(distutils.log) != id(distutils.dist.log).
-    # Make sure the same module object is used everywhere:
-    distutils.dist.log = distutils.log
+    if inspect.ismodule(distutils.dist.log):
+        monkey.patch_func(set_threshold, distutils.log, 'set_threshold')
+        # For some reason `distutils.log` module is getting cached in `distutils.dist`
+        # and then loaded again when patched,
+        # implying: id(distutils.log) != id(distutils.dist.log).
+        # Make sure the same module object is used everywhere:
+        distutils.dist.log = distutils.log
 
 
 def set_threshold(level):
diff --git a/setuptools/monkey.py b/setuptools/monkey.py
index fb36dc1a97..50653fc7ee 100644
--- a/setuptools/monkey.py
+++ b/setuptools/monkey.py
@@ -71,8 +71,6 @@ def patch_all():
         distutils.filelist.findall = setuptools.findall
 
     needs_warehouse = (
-        sys.version_info < (2, 7, 13)
-        or
         (3, 4) < sys.version_info < (3, 4, 6)
         or
         (3, 5) < sys.version_info <= (3, 5, 3)
@@ -143,7 +141,7 @@ def patch_params(mod_name, func_name):
         """
         Prepare the parameters for patch_func to patch indicated function.
         """
-        repl_prefix = 'msvc9_' if 'msvc9' in mod_name else 'msvc14_'
+        repl_prefix = 'msvc14_'
         repl_name = repl_prefix + func_name.lstrip('_')
         repl = getattr(msvc, repl_name)
         mod = import_module(mod_name)
@@ -151,27 +149,11 @@ def patch_params(mod_name, func_name):
             raise ImportError(func_name)
         return repl, mod, func_name
 
-    # Python 2.7 to 3.4
-    msvc9 = functools.partial(patch_params, 'distutils.msvc9compiler')
-
     # Python 3.5+
     msvc14 = functools.partial(patch_params, 'distutils._msvccompiler')
 
-    try:
-        # Patch distutils.msvc9compiler
-        patch_func(*msvc9('find_vcvarsall'))
-        patch_func(*msvc9('query_vcvarsall'))
-    except ImportError:
-        pass
-
     try:
         # Patch distutils._msvccompiler._get_vc_env
         patch_func(*msvc14('_get_vc_env'))
     except ImportError:
         pass
-
-    try:
-        # Patch distutils._msvccompiler.gen_lib_options for Numpy
-        patch_func(*msvc14('gen_lib_options'))
-    except ImportError:
-        pass
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 281ea1c2af..26d4e74f99 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -3,14 +3,6 @@
 
 Known supported compilers:
 --------------------------
-Microsoft Visual C++ 9.0:
-    Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)
-    Microsoft Windows SDK 6.1 (x86, x64, ia64)
-    Microsoft Windows SDK 7.0 (x86, x64, ia64)
-
-Microsoft Visual C++ 10.0:
-    Microsoft Windows SDK 7.1 (x86, x64, ia64)
-
 Microsoft Visual C++ 14.X:
     Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
     Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
@@ -23,17 +15,13 @@
 from io import open
 from os import listdir, pathsep
 from os.path import join, isfile, isdir, dirname
-import sys
 import contextlib
 import platform
 import itertools
 import subprocess
 import distutils.errors
-from setuptools.extern.packaging.version import LegacyVersion
 from setuptools.extern.more_itertools import unique_everseen
 
-from .monkey import get_unpatched
-
 if platform.system() == 'Windows':
     import winreg
     from os import environ
@@ -48,100 +36,6 @@ class winreg:
 
     environ = dict()
 
-_msvc9_suppress_errors = (
-    # msvc9compiler isn't available on some platforms
-    ImportError,
-
-    # msvc9compiler raises DistutilsPlatformError in some
-    # environments. See #1118.
-    distutils.errors.DistutilsPlatformError,
-)
-
-try:
-    from distutils.msvc9compiler import Reg
-except _msvc9_suppress_errors:
-    pass
-
-
-def msvc9_find_vcvarsall(version):
-    """
-    Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone
-    compiler build for Python
-    (VCForPython / Microsoft Visual C++ Compiler for Python 2.7).
-
-    Fall back to original behavior when the standalone compiler is not
-    available.
-
-    Redirect the path of "vcvarsall.bat".
-
-    Parameters
-    ----------
-    version: float
-        Required Microsoft Visual C++ version.
-
-    Return
-    ------
-    str
-        vcvarsall.bat path
-    """
-    vc_base = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'
-    key = vc_base % ('', version)
-    try:
-        # Per-user installs register the compiler path here
-        productdir = Reg.get_value(key, "installdir")
-    except KeyError:
-        try:
-            # All-user installs on a 64-bit system register here
-            key = vc_base % ('Wow6432Node\\', version)
-            productdir = Reg.get_value(key, "installdir")
-        except KeyError:
-            productdir = None
-
-    if productdir:
-        vcvarsall = join(productdir, "vcvarsall.bat")
-        if isfile(vcvarsall):
-            return vcvarsall
-
-    return get_unpatched(msvc9_find_vcvarsall)(version)
-
-
-def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs):
-    """
-    Patched "distutils.msvc9compiler.query_vcvarsall" for support extra
-    Microsoft Visual C++ 9.0 and 10.0 compilers.
-
-    Set environment without use of "vcvarsall.bat".
-
-    Parameters
-    ----------
-    ver: float
-        Required Microsoft Visual C++ version.
-    arch: str
-        Target architecture.
-
-    Return
-    ------
-    dict
-        environment
-    """
-    # Try to get environment from vcvarsall.bat (Classical way)
-    try:
-        orig = get_unpatched(msvc9_query_vcvarsall)
-        return orig(ver, arch, *args, **kwargs)
-    except distutils.errors.DistutilsPlatformError:
-        # Pass error if Vcvarsall.bat is missing
-        pass
-    except ValueError:
-        # Pass error if environment not set after executing vcvarsall.bat
-        pass
-
-    # If error, try to set environment directly
-    try:
-        return EnvironmentInfo(arch, ver).return_env()
-    except distutils.errors.DistutilsPlatformError as exc:
-        _augment_exception(exc, ver, arch)
-        raise
-
 
 def _msvc14_find_vc2015():
     """Python 3.8 "distutils/_msvccompiler.py" backport"""
@@ -319,19 +213,6 @@ def msvc14_get_vc_env(plat_spec):
         raise
 
 
-def msvc14_gen_lib_options(*args, **kwargs):
-    """
-    Patched "distutils._msvccompiler.gen_lib_options" for fix
-    compatibility between "numpy.distutils" and "distutils._msvccompiler"
-    (for Numpy < 1.11.2)
-    """
-    if "numpy.distutils" in sys.modules:
-        import numpy as np
-        if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'):
-            return np.distutils.ccompiler.gen_lib_options(*args, **kwargs)
-    return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs)
-
-
 def _augment_exception(exc, version, arch=''):
     """
     Add details to the exception message to help guide the user
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 051e523a57..b992048c0d 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -1,4 +1,5 @@
-"""PyPI and direct package downloading"""
+"""PyPI and direct package downloading."""
+
 import sys
 import os
 import re
@@ -8,7 +9,6 @@
 import base64
 import hashlib
 import itertools
-import warnings
 import configparser
 import html
 import http.client
@@ -19,15 +19,27 @@
 
 import setuptools
 from pkg_resources import (
-    CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST,
-    Environment, find_distributions, safe_name, safe_version,
-    to_filename, Requirement, DEVELOP_DIST, EGG_DIST, parse_version,
+    CHECKOUT_DIST,
+    Distribution,
+    BINARY_DIST,
+    normalize_path,
+    SOURCE_DIST,
+    Environment,
+    find_distributions,
+    safe_name,
+    safe_version,
+    to_filename,
+    Requirement,
+    DEVELOP_DIST,
+    EGG_DIST,
+    parse_version,
 )
 from distutils import log
 from distutils.errors import DistutilsError
 from fnmatch import translate
 from setuptools.wheel import Wheel
 from setuptools.extern.more_itertools import unique_everseen
+from setuptools.warnings import SetuptoolsDeprecationWarning
 
 
 EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$')
@@ -40,7 +52,9 @@
 EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
 
 __all__ = [
-    'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
+    'PackageIndex',
+    'distros_for_url',
+    'parse_bdist_wininst',
     'interpret_distro_name',
 ]
 
@@ -48,7 +62,8 @@
 
 _tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}"
 user_agent = _tmpl.format(
-    py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools)
+    py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools
+)
 
 
 def parse_requirement_arg(spec):
@@ -120,13 +135,15 @@ def distros_for_location(location, basename, metadata=None):
         wheel = Wheel(basename)
         if not wheel.is_compatible():
             return []
-        return [Distribution(
-            location=location,
-            project_name=wheel.project_name,
-            version=wheel.version,
-            # Increase priority over eggs.
-            precedence=EGG_DIST + 1,
-        )]
+        return [
+            Distribution(
+                location=location,
+                project_name=wheel.project_name,
+                version=wheel.version,
+                # Increase priority over eggs.
+                precedence=EGG_DIST + 1,
+            )
+        ]
     if basename.endswith('.exe'):
         win_base, py_ver, platform = parse_bdist_wininst(basename)
         if win_base is not None:
@@ -137,7 +154,7 @@ def distros_for_location(location, basename, metadata=None):
     #
     for ext in EXTENSIONS:
         if basename.endswith(ext):
-            basename = basename[:-len(ext)]
+            basename = basename[: -len(ext)]
             return interpret_distro_name(location, basename, metadata)
     return []  # no extension matched
 
@@ -150,38 +167,37 @@ def distros_for_filename(filename, metadata=None):
 
 
 def interpret_distro_name(
-        location, basename, metadata, py_version=None, precedence=SOURCE_DIST,
-        platform=None
+    location, basename, metadata, py_version=None, precedence=SOURCE_DIST, platform=None
 ):
-    """Generate alternative interpretations of a source distro name
+    """Generate the interpretation of a source distro name
 
     Note: if `location` is a filesystem filename, you should call
     ``pkg_resources.normalize_path()`` on it before passing it to this
     routine!
     """
-    # Generate alternative interpretations of a source distro name
-    # Because some packages are ambiguous as to name/versions split
-    # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
-    # So, we generate each possible interpretation (e.g. "adns, python-1.1.0"
-    # "adns-python, 1.1.0", and "adns-python-1.1.0, no version").  In practice,
-    # the spurious interpretations should be ignored, because in the event
-    # there's also an "adns" package, the spurious "python-1.1.0" version will
-    # compare lower than any numeric version number, and is therefore unlikely
-    # to match a request for it.  It's still a potential problem, though, and
-    # in the long run PyPI and the distutils should go for "safe" names and
-    # versions in distribution archive names (sdist and bdist).
 
     parts = basename.split('-')
     if not py_version and any(re.match(r'py\d\.\d$', p) for p in parts[2:]):
         # it is a bdist_dumb, not an sdist -- bail out
         return
 
-    for p in range(1, len(parts) + 1):
-        yield Distribution(
-            location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
-            py_version=py_version, precedence=precedence,
-            platform=platform
-        )
+    # find the pivot (p) that splits the name from the version.
+    # infer the version as the first item that has a digit.
+    for p in range(len(parts)):
+        if parts[p][:1].isdigit():
+            break
+    else:
+        p = len(parts)
+
+    yield Distribution(
+        location,
+        metadata,
+        '-'.join(parts[:p]),
+        '-'.join(parts[p:]),
+        py_version=py_version,
+        precedence=precedence,
+        platform=platform
+    )
 
 
 def unique_values(func):
@@ -197,8 +213,10 @@ def wrapper(*args, **kwargs):
     return wrapper
 
 
-REL = re.compile(r"""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
-# this line is here to fix emacs' cruddy broken syntax highlighting
+REL = re.compile(r"""<([^>]*\srel\s{0,10}=\s{0,10}['"]?([^'" >]+)[^>]*)>""", re.I)
+"""
+Regex for an HTML tag with 'rel="val"' attributes.
+"""
 
 
 @unique_values
@@ -282,11 +300,16 @@ class PackageIndex(Environment):
     """A distribution index that scans web pages for download URLs"""
 
     def __init__(
-            self, index_url="https://pypi.org/simple/", hosts=('*',),
-            ca_bundle=None, verify_ssl=True, *args, **kw
+        self,
+        index_url="https://pypi.org/simple/",
+        hosts=('*',),
+        ca_bundle=None,
+        verify_ssl=True,
+        *args,
+        **kw
     ):
         super().__init__(*args, **kw)
-        self.index_url = index_url + "/" [:not index_url.endswith('/')]
+        self.index_url = index_url + "/"[: not index_url.endswith('/')]
         self.scanned_urls = {}
         self.fetched_urls = {}
         self.package_pages = {}
@@ -379,7 +402,9 @@ def url_ok(self, url, fatal=False):
             return True
         msg = (
             "\nNote: Bypassing %s (disallowed host; see "
-            "http://bit.ly/2hrImnY for details).\n")
+            "https://setuptools.pypa.io/en/latest/deprecated/"
+            "easy_install.html#restricting-downloads-with-allow-hosts for details).\n"
+        )
         if fatal:
             raise DistutilsError(msg % url)
         else:
@@ -417,9 +442,7 @@ def _scan(self, link):
         if not link.startswith(self.index_url):
             return NO_MATCH_SENTINEL
 
-        parts = list(map(
-            urllib.parse.unquote, link[len(self.index_url):].split('/')
-        ))
+        parts = list(map(urllib.parse.unquote, link[len(self.index_url) :].split('/')))
         if len(parts) != 2 or '#' in parts[1]:
             return NO_MATCH_SENTINEL
 
@@ -461,16 +484,15 @@ def process_index(self, url, page):
     def need_version_info(self, url):
         self.scan_all(
             "Page at %s links to .py file(s) without version info; an index "
-            "scan is required.", url
+            "scan is required.",
+            url,
         )
 
     def scan_all(self, msg=None, *args):
         if self.index_url not in self.fetched_urls:
             if msg:
                 self.warn(msg, *args)
-            self.info(
-                "Scanning index of all packages (this may take a while)"
-            )
+            self.info("Scanning index of all packages (this may take a while)")
         self.scan_url(self.index_url)
 
     def find_packages(self, requirement):
@@ -501,9 +523,7 @@ def check_hash(self, checker, filename, tfp):
         """
         checker is a ContentChecker
         """
-        checker.report(
-            self.debug,
-            "Validating %%s checksum for %s" % filename)
+        checker.report(self.debug, "Validating %%s checksum for %s" % filename)
         if not checker.is_valid():
             tfp.close()
             os.unlink(filename)
@@ -540,7 +560,8 @@ def not_found_in_index(self, requirement):
         else:  # no distros seen for this name, might be misspelled
             meth, msg = (
                 self.warn,
-                "Couldn't find index page for %r (maybe misspelled?)")
+                "Couldn't find index page for %r (maybe misspelled?)",
+            )
         meth(msg, requirement.unsafe_name)
         self.scan_all()
 
@@ -579,8 +600,14 @@ def download(self, spec, tmpdir):
         return getattr(self.fetch_distribution(spec, tmpdir), 'location', None)
 
     def fetch_distribution(  # noqa: C901  # is too complex (14)  # FIXME
-            self, requirement, tmpdir, force_scan=False, source=False,
-            develop_ok=False, local_index=None):
+        self,
+        requirement,
+        tmpdir,
+        force_scan=False,
+        source=False,
+        develop_ok=False,
+        local_index=None,
+    ):
         """Obtain a distribution suitable for fulfilling `requirement`
 
         `requirement` must be a ``pkg_resources.Requirement`` instance.
@@ -612,15 +639,13 @@ def find(req, env=None):
                 if dist.precedence == DEVELOP_DIST and not develop_ok:
                     if dist not in skipped:
                         self.warn(
-                            "Skipping development or system egg: %s", dist,
+                            "Skipping development or system egg: %s",
+                            dist,
                         )
                         skipped[dist] = 1
                     continue
 
-                test = (
-                    dist in req
-                    and (dist.precedence <= SOURCE_DIST or not source)
-                )
+                test = dist in req and (dist.precedence <= SOURCE_DIST or not source)
                 if test:
                     loc = self.download(dist.location, tmpdir)
                     dist.download_location = loc
@@ -669,10 +694,15 @@ def fetch(self, requirement, tmpdir, force_scan=False, source=False):
 
     def gen_setup(self, filename, fragment, tmpdir):
         match = EGG_FRAGMENT.match(fragment)
-        dists = match and [
-            d for d in
-            interpret_distro_name(filename, match.group(1), None) if d.version
-        ] or []
+        dists = (
+            match
+            and [
+                d
+                for d in interpret_distro_name(filename, match.group(1), None)
+                if d.version
+            ]
+            or []
+        )
 
         if len(dists) == 1:  # unambiguous ``#egg`` fragment
             basename = os.path.basename(filename)
@@ -680,8 +710,7 @@ def gen_setup(self, filename, fragment, tmpdir):
             # Make sure the file has been downloaded to the temp dir.
             if os.path.dirname(filename) != tmpdir:
                 dst = os.path.join(tmpdir, basename)
-                from setuptools.command.easy_install import samefile
-                if not samefile(filename, dst):
+                if not (os.path.exists(dst) and os.path.samefile(filename, dst)):
                     shutil.copy2(filename, dst)
                     filename = dst
 
@@ -690,8 +719,9 @@ def gen_setup(self, filename, fragment, tmpdir):
                     "from setuptools import setup\n"
                     "setup(name=%r, version=%r, py_modules=[%r])\n"
                     % (
-                        dists[0].project_name, dists[0].version,
-                        os.path.splitext(basename)[0]
+                        dists[0].project_name,
+                        dists[0].version,
+                        os.path.splitext(basename)[0],
                     )
                 )
             return filename
@@ -767,23 +797,22 @@ def open_url(self, url, warning=None):  # noqa: C901  # is too complex (12)
             if warning:
                 self.warn(warning, v.reason)
             else:
-                raise DistutilsError("Download error for %s: %s"
-                                     % (url, v.reason)) from v
+                raise DistutilsError(
+                    "Download error for %s: %s" % (url, v.reason)
+                ) from v
         except http.client.BadStatusLine as v:
             if warning:
                 self.warn(warning, v.line)
             else:
                 raise DistutilsError(
                     '%s returned a bad status line. The server might be '
-                    'down, %s' %
-                    (url, v.line)
+                    'down, %s' % (url, v.line)
                 ) from v
         except (http.client.HTTPException, socket.error) as v:
             if warning:
                 self.warn(warning, v)
             else:
-                raise DistutilsError("Download error for %s: %s"
-                                     % (url, v)) from v
+                raise DistutilsError("Download error for %s: %s" % (url, v)) from v
 
     def _download_url(self, scheme, url, tmpdir):
         # Determine download filename
@@ -840,7 +869,11 @@ def _download_html(self, url, headers, filename):
         raise DistutilsError("Unexpected HTML page found at " + url)
 
     def _download_svn(self, url, filename):
-        warnings.warn("SVN download support is deprecated", UserWarning)
+        SetuptoolsDeprecationWarning.emit(
+            "Invalid config",
+            f"SVN download support is deprecated: {url}",
+            due_date=(2023, 6, 1),  # Initially introduced in 23 Sept 2018
+        )
         url = url.split('#', 1)[0]  # remove any fragment for svn's sake
         creds = ''
         if url.lower().startswith('svn:') and '@' in url:
@@ -888,10 +921,13 @@ def _download_git(self, url, filename):
 
         if rev is not None:
             self.info("Checking out %s", rev)
-            os.system("git -C %s checkout --quiet %s" % (
-                filename,
-                rev,
-            ))
+            os.system(
+                "git -C %s checkout --quiet %s"
+                % (
+                    filename,
+                    rev,
+                )
+            )
 
         return filename
 
@@ -904,10 +940,13 @@ def _download_hg(self, url, filename):
 
         if rev is not None:
             self.info("Updating to %s", rev)
-            os.system("hg --cwd %s up -C -r %s -q" % (
-                filename,
-                rev,
-            ))
+            os.system(
+                "hg --cwd %s up -C -r %s -q"
+                % (
+                    filename,
+                    rev,
+                )
+            )
 
         return filename
 
@@ -1011,7 +1050,8 @@ def __init__(self):
     @property
     def creds_by_repository(self):
         sections_with_repositories = [
-            section for section in self.sections()
+            section
+            for section in self.sections()
             if self.get(section, 'repository').strip()
         ]
 
@@ -1115,8 +1155,8 @@ def local_open(url):
             files.append('{name}'.format(name=f))
         else:
             tmpl = (
-                "{url}"
-                "{files}")
+                "{url}" "{files}"
+            )
             body = tmpl.format(url=url, files='\n'.join(files))
         status, message = 200, "OK"
     else:
diff --git a/setuptools/py312compat.py b/setuptools/py312compat.py
new file mode 100644
index 0000000000..28175b1f75
--- /dev/null
+++ b/setuptools/py312compat.py
@@ -0,0 +1,12 @@
+import sys
+import shutil
+
+
+def shutil_rmtree(path, ignore_errors=False, onexc=None):
+    if sys.version_info >= (3, 12):
+        return shutil.rmtree(path, ignore_errors, onexc=onexc)
+
+    def _handler(fn, path, excinfo):
+        return onexc(fn, path, excinfo[1])
+
+    return shutil.rmtree(path, ignore_errors, onerror=_handler)
diff --git a/setuptools/tests/config/__init__.py b/setuptools/tests/config/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/tests/config/downloads/.gitignore b/setuptools/tests/config/downloads/.gitignore
new file mode 100644
index 0000000000..df3779fc42
--- /dev/null
+++ b/setuptools/tests/config/downloads/.gitignore
@@ -0,0 +1,4 @@
+*
+!.gitignore
+!__init__.py
+!preload.py
diff --git a/setuptools/tests/config/downloads/__init__.py b/setuptools/tests/config/downloads/__init__.py
new file mode 100644
index 0000000000..9fb9b14b02
--- /dev/null
+++ b/setuptools/tests/config/downloads/__init__.py
@@ -0,0 +1,57 @@
+import re
+import time
+from pathlib import Path
+from urllib.error import HTTPError
+from urllib.request import urlopen
+
+__all__ = ["DOWNLOAD_DIR", "retrieve_file", "output_file", "urls_from_file"]
+
+
+NAME_REMOVE = ("http://", "https://", "github.com/", "/raw/")
+DOWNLOAD_DIR = Path(__file__).parent
+
+
+# ----------------------------------------------------------------------
+# Please update ./preload.py accordingly when modifying this file
+# ----------------------------------------------------------------------
+
+
+def output_file(url: str, download_dir: Path = DOWNLOAD_DIR):
+    file_name = url.strip()
+    for part in NAME_REMOVE:
+        file_name = file_name.replace(part, '').strip().strip('/:').strip()
+    return Path(download_dir, re.sub(r"[^\-_\.\w\d]+", "_", file_name))
+
+
+def retrieve_file(url: str, download_dir: Path = DOWNLOAD_DIR, wait: float = 5):
+    path = output_file(url, download_dir)
+    if path.exists():
+        print(f"Skipping {url} (already exists: {path})")
+    else:
+        download_dir.mkdir(exist_ok=True, parents=True)
+        print(f"Downloading {url} to {path}")
+        try:
+            download(url, path)
+        except HTTPError:
+            time.sleep(wait)  # wait a few seconds and try again.
+            download(url, path)
+    return path
+
+
+def urls_from_file(list_file: Path):
+    """``list_file`` should be a text file where each line corresponds to a URL to
+    download.
+    """
+    print(f"file: {list_file}")
+    content = list_file.read_text(encoding="utf-8")
+    return [url for url in content.splitlines() if not url.startswith("#")]
+
+
+def download(url: str, dest: Path):
+    with urlopen(url) as f:
+        data = f.read()
+
+    with open(dest, "wb") as f:
+        f.write(data)
+
+    assert Path(dest).exists()
diff --git a/setuptools/tests/config/downloads/preload.py b/setuptools/tests/config/downloads/preload.py
new file mode 100644
index 0000000000..64b3f1c8d5
--- /dev/null
+++ b/setuptools/tests/config/downloads/preload.py
@@ -0,0 +1,18 @@
+"""This file can be used to preload files needed for testing.
+
+For example you can use::
+
+    cd setuptools/tests/config
+    python -m downloads.preload setupcfg_examples.txt
+
+to make sure the `setup.cfg` examples are downloaded before starting the tests.
+"""
+import sys
+from pathlib import Path
+
+from . import retrieve_file, urls_from_file
+
+
+if __name__ == "__main__":
+    urls = urls_from_file(Path(sys.argv[1]))
+    list(map(retrieve_file, urls))
diff --git a/setuptools/tests/config/setupcfg_examples.txt b/setuptools/tests/config/setupcfg_examples.txt
new file mode 100644
index 0000000000..5db3565464
--- /dev/null
+++ b/setuptools/tests/config/setupcfg_examples.txt
@@ -0,0 +1,23 @@
+# ====================================================================
+# Some popular packages that use setup.cfg (and others not so popular)
+# Reference: https://hugovk.github.io/top-pypi-packages/
+# ====================================================================
+https://github.com/pypa/setuptools/raw/52c990172fec37766b3566679724aa8bf70ae06d/setup.cfg
+https://github.com/pypa/wheel/raw/0acd203cd896afec7f715aa2ff5980a403459a3b/setup.cfg
+https://github.com/python/importlib_metadata/raw/2f05392ca980952a6960d82b2f2d2ea10aa53239/setup.cfg
+https://github.com/jaraco/skeleton/raw/d9008b5c510cd6969127a6a2ab6f832edddef296/setup.cfg
+https://github.com/jaraco/zipp/raw/700d3a96390e970b6b962823bfea78b4f7e1c537/setup.cfg
+https://github.com/pallets/jinja/raw/7d72eb7fefb7dce065193967f31f805180508448/setup.cfg
+https://github.com/tkem/cachetools/raw/2fd87a94b8d3861d80e9e4236cd480bfdd21c90d/setup.cfg
+https://github.com/aio-libs/aiohttp/raw/5e0e6b7080f2408d5f1dd544c0e1cf88378b7b10/setup.cfg
+https://github.com/pallets/flask/raw/9486b6cf57bd6a8a261f67091aca8ca78eeec1e3/setup.cfg
+https://github.com/pallets/click/raw/6411f425fae545f42795665af4162006b36c5e4a/setup.cfg
+https://github.com/sqlalchemy/sqlalchemy/raw/533f5718904b620be8d63f2474229945d6f8ba5d/setup.cfg
+https://github.com/pytest-dev/pluggy/raw/461ef63291d13589c4e21aa182cd1529257e9a0a/setup.cfg
+https://github.com/pytest-dev/pytest/raw/c7be96dae487edbd2f55b561b31b68afac1dabe6/setup.cfg
+https://github.com/tqdm/tqdm/raw/fc69d5dcf578f7c7986fa76841a6b793f813df35/setup.cfg
+https://github.com/platformdirs/platformdirs/raw/7b7852128dd6f07511b618d6edea35046bd0c6ff/setup.cfg
+https://github.com/pandas-dev/pandas/raw/bc17343f934a33dc231c8c74be95d8365537c376/setup.cfg
+https://github.com/django/django/raw/4e249d11a6e56ca8feb4b055b681cec457ef3a3d/setup.cfg
+https://github.com/pyscaffold/pyscaffold/raw/de7aa5dc059fbd04307419c667cc4961bc9df4b8/setup.cfg
+https://github.com/pypa/virtualenv/raw/f92eda6e3da26a4d28c2663ffb85c4960bdb990c/setup.cfg
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
new file mode 100644
index 0000000000..c9c521be16
--- /dev/null
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -0,0 +1,442 @@
+"""Make sure that applying the configuration from pyproject.toml is equivalent to
+applying a similar configuration from setup.cfg
+
+To run these tests offline, please have a look on ``./downloads/preload.py``
+"""
+import io
+import re
+import tarfile
+from inspect import cleandoc
+from pathlib import Path
+from unittest.mock import Mock
+from zipfile import ZipFile
+
+import pytest
+from ini2toml.api import Translator
+
+import setuptools  # noqa ensure monkey patch to metadata
+from setuptools.dist import Distribution
+from setuptools.config import setupcfg, pyprojecttoml
+from setuptools.config import expand
+from setuptools.config._apply_pyprojecttoml import _WouldIgnoreField, _some_attrgetter
+from setuptools.command.egg_info import write_requirements
+from setuptools.warnings import SetuptoolsDeprecationWarning
+
+from .downloads import retrieve_file, urls_from_file
+
+
+HERE = Path(__file__).parent
+EXAMPLES_FILE = "setupcfg_examples.txt"
+
+
+def makedist(path, **attrs):
+    return Distribution({"src_root": path, **attrs})
+
+
+@pytest.mark.parametrize("url", urls_from_file(HERE / EXAMPLES_FILE))
+@pytest.mark.filterwarnings("ignore")
+@pytest.mark.uses_network
+def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path):
+    monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.0.1"))
+    setupcfg_example = retrieve_file(url)
+    pyproject_example = Path(tmp_path, "pyproject.toml")
+    toml_config = Translator().translate(setupcfg_example.read_text(), "setup.cfg")
+    pyproject_example.write_text(toml_config)
+
+    dist_toml = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject_example)
+    dist_cfg = setupcfg.apply_configuration(makedist(tmp_path), setupcfg_example)
+
+    pkg_info_toml = core_metadata(dist_toml)
+    pkg_info_cfg = core_metadata(dist_cfg)
+    assert pkg_info_toml == pkg_info_cfg
+
+    if any(getattr(d, "license_files", None) for d in (dist_toml, dist_cfg)):
+        assert set(dist_toml.license_files) == set(dist_cfg.license_files)
+
+    if any(getattr(d, "entry_points", None) for d in (dist_toml, dist_cfg)):
+        print(dist_cfg.entry_points)
+        ep_toml = {(k, *sorted(i.replace(" ", "") for i in v))
+                   for k, v in dist_toml.entry_points.items()}
+        ep_cfg = {(k, *sorted(i.replace(" ", "") for i in v))
+                  for k, v in dist_cfg.entry_points.items()}
+        assert ep_toml == ep_cfg
+
+    if any(getattr(d, "package_data", None) for d in (dist_toml, dist_cfg)):
+        pkg_data_toml = {(k, *sorted(v)) for k, v in dist_toml.package_data.items()}
+        pkg_data_cfg = {(k, *sorted(v)) for k, v in dist_cfg.package_data.items()}
+        assert pkg_data_toml == pkg_data_cfg
+
+    if any(getattr(d, "data_files", None) for d in (dist_toml, dist_cfg)):
+        data_files_toml = {(k, *sorted(v)) for k, v in dist_toml.data_files}
+        data_files_cfg = {(k, *sorted(v)) for k, v in dist_cfg.data_files}
+        assert data_files_toml == data_files_cfg
+
+    assert set(dist_toml.install_requires) == set(dist_cfg.install_requires)
+    if any(getattr(d, "extras_require", None) for d in (dist_toml, dist_cfg)):
+        if (
+            "testing" in dist_toml.extras_require
+            and "testing" not in dist_cfg.extras_require
+        ):
+            # ini2toml can automatically convert `tests_require` to `testing` extra
+            dist_toml.extras_require.pop("testing")
+        extra_req_toml = {(k, *sorted(v)) for k, v in dist_toml.extras_require.items()}
+        extra_req_cfg = {(k, *sorted(v)) for k, v in dist_cfg.extras_require.items()}
+        assert extra_req_toml == extra_req_cfg
+
+
+PEP621_EXAMPLE = """\
+[project]
+name = "spam"
+version = "2020.0.0"
+description = "Lovely Spam! Wonderful Spam!"
+readme = "README.rst"
+requires-python = ">=3.8"
+license = {file = "LICENSE.txt"}
+keywords = ["egg", "bacon", "sausage", "tomatoes", "Lobster Thermidor"]
+authors = [
+  {email = "hi@pradyunsg.me"},
+  {name = "Tzu-Ping Chung"}
+]
+maintainers = [
+  {name = "Brett Cannon", email = "brett@python.org"},
+  {name = "John X. Ãørçeč", email = "john@utf8.org"},
+  {name = "Γαμα קּ 東", email = "gama@utf8.org"},
+]
+classifiers = [
+  "Development Status :: 4 - Beta",
+  "Programming Language :: Python"
+]
+
+dependencies = [
+  "httpx",
+  "gidgethub[httpx]>4.0.0",
+  "django>2.1; os_name != 'nt'",
+  "django>2.0; os_name == 'nt'"
+]
+
+[project.optional-dependencies]
+test = [
+  "pytest < 5.0.0",
+  "pytest-cov[all]"
+]
+
+[project.urls]
+homepage = "http://example.com"
+documentation = "http://readthedocs.org"
+repository = "http://github.com"
+changelog = "http://github.com/me/spam/blob/master/CHANGELOG.md"
+
+[project.scripts]
+spam-cli = "spam:main_cli"
+
+[project.gui-scripts]
+spam-gui = "spam:main_gui"
+
+[project.entry-points."spam.magical"]
+tomatoes = "spam:main_tomatoes"
+"""
+
+PEP621_INTERNATIONAL_EMAIL_EXAMPLE = """\
+[project]
+name = "spam"
+version = "2020.0.0"
+authors = [
+  {email = "hi@pradyunsg.me"},
+  {name = "Tzu-Ping Chung"}
+]
+maintainers = [
+  {name = "Степан Бандера", email = "криївка@оун-упа.укр"},
+]
+"""
+
+PEP621_EXAMPLE_SCRIPT = """
+def main_cli(): pass
+def main_gui(): pass
+def main_tomatoes(): pass
+"""
+
+
+def _pep621_example_project(
+        tmp_path,
+        readme="README.rst",
+        pyproject_text=PEP621_EXAMPLE,
+):
+    pyproject = tmp_path / "pyproject.toml"
+    text = pyproject_text
+    replacements = {'readme = "README.rst"': f'readme = "{readme}"'}
+    for orig, subst in replacements.items():
+        text = text.replace(orig, subst)
+    pyproject.write_text(text, encoding="utf-8")
+
+    (tmp_path / readme).write_text("hello world")
+    (tmp_path / "LICENSE.txt").write_text("--- LICENSE stub ---")
+    (tmp_path / "spam.py").write_text(PEP621_EXAMPLE_SCRIPT)
+    return pyproject
+
+
+def test_pep621_example(tmp_path):
+    """Make sure the example in PEP 621 works"""
+    pyproject = _pep621_example_project(tmp_path)
+    dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+    assert dist.metadata.license == "--- LICENSE stub ---"
+    assert set(dist.metadata.license_files) == {"LICENSE.txt"}
+
+
+@pytest.mark.parametrize(
+    "readme, ctype",
+    [
+        ("Readme.txt", "text/plain"),
+        ("readme.md", "text/markdown"),
+        ("text.rst", "text/x-rst"),
+    ]
+)
+def test_readme_content_type(tmp_path, readme, ctype):
+    pyproject = _pep621_example_project(tmp_path, readme)
+    dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+    assert dist.metadata.long_description_content_type == ctype
+
+
+def test_undefined_content_type(tmp_path):
+    pyproject = _pep621_example_project(tmp_path, "README.tex")
+    with pytest.raises(ValueError, match="Undefined content type for README.tex"):
+        pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+
+
+def test_no_explicit_content_type_for_missing_extension(tmp_path):
+    pyproject = _pep621_example_project(tmp_path, "README")
+    dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+    assert dist.metadata.long_description_content_type is None
+
+
+@pytest.mark.parametrize(
+    ('pyproject_text', 'expected_maintainers_meta_value'),
+    (
+        pytest.param(
+            PEP621_EXAMPLE,
+            (
+                'Brett Cannon , "John X. Ãørçeč" , '
+                'Γαμα קּ 東 '
+            ),
+            id='non-international-emails',
+        ),
+        pytest.param(
+            PEP621_INTERNATIONAL_EMAIL_EXAMPLE,
+            'Степан Бандера <криївка@оун-упа.укр>',
+            marks=pytest.mark.xfail(
+                reason="CPython's `email.headerregistry.Address` only supports "
+                'RFC 5322, as of Nov 10, 2022 and latest Python 3.11.0',
+                strict=True,
+            ),
+            id='international-email',
+        ),
+    ),
+)
+def test_utf8_maintainer_in_metadata(  # issue-3663
+        expected_maintainers_meta_value,
+        pyproject_text, tmp_path,
+):
+    pyproject = _pep621_example_project(
+        tmp_path, "README", pyproject_text=pyproject_text,
+    )
+    dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+    assert dist.metadata.maintainer_email == expected_maintainers_meta_value
+    pkg_file = tmp_path / "PKG-FILE"
+    with open(pkg_file, "w", encoding="utf-8") as fh:
+        dist.metadata.write_pkg_file(fh)
+    content = pkg_file.read_text(encoding="utf-8")
+    assert f"Maintainer-email: {expected_maintainers_meta_value}" in content
+
+
+class TestLicenseFiles:
+    # TODO: After PEP 639 is accepted, we have to move the license-files
+    #       to the `project` table instead of `tool.setuptools`
+
+    def base_pyproject(self, tmp_path, additional_text):
+        pyproject = _pep621_example_project(tmp_path, "README")
+        text = pyproject.read_text(encoding="utf-8")
+
+        # Sanity-check
+        assert 'license = {file = "LICENSE.txt"}' in text
+        assert "[tool.setuptools]" not in text
+
+        text = f"{text}\n{additional_text}\n"
+        pyproject.write_text(text, encoding="utf-8")
+        return pyproject
+
+    def test_both_license_and_license_files_defined(self, tmp_path):
+        setuptools_config = '[tool.setuptools]\nlicense-files = ["_FILE*"]'
+        pyproject = self.base_pyproject(tmp_path, setuptools_config)
+
+        (tmp_path / "_FILE.txt").touch()
+        (tmp_path / "_FILE.rst").touch()
+
+        # Would normally match the `license_files` patterns, but we want to exclude it
+        # by being explicit. On the other hand, contents should be added to `license`
+        license = tmp_path / "LICENSE.txt"
+        license.write_text("LicenseRef-Proprietary\n", encoding="utf-8")
+
+        dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+        assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"}
+        assert dist.metadata.license == "LicenseRef-Proprietary\n"
+
+    def test_default_patterns(self, tmp_path):
+        setuptools_config = '[tool.setuptools]\nzip-safe = false'
+        # ^ used just to trigger section validation
+        pyproject = self.base_pyproject(tmp_path, setuptools_config)
+
+        license_files = "LICENCE-a.html COPYING-abc.txt AUTHORS-xyz NOTICE,def".split()
+
+        for fname in license_files:
+            (tmp_path / fname).write_text(f"{fname}\n", encoding="utf-8")
+
+        dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+        assert (tmp_path / "LICENSE.txt").exists()  # from base example
+        assert set(dist.metadata.license_files) == {*license_files, "LICENSE.txt"}
+
+
+class TestDeprecatedFields:
+    def test_namespace_packages(self, tmp_path):
+        pyproject = tmp_path / "pyproject.toml"
+        config = """
+        [project]
+        name = "myproj"
+        version = "42"
+        [tool.setuptools]
+        namespace-packages = ["myproj.pkg"]
+        """
+        pyproject.write_text(cleandoc(config), encoding="utf-8")
+        with pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages"):
+            pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+
+
+class TestPresetField:
+    def pyproject(self, tmp_path, dynamic, extra_content=""):
+        content = f"[project]\nname = 'proj'\ndynamic = {dynamic!r}\n"
+        if "version" not in dynamic:
+            content += "version = '42'\n"
+        file = tmp_path / "pyproject.toml"
+        file.write_text(content + extra_content, encoding="utf-8")
+        return file
+
+    @pytest.mark.parametrize(
+        "attr, field, value",
+        [
+            ("install_requires", "dependencies", ["six"]),
+            ("classifiers", "classifiers", ["Private :: Classifier"]),
+        ]
+    )
+    def test_not_listed_in_dynamic(self, tmp_path, attr, field, value):
+        """For the time being we just warn if the user pre-set values (e.g. via
+        ``setup.py``) but do not include them in ``dynamic``.
+        """
+        pyproject = self.pyproject(tmp_path, [])
+        dist = makedist(tmp_path, **{attr: value})
+        msg = re.compile(f"defined outside of `pyproject.toml`:.*{field}", re.S)
+        with pytest.warns(_WouldIgnoreField, match=msg):
+            dist = pyprojecttoml.apply_configuration(dist, pyproject)
+
+        # TODO: Once support for pyproject.toml config stabilizes attr should be None
+        dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist)
+        assert dist_value == value
+
+    @pytest.mark.parametrize(
+        "attr, field, value",
+        [
+            ("install_requires", "dependencies", []),
+            ("extras_require", "optional-dependencies", {}),
+            ("install_requires", "dependencies", ["six"]),
+            ("classifiers", "classifiers", ["Private :: Classifier"]),
+        ]
+    )
+    def test_listed_in_dynamic(self, tmp_path, attr, field, value):
+        pyproject = self.pyproject(tmp_path, [field])
+        dist = makedist(tmp_path, **{attr: value})
+        dist = pyprojecttoml.apply_configuration(dist, pyproject)
+        dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist)
+        assert dist_value == value
+
+    def test_warning_overwritten_dependencies(self, tmp_path):
+        src = "[project]\nname='pkg'\nversion='0.1'\ndependencies=['click']\n"
+        pyproject = tmp_path / "pyproject.toml"
+        pyproject.write_text(src, encoding="utf-8")
+        dist = makedist(tmp_path, install_requires=["wheel"])
+        with pytest.warns(match="`install_requires` overwritten"):
+            dist = pyprojecttoml.apply_configuration(dist, pyproject)
+        assert "wheel" not in dist.install_requires
+
+    def test_optional_dependencies_dont_remove_env_markers(self, tmp_path):
+        """
+        Internally setuptools converts dependencies with markers to "extras".
+        If ``install_requires`` is given by ``setup.py``, we have to ensure that
+        applying ``optional-dependencies`` does not overwrite the mandatory
+        dependencies with markers (see #3204).
+        """
+        # If setuptools replace its internal mechanism that uses `requires.txt`
+        # this test has to be rewritten to adapt accordingly
+        extra = "\n[project.optional-dependencies]\nfoo = ['bar>1']\n"
+        pyproject = self.pyproject(tmp_path, ["dependencies"], extra)
+        install_req = ['importlib-resources (>=3.0.0) ; python_version < "3.7"']
+        dist = makedist(tmp_path, install_requires=install_req)
+        dist = pyprojecttoml.apply_configuration(dist, pyproject)
+        assert "foo" in dist.extras_require
+        assert ':python_version < "3.7"' in dist.extras_require
+        egg_info = dist.get_command_obj("egg_info")
+        write_requirements(egg_info, tmp_path, tmp_path / "requires.txt")
+        reqs = (tmp_path / "requires.txt").read_text(encoding="utf-8")
+        assert "importlib-resources" in reqs
+        assert "bar" in reqs
+
+    @pytest.mark.parametrize(
+        "field,group",
+        [("scripts", "console_scripts"), ("gui-scripts", "gui_scripts")]
+    )
+    @pytest.mark.filterwarnings("error")
+    def test_scripts_dont_require_dynamic_entry_points(self, tmp_path, field, group):
+        # Issue 3862
+        pyproject = self.pyproject(tmp_path, [field])
+        dist = makedist(tmp_path, entry_points={group: ["foobar=foobar:main"]})
+        dist = pyprojecttoml.apply_configuration(dist, pyproject)
+        assert group in dist.entry_points
+
+
+class TestMeta:
+    def test_example_file_in_sdist(self, setuptools_sdist):
+        """Meta test to ensure tests can run from sdist"""
+        with tarfile.open(setuptools_sdist) as tar:
+            assert any(name.endswith(EXAMPLES_FILE) for name in tar.getnames())
+
+    def test_example_file_not_in_wheel(self, setuptools_wheel):
+        """Meta test to ensure auxiliary test files are not in wheel"""
+        with ZipFile(setuptools_wheel) as zipfile:
+            assert not any(name.endswith(EXAMPLES_FILE) for name in zipfile.namelist())
+
+
+# --- Auxiliary Functions ---
+
+
+def core_metadata(dist) -> str:
+    with io.StringIO() as buffer:
+        dist.metadata.write_pkg_file(buffer)
+        pkg_file_txt = buffer.getvalue()
+
+    skip_prefixes = ()
+    skip_lines = set()
+    # ---- DIFF NORMALISATION ----
+    # PEP 621 is very particular about author/maintainer metadata conversion, so skip
+    skip_prefixes += ("Author:", "Author-email:", "Maintainer:", "Maintainer-email:")
+    # May be redundant with Home-page
+    skip_prefixes += ("Project-URL: Homepage,", "Home-page:")
+    # May be missing in original (relying on default) but backfilled in the TOML
+    skip_prefixes += ("Description-Content-Type:",)
+    # ini2toml can automatically convert `tests_require` to `testing` extra
+    skip_lines.add("Provides-Extra: testing")
+    # Remove empty lines
+    skip_lines.add("")
+
+    result = []
+    for line in pkg_file_txt.splitlines():
+        if line.startswith(skip_prefixes) or line in skip_lines:
+            continue
+        result.append(line + "\n")
+
+    return "".join(result)
diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py
new file mode 100644
index 0000000000..6af88efd82
--- /dev/null
+++ b/setuptools/tests/config/test_expand.py
@@ -0,0 +1,224 @@
+import os
+from pathlib import Path
+
+import pytest
+
+from distutils.errors import DistutilsOptionError
+from setuptools.config import expand
+from setuptools.discovery import find_package_path
+
+
+def write_files(files, root_dir):
+    for file, content in files.items():
+        path = root_dir / file
+        path.parent.mkdir(exist_ok=True, parents=True)
+        path.write_text(content)
+
+
+def test_glob_relative(tmp_path, monkeypatch):
+    files = {
+        "dir1/dir2/dir3/file1.txt",
+        "dir1/dir2/file2.txt",
+        "dir1/file3.txt",
+        "a.ini",
+        "b.ini",
+        "dir1/c.ini",
+        "dir1/dir2/a.ini",
+    }
+
+    write_files({k: "" for k in files}, tmp_path)
+    patterns = ["**/*.txt", "[ab].*", "**/[ac].ini"]
+    monkeypatch.chdir(tmp_path)
+    assert set(expand.glob_relative(patterns)) == files
+    # Make sure the same APIs work outside cwd
+    assert set(expand.glob_relative(patterns, tmp_path)) == files
+
+
+def test_read_files(tmp_path, monkeypatch):
+
+    dir_ = tmp_path / "dir_"
+    (tmp_path / "_dir").mkdir(exist_ok=True)
+    (tmp_path / "a.txt").touch()
+    files = {
+        "a.txt": "a",
+        "dir1/b.txt": "b",
+        "dir1/dir2/c.txt": "c"
+    }
+    write_files(files, dir_)
+
+    secrets = Path(str(dir_) + "secrets")
+    secrets.mkdir(exist_ok=True)
+    write_files({"secrets.txt": "secret keys"}, secrets)
+
+    with monkeypatch.context() as m:
+        m.chdir(dir_)
+        assert expand.read_files(list(files)) == "a\nb\nc"
+
+        cannot_access_msg = r"Cannot access '.*\.\..a\.txt'"
+        with pytest.raises(DistutilsOptionError, match=cannot_access_msg):
+            expand.read_files(["../a.txt"])
+
+        cannot_access_secrets_msg = r"Cannot access '.*secrets\.txt'"
+        with pytest.raises(DistutilsOptionError, match=cannot_access_secrets_msg):
+            expand.read_files(["../dir_secrets/secrets.txt"])
+
+    # Make sure the same APIs work outside cwd
+    assert expand.read_files(list(files), dir_) == "a\nb\nc"
+    with pytest.raises(DistutilsOptionError, match=cannot_access_msg):
+        expand.read_files(["../a.txt"], dir_)
+
+
+class TestReadAttr:
+    @pytest.mark.parametrize(
+        "example",
+        [
+            # No cookie means UTF-8:
+            b"__version__ = '\xc3\xa9'\nraise SystemExit(1)\n",
+            # If a cookie is present, honor it:
+            b"# -*- coding: utf-8 -*-\n__version__ = '\xc3\xa9'\nraise SystemExit(1)\n",
+            b"# -*- coding: latin1 -*-\n__version__ = '\xe9'\nraise SystemExit(1)\n",
+        ]
+    )
+    def test_read_attr_encoding_cookie(self, example, tmp_path):
+        (tmp_path / "mod.py").write_bytes(example)
+        assert expand.read_attr('mod.__version__', root_dir=tmp_path) == 'é'
+
+    def test_read_attr(self, tmp_path, monkeypatch):
+        files = {
+            "pkg/__init__.py": "",
+            "pkg/sub/__init__.py": "VERSION = '0.1.1'",
+            "pkg/sub/mod.py": (
+                "VALUES = {'a': 0, 'b': {42}, 'c': (0, 1, 1)}\n"
+                "raise SystemExit(1)"
+            ),
+        }
+        write_files(files, tmp_path)
+
+        with monkeypatch.context() as m:
+            m.chdir(tmp_path)
+            # Make sure it can read the attr statically without evaluating the module
+            assert expand.read_attr('pkg.sub.VERSION') == '0.1.1'
+            values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'})
+
+        assert values['a'] == 0
+        assert values['b'] == {42}
+
+        # Make sure the same APIs work outside cwd
+        assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1'
+        values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}, tmp_path)
+        assert values['c'] == (0, 1, 1)
+
+    @pytest.mark.parametrize(
+        "example",
+        [
+            "VERSION: str\nVERSION = '0.1.1'\nraise SystemExit(1)\n",
+            "VERSION: str = '0.1.1'\nraise SystemExit(1)\n",
+        ]
+    )
+    def test_read_annotated_attr(self, tmp_path, example):
+        files = {
+            "pkg/__init__.py": "",
+            "pkg/sub/__init__.py": example,
+        }
+        write_files(files, tmp_path)
+        # Make sure this attribute can be read statically
+        assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1'
+
+    def test_import_order(self, tmp_path):
+        """
+        Sometimes the import machinery will import the parent package of a nested
+        module, which triggers side-effects and might create problems (see issue #3176)
+
+        ``read_attr`` should bypass these limitations by resolving modules statically
+        (via ast.literal_eval).
+        """
+        files = {
+            "src/pkg/__init__.py": "from .main import func\nfrom .about import version",
+            "src/pkg/main.py": "import super_complicated_dep\ndef func(): return 42",
+            "src/pkg/about.py": "version = '42'",
+        }
+        write_files(files, tmp_path)
+        attr_desc = "pkg.about.version"
+        package_dir = {"": "src"}
+        # `import super_complicated_dep` should not run, otherwise the build fails
+        assert expand.read_attr(attr_desc, package_dir, tmp_path) == "42"
+
+
+@pytest.mark.parametrize(
+    'package_dir, file, module, return_value',
+    [
+        ({"": "src"}, "src/pkg/main.py", "pkg.main", 42),
+        ({"pkg": "lib"}, "lib/main.py", "pkg.main", 13),
+        ({}, "single_module.py", "single_module", 70),
+        ({}, "flat_layout/pkg.py", "flat_layout.pkg", 836),
+    ]
+)
+def test_resolve_class(tmp_path, package_dir, file, module, return_value):
+    files = {file: f"class Custom:\n    def testing(self): return {return_value}"}
+    write_files(files, tmp_path)
+    cls = expand.resolve_class(f"{module}.Custom", package_dir, tmp_path)
+    assert cls().testing() == return_value
+
+
+@pytest.mark.parametrize(
+    'args, pkgs',
+    [
+        ({"where": ["."], "namespaces": False}, {"pkg", "other"}),
+        ({"where": [".", "dir1"], "namespaces": False}, {"pkg", "other", "dir2"}),
+        ({"namespaces": True}, {"pkg", "other", "dir1", "dir1.dir2"}),
+        ({}, {"pkg", "other", "dir1", "dir1.dir2"}),  # default value for `namespaces`
+    ]
+)
+def test_find_packages(tmp_path, args, pkgs):
+    files = {
+        "pkg/__init__.py",
+        "other/__init__.py",
+        "dir1/dir2/__init__.py",
+    }
+    write_files({k: "" for k in files}, tmp_path)
+
+    package_dir = {}
+    kwargs = {"root_dir": tmp_path, "fill_package_dir": package_dir, **args}
+    where = kwargs.get("where", ["."])
+    assert set(expand.find_packages(**kwargs)) == pkgs
+    for pkg in pkgs:
+        pkg_path = find_package_path(pkg, package_dir, tmp_path)
+        assert os.path.exists(pkg_path)
+
+    # Make sure the same APIs work outside cwd
+    where = [
+        str((tmp_path / p).resolve()).replace(os.sep, "/")  # ensure posix-style paths
+        for p in args.pop("where", ["."])
+    ]
+
+    assert set(expand.find_packages(where=where, **args)) == pkgs
+
+
+@pytest.mark.parametrize(
+    "files, where, expected_package_dir",
+    [
+        (["pkg1/__init__.py", "pkg1/other.py"], ["."], {}),
+        (["pkg1/__init__.py", "pkg2/__init__.py"], ["."], {}),
+        (["src/pkg1/__init__.py", "src/pkg1/other.py"], ["src"], {"": "src"}),
+        (["src/pkg1/__init__.py", "src/pkg2/__init__.py"], ["src"], {"": "src"}),
+        (
+            ["src1/pkg1/__init__.py", "src2/pkg2/__init__.py"],
+            ["src1", "src2"],
+            {"pkg1": "src1/pkg1", "pkg2": "src2/pkg2"},
+        ),
+        (
+            ["src/pkg1/__init__.py", "pkg2/__init__.py"],
+            ["src", "."],
+            {"pkg1": "src/pkg1"},
+        ),
+    ],
+)
+def test_fill_package_dir(tmp_path, files, where, expected_package_dir):
+    write_files({k: "" for k in files}, tmp_path)
+    pkg_dir = {}
+    kwargs = {"root_dir": tmp_path, "fill_package_dir": pkg_dir, "namespaces": False}
+    pkgs = expand.find_packages(where=where, **kwargs)
+    assert set(pkg_dir.items()) == set(expected_package_dir.items())
+    for pkg in pkgs:
+        pkg_path = find_package_path(pkg, pkg_dir, tmp_path)
+        assert os.path.exists(pkg_path)
diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py
new file mode 100644
index 0000000000..811328f52b
--- /dev/null
+++ b/setuptools/tests/config/test_pyprojecttoml.py
@@ -0,0 +1,402 @@
+import re
+from configparser import ConfigParser
+from inspect import cleandoc
+
+import pytest
+import tomli_w
+from path import Path as _Path
+
+from setuptools.config._apply_pyprojecttoml import _WouldIgnoreField
+from setuptools.config.pyprojecttoml import (
+    read_configuration,
+    expand_configuration,
+    apply_configuration,
+    validate,
+    _InvalidFile,
+)
+from setuptools.dist import Distribution
+from setuptools.errors import OptionError
+
+
+import setuptools  # noqa -- force distutils.core to be patched
+import distutils.core
+
+EXAMPLE = """
+[project]
+name = "myproj"
+keywords = ["some", "key", "words"]
+dynamic = ["version", "readme"]
+requires-python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+dependencies = [
+    'importlib-metadata>=0.12;python_version<"3.8"',
+    'importlib-resources>=1.0;python_version<"3.7"',
+    'pathlib2>=2.3.3,<3;python_version < "3.4" and sys.platform != "win32"',
+]
+
+[project.optional-dependencies]
+docs = [
+    "sphinx>=3",
+    "sphinx-argparse>=0.2.5",
+    "sphinx-rtd-theme>=0.4.3",
+]
+testing = [
+    "pytest>=1",
+    "coverage>=3,<5",
+]
+
+[project.scripts]
+exec = "pkg.__main__:exec"
+
+[build-system]
+requires = ["setuptools", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools]
+package-dir = {"" = "src"}
+zip-safe = true
+platforms = ["any"]
+
+[tool.setuptools.packages.find]
+where = ["src"]
+
+[tool.setuptools.cmdclass]
+sdist = "pkg.mod.CustomSdist"
+
+[tool.setuptools.dynamic.version]
+attr = "pkg.__version__.VERSION"
+
+[tool.setuptools.dynamic.readme]
+file = ["README.md"]
+content-type = "text/markdown"
+
+[tool.setuptools.package-data]
+"*" = ["*.txt"]
+
+[tool.setuptools.data-files]
+"data" = ["_files/*.txt"]
+
+[tool.distutils.sdist]
+formats = "gztar"
+
+[tool.distutils.bdist_wheel]
+universal = true
+"""
+
+
+def create_example(path, pkg_root):
+    pyproject = path / "pyproject.toml"
+
+    files = [
+        f"{pkg_root}/pkg/__init__.py",
+        "_files/file.txt",
+    ]
+    if pkg_root != ".":  # flat-layout will raise error for multi-package dist
+        # Ensure namespaces are discovered
+        files.append(f"{pkg_root}/other/nested/__init__.py")
+
+    for file in files:
+        (path / file).parent.mkdir(exist_ok=True, parents=True)
+        (path / file).touch()
+
+    pyproject.write_text(EXAMPLE)
+    (path / "README.md").write_text("hello world")
+    (path / f"{pkg_root}/pkg/mod.py").write_text("class CustomSdist: pass")
+    (path / f"{pkg_root}/pkg/__version__.py").write_text("VERSION = (3, 10)")
+    (path / f"{pkg_root}/pkg/__main__.py").write_text("def exec(): print('hello')")
+
+
+def verify_example(config, path, pkg_root):
+    pyproject = path / "pyproject.toml"
+    pyproject.write_text(tomli_w.dumps(config), encoding="utf-8")
+    expanded = expand_configuration(config, path)
+    expanded_project = expanded["project"]
+    assert read_configuration(pyproject, expand=True) == expanded
+    assert expanded_project["version"] == "3.10"
+    assert expanded_project["readme"]["text"] == "hello world"
+    assert "packages" in expanded["tool"]["setuptools"]
+    if pkg_root == ".":
+        # Auto-discovery will raise error for multi-package dist
+        assert set(expanded["tool"]["setuptools"]["packages"]) == {"pkg"}
+    else:
+        assert set(expanded["tool"]["setuptools"]["packages"]) == {
+            "pkg",
+            "other",
+            "other.nested",
+        }
+    assert expanded["tool"]["setuptools"]["include-package-data"] is True
+    assert "" in expanded["tool"]["setuptools"]["package-data"]
+    assert "*" not in expanded["tool"]["setuptools"]["package-data"]
+    assert expanded["tool"]["setuptools"]["data-files"] == [
+        ("data", ["_files/file.txt"])
+    ]
+
+
+def test_read_configuration(tmp_path):
+    create_example(tmp_path, "src")
+    pyproject = tmp_path / "pyproject.toml"
+
+    config = read_configuration(pyproject, expand=False)
+    assert config["project"].get("version") is None
+    assert config["project"].get("readme") is None
+
+    verify_example(config, tmp_path, "src")
+
+
+@pytest.mark.parametrize(
+    "pkg_root, opts",
+    [
+        (".", {}),
+        ("src", {}),
+        ("lib", {"packages": {"find": {"where": ["lib"]}}}),
+    ],
+)
+def test_discovered_package_dir_with_attr_directive_in_config(tmp_path, pkg_root, opts):
+    create_example(tmp_path, pkg_root)
+
+    pyproject = tmp_path / "pyproject.toml"
+
+    config = read_configuration(pyproject, expand=False)
+    assert config["project"].get("version") is None
+    assert config["project"].get("readme") is None
+    config["tool"]["setuptools"].pop("packages", None)
+    config["tool"]["setuptools"].pop("package-dir", None)
+
+    config["tool"]["setuptools"].update(opts)
+    verify_example(config, tmp_path, pkg_root)
+
+
+ENTRY_POINTS = {
+    "console_scripts": {"a": "mod.a:func"},
+    "gui_scripts": {"b": "mod.b:func"},
+    "other": {"c": "mod.c:func [extra]"},
+}
+
+
+class TestEntryPoints:
+    def write_entry_points(self, tmp_path):
+        entry_points = ConfigParser()
+        entry_points.read_dict(ENTRY_POINTS)
+        with open(tmp_path / "entry-points.txt", "w") as f:
+            entry_points.write(f)
+
+    def pyproject(self, dynamic=None):
+        project = {"dynamic": dynamic or ["scripts", "gui-scripts", "entry-points"]}
+        tool = {"dynamic": {"entry-points": {"file": "entry-points.txt"}}}
+        return {"project": project, "tool": {"setuptools": tool}}
+
+    def test_all_listed_in_dynamic(self, tmp_path):
+        self.write_entry_points(tmp_path)
+        expanded = expand_configuration(self.pyproject(), tmp_path)
+        expanded_project = expanded["project"]
+        assert len(expanded_project["scripts"]) == 1
+        assert expanded_project["scripts"]["a"] == "mod.a:func"
+        assert len(expanded_project["gui-scripts"]) == 1
+        assert expanded_project["gui-scripts"]["b"] == "mod.b:func"
+        assert len(expanded_project["entry-points"]) == 1
+        assert expanded_project["entry-points"]["other"]["c"] == "mod.c:func [extra]"
+
+    @pytest.mark.parametrize("missing_dynamic", ("scripts", "gui-scripts"))
+    def test_scripts_not_listed_in_dynamic(self, tmp_path, missing_dynamic):
+        self.write_entry_points(tmp_path)
+        dynamic = {"scripts", "gui-scripts", "entry-points"} - {missing_dynamic}
+
+        msg = f"defined outside of `pyproject.toml`:.*{missing_dynamic}"
+        with pytest.warns(_WouldIgnoreField, match=re.compile(msg, re.S)):
+            expanded = expand_configuration(self.pyproject(dynamic), tmp_path)
+
+        expanded_project = expanded["project"]
+        assert dynamic < set(expanded_project)
+        assert len(expanded_project["entry-points"]) == 1
+        # TODO: Test the following when pyproject.toml support stabilizes:
+        # >>> assert missing_dynamic not in expanded_project
+
+
+class TestClassifiers:
+    def test_dynamic(self, tmp_path):
+        # Let's create a project example that has dynamic classifiers
+        # coming from a txt file.
+        create_example(tmp_path, "src")
+        classifiers = """\
+        Framework :: Flask
+        Programming Language :: Haskell
+        """
+        (tmp_path / "classifiers.txt").write_text(cleandoc(classifiers))
+
+        pyproject = tmp_path / "pyproject.toml"
+        config = read_configuration(pyproject, expand=False)
+        dynamic = config["project"]["dynamic"]
+        config["project"]["dynamic"] = list({*dynamic, "classifiers"})
+        dynamic_config = config["tool"]["setuptools"]["dynamic"]
+        dynamic_config["classifiers"] = {"file": "classifiers.txt"}
+
+        # When the configuration is expanded,
+        # each line of the file should be an different classifier.
+        validate(config, pyproject)
+        expanded = expand_configuration(config, tmp_path)
+
+        assert set(expanded["project"]["classifiers"]) == {
+            "Framework :: Flask",
+            "Programming Language :: Haskell",
+        }
+
+    def test_dynamic_without_config(self, tmp_path):
+        config = """
+        [project]
+        name = "myproj"
+        version = '42'
+        dynamic = ["classifiers"]
+        """
+
+        pyproject = tmp_path / "pyproject.toml"
+        pyproject.write_text(cleandoc(config))
+        with pytest.raises(OptionError, match="No configuration .* .classifiers."):
+            read_configuration(pyproject)
+
+    def test_dynamic_readme_from_setup_script_args(self, tmp_path):
+        config = """
+        [project]
+        name = "myproj"
+        version = '42'
+        dynamic = ["readme"]
+        """
+        pyproject = tmp_path / "pyproject.toml"
+        pyproject.write_text(cleandoc(config))
+        dist = Distribution(attrs={"long_description": "42"})
+        # No error should occur because of missing `readme`
+        dist = apply_configuration(dist, pyproject)
+        assert dist.metadata.long_description == "42"
+
+    def test_dynamic_without_file(self, tmp_path):
+        config = """
+        [project]
+        name = "myproj"
+        version = '42'
+        dynamic = ["classifiers"]
+
+        [tool.setuptools.dynamic]
+        classifiers = {file = ["classifiers.txt"]}
+        """
+
+        pyproject = tmp_path / "pyproject.toml"
+        pyproject.write_text(cleandoc(config))
+        with pytest.warns(UserWarning, match="File .*classifiers.txt. cannot be found"):
+            expanded = read_configuration(pyproject)
+        assert "classifiers" not in expanded["project"]
+
+
+@pytest.mark.parametrize(
+    "example",
+    (
+        """
+        [project]
+        name = "myproj"
+        version = "1.2"
+
+        [my-tool.that-disrespect.pep518]
+        value = 42
+        """,
+    ),
+)
+def test_ignore_unrelated_config(tmp_path, example):
+    pyproject = tmp_path / "pyproject.toml"
+    pyproject.write_text(cleandoc(example))
+
+    # Make sure no error is raised due to 3rd party configs in pyproject.toml
+    assert read_configuration(pyproject) is not None
+
+
+@pytest.mark.parametrize(
+    "example, error_msg",
+    [
+        (
+            """
+            [project]
+            name = "myproj"
+            version = "1.2"
+            requires = ['pywin32; platform_system=="Windows"' ]
+            """,
+            "configuration error: .project. must not contain ..requires.. properties",
+        ),
+    ],
+)
+def test_invalid_example(tmp_path, example, error_msg):
+    pyproject = tmp_path / "pyproject.toml"
+    pyproject.write_text(cleandoc(example))
+
+    pattern = re.compile(f"invalid pyproject.toml.*{error_msg}.*", re.M | re.S)
+    with pytest.raises(ValueError, match=pattern):
+        read_configuration(pyproject)
+
+
+@pytest.mark.parametrize("config", ("", "[tool.something]\nvalue = 42"))
+def test_empty(tmp_path, config):
+    pyproject = tmp_path / "pyproject.toml"
+    pyproject.write_text(config)
+
+    # Make sure no error is raised
+    assert read_configuration(pyproject) == {}
+
+
+@pytest.mark.parametrize("config", ("[project]\nname = 'myproj'\nversion='42'\n",))
+def test_include_package_data_by_default(tmp_path, config):
+    """Builds with ``pyproject.toml`` should consider ``include-package-data=True`` as
+    default.
+    """
+    pyproject = tmp_path / "pyproject.toml"
+    pyproject.write_text(config)
+
+    config = read_configuration(pyproject)
+    assert config["tool"]["setuptools"]["include-package-data"] is True
+
+
+def test_include_package_data_in_setuppy(tmp_path):
+    """Builds with ``pyproject.toml`` should consider ``include_package_data`` set in
+    ``setup.py``.
+
+    See https://github.com/pypa/setuptools/issues/3197#issuecomment-1079023889
+    """
+    pyproject = tmp_path / "pyproject.toml"
+    pyproject.write_text("[project]\nname = 'myproj'\nversion='42'\n")
+    setuppy = tmp_path / "setup.py"
+    setuppy.write_text("__import__('setuptools').setup(include_package_data=False)")
+
+    with _Path(tmp_path):
+        dist = distutils.core.run_setup("setup.py", {}, stop_after="config")
+
+    assert dist.get_name() == "myproj"
+    assert dist.get_version() == "42"
+    assert dist.include_package_data is False
+
+
+class TestSkipBadConfig:
+    @pytest.mark.parametrize(
+        "setup_attrs",
+        [
+            {"name": "myproj"},
+            {"install_requires": ["does-not-exist"]},
+        ],
+    )
+    @pytest.mark.parametrize(
+        "pyproject_content",
+        [
+            "[project]\nrequires-python = '>=3.7'\n",
+            "[project]\nversion = '42'\nrequires-python = '>=3.7'\n",
+            "[project]\nname='othername'\nrequires-python = '>=3.7'\n",
+        ],
+    )
+    def test_popular_config(self, tmp_path, pyproject_content, setup_attrs):
+        # See pypa/setuptools#3199 and pypa/cibuildwheel#1064
+        pyproject = tmp_path / "pyproject.toml"
+        pyproject.write_text(pyproject_content)
+        dist = Distribution(attrs=setup_attrs)
+
+        prev_name = dist.get_name()
+        prev_deps = dist.install_requires
+
+        with pytest.warns(_InvalidFile, match=r"DO NOT include.*\[project\].* table"):
+            dist = apply_configuration(dist, pyproject)
+
+        assert dist.get_name() != "othername"
+        assert dist.get_name() == prev_name
+        assert dist.python_requires is None
+        assert set(dist.install_requires) == set(prev_deps)
diff --git a/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py b/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py
new file mode 100644
index 0000000000..5687cf1a2c
--- /dev/null
+++ b/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py
@@ -0,0 +1,82 @@
+import pytest
+
+from setuptools.config.pyprojecttoml import apply_configuration
+from setuptools.dist import Distribution
+from setuptools.tests.textwrap import DALS
+
+
+def test_dynamic_dependencies(tmp_path):
+    (tmp_path / "requirements.txt").write_text("six\n  # comment\n")
+    pyproject = (tmp_path / "pyproject.toml")
+    pyproject.write_text(DALS("""
+    [project]
+    name = "myproj"
+    version = "1.0"
+    dynamic = ["dependencies"]
+
+    [build-system]
+    requires = ["setuptools", "wheel"]
+    build-backend = "setuptools.build_meta"
+
+    [tool.setuptools.dynamic.dependencies]
+    file = ["requirements.txt"]
+    """))
+    dist = Distribution()
+    dist = apply_configuration(dist, pyproject)
+    assert dist.install_requires == ["six"]
+
+
+def test_dynamic_optional_dependencies(tmp_path):
+    (tmp_path / "requirements-docs.txt").write_text("sphinx\n  # comment\n")
+    pyproject = (tmp_path / "pyproject.toml")
+    pyproject.write_text(DALS("""
+    [project]
+    name = "myproj"
+    version = "1.0"
+    dynamic = ["optional-dependencies"]
+
+    [tool.setuptools.dynamic.optional-dependencies.docs]
+    file = ["requirements-docs.txt"]
+
+    [build-system]
+    requires = ["setuptools", "wheel"]
+    build-backend = "setuptools.build_meta"
+    """))
+    dist = Distribution()
+    dist = apply_configuration(dist, pyproject)
+    assert dist.extras_require == {"docs": ["sphinx"]}
+
+
+def test_mixed_dynamic_optional_dependencies(tmp_path):
+    """
+    Test that if PEP 621 was loosened to allow mixing of dynamic and static
+    configurations in the case of fields containing sub-fields (groups),
+    things would work out.
+    """
+    (tmp_path / "requirements-images.txt").write_text("pillow~=42.0\n  # comment\n")
+    pyproject = (tmp_path / "pyproject.toml")
+    pyproject.write_text(DALS("""
+    [project]
+    name = "myproj"
+    version = "1.0"
+    dynamic = ["optional-dependencies"]
+
+    [project.optional-dependencies]
+    docs = ["sphinx"]
+
+    [tool.setuptools.dynamic.optional-dependencies.images]
+    file = ["requirements-images.txt"]
+
+    [build-system]
+    requires = ["setuptools", "wheel"]
+    build-backend = "setuptools.build_meta"
+    """))
+    # Test that the mix-and-match doesn't currently validate.
+    with pytest.raises(ValueError, match="project.optional-dependencies"):
+        apply_configuration(Distribution(), pyproject)
+
+    # Explicitly disable the validation and try again, to see that the mix-and-match
+    # result would be correct.
+    dist = Distribution()
+    dist = apply_configuration(dist, pyproject, ignore_option_errors=True)
+    assert dist.extras_require == {"docs": ["sphinx"], "images": ["pillow~=42.0"]}
diff --git a/setuptools/tests/test_config.py b/setuptools/tests/config/test_setupcfg.py
similarity index 84%
rename from setuptools/tests/test_config.py
rename to setuptools/tests/config/test_setupcfg.py
index 005742e4be..69b75a89fb 100644
--- a/setuptools/tests/test_config.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -1,21 +1,22 @@
-import types
-import sys
-
-import contextlib
 import configparser
+import contextlib
+import inspect
+from pathlib import Path
+from unittest.mock import Mock, patch
 
 import pytest
 
 from distutils.errors import DistutilsOptionError, DistutilsFileError
-from mock import patch
 from setuptools.dist import Distribution, _Distribution
-from setuptools.config import ConfigHandler, read_configuration
-from distutils.core import Command
-from .textwrap import DALS
+from setuptools.config.setupcfg import ConfigHandler, read_configuration
+from setuptools.extern.packaging.requirements import InvalidRequirement
+from setuptools.warnings import SetuptoolsDeprecationWarning
+from ..textwrap import DALS
 
 
 class ErrConfigHandler(ConfigHandler):
     """Erroneous handler. Fails to implement required methods."""
+    section_prefix = "**err**"
 
 
 def make_package_dir(name, base_dir, ns=False):
@@ -70,7 +71,7 @@ def get_dist(tmpdir, kwargs_initial=None, parse=True):
 def test_parsers_implemented():
 
     with pytest.raises(NotImplementedError):
-        handler = ErrConfigHandler(None, {})
+        handler = ErrConfigHandler(None, {}, False, Mock())
         handler.parsers
 
 
@@ -186,9 +187,12 @@ def test_file_mixed(self, tmpdir):
 
     def test_file_sandboxed(self, tmpdir):
 
-        fake_env(tmpdir, '[metadata]\n' 'long_description = file: ../../README\n')
+        tmpdir.ensure("README")
+        project = tmpdir.join('depth1', 'depth2')
+        project.ensure(dir=True)
+        fake_env(project, '[metadata]\n' 'long_description = file: ../../README\n')
 
-        with get_dist(tmpdir, parse=False) as dist:
+        with get_dist(project, parse=False) as dist:
             with pytest.raises(DistutilsOptionError):
                 dist.parse_config_files()  # file: out of sandbox
 
@@ -407,7 +411,7 @@ def test_deprecated_config_handlers(self, tmpdir):
             'requires = some, requirement\n',
         )
 
-        with pytest.deprecated_call():
+        with pytest.warns(SetuptoolsDeprecationWarning, match="requires"):
             with get_dist(tmpdir) as dist:
                 metadata = dist.metadata
 
@@ -464,12 +468,8 @@ def test_warn_dash_deprecation(self, tmpdir):
             'author-email = test@test.com\n'
             'maintainer_email = foo@foo.com\n',
         )
-        msg = (
-            "Usage of dash-separated 'author-email' will not be supported "
-            "in future versions. "
-            "Please use the underscore name 'author_email' instead"
-        )
-        with pytest.warns(UserWarning, match=msg):
+        msg = "Usage of dash-separated 'author-email' will not be supported"
+        with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
             with get_dist(tmpdir) as dist:
                 metadata = dist.metadata
 
@@ -482,12 +482,8 @@ def test_make_option_lowercase(self, tmpdir):
         fake_env(
             tmpdir, '[metadata]\n' 'Name = foo\n' 'description = Some description\n'
         )
-        msg = (
-            "Usage of uppercase key 'Name' in 'metadata' will be deprecated in "
-            "future versions. "
-            "Please use lowercase 'name' instead"
-        )
-        with pytest.warns(UserWarning, match=msg):
+        msg = "Usage of uppercase key 'Name' in 'metadata' will not be supported"
+        with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
             with get_dist(tmpdir) as dist:
                 metadata = dist.metadata
 
@@ -516,7 +512,8 @@ def test_basic(self, tmpdir):
             'python_requires = >=1.0, !=2.8\n'
             'py_modules = module1, module2\n',
         )
-        with get_dist(tmpdir) as dist:
+        deprec = pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages")
+        with deprec, get_dist(tmpdir) as dist:
             assert dist.zip_safe
             assert dist.include_package_data
             assert dist.package_dir == {'': 'src', 'b': 'c'}
@@ -570,7 +567,8 @@ def test_multiline(self, tmpdir):
             '  http://some.com/here/1\n'
             '  http://some.com/there/2\n',
         )
-        with get_dist(tmpdir) as dist:
+        deprec = pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages")
+        with deprec, get_dist(tmpdir) as dist:
             assert dist.package_dir == {'': 'src', 'b': 'c'}
             assert dist.packages == ['pack_a', 'pack_b.subpack']
             assert dist.namespace_packages == ['pack1', 'pack2']
@@ -711,6 +709,72 @@ def test_extras_require(self, tmpdir):
             }
             assert dist.metadata.provides_extras == set(['pdf', 'rest'])
 
+    @pytest.mark.parametrize(
+        "config",
+        [
+            "[options.extras_require]\nfoo = bar;python_version<'3'",
+            "[options.extras_require]\nfoo = bar;os_name=='linux'",
+            "[options.extras_require]\nfoo = bar;python_version<'3'\n",
+            "[options.extras_require]\nfoo = bar;os_name=='linux'\n",
+            "[options]\ninstall_requires = bar;python_version<'3'",
+            "[options]\ninstall_requires = bar;os_name=='linux'",
+            "[options]\ninstall_requires = bar;python_version<'3'\n",
+            "[options]\ninstall_requires = bar;os_name=='linux'\n",
+        ],
+    )
+    def test_raises_accidental_env_marker_misconfig(self, config, tmpdir):
+        fake_env(tmpdir, config)
+        match = (
+            r"One of the parsed requirements in `(install_requires|extras_require.+)` "
+            "looks like a valid environment marker.*"
+        )
+        with pytest.raises(InvalidRequirement, match=match):
+            with get_dist(tmpdir) as _:
+                pass
+
+    @pytest.mark.parametrize(
+        "config",
+        [
+            "[options.extras_require]\nfoo = bar;python_version<3",
+            "[options.extras_require]\nfoo = bar;python_version<3\n",
+            "[options]\ninstall_requires = bar;python_version<3",
+            "[options]\ninstall_requires = bar;python_version<3\n",
+        ],
+    )
+    def test_warn_accidental_env_marker_misconfig(self, config, tmpdir):
+        fake_env(tmpdir, config)
+        match = (
+            r"One of the parsed requirements in `(install_requires|extras_require.+)` "
+            "looks like a valid environment marker.*"
+        )
+        with pytest.warns(SetuptoolsDeprecationWarning, match=match):
+            with get_dist(tmpdir) as _:
+                pass
+
+    @pytest.mark.parametrize(
+        "config",
+        [
+            "[options.extras_require]\nfoo =\n    bar;python_version<'3'",
+            "[options.extras_require]\nfoo = bar;baz\nboo = xxx;yyy",
+            "[options.extras_require]\nfoo =\n    bar;python_version<'3'\n",
+            "[options.extras_require]\nfoo = bar;baz\nboo = xxx;yyy\n",
+            "[options.extras_require]\nfoo =\n    bar\n    python_version<3\n",
+            "[options]\ninstall_requires =\n    bar;python_version<'3'",
+            "[options]\ninstall_requires = bar;baz\nboo = xxx;yyy",
+            "[options]\ninstall_requires =\n    bar;python_version<'3'\n",
+            "[options]\ninstall_requires = bar;baz\nboo = xxx;yyy\n",
+            "[options]\ninstall_requires =\n    bar\n    python_version<3\n",
+        ],
+    )
+    @pytest.mark.filterwarnings("error::setuptools.SetuptoolsDeprecationWarning")
+    def test_nowarn_accidental_env_marker_misconfig(self, config, tmpdir, recwarn):
+        fake_env(tmpdir, config)
+        num_warnings = len(recwarn)
+        with get_dist(tmpdir) as _:
+            pass
+        # The examples are valid, no warnings shown
+        assert len(recwarn) == num_warnings
+
     def test_dash_preserved_extras_require(self, tmpdir):
         fake_env(tmpdir, '[options.extras_require]\n' 'foo-a = foo\n' 'foo_b = test\n')
 
@@ -859,22 +923,43 @@ def test_python_requires_invalid(self, tmpdir):
                 dist.parse_config_files()
 
     def test_cmdclass(self, tmpdir):
-        class CustomCmd(Command):
-            pass
-
-        m = types.ModuleType('custom_build', 'test package')
+        module_path = Path(tmpdir, "src/custom_build.py")  # auto discovery for src
+        module_path.parent.mkdir(parents=True, exist_ok=True)
+        module_path.write_text(
+            "from distutils.core import Command\n"
+            "class CustomCmd(Command): pass\n"
+        )
 
-        m.__dict__['CustomCmd'] = CustomCmd
+        setup_cfg = """
+            [options]
+            cmdclass =
+                customcmd = custom_build.CustomCmd
+        """
+        fake_env(tmpdir, inspect.cleandoc(setup_cfg))
 
-        sys.modules['custom_build'] = m
+        with get_dist(tmpdir) as dist:
+            cmdclass = dist.cmdclass['customcmd']
+            assert cmdclass.__name__ == "CustomCmd"
+            assert cmdclass.__module__ == "custom_build"
+            assert module_path.samefile(inspect.getfile(cmdclass))
 
+    def test_requirements_file(self, tmpdir):
         fake_env(
             tmpdir,
-            '[options]\n' 'cmdclass =\n' '    customcmd = custom_build.CustomCmd\n',
+            DALS("""
+            [options]
+            install_requires = file:requirements.txt
+            [options.extras_require]
+            colors = file:requirements-extra.txt
+            """)
         )
 
+        tmpdir.join('requirements.txt').write('\ndocutils>=0.3\n\n')
+        tmpdir.join('requirements-extra.txt').write('colorama')
+
         with get_dist(tmpdir) as dist:
-            assert dist.cmdclass == {'customcmd': CustomCmd}
+            assert dist.install_requires == ['docutils>=0.3']
+            assert dist.extras_require == {'colors': ['colorama']}
 
 
 saved_dist_init = _Distribution.__init__
diff --git a/setuptools/tests/contexts.py b/setuptools/tests/contexts.py
index 5894882435..112cdf4b28 100644
--- a/setuptools/tests/contexts.py
+++ b/setuptools/tests/contexts.py
@@ -6,7 +6,6 @@
 import site
 import io
 
-import pkg_resources
 from filelock import FileLock
 
 
@@ -28,11 +27,7 @@ def environment(**replacements):
     In a context, patch the environment with replacements. Pass None values
     to clear the values.
     """
-    saved = dict(
-        (key, os.environ[key])
-        for key in replacements
-        if key in os.environ
-    )
+    saved = dict((key, os.environ[key]) for key in replacements if key in os.environ)
 
     # remove values that are null
     remove = (key for (key, value) in replacements.items() if value is None)
@@ -81,6 +76,8 @@ def save_user_site_setting():
 
 @contextlib.contextmanager
 def save_pkg_resources_state():
+    import pkg_resources
+
     pr_state = pkg_resources.__getstate__()
     # also save sys.path
     sys_path = sys.path[:]
@@ -123,3 +120,26 @@ def session_locked_tmp_dir(request, tmp_path_factory, name):
         # ^-- prevent multiple workers to access the directory at once
         locked_dir.mkdir(exist_ok=True, parents=True)
         yield locked_dir
+
+
+@contextlib.contextmanager
+def save_paths():
+    """Make sure ``sys.path``, ``sys.meta_path`` and ``sys.path_hooks`` are preserved"""
+    prev = sys.path[:], sys.meta_path[:], sys.path_hooks[:]
+
+    try:
+        yield
+    finally:
+        sys.path, sys.meta_path, sys.path_hooks = prev
+
+
+@contextlib.contextmanager
+def save_sys_modules():
+    """Make sure initial ``sys.modules`` is preserved"""
+    prev_modules = sys.modules
+
+    try:
+        sys.modules = sys.modules.copy()
+        yield
+    finally:
+        sys.modules = prev_modules
diff --git a/setuptools/tests/environment.py b/setuptools/tests/environment.py
index a0c0ec6e70..bcf29601ce 100644
--- a/setuptools/tests/environment.py
+++ b/setuptools/tests/environment.py
@@ -18,6 +18,19 @@ class VirtualEnv(jaraco.envs.VirtualEnv):
     def run(self, cmd, *args, **kwargs):
         cmd = [self.exe(cmd[0])] + cmd[1:]
         kwargs = {"cwd": self.root, **kwargs}  # Allow overriding
+        # In some environments (eg. downstream distro packaging), where:
+        # - tox isn't used to run tests and
+        # - PYTHONPATH is set to point to a specific setuptools codebase and
+        # - no custom env is explicitly set by a test
+        # PYTHONPATH will leak into the spawned processes.
+        # In that case tests look for module in the wrong place (on PYTHONPATH).
+        # Unless the test sets its own special env, pass a copy of the existing
+        # environment with removed PYTHONPATH to the subprocesses.
+        if "env" not in kwargs:
+            env = dict(os.environ)
+            if "PYTHONPATH" in env:
+                del env["PYTHONPATH"]
+            kwargs["env"] = env
         return subprocess.check_output(cmd, *args, **kwargs)
 
 
diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py
index 7599e65540..f1cfc66c81 100644
--- a/setuptools/tests/fixtures.py
+++ b/setuptools/tests/fixtures.py
@@ -1,6 +1,8 @@
+import os
 import contextlib
 import sys
 import subprocess
+from pathlib import Path
 
 import pytest
 import path
@@ -64,6 +66,9 @@ def sample_project(tmp_path):
 
 @pytest.fixture(scope="session")
 def setuptools_sdist(tmp_path_factory, request):
+    if os.getenv("PRE_BUILT_SETUPTOOLS_SDIST"):
+        return Path(os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")).resolve()
+
     with contexts.session_locked_tmp_dir(
             request, tmp_path_factory, "sdist_build") as tmp:
         dist = next(tmp.glob("*.tar.gz"), None)
@@ -79,6 +84,9 @@ def setuptools_sdist(tmp_path_factory, request):
 
 @pytest.fixture(scope="session")
 def setuptools_wheel(tmp_path_factory, request):
+    if os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL"):
+        return Path(os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")).resolve()
+
     with contexts.session_locked_tmp_dir(
             request, tmp_path_factory, "wheel_build") as tmp:
         dist = next(tmp.glob("*.whl"), None)
@@ -97,8 +105,21 @@ def venv(tmp_path, setuptools_wheel):
     """Virtual env with the version of setuptools under test installed"""
     env = environment.VirtualEnv()
     env.root = path.Path(tmp_path / 'venv')
+    env.create_opts = ['--no-setuptools', '--wheel=bundle']
+    # TODO: Use `--no-wheel` when setuptools implements its own bdist_wheel
     env.req = str(setuptools_wheel)
-    return env.create()
+    # In some environments (eg. downstream distro packaging),
+    # where tox isn't used to run tests and PYTHONPATH is set to point to
+    # a specific setuptools codebase, PYTHONPATH will leak into the spawned
+    # processes.
+    # env.create() should install the just created setuptools
+    # wheel, but it doesn't if it finds another existing matching setuptools
+    # installation present on PYTHONPATH:
+    # `setuptools is already installed with the same version as the provided
+    # wheel. Use --force-reinstall to force an installation of the wheel.`
+    # This prevents leaking PYTHONPATH to the created environment.
+    with contexts.environment(PYTHONPATH=None):
+        return env.create()
 
 
 @pytest.fixture
@@ -106,7 +127,7 @@ def venv_without_setuptools(tmp_path):
     """Virtual env without any version of setuptools installed"""
     env = environment.VirtualEnv()
     env.root = path.Path(tmp_path / 'venv_without_setuptools')
-    env.create_opts = ['--no-setuptools']
+    env.create_opts = ['--no-setuptools', '--no-wheel']
     env.ensure_env()
     return env
 
diff --git a/setuptools/tests/integration/helpers.py b/setuptools/tests/integration/helpers.py
index 43f43902e3..24c02be042 100644
--- a/setuptools/tests/integration/helpers.py
+++ b/setuptools/tests/integration/helpers.py
@@ -8,6 +8,7 @@
 import subprocess
 import tarfile
 from zipfile import ZipFile
+from pathlib import Path
 
 
 def run(cmd, env=None):
@@ -59,3 +60,16 @@ def get_content(self, zip_or_tar_info):
                 raise ValueError(msg)
             return str(content.read(), "utf-8")
         return str(self._obj.read(zip_or_tar_info), "utf-8")
+
+
+def get_sdist_members(sdist_path):
+    with tarfile.open(sdist_path, "r:gz") as tar:
+        files = [Path(f) for f in tar.getnames()]
+    # remove root folder
+    relative_files = ("/".join(f.parts[1:]) for f in files)
+    return {f for f in relative_files if f}
+
+
+def get_wheel_members(wheel_path):
+    with ZipFile(wheel_path) as zipfile:
+        return set(zipfile.namelist())
diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py
index 86cc42359d..b44e32fcb8 100644
--- a/setuptools/tests/integration/test_pip_install_sdist.py
+++ b/setuptools/tests/integration/test_pip_install_sdist.py
@@ -27,7 +27,7 @@
 
 pytestmark = pytest.mark.integration
 
-LATEST, = list(Enum("v", "LATEST"))
+LATEST, = Enum("v", "LATEST")
 """Default version to be checked"""
 # There are positive and negative aspects of checking the latest version of the
 # packages.
@@ -53,7 +53,7 @@
     ("brotli", LATEST),  # not in the list but used by urllib3
 
     # When adding packages to this list, make sure they expose a `__version__`
-    # attribute, or modify the tests bellow
+    # attribute, or modify the tests below
 ]
 
 
@@ -72,11 +72,11 @@
 # means it will download the previous stable version of setuptools.
 # `pip` flags can avoid that (the version of setuptools under test
 # should be the one to be used)
-SDIST_OPTIONS = (
+INSTALL_OPTIONS = (
     "--ignore-installed",
     "--no-build-isolation",
-    # We don't need "--no-binary :all:" since we specify the path to the sdist.
-    # It also helps with performance, since dependencies can come from wheels.
+    # Omit "--no-binary :all:" the sdist is supplied directly.
+    # Allows dependencies as wheels.
 )
 # The downside of `--no-build-isolation` is that pip will not download build
 # dependencies. The test script will have to also handle that.
@@ -112,6 +112,7 @@ def _debug_info():
 
 
 @pytest.mark.parametrize('package, version', EXAMPLES)
+@pytest.mark.uses_network
 def test_install_sdist(package, version, tmp_path, venv_python, setuptools_wheel):
     venv_pip = (venv_python, "-m", "pip")
     sdist = retrieve_sdist(package, version, tmp_path)
@@ -124,7 +125,7 @@ def test_install_sdist(package, version, tmp_path, venv_python, setuptools_wheel
     # Use a virtualenv to simulate PEP 517 isolation
     # but install fresh setuptools wheel to ensure the version under development
     run([*venv_pip, "install", "-I", setuptools_wheel])
-    run([*venv_pip, "install", *SDIST_OPTIONS, sdist])
+    run([*venv_pip, "install", *INSTALL_OPTIONS, sdist])
 
     # Execute a simple script to make sure the package was installed correctly
     script = f"import {package}; print(getattr({package}, '__version__', 0))"
@@ -164,17 +165,9 @@ def retrieve_pypi_sdist_metadata(package, version):
         raise ValueError(f"Release for {package} {version} was yanked")
 
     version = metadata["info"]["version"]
-    release = metadata["releases"][version]
-    dists = [d for d in release if d["packagetype"] == "sdist"]
-    if len(dists) == 0:
-        raise ValueError(f"No sdist found for {package} {version}")
-
-    for dist in dists:
-        if dist["filename"].endswith(".tar.gz"):
-            return dist
-
-    # Not all packages are publishing tar.gz
-    return dist
+    release = metadata["releases"][version] if version is LATEST else metadata["urls"]
+    sdist, = filter(lambda d: d["packagetype"] == "sdist", release)
+    return sdist
 
 
 def download(url, dest, md5_digest):
@@ -192,7 +185,7 @@ def download(url, dest, md5_digest):
 def build_deps(package, sdist_file):
     """Find out what are the build dependencies for a package.
 
-    We need to "manually" install them, since pip will not install build
+    "Manually" install them, since pip will not install build
     deps with `--no-build-isolation`.
     """
     import tomli as toml
@@ -201,9 +194,7 @@ def build_deps(package, sdist_file):
     # testenv without tomli
 
     archive = Archive(sdist_file)
-    pyproject = _read_pyproject(archive)
-
-    info = toml.loads(pyproject)
+    info = toml.loads(_read_pyproject(archive))
     deps = info.get("build-system", {}).get("requires", [])
     deps += EXTRA_BUILD_DEPS.get(package, [])
     # Remove setuptools from requirements (and deduplicate)
@@ -212,7 +203,9 @@ def build_deps(package, sdist_file):
 
 
 def _read_pyproject(archive):
-    for member in archive:
-        if os.path.basename(archive.get_name(member)) == "pyproject.toml":
-            return archive.get_content(member)
-    return ""
+    contents = (
+        archive.get_content(member)
+        for member in archive
+        if os.path.basename(archive.get_name(member)) == "pyproject.toml"
+    )
+    return next(contents, "")
diff --git a/setuptools/tests/namespaces.py b/setuptools/tests/namespaces.py
index 245cf8ea38..34e916f50a 100644
--- a/setuptools/tests/namespaces.py
+++ b/setuptools/tests/namespaces.py
@@ -28,6 +28,29 @@ def build_namespace_package(tmpdir, name):
     return src_dir
 
 
+def build_pep420_namespace_package(tmpdir, name):
+    src_dir = tmpdir / name
+    src_dir.mkdir()
+    pyproject = src_dir / "pyproject.toml"
+    namespace, sep, rest = name.rpartition(".")
+    script = f"""\
+        [build-system]
+        requires = ["setuptools"]
+        build-backend = "setuptools.build_meta"
+
+        [project]
+        name = "{name}"
+        version = "3.14159"
+        """
+    pyproject.write_text(textwrap.dedent(script), encoding='utf-8')
+    ns_pkg_dir = src_dir / namespace.replace(".", "/")
+    ns_pkg_dir.mkdir(parents=True)
+    pkg_mod = ns_pkg_dir / (rest + ".py")
+    some_functionality = f"name = {rest!r}"
+    pkg_mod.write_text(some_functionality, encoding='utf-8')
+    return src_dir
+
+
 def make_site_dir(target):
     """
     Add a sitecustomize.py module in target to cause
diff --git a/setuptools/tests/script-with-bom.py b/setuptools/tests/script-with-bom.py
index 22dee0d2a3..93d28f1600 100644
--- a/setuptools/tests/script-with-bom.py
+++ b/setuptools/tests/script-with-bom.py
@@ -1,3 +1 @@
-# -*- coding: utf-8 -*-
-
-result = 'passed'
+result = 'passed'
diff --git a/setuptools/tests/test_bdist_deprecations.py b/setuptools/tests/test_bdist_deprecations.py
index 1a900c6766..1b69c41858 100644
--- a/setuptools/tests/test_bdist_deprecations.py
+++ b/setuptools/tests/test_bdist_deprecations.py
@@ -1,7 +1,7 @@
 """develop tests
 """
-import mock
 import sys
+from unittest import mock
 
 import pytest
 
diff --git a/setuptools/tests/test_build.py b/setuptools/tests/test_build.py
new file mode 100644
index 0000000000..cefb3d343c
--- /dev/null
+++ b/setuptools/tests/test_build.py
@@ -0,0 +1,63 @@
+from contextlib import contextmanager
+from setuptools import Command, SetuptoolsDeprecationWarning
+from setuptools.dist import Distribution
+from setuptools.command.build import build
+from distutils.command.build import build as distutils_build
+
+import pytest
+
+
+def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
+    """
+    Check that the setuptools Distribution uses the
+    setuptools specific build object.
+    """
+
+    dist = Distribution(dict(
+        script_name='setup.py',
+        script_args=['build'],
+        packages=[],
+        package_data={'': ['path/*']},
+    ))
+    assert isinstance(dist.get_command_obj("build"), build)
+
+
+@contextmanager
+def _restore_sub_commands():
+    orig = distutils_build.sub_commands[:]
+    try:
+        yield
+    finally:
+        distutils_build.sub_commands = orig
+
+
+class Subcommand(Command):
+    """Dummy command to be used in tests"""
+
+    def initialize_options(self):
+        pass
+
+    def finalize_options(self):
+        pass
+
+    def run(self):
+        raise NotImplementedError("just to check if the command runs")
+
+
+@_restore_sub_commands()
+def test_subcommand_in_distutils(tmpdir_cwd):
+    """
+    Ensure that sub commands registered in ``distutils`` run,
+    after instructing the users to migrate to ``setuptools``.
+    """
+    dist = Distribution(dict(
+        packages=[],
+        cmdclass={'subcommand': Subcommand},
+    ))
+    distutils_build.sub_commands.append(('subcommand', None))
+
+    warning_msg = "please use .setuptools.command.build."
+    with pytest.warns(SetuptoolsDeprecationWarning, match=warning_msg):
+        # For backward compatibility, the subcommand should run anyway:
+        with pytest.raises(NotImplementedError, match="the command runs"):
+            dist.run_command("build")
diff --git a/setuptools/tests/test_build_clib.py b/setuptools/tests/test_build_clib.py
index 48bea2b43d..2d9273cdca 100644
--- a/setuptools/tests/test_build_clib.py
+++ b/setuptools/tests/test_build_clib.py
@@ -1,6 +1,8 @@
+from unittest import mock
+
 import pytest
 
-import mock
+import random
 from distutils.errors import DistutilsSetupError
 from setuptools.command.build_clib import build_clib
 from setuptools.dist import Distribution
@@ -55,3 +57,30 @@ def test_build_libraries(self, mock_newer):
         cmd.build_libraries(libs)
         assert cmd.compiler.compile.call_count == 1
         assert cmd.compiler.create_static_lib.call_count == 1
+
+    @mock.patch(
+        'setuptools.command.build_clib.newer_pairwise_group')
+    def test_build_libraries_reproducible(self, mock_newer):
+        dist = Distribution()
+        cmd = build_clib(dist)
+
+        # with that out of the way, let's see if the crude dependency
+        # system works
+        cmd.compiler = mock.MagicMock(spec=cmd.compiler)
+        mock_newer.return_value = ([], [])
+
+        original_sources = ['a-example.c', 'example.c']
+        sources = original_sources
+
+        obj_deps = {'': ('global.h',), 'example.c': ('example.h',)}
+        libs = [('example', {'sources': sources, 'obj_deps': obj_deps})]
+
+        cmd.build_libraries(libs)
+        computed_call_args = mock_newer.call_args[0]
+
+        while sources == original_sources:
+            sources = random.sample(original_sources, len(original_sources))
+        libs = [('example', {'sources': sources, 'obj_deps': obj_deps})]
+
+        cmd.build_libraries(libs)
+        assert computed_call_args == mock_newer.call_args[0]
diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
index 3177a2cdd6..62ba925e6d 100644
--- a/setuptools/tests/test_build_ext.py
+++ b/setuptools/tests/test_build_ext.py
@@ -2,16 +2,20 @@
 import sys
 import distutils.command.build_ext as orig
 from distutils.sysconfig import get_config_var
+from importlib.util import cache_from_source as _compiled_file_name
 
 from jaraco import path
 
 from setuptools.command.build_ext import build_ext, get_abi3_suffix
 from setuptools.dist import Distribution
 from setuptools.extension import Extension
+from setuptools.errors import CompileError
 
 from . import environment
 from .textwrap import DALS
 
+import pytest
+
 
 IS_PYPY = '__pypy__' in sys.builtin_module_names
 
@@ -83,6 +87,141 @@ def test_ext_suffix_override(self):
         finally:
             del os.environ['SETUPTOOLS_EXT_SUFFIX']
 
+    def dist_with_example(self):
+        files = {
+            "src": {"mypkg": {"subpkg": {"ext2.c": ""}}},
+            "c-extensions": {"ext1": {"main.c": ""}},
+        }
+
+        ext1 = Extension("mypkg.ext1", ["c-extensions/ext1/main.c"])
+        ext2 = Extension("mypkg.subpkg.ext2", ["src/mypkg/subpkg/ext2.c"])
+        ext3 = Extension("ext3", ["c-extension/ext3.c"])
+
+        path.build(files)
+        dist = Distribution({
+            "script_name": "%test%",
+            "ext_modules": [ext1, ext2, ext3],
+            "package_dir": {"": "src"},
+        })
+        return dist
+
+    def test_get_outputs(self, tmpdir_cwd, monkeypatch):
+        monkeypatch.setenv('SETUPTOOLS_EXT_SUFFIX', '.mp3')  # make test OS-independent
+        monkeypatch.setattr('setuptools.command.build_ext.use_stubs', False)
+        dist = self.dist_with_example()
+
+        # Regular build: get_outputs not empty, but get_output_mappings is empty
+        build_ext = dist.get_command_obj("build_ext")
+        build_ext.editable_mode = False
+        build_ext.ensure_finalized()
+        build_lib = build_ext.build_lib.replace(os.sep, "/")
+        outputs = [x.replace(os.sep, "/") for x in build_ext.get_outputs()]
+        assert outputs == [
+            f"{build_lib}/ext3.mp3",
+            f"{build_lib}/mypkg/ext1.mp3",
+            f"{build_lib}/mypkg/subpkg/ext2.mp3",
+        ]
+        assert build_ext.get_output_mapping() == {}
+
+        # Editable build: get_output_mappings should contain everything in get_outputs
+        dist.reinitialize_command("build_ext")
+        build_ext.editable_mode = True
+        build_ext.ensure_finalized()
+        mapping = {
+            k.replace(os.sep, "/"): v.replace(os.sep, "/")
+            for k, v in build_ext.get_output_mapping().items()
+        }
+        assert mapping == {
+            f"{build_lib}/ext3.mp3": "src/ext3.mp3",
+            f"{build_lib}/mypkg/ext1.mp3": "src/mypkg/ext1.mp3",
+            f"{build_lib}/mypkg/subpkg/ext2.mp3": "src/mypkg/subpkg/ext2.mp3",
+        }
+
+    def test_get_output_mapping_with_stub(self, tmpdir_cwd, monkeypatch):
+        monkeypatch.setenv('SETUPTOOLS_EXT_SUFFIX', '.mp3')  # make test OS-independent
+        monkeypatch.setattr('setuptools.command.build_ext.use_stubs', True)
+        dist = self.dist_with_example()
+
+        # Editable build should create compiled stubs (.pyc files only, no .py)
+        build_ext = dist.get_command_obj("build_ext")
+        build_ext.editable_mode = True
+        build_ext.ensure_finalized()
+        for ext in build_ext.extensions:
+            monkeypatch.setattr(ext, "_needs_stub", True)
+
+        build_lib = build_ext.build_lib.replace(os.sep, "/")
+        mapping = {
+            k.replace(os.sep, "/"): v.replace(os.sep, "/")
+            for k, v in build_ext.get_output_mapping().items()
+        }
+
+        def C(file):
+            """Make it possible to do comparisons and tests in a OS-independent way"""
+            return _compiled_file_name(file).replace(os.sep, "/")
+
+        assert mapping == {
+            C(f"{build_lib}/ext3.py"): C("src/ext3.py"),
+            f"{build_lib}/ext3.mp3": "src/ext3.mp3",
+            C(f"{build_lib}/mypkg/ext1.py"): C("src/mypkg/ext1.py"),
+            f"{build_lib}/mypkg/ext1.mp3": "src/mypkg/ext1.mp3",
+            C(f"{build_lib}/mypkg/subpkg/ext2.py"): C("src/mypkg/subpkg/ext2.py"),
+            f"{build_lib}/mypkg/subpkg/ext2.mp3": "src/mypkg/subpkg/ext2.mp3",
+        }
+
+        # Ensure only the compiled stubs are present not the raw .py stub
+        assert f"{build_lib}/mypkg/ext1.py" not in mapping
+        assert f"{build_lib}/mypkg/subpkg/ext2.py" not in mapping
+
+        # Visualize what the cached stub files look like
+        example_stub = C(f"{build_lib}/mypkg/ext1.py")
+        assert example_stub in mapping
+        assert example_stub.startswith(f"{build_lib}/mypkg/__pycache__/ext1")
+        assert example_stub.endswith(".pyc")
+
+
+class TestBuildExtInplace:
+    def get_build_ext_cmd(self, optional: bool, **opts):
+        files = {
+            "eggs.c": "#include missingheader.h\n",
+            ".build": {"lib": {}, "tmp": {}},
+        }
+        path.build(files)
+        extension = Extension('spam.eggs', ['eggs.c'], optional=optional)
+        dist = Distribution(dict(ext_modules=[extension]))
+        dist.script_name = 'setup.py'
+        cmd = build_ext(dist)
+        vars(cmd).update(build_lib=".build/lib", build_temp=".build/tmp", **opts)
+        cmd.ensure_finalized()
+        return cmd
+
+    def get_log_messages(self, caplog, capsys):
+        """
+        Historically, distutils "logged" by printing to sys.std*.
+        Later versions adopted the logging framework. Grab
+        messages regardless of how they were captured.
+        """
+        std = capsys.readouterr()
+        return std.out.splitlines() + std.err.splitlines() + caplog.messages
+
+    def test_optional(self, tmpdir_cwd, caplog, capsys):
+        """
+        If optional extensions fail to build, setuptools should show the error
+        in the logs but not fail to build
+        """
+        cmd = self.get_build_ext_cmd(optional=True, inplace=True)
+        cmd.run()
+        assert any(
+            'build_ext: building extension "spam.eggs" failed'
+            for msg in self.get_log_messages(caplog, capsys)
+        )
+        # No compile error exception should be raised
+
+    def test_non_optional(self, tmpdir_cwd):
+        # Non-optional extensions should raise an exception
+        cmd = self.get_build_ext_cmd(optional=False, inplace=True)
+        with pytest.raises(CompileError):
+            cmd.run()
+
 
 def test_build_ext_config_handling(tmpdir_cwd):
     files = {
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index 76f560e745..4db4de65de 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -7,17 +7,28 @@
 import contextlib
 from concurrent import futures
 import re
+from zipfile import ZipFile
+from pathlib import Path
 
 import pytest
 from jaraco import path
 
 from .textwrap import DALS
 
+SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
+
 
 TIMEOUT = int(os.getenv("TIMEOUT_BACKEND_TEST", "180"))  # in seconds
 IS_PYPY = '__pypy__' in sys.builtin_module_names
 
 
+pytestmark = pytest.mark.skipif(
+    sys.platform == "win32" and IS_PYPY,
+    reason="The combination of PyPy + Windows + pytest-xdist + ProcessPoolExecutor "
+    "is flaky and problematic"
+)
+
+
 class BuildBackendBase:
     def __init__(self, cwd='.', env={}, backend_name='setuptools.build_meta'):
         self.cwd = cwd
@@ -33,7 +44,7 @@ def __init__(self, *args, **kwargs):
         self.pool = futures.ProcessPoolExecutor(max_workers=1)
 
     def __getattr__(self, name):
-        """Handles aribrary function invocations on the build backend."""
+        """Handles arbitrary function invocations on the build backend."""
 
         def method(*args, **kw):
             root = os.path.abspath(self.cwd)
@@ -46,7 +57,7 @@ def method(*args, **kw):
                 self.pool.shutdown(wait=False)  # doesn't stop already running processes
                 self._kill(pid)
                 pytest.xfail(f"Backend did not respond before timeout ({TIMEOUT} s)")
-            except (futures.process.BrokenProcessPool, MemoryError):
+            except (futures.process.BrokenProcessPool, MemoryError, OSError):
                 if IS_PYPY:
                     pytest.xfail("PyPy frequently fails tests with ProcessPoolExector")
                 raise
@@ -68,7 +79,7 @@ def __init__(self, *args, **kwargs):
          self.backend_obj) = self.backend_name.partition(':')
 
     def __call__(self, name, *args, **kw):
-        """Handles aribrary function invocations on the build backend."""
+        """Handles arbitrary function invocations on the build backend."""
         os.chdir(self.cwd)
         os.environ.update(self.env)
         mod = importlib.import_module(self.backend_name)
@@ -82,7 +93,7 @@ def __call__(self, name, *args, **kw):
 
 
 defns = [
-    {
+    {  # simple setup.py script
         'setup.py': DALS("""
             __import__('setuptools').setup(
                 name='foo',
@@ -96,7 +107,7 @@ def run():
                 print('hello')
             """),
     },
-    {
+    {  # setup.py that relies on __name__
         'setup.py': DALS("""
             assert __name__ == '__main__'
             __import__('setuptools').setup(
@@ -111,7 +122,7 @@ def run():
                 print('hello')
             """),
     },
-    {
+    {  # setup.py script that runs arbitrary code
         'setup.py': DALS("""
             variable = True
             def function():
@@ -129,7 +140,45 @@ def run():
                 print('hello')
             """),
     },
-    {
+    {  # setup.py script that constructs temp files to be included in the distribution
+        'setup.py': DALS("""
+            # Some packages construct files on the fly, include them in the package,
+            # and immediately remove them after `setup()` (e.g. pybind11==2.9.1).
+            # Therefore, we cannot use `distutils.core.run_setup(..., stop_after=...)`
+            # to obtain a distribution object first, and then run the distutils
+            # commands later, because these files will be removed in the meantime.
+
+            with open('world.py', 'w') as f:
+                f.write('x = 42')
+
+            try:
+                __import__('setuptools').setup(
+                    name='foo',
+                    version='0.0.0',
+                    py_modules=['world'],
+                    setup_requires=['six'],
+                )
+            finally:
+                # Some packages will clean temporary files
+                __import__('os').unlink('world.py')
+            """),
+    },
+    {  # setup.cfg only
+        'setup.cfg': DALS("""
+        [metadata]
+        name = foo
+        version = 0.0.0
+
+        [options]
+        py_modules=hello
+        setup_requires=six
+        """),
+        'hello.py': DALS("""
+        def run():
+            print('hello')
+        """)
+    },
+    {  # setup.cfg and setup.py
         'setup.cfg': DALS("""
         [metadata]
         name = foo
@@ -139,6 +188,7 @@ def run():
         py_modules=hello
         setup_requires=six
         """),
+        'setup.py': "__import__('setuptools').setup()",
         'hello.py': DALS("""
         def run():
             print('hello')
@@ -174,7 +224,20 @@ def test_build_wheel(self, build_backend):
         os.makedirs(dist_dir)
         wheel_name = build_backend.build_wheel(dist_dir)
 
-        assert os.path.isfile(os.path.join(dist_dir, wheel_name))
+        wheel_file = os.path.join(dist_dir, wheel_name)
+        assert os.path.isfile(wheel_file)
+
+        # Temporary files should be removed
+        assert not os.path.isfile('world.py')
+
+        with ZipFile(wheel_file) as zipfile:
+            wheel_contents = set(zipfile.namelist())
+
+        # Each one of the examples have a single module
+        # that should be included in the distribution
+        python_scripts = (f for f in wheel_contents if f.endswith('.py'))
+        modules = [f for f in python_scripts if not f.endswith('setup.py')]
+        assert len(modules) == 1
 
     @pytest.mark.parametrize('build_type', ('wheel', 'sdist'))
     def test_build_with_existing_file_present(self, build_type, tmpdir_cwd):
@@ -223,6 +286,190 @@ def test_build_with_existing_file_present(self, build_type, tmpdir_cwd):
         assert third_result == second_result
         assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0
 
+    @pytest.mark.parametrize("setup_script", [None, SETUP_SCRIPT_STUB])
+    def test_build_with_pyproject_config(self, tmpdir, setup_script):
+        files = {
+            'pyproject.toml': DALS("""
+                [build-system]
+                requires = ["setuptools", "wheel"]
+                build-backend = "setuptools.build_meta"
+
+                [project]
+                name = "foo"
+                license = {text = "MIT"}
+                description = "This is a Python package"
+                dynamic = ["version", "readme"]
+                classifiers = [
+                    "Development Status :: 5 - Production/Stable",
+                    "Intended Audience :: Developers"
+                ]
+                urls = {Homepage = "http://github.com"}
+                dependencies = [
+                    "appdirs",
+                ]
+
+                [project.optional-dependencies]
+                all = [
+                    "tomli>=1",
+                    "pyscaffold>=4,<5",
+                    'importlib; python_version == "2.6"',
+                ]
+
+                [project.scripts]
+                foo = "foo.cli:main"
+
+                [tool.setuptools]
+                zip-safe = false
+                package-dir = {"" = "src"}
+                packages = {find = {where = ["src"]}}
+                license-files = ["LICENSE*"]
+
+                [tool.setuptools.dynamic]
+                version = {attr = "foo.__version__"}
+                readme = {file = "README.rst"}
+
+                [tool.distutils.sdist]
+                formats = "gztar"
+
+                [tool.distutils.bdist_wheel]
+                universal = true
+                """),
+            "MANIFEST.in": DALS("""
+                global-include *.py *.txt
+                global-exclude *.py[cod]
+                """),
+            "README.rst": "This is a ``README``",
+            "LICENSE.txt": "---- placeholder MIT license ----",
+            "src": {
+                "foo": {
+                    "__init__.py": "__version__ = '0.1'",
+                    "cli.py": "def main(): print('hello world')",
+                    "data.txt": "def main(): print('hello world')",
+                }
+            }
+        }
+        if setup_script:
+            files["setup.py"] = setup_script
+
+        build_backend = self.get_build_backend()
+        with tmpdir.as_cwd():
+            path.build(files)
+            sdist_path = build_backend.build_sdist("temp")
+            wheel_file = build_backend.build_wheel("temp")
+
+        with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
+            sdist_contents = set(tar.getnames())
+
+        with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
+            wheel_contents = set(zipfile.namelist())
+            metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8")
+            license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8")
+            epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8")
+
+        assert sdist_contents - {"foo-0.1/setup.py"} == {
+            'foo-0.1',
+            'foo-0.1/LICENSE.txt',
+            'foo-0.1/MANIFEST.in',
+            'foo-0.1/PKG-INFO',
+            'foo-0.1/README.rst',
+            'foo-0.1/pyproject.toml',
+            'foo-0.1/setup.cfg',
+            'foo-0.1/src',
+            'foo-0.1/src/foo',
+            'foo-0.1/src/foo/__init__.py',
+            'foo-0.1/src/foo/cli.py',
+            'foo-0.1/src/foo/data.txt',
+            'foo-0.1/src/foo.egg-info',
+            'foo-0.1/src/foo.egg-info/PKG-INFO',
+            'foo-0.1/src/foo.egg-info/SOURCES.txt',
+            'foo-0.1/src/foo.egg-info/dependency_links.txt',
+            'foo-0.1/src/foo.egg-info/entry_points.txt',
+            'foo-0.1/src/foo.egg-info/requires.txt',
+            'foo-0.1/src/foo.egg-info/top_level.txt',
+            'foo-0.1/src/foo.egg-info/not-zip-safe',
+        }
+        assert wheel_contents == {
+            "foo/__init__.py",
+            "foo/cli.py",
+            "foo/data.txt",  # include_package_data defaults to True
+            "foo-0.1.dist-info/LICENSE.txt",
+            "foo-0.1.dist-info/METADATA",
+            "foo-0.1.dist-info/WHEEL",
+            "foo-0.1.dist-info/entry_points.txt",
+            "foo-0.1.dist-info/top_level.txt",
+            "foo-0.1.dist-info/RECORD",
+        }
+        assert license == "---- placeholder MIT license ----"
+        for line in (
+            "Summary: This is a Python package",
+            "License: MIT",
+            "Classifier: Intended Audience :: Developers",
+            "Requires-Dist: appdirs",
+            "Requires-Dist: tomli (>=1) ; extra == 'all'",
+            "Requires-Dist: importlib ; (python_version == \"2.6\") and extra == 'all'"
+        ):
+            assert line in metadata
+
+        assert metadata.strip().endswith("This is a ``README``")
+        assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main"
+
+    def test_static_metadata_in_pyproject_config(self, tmpdir):
+        # Make sure static metadata in pyproject.toml is not overwritten by setup.py
+        # as required by PEP 621
+        files = {
+            'pyproject.toml': DALS("""
+                [build-system]
+                requires = ["setuptools", "wheel"]
+                build-backend = "setuptools.build_meta"
+
+                [project]
+                name = "foo"
+                description = "This is a Python package"
+                version = "42"
+                dependencies = ["six"]
+                """),
+            'hello.py': DALS("""
+                def run():
+                    print('hello')
+                """),
+            'setup.py': DALS("""
+                __import__('setuptools').setup(
+                    name='bar',
+                    version='13',
+                )
+                """),
+        }
+        build_backend = self.get_build_backend()
+        with tmpdir.as_cwd():
+            path.build(files)
+            sdist_path = build_backend.build_sdist("temp")
+            wheel_file = build_backend.build_wheel("temp")
+
+        assert (tmpdir / "temp/foo-42.tar.gz").exists()
+        assert (tmpdir / "temp/foo-42-py3-none-any.whl").exists()
+        assert not (tmpdir / "temp/bar-13.tar.gz").exists()
+        assert not (tmpdir / "temp/bar-42.tar.gz").exists()
+        assert not (tmpdir / "temp/foo-13.tar.gz").exists()
+        assert not (tmpdir / "temp/bar-13-py3-none-any.whl").exists()
+        assert not (tmpdir / "temp/bar-42-py3-none-any.whl").exists()
+        assert not (tmpdir / "temp/foo-13-py3-none-any.whl").exists()
+
+        with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
+            pkg_info = str(tar.extractfile('foo-42/PKG-INFO').read(), "utf-8")
+            members = tar.getnames()
+            assert "bar-13/PKG-INFO" not in members
+
+        with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
+            metadata = str(zipfile.read("foo-42.dist-info/METADATA"), "utf-8")
+            members = zipfile.namelist()
+            assert "bar-13.dist-info/METADATA" not in members
+
+        for file in pkg_info, metadata:
+            for line in ("Name: foo", "Version: 42"):
+                assert line in file
+            for line in ("Name: bar", "Version: 13"):
+                assert line not in file
+
     def test_build_sdist(self, build_backend):
         dist_dir = os.path.abspath('pip-sdist')
         os.makedirs(dist_dir)
@@ -238,6 +485,23 @@ def test_prepare_metadata_for_build_wheel(self, build_backend):
 
         assert os.path.isfile(os.path.join(dist_dir, dist_info, 'METADATA'))
 
+    def test_prepare_metadata_inplace(self, build_backend):
+        """
+        Some users might pass metadata_directory pre-populated with `.tox` or `.venv`.
+        See issue #3523.
+        """
+        for pre_existing in [
+            ".tox/python/lib/python3.10/site-packages/attrs-22.1.0.dist-info",
+            ".tox/python/lib/python3.10/site-packages/autocommand-2.2.1.dist-info",
+            ".nox/python/lib/python3.10/site-packages/build-0.8.0.dist-info",
+            ".venv/python3.10/site-packages/click-8.1.3.dist-info",
+            "venv/python3.10/site-packages/distlib-0.3.5.dist-info",
+            "env/python3.10/site-packages/docutils-0.19.dist-info",
+        ]:
+            os.makedirs(pre_existing, exist_ok=True)
+        dist_info = build_backend.prepare_metadata_for_build_wheel(".")
+        assert os.path.isfile(os.path.join(dist_info, 'METADATA'))
+
     def test_build_sdist_explicit_dist(self, build_backend):
         # explicitly specifying the dist folder should work
         # the folder sdist_directory and the ``--dist-dir`` can be the same
@@ -365,6 +629,71 @@ def test_build_sdist_relative_path_import(self, tmpdir_cwd):
         with pytest.raises(ImportError, match="^No module named 'hello'$"):
             build_backend.build_sdist("temp")
 
+    _simple_pyproject_example = {
+        "pyproject.toml": DALS("""
+            [project]
+            name = "proj"
+            version = "42"
+            """),
+        "src": {
+            "proj": {"__init__.py": ""}
+        }
+    }
+
+    def _assert_link_tree(self, parent_dir):
+        """All files in the directory should be either links or hard links"""
+        files = list(Path(parent_dir).glob("**/*"))
+        assert files  # Should not be empty
+        for file in files:
+            assert file.is_symlink() or os.stat(file).st_nlink > 0
+
+    @pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
+    # Since the backend is running via a process pool, in some operating systems
+    # we may have problems to make assertions based on warnings/stdout/stderr...
+    # So the best is to ignore them for the time being.
+    def test_editable_with_global_option_still_works(self, tmpdir_cwd):
+        """The usage of --global-option is now discouraged in favour of --build-option.
+        This is required to make more sense of the provided scape hatch and align with
+        previous pip behaviour. See pypa/setuptools#1928.
+        """
+        path.build({**self._simple_pyproject_example, '_meta': {}})
+        build_backend = self.get_build_backend()
+        assert not Path("build").exists()
+
+        cfg = {"--global-option": ["--mode", "strict"]}
+        build_backend.prepare_metadata_for_build_editable("_meta", cfg)
+        build_backend.build_editable("temp", cfg, "_meta")
+
+        self._assert_link_tree(next(Path("build").glob("__editable__.*")))
+
+    def test_editable_without_config_settings(self, tmpdir_cwd):
+        """
+        Sanity check to ensure tests with --mode=strict are different from the ones
+        without --mode.
+
+        --mode=strict should create a local directory with a package tree.
+        The directory should not get created otherwise.
+        """
+        path.build(self._simple_pyproject_example)
+        build_backend = self.get_build_backend()
+        assert not Path("build").exists()
+        build_backend.build_editable("temp")
+        assert not Path("build").exists()
+
+    @pytest.mark.parametrize(
+        "config_settings", [
+            {"--build-option": ["--mode", "strict"]},
+            {"editable-mode": "strict"},
+        ]
+    )
+    def test_editable_with_config_settings(self, tmpdir_cwd, config_settings):
+        path.build({**self._simple_pyproject_example, '_meta': {}})
+        assert not Path("build").exists()
+        build_backend = self.get_build_backend()
+        build_backend.prepare_metadata_for_build_editable("_meta", config_settings)
+        build_backend.build_editable("temp", config_settings, "_meta")
+        self._assert_link_tree(next(Path("build").glob("__editable__.*")))
+
     @pytest.mark.parametrize('setup_literal, requirements', [
         ("'foo'", ['foo']),
         ("['foo']", ['foo']),
@@ -416,6 +745,30 @@ def run():
 
         assert expected == sorted(actual)
 
+    def test_setup_requires_with_auto_discovery(self, tmpdir_cwd):
+        # Make sure patches introduced to retrieve setup_requires don't accidentally
+        # activate auto-discovery and cause problems due to the incomplete set of
+        # attributes passed to MinimalDistribution
+        files = {
+            'pyproject.toml': DALS("""
+                [project]
+                name = "proj"
+                version = "42"
+            """),
+            "setup.py": DALS("""
+                __import__('setuptools').setup(
+                    setup_requires=["foo"],
+                    py_modules = ["hello", "world"]
+                )
+            """),
+            'hello.py': "'hello'",
+            'world.py': "'world'",
+        }
+        path.build(files)
+        build_backend = self.get_build_backend()
+        setup_requires = build_backend.get_requires_for_build_wheel()
+        assert setup_requires == ["wheel", "foo"]
+
     def test_dont_install_setup_requires(self, tmpdir_cwd):
         files = {
             'setup.py': DALS("""
@@ -467,6 +820,24 @@ def test_sys_argv_passthrough(self, tmpdir_cwd):
         with pytest.raises(AssertionError):
             build_backend.build_sdist("temp")
 
+    _setup_py_file_abspath = {
+        'setup.py': DALS("""
+            import os
+            assert os.path.isabs(__file__)
+            __import__('setuptools').setup(
+                name='foo',
+                version='0.0.0',
+                py_modules=['hello'],
+                setup_requires=['six'],
+            )
+            """)
+    }
+
+    def test_setup_py_file_abspath(self, tmpdir_cwd):
+        path.build(self._setup_py_file_abspath)
+        build_backend = self.get_build_backend()
+        build_backend.build_sdist("temp")
+
     @pytest.mark.parametrize('build_hook', ('build_sdist', 'build_wheel'))
     def test_build_with_empty_setuppy(self, build_backend, build_hook):
         files = {'setup.py': ''}
@@ -494,3 +865,27 @@ def test_sys_argv_passthrough(self, tmpdir_cwd):
 
         build_backend = self.get_build_backend()
         build_backend.build_sdist("temp")
+
+
+def test_legacy_editable_install(venv, tmpdir, tmpdir_cwd):
+    pyproject = """
+    [build-system]
+    requires = ["setuptools"]
+    build-backend = "setuptools.build_meta"
+    [project]
+    name = "myproj"
+    version = "42"
+    """
+    path.build({"pyproject.toml": DALS(pyproject), "mymod.py": ""})
+
+    # First: sanity check
+    cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
+    output = str(venv.run(cmd, cwd=tmpdir), "utf-8").lower()
+    assert "running setup.py develop for myproj" not in output
+    assert "created wheel for myproj" in output
+
+    # Then: real test
+    env = {**os.environ, "SETUPTOOLS_ENABLE_FEATURES": "legacy-editable"}
+    cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
+    output = str(venv.run(cmd, cwd=tmpdir, env=env), "utf-8").lower()
+    assert "running setup.py develop for myproj" in output
diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py
index 19c8b780b8..77738f23fa 100644
--- a/setuptools/tests/test_build_py.py
+++ b/setuptools/tests/test_build_py.py
@@ -1,11 +1,17 @@
 import os
 import stat
 import shutil
+from pathlib import Path
+from unittest.mock import Mock
 
 import pytest
+import jaraco.path
 
+from setuptools import SetuptoolsDeprecationWarning
 from setuptools.dist import Distribution
 
+from .textwrap import DALS
+
 
 def test_directories_in_package_data_glob(tmpdir_cwd):
     """
@@ -25,6 +31,29 @@ def test_directories_in_package_data_glob(tmpdir_cwd):
     dist.run_commands()
 
 
+def test_recursive_in_package_data_glob(tmpdir_cwd):
+    """
+    Files matching recursive globs (**) in package_data should
+    be included in the package data.
+
+    #1806
+    """
+    dist = Distribution(dict(
+        script_name='setup.py',
+        script_args=['build_py'],
+        packages=[''],
+        package_data={'': ['path/**/data']},
+    ))
+    os.makedirs('path/subpath/subsubpath')
+    open('path/subpath/subsubpath/data', 'w').close()
+
+    dist.parse_command_line()
+    dist.run_commands()
+
+    assert stat.S_ISREG(os.stat('build/lib/path/subpath/subsubpath/data').st_mode), \
+        "File is not included"
+
+
 def test_read_only(tmpdir_cwd):
     """
     Ensure read-only flag is not preserved in copy
@@ -79,3 +108,196 @@ def test_executable_data(tmpdir_cwd):
 
     assert os.stat('build/lib/pkg/run-me').st_mode & stat.S_IEXEC, \
         "Script is not executable"
+
+
+EXAMPLE_WITH_MANIFEST = {
+    "setup.cfg": DALS("""
+        [metadata]
+        name = mypkg
+        version = 42
+
+        [options]
+        include_package_data = True
+        packages = find:
+
+        [options.packages.find]
+        exclude = *.tests*
+        """),
+    "mypkg": {
+        "__init__.py": "",
+        "resource_file.txt": "",
+        "tests": {
+            "__init__.py": "",
+            "test_mypkg.py": "",
+            "test_file.txt": "",
+        }
+    },
+    "MANIFEST.in": DALS("""
+        global-include *.py *.txt
+        global-exclude *.py[cod]
+        prune dist
+        prune build
+        prune *.egg-info
+        """)
+}
+
+
+def test_excluded_subpackages(tmpdir_cwd):
+    jaraco.path.build(EXAMPLE_WITH_MANIFEST)
+    dist = Distribution({"script_name": "%PEP 517%"})
+    dist.parse_config_files()
+
+    build_py = dist.get_command_obj("build_py")
+    msg = r"Python recognizes 'mypkg\.tests' as an importable package"
+    with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
+        # TODO: To fix #3260 we need some transition period to deprecate the
+        # existing behavior of `include_package_data`. After the transition, we
+        # should remove the warning and fix the behaviour.
+        build_py.finalize_options()
+        build_py.run()
+
+    build_dir = Path(dist.get_command_obj("build_py").build_lib)
+    assert (build_dir / "mypkg/__init__.py").exists()
+    assert (build_dir / "mypkg/resource_file.txt").exists()
+
+    # Setuptools is configured to ignore `mypkg.tests`, therefore the following
+    # files/dirs should not be included in the distribution.
+    for f in [
+        "mypkg/tests/__init__.py",
+        "mypkg/tests/test_mypkg.py",
+        "mypkg/tests/test_file.txt",
+        "mypkg/tests",
+    ]:
+        with pytest.raises(AssertionError):
+            # TODO: Enforce the following assertion once #3260 is fixed
+            # (remove context manager and the following xfail).
+            assert not (build_dir / f).exists()
+
+    pytest.xfail("#3260")
+
+
+@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
+def test_existing_egg_info(tmpdir_cwd, monkeypatch):
+    """When provided with the ``existing_egg_info_dir`` attribute, build_py should not
+    attempt to run egg_info again.
+    """
+    # == Pre-condition ==
+    # Generate an egg-info dir
+    jaraco.path.build(EXAMPLE_WITH_MANIFEST)
+    dist = Distribution({"script_name": "%PEP 517%"})
+    dist.parse_config_files()
+    assert dist.include_package_data
+
+    egg_info = dist.get_command_obj("egg_info")
+    dist.run_command("egg_info")
+    egg_info_dir = next(Path(egg_info.egg_base).glob("*.egg-info"))
+    assert egg_info_dir.is_dir()
+
+    # == Setup ==
+    build_py = dist.get_command_obj("build_py")
+    build_py.finalize_options()
+    egg_info = dist.get_command_obj("egg_info")
+    egg_info_run = Mock(side_effect=egg_info.run)
+    monkeypatch.setattr(egg_info, "run", egg_info_run)
+
+    # == Remove caches ==
+    # egg_info is called when build_py looks for data_files, which gets cached.
+    # We need to ensure it is not cached yet, otherwise it may impact on the tests
+    build_py.__dict__.pop('data_files', None)
+    dist.reinitialize_command(egg_info)
+
+    # == Sanity check ==
+    # Ensure that if existing_egg_info is not given, build_py attempts to run egg_info
+    build_py.existing_egg_info_dir = None
+    build_py.run()
+    egg_info_run.assert_called()
+
+    # == Remove caches ==
+    egg_info_run.reset_mock()
+    build_py.__dict__.pop('data_files', None)
+    dist.reinitialize_command(egg_info)
+
+    # == Actual test ==
+    # Ensure that if existing_egg_info_dir is given, egg_info doesn't run
+    build_py.existing_egg_info_dir = egg_info_dir
+    build_py.run()
+    egg_info_run.assert_not_called()
+    assert build_py.data_files
+
+    # Make sure the list of outputs is actually OK
+    outputs = map(lambda x: x.replace(os.sep, "/"), build_py.get_outputs())
+    assert outputs
+    example = str(Path(build_py.build_lib, "mypkg/__init__.py")).replace(os.sep, "/")
+    assert example in outputs
+
+
+EXAMPLE_ARBITRARY_MAPPING = {
+    "pyproject.toml": DALS("""
+        [project]
+        name = "mypkg"
+        version = "42"
+
+        [tool.setuptools]
+        packages = ["mypkg", "mypkg.sub1", "mypkg.sub2", "mypkg.sub2.nested"]
+
+        [tool.setuptools.package-dir]
+        "" = "src"
+        "mypkg.sub2" = "src/mypkg/_sub2"
+        "mypkg.sub2.nested" = "other"
+        """),
+    "src": {
+        "mypkg": {
+            "__init__.py": "",
+            "resource_file.txt": "",
+            "sub1": {
+                "__init__.py": "",
+                "mod1.py": "",
+            },
+            "_sub2": {
+                "mod2.py": "",
+            },
+        },
+    },
+    "other": {
+        "__init__.py": "",
+        "mod3.py": "",
+    },
+    "MANIFEST.in": DALS("""
+        global-include *.py *.txt
+        global-exclude *.py[cod]
+        """)
+}
+
+
+def test_get_outputs(tmpdir_cwd):
+    jaraco.path.build(EXAMPLE_ARBITRARY_MAPPING)
+    dist = Distribution({"script_name": "%test%"})
+    dist.parse_config_files()
+
+    build_py = dist.get_command_obj("build_py")
+    build_py.editable_mode = True
+    build_py.ensure_finalized()
+    build_lib = build_py.build_lib.replace(os.sep, "/")
+    outputs = {x.replace(os.sep, "/") for x in build_py.get_outputs()}
+    assert outputs == {
+        f"{build_lib}/mypkg/__init__.py",
+        f"{build_lib}/mypkg/resource_file.txt",
+        f"{build_lib}/mypkg/sub1/__init__.py",
+        f"{build_lib}/mypkg/sub1/mod1.py",
+        f"{build_lib}/mypkg/sub2/mod2.py",
+        f"{build_lib}/mypkg/sub2/nested/__init__.py",
+        f"{build_lib}/mypkg/sub2/nested/mod3.py",
+    }
+    mapping = {
+        k.replace(os.sep, "/"): v.replace(os.sep, "/")
+        for k, v in build_py.get_output_mapping().items()
+    }
+    assert mapping == {
+        f"{build_lib}/mypkg/__init__.py": "src/mypkg/__init__.py",
+        f"{build_lib}/mypkg/resource_file.txt": "src/mypkg/resource_file.txt",
+        f"{build_lib}/mypkg/sub1/__init__.py": "src/mypkg/sub1/__init__.py",
+        f"{build_lib}/mypkg/sub1/mod1.py": "src/mypkg/sub1/mod1.py",
+        f"{build_lib}/mypkg/sub2/mod2.py": "src/mypkg/_sub2/mod2.py",
+        f"{build_lib}/mypkg/sub2/nested/__init__.py": "other/__init__.py",
+        f"{build_lib}/mypkg/sub2/nested/mod3.py": "other/mod3.py",
+    }
diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py
new file mode 100644
index 0000000000..f65b00b61f
--- /dev/null
+++ b/setuptools/tests/test_config_discovery.py
@@ -0,0 +1,637 @@
+import os
+import sys
+from configparser import ConfigParser
+from itertools import product
+
+from setuptools.command.sdist import sdist
+from setuptools.dist import Distribution
+from setuptools.discovery import find_package_path, find_parent_package
+from setuptools.errors import PackageDiscoveryError
+
+import setuptools  # noqa -- force distutils.core to be patched
+import distutils.core
+
+import pytest
+import jaraco.path
+from path import Path as _Path
+
+from .contexts import quiet
+from .integration.helpers import get_sdist_members, get_wheel_members, run
+from .textwrap import DALS
+
+
+class TestFindParentPackage:
+    def test_single_package(self, tmp_path):
+        # find_parent_package should find a non-namespace parent package
+        (tmp_path / "src/namespace/pkg/nested").mkdir(exist_ok=True, parents=True)
+        (tmp_path / "src/namespace/pkg/nested/__init__.py").touch()
+        (tmp_path / "src/namespace/pkg/__init__.py").touch()
+        packages = ["namespace", "namespace.pkg", "namespace.pkg.nested"]
+        assert find_parent_package(packages, {"": "src"}, tmp_path) == "namespace.pkg"
+
+    def test_multiple_toplevel(self, tmp_path):
+        # find_parent_package should return null if the given list of packages does not
+        # have a single parent package
+        multiple = ["pkg", "pkg1", "pkg2"]
+        for name in multiple:
+            (tmp_path / f"src/{name}").mkdir(exist_ok=True, parents=True)
+            (tmp_path / f"src/{name}/__init__.py").touch()
+        assert find_parent_package(multiple, {"": "src"}, tmp_path) is None
+
+
+class TestDiscoverPackagesAndPyModules:
+    """Make sure discovered values for ``packages`` and ``py_modules`` work
+    similarly to explicit configuration for the simple scenarios.
+    """
+    OPTIONS = {
+        # Different options according to the circumstance being tested
+        "explicit-src": {
+            "package_dir": {"": "src"},
+            "packages": ["pkg"]
+        },
+        "variation-lib": {
+            "package_dir": {"": "lib"},  # variation of the source-layout
+        },
+        "explicit-flat": {
+            "packages": ["pkg"]
+        },
+        "explicit-single_module": {
+            "py_modules": ["pkg"]
+        },
+        "explicit-namespace": {
+            "packages": ["ns", "ns.pkg"]
+        },
+        "automatic-src": {},
+        "automatic-flat": {},
+        "automatic-single_module": {},
+        "automatic-namespace": {}
+    }
+    FILES = {
+        "src": ["src/pkg/__init__.py", "src/pkg/main.py"],
+        "lib": ["lib/pkg/__init__.py", "lib/pkg/main.py"],
+        "flat": ["pkg/__init__.py", "pkg/main.py"],
+        "single_module": ["pkg.py"],
+        "namespace": ["ns/pkg/__init__.py"]
+    }
+
+    def _get_info(self, circumstance):
+        _, _, layout = circumstance.partition("-")
+        files = self.FILES[layout]
+        options = self.OPTIONS[circumstance]
+        return files, options
+
+    @pytest.mark.parametrize("circumstance", OPTIONS.keys())
+    def test_sdist_filelist(self, tmp_path, circumstance):
+        files, options = self._get_info(circumstance)
+        _populate_project_dir(tmp_path, files, options)
+
+        _, cmd = _run_sdist_programatically(tmp_path, options)
+
+        manifest = [f.replace(os.sep, "/") for f in cmd.filelist.files]
+        for file in files:
+            assert any(f.endswith(file) for f in manifest)
+
+    @pytest.mark.parametrize("circumstance", OPTIONS.keys())
+    def test_project(self, tmp_path, circumstance):
+        files, options = self._get_info(circumstance)
+        _populate_project_dir(tmp_path, files, options)
+
+        # Simulate a pre-existing `build` directory
+        (tmp_path / "build").mkdir()
+        (tmp_path / "build/lib").mkdir()
+        (tmp_path / "build/bdist.linux-x86_64").mkdir()
+        (tmp_path / "build/bdist.linux-x86_64/file.py").touch()
+        (tmp_path / "build/lib/__init__.py").touch()
+        (tmp_path / "build/lib/file.py").touch()
+        (tmp_path / "dist").mkdir()
+        (tmp_path / "dist/file.py").touch()
+
+        _run_build(tmp_path)
+
+        sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz")))
+        print("~~~~~ sdist_members ~~~~~")
+        print('\n'.join(sdist_files))
+        assert sdist_files >= set(files)
+
+        wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl")))
+        print("~~~~~ wheel_members ~~~~~")
+        print('\n'.join(wheel_files))
+        orig_files = {f.replace("src/", "").replace("lib/", "") for f in files}
+        assert wheel_files >= orig_files
+
+        # Make sure build files are not included by mistake
+        for file in wheel_files:
+            assert "build" not in files
+            assert "dist" not in files
+
+    PURPOSEFULLY_EMPY = {
+        "setup.cfg": DALS(
+            """
+            [metadata]
+            name = myproj
+            version = 0.0.0
+
+            [options]
+            {param} =
+            """
+        ),
+        "setup.py": DALS(
+            """
+            __import__('setuptools').setup(
+                name="myproj",
+                version="0.0.0",
+                {param}=[]
+            )
+            """
+        ),
+        "pyproject.toml": DALS(
+            """
+            [build-system]
+            requires = []
+            build-backend = 'setuptools.build_meta'
+
+            [project]
+            name = "myproj"
+            version = "0.0.0"
+
+            [tool.setuptools]
+            {param} = []
+            """
+        ),
+        "template-pyproject.toml": DALS(
+            """
+            [build-system]
+            requires = []
+            build-backend = 'setuptools.build_meta'
+            """
+        )
+    }
+
+    @pytest.mark.parametrize(
+        "config_file, param, circumstance",
+        product(
+            ["setup.cfg", "setup.py", "pyproject.toml"],
+            ["packages", "py_modules"],
+            FILES.keys()
+        )
+    )
+    def test_purposefully_empty(self, tmp_path, config_file, param, circumstance):
+        files = self.FILES[circumstance] + ["mod.py", "other.py", "src/pkg/__init__.py"]
+        _populate_project_dir(tmp_path, files, {})
+
+        if config_file == "pyproject.toml":
+            template_param = param.replace("_", "-")
+        else:
+            # Make sure build works with or without setup.cfg
+            pyproject = self.PURPOSEFULLY_EMPY["template-pyproject.toml"]
+            (tmp_path / "pyproject.toml").write_text(pyproject)
+            template_param = param
+
+        config = self.PURPOSEFULLY_EMPY[config_file].format(param=template_param)
+        (tmp_path / config_file).write_text(config)
+
+        dist = _get_dist(tmp_path, {})
+        # When either parameter package or py_modules is an empty list,
+        # then there should be no discovery
+        assert getattr(dist, param) == []
+        other = {"py_modules": "packages", "packages": "py_modules"}[param]
+        assert getattr(dist, other) is None
+
+    @pytest.mark.parametrize(
+        "extra_files, pkgs",
+        [
+            (["venv/bin/simulate_venv"], {"pkg"}),
+            (["pkg-stubs/__init__.pyi"], {"pkg", "pkg-stubs"}),
+            (["other-stubs/__init__.pyi"], {"pkg", "other-stubs"}),
+            (
+                # Type stubs can also be namespaced
+                ["namespace-stubs/pkg/__init__.pyi"],
+                {"pkg", "namespace-stubs", "namespace-stubs.pkg"},
+            ),
+            (
+                # Just the top-level package can have `-stubs`, ignore nested ones
+                ["namespace-stubs/pkg-stubs/__init__.pyi"],
+                {"pkg", "namespace-stubs"}
+            ),
+            (["_hidden/file.py"], {"pkg"}),
+            (["news/finalize.py"], {"pkg"}),
+        ]
+    )
+    def test_flat_layout_with_extra_files(self, tmp_path, extra_files, pkgs):
+        files = self.FILES["flat"] + extra_files
+        _populate_project_dir(tmp_path, files, {})
+        dist = _get_dist(tmp_path, {})
+        assert set(dist.packages) == pkgs
+
+    @pytest.mark.parametrize(
+        "extra_files",
+        [
+            ["other/__init__.py"],
+            ["other/finalize.py"],
+        ]
+    )
+    def test_flat_layout_with_dangerous_extra_files(self, tmp_path, extra_files):
+        files = self.FILES["flat"] + extra_files
+        _populate_project_dir(tmp_path, files, {})
+        with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
+            _get_dist(tmp_path, {})
+
+    def test_flat_layout_with_single_module(self, tmp_path):
+        files = self.FILES["single_module"] + ["invalid-module-name.py"]
+        _populate_project_dir(tmp_path, files, {})
+        dist = _get_dist(tmp_path, {})
+        assert set(dist.py_modules) == {"pkg"}
+
+    def test_flat_layout_with_multiple_modules(self, tmp_path):
+        files = self.FILES["single_module"] + ["valid_module_name.py"]
+        _populate_project_dir(tmp_path, files, {})
+        with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
+            _get_dist(tmp_path, {})
+
+    def test_py_modules_when_wheel_dir_is_cwd(self, tmp_path):
+        """Regression for issue 3692"""
+        from setuptools import build_meta
+
+        pyproject = '[project]\nname = "test"\nversion = "1"'
+        (tmp_path / "pyproject.toml").write_text(DALS(pyproject), encoding="utf-8")
+        (tmp_path / "foo.py").touch()
+        with jaraco.path.DirectoryStack().context(tmp_path):
+            build_meta.build_wheel(".")
+        # Ensure py_modules are found
+        wheel_files = get_wheel_members(next(tmp_path.glob("*.whl")))
+        assert "foo.py" in wheel_files
+
+
+class TestNoConfig:
+    DEFAULT_VERSION = "0.0.0"  # Default version given by setuptools
+
+    EXAMPLES = {
+        "pkg1": ["src/pkg1.py"],
+        "pkg2": ["src/pkg2/__init__.py"],
+        "pkg3": ["src/pkg3/__init__.py", "src/pkg3-stubs/__init__.py"],
+        "pkg4": ["pkg4/__init__.py", "pkg4-stubs/__init__.py"],
+        "ns.nested.pkg1": ["src/ns/nested/pkg1/__init__.py"],
+        "ns.nested.pkg2": ["ns/nested/pkg2/__init__.py"],
+    }
+
+    @pytest.mark.parametrize("example", EXAMPLES.keys())
+    def test_discover_name(self, tmp_path, example):
+        _populate_project_dir(tmp_path, self.EXAMPLES[example], {})
+        dist = _get_dist(tmp_path, {})
+        assert dist.get_name() == example
+
+    def test_build_with_discovered_name(self, tmp_path):
+        files = ["src/ns/nested/pkg/__init__.py"]
+        _populate_project_dir(tmp_path, files, {})
+        _run_build(tmp_path, "--sdist")
+        # Expected distribution file
+        dist_file = tmp_path / f"dist/ns.nested.pkg-{self.DEFAULT_VERSION}.tar.gz"
+        assert dist_file.is_file()
+
+
+class TestWithAttrDirective:
+    @pytest.mark.parametrize(
+        "folder, opts",
+        [
+            ("src", {}),
+            ("lib", {"packages": "find:", "packages.find": {"where": "lib"}}),
+        ]
+    )
+    def test_setupcfg_metadata(self, tmp_path, folder, opts):
+        files = [f"{folder}/pkg/__init__.py", "setup.cfg"]
+        _populate_project_dir(tmp_path, files, opts)
+        (tmp_path / folder / "pkg/__init__.py").write_text("version = 42")
+        (tmp_path / "setup.cfg").write_text(
+            "[metadata]\nversion = attr: pkg.version\n"
+            + (tmp_path / "setup.cfg").read_text()
+        )
+
+        dist = _get_dist(tmp_path, {})
+        assert dist.get_name() == "pkg"
+        assert dist.get_version() == "42"
+        assert dist.package_dir
+        package_path = find_package_path("pkg", dist.package_dir, tmp_path)
+        assert os.path.exists(package_path)
+        assert folder in _Path(package_path).parts()
+
+        _run_build(tmp_path, "--sdist")
+        dist_file = tmp_path / "dist/pkg-42.tar.gz"
+        assert dist_file.is_file()
+
+    def test_pyproject_metadata(self, tmp_path):
+        _populate_project_dir(tmp_path, ["src/pkg/__init__.py"], {})
+        (tmp_path / "src/pkg/__init__.py").write_text("version = 42")
+        (tmp_path / "pyproject.toml").write_text(
+            "[project]\nname = 'pkg'\ndynamic = ['version']\n"
+            "[tool.setuptools.dynamic]\nversion = {attr = 'pkg.version'}\n"
+        )
+        dist = _get_dist(tmp_path, {})
+        assert dist.get_version() == "42"
+        assert dist.package_dir == {"": "src"}
+
+
+class TestWithCExtension:
+    def _simulate_package_with_extension(self, tmp_path):
+        # This example is based on: https://github.com/nucleic/kiwi/tree/1.4.0
+        files = [
+            "benchmarks/file.py",
+            "docs/Makefile",
+            "docs/requirements.txt",
+            "docs/source/conf.py",
+            "proj/header.h",
+            "proj/file.py",
+            "py/proj.cpp",
+            "py/other.cpp",
+            "py/file.py",
+            "py/py.typed",
+            "py/tests/test_proj.py",
+            "README.rst",
+        ]
+        _populate_project_dir(tmp_path, files, {})
+
+        setup_script = """
+            from setuptools import Extension, setup
+
+            ext_modules = [
+                Extension(
+                    "proj",
+                    ["py/proj.cpp", "py/other.cpp"],
+                    include_dirs=["."],
+                    language="c++",
+                ),
+            ]
+            setup(ext_modules=ext_modules)
+        """
+        (tmp_path / "setup.py").write_text(DALS(setup_script))
+
+    def test_skip_discovery_with_setupcfg_metadata(self, tmp_path):
+        """Ensure that auto-discovery is not triggered when the project is based on
+        C-extensions only, for backward compatibility.
+        """
+        self._simulate_package_with_extension(tmp_path)
+
+        pyproject = """
+            [build-system]
+            requires = []
+            build-backend = 'setuptools.build_meta'
+        """
+        (tmp_path / "pyproject.toml").write_text(DALS(pyproject))
+
+        setupcfg = """
+            [metadata]
+            name = proj
+            version = 42
+        """
+        (tmp_path / "setup.cfg").write_text(DALS(setupcfg))
+
+        dist = _get_dist(tmp_path, {})
+        assert dist.get_name() == "proj"
+        assert dist.get_version() == "42"
+        assert dist.py_modules is None
+        assert dist.packages is None
+        assert len(dist.ext_modules) == 1
+        assert dist.ext_modules[0].name == "proj"
+
+    def test_dont_skip_discovery_with_pyproject_metadata(self, tmp_path):
+        """When opting-in to pyproject.toml metadata, auto-discovery will be active if
+        the package lists C-extensions, but does not configure py-modules or packages.
+
+        This way we ensure users with complex package layouts that would lead to the
+        discovery of multiple top-level modules/packages see errors and are forced to
+        explicitly set ``packages`` or ``py-modules``.
+        """
+        self._simulate_package_with_extension(tmp_path)
+
+        pyproject = """
+            [project]
+            name = 'proj'
+            version = '42'
+        """
+        (tmp_path / "pyproject.toml").write_text(DALS(pyproject))
+        with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
+            _get_dist(tmp_path, {})
+
+
+class TestWithPackageData:
+    def _simulate_package_with_data_files(self, tmp_path, src_root):
+        files = [
+            f"{src_root}/proj/__init__.py",
+            f"{src_root}/proj/file1.txt",
+            f"{src_root}/proj/nested/file2.txt",
+        ]
+        _populate_project_dir(tmp_path, files, {})
+
+        manifest = """
+            global-include *.py *.txt
+        """
+        (tmp_path / "MANIFEST.in").write_text(DALS(manifest))
+
+    EXAMPLE_SETUPCFG = """
+    [metadata]
+    name = proj
+    version = 42
+
+    [options]
+    include_package_data = True
+    """
+    EXAMPLE_PYPROJECT = """
+    [project]
+    name = "proj"
+    version = "42"
+    """
+
+    PYPROJECT_PACKAGE_DIR = """
+    [tool.setuptools]
+    package-dir = {"" = "src"}
+    """
+
+    @pytest.mark.parametrize(
+        "src_root, files",
+        [
+            (".", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
+            (".", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
+            ("src", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
+            ("src", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
+            (
+                "src",
+                {
+                    "setup.cfg": DALS(EXAMPLE_SETUPCFG) + DALS(
+                        """
+                        packages = find:
+                        package_dir =
+                            =src
+
+                        [options.packages.find]
+                        where = src
+                        """
+                    )
+                }
+            ),
+            (
+                "src",
+                {
+                    "pyproject.toml": DALS(EXAMPLE_PYPROJECT) + DALS(
+                        """
+                        [tool.setuptools]
+                        package-dir = {"" = "src"}
+                        """
+                    )
+                },
+            ),
+        ]
+    )
+    def test_include_package_data(self, tmp_path, src_root, files):
+        """
+        Make sure auto-discovery does not affect package include_package_data.
+        See issue #3196.
+        """
+        jaraco.path.build(files, prefix=str(tmp_path))
+        self._simulate_package_with_data_files(tmp_path, src_root)
+
+        expected = {
+            os.path.normpath(f"{src_root}/proj/file1.txt").replace(os.sep, "/"),
+            os.path.normpath(f"{src_root}/proj/nested/file2.txt").replace(os.sep, "/"),
+        }
+
+        _run_build(tmp_path)
+
+        sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz")))
+        print("~~~~~ sdist_members ~~~~~")
+        print('\n'.join(sdist_files))
+        assert sdist_files >= expected
+
+        wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl")))
+        print("~~~~~ wheel_members ~~~~~")
+        print('\n'.join(wheel_files))
+        orig_files = {f.replace("src/", "").replace("lib/", "") for f in expected}
+        assert wheel_files >= orig_files
+
+
+def test_compatible_with_numpy_configuration(tmp_path):
+    files = [
+        "dir1/__init__.py",
+        "dir2/__init__.py",
+        "file.py",
+    ]
+    _populate_project_dir(tmp_path, files, {})
+    dist = Distribution({})
+    dist.configuration = object()
+    dist.set_defaults()
+    assert dist.py_modules is None
+    assert dist.packages is None
+
+
+def test_name_discovery_doesnt_break_cli(tmpdir_cwd):
+    jaraco.path.build({"pkg.py": ""})
+    dist = Distribution({})
+    dist.script_args = ["--name"]
+    dist.set_defaults()
+    dist.parse_command_line()  # <-- no exception should be raised here.
+    assert dist.get_name() == "pkg"
+
+
+def test_preserve_explicit_name_with_dynamic_version(tmpdir_cwd, monkeypatch):
+    """According to #3545 it seems that ``name`` discovery is running,
+    even when the project already explicitly sets it.
+    This seems to be related to parsing of dynamic versions (via ``attr`` directive),
+    which requires the auto-discovery of ``package_dir``.
+    """
+    files = {
+        "src": {
+            "pkg": {"__init__.py": "__version__ = 42\n"},
+        },
+        "pyproject.toml": DALS("""
+            [project]
+            name = "myproj"  # purposefully different from package name
+            dynamic = ["version"]
+            [tool.setuptools.dynamic]
+            version = {"attr" = "pkg.__version__"}
+            """)
+    }
+    jaraco.path.build(files)
+    dist = Distribution({})
+    orig_analyse_name = dist.set_defaults.analyse_name
+
+    def spy_analyse_name():
+        # We can check if name discovery was triggered by ensuring the original
+        # name remains instead of the package name.
+        orig_analyse_name()
+        assert dist.get_name() == "myproj"
+
+    monkeypatch.setattr(dist.set_defaults, "analyse_name", spy_analyse_name)
+    dist.parse_config_files()
+    assert dist.get_version() == "42"
+    assert set(dist.packages) == {"pkg"}
+
+
+def _populate_project_dir(root, files, options):
+    # NOTE: Currently pypa/build will refuse to build the project if no
+    # `pyproject.toml` or `setup.py` is found. So it is impossible to do
+    # completely "config-less" projects.
+    (root / "setup.py").write_text("import setuptools\nsetuptools.setup()")
+    (root / "README.md").write_text("# Example Package")
+    (root / "LICENSE").write_text("Copyright (c) 2018")
+    _write_setupcfg(root, options)
+    paths = (root / f for f in files)
+    for path in paths:
+        path.parent.mkdir(exist_ok=True, parents=True)
+        path.touch()
+
+
+def _write_setupcfg(root, options):
+    if not options:
+        print("~~~~~ **NO** setup.cfg ~~~~~")
+        return
+    setupcfg = ConfigParser()
+    setupcfg.add_section("options")
+    for key, value in options.items():
+        if key == "packages.find":
+            setupcfg.add_section(f"options.{key}")
+            setupcfg[f"options.{key}"].update(value)
+        elif isinstance(value, list):
+            setupcfg["options"][key] = ", ".join(value)
+        elif isinstance(value, dict):
+            str_value = "\n".join(f"\t{k} = {v}" for k, v in value.items())
+            setupcfg["options"][key] = "\n" + str_value
+        else:
+            setupcfg["options"][key] = str(value)
+    with open(root / "setup.cfg", "w") as f:
+        setupcfg.write(f)
+    print("~~~~~ setup.cfg ~~~~~")
+    print((root / "setup.cfg").read_text())
+
+
+def _run_build(path, *flags):
+    cmd = [sys.executable, "-m", "build", "--no-isolation", *flags, str(path)]
+    return run(cmd, env={'DISTUTILS_DEBUG': ''})
+
+
+def _get_dist(dist_path, attrs):
+    root = "/".join(os.path.split(dist_path))  # POSIX-style
+
+    script = dist_path / 'setup.py'
+    if script.exists():
+        with _Path(dist_path):
+            dist = distutils.core.run_setup("setup.py", {}, stop_after="init")
+    else:
+        dist = Distribution(attrs)
+
+    dist.src_root = root
+    dist.script_name = "setup.py"
+    with _Path(dist_path):
+        dist.parse_config_files()
+
+    dist.set_defaults()
+    return dist
+
+
+def _run_sdist_programatically(dist_path, attrs):
+    dist = _get_dist(dist_path, attrs)
+    cmd = sdist(dist)
+    cmd.ensure_finalized()
+    assert cmd.distribution.packages or cmd.distribution.py_modules
+
+    with quiet(), _Path(dist_path):
+        cmd.run()
+
+    return dist, cmd
diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py
index c52072ac1e..0dd60342ba 100644
--- a/setuptools/tests/test_develop.py
+++ b/setuptools/tests/test_develop.py
@@ -5,12 +5,10 @@
 import sys
 import subprocess
 import platform
-import pathlib
 
 from setuptools.command import test
 
 import pytest
-import pip_run.launch
 
 from setuptools.command.develop import develop
 from setuptools.dist import Distribution
@@ -165,45 +163,3 @@ def test_namespace_package_importable(self, tmpdir):
         ]
         with test.test.paths_on_pythonpath([str(target)]):
             subprocess.check_call(pkg_resources_imp)
-
-    @pytest.mark.xfail(
-        platform.python_implementation() == 'PyPy',
-        reason="Workaround fails on PyPy (why?)",
-    )
-    def test_editable_prefix(self, tmp_path, sample_project):
-        """
-        Editable install to a prefix should be discoverable.
-        """
-        prefix = tmp_path / 'prefix'
-
-        # figure out where pip will likely install the package
-        site_packages = prefix / next(
-            pathlib.Path(path).relative_to(sys.prefix)
-            for path in sys.path
-            if 'site-packages' in path and path.startswith(sys.prefix)
-        )
-        site_packages.mkdir(parents=True)
-
-        # install workaround
-        pip_run.launch.inject_sitecustomize(str(site_packages))
-
-        env = dict(os.environ, PYTHONPATH=str(site_packages))
-        cmd = [
-            sys.executable,
-            '-m',
-            'pip',
-            'install',
-            '--editable',
-            str(sample_project),
-            '--prefix',
-            str(prefix),
-            '--no-build-isolation',
-        ]
-        subprocess.check_call(cmd, env=env)
-
-        # now run 'sample' with the prefix on the PYTHONPATH
-        bin = 'Scripts' if platform.system() == 'Windows' else 'bin'
-        exe = prefix / bin / 'sample'
-        if sys.version_info < (3, 8) and platform.system() == 'Windows':
-            exe = str(exe)
-        subprocess.check_call([exe], env=env)
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index 4980f2c3ce..e7d2f5ca25 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -2,6 +2,7 @@
 import collections
 import re
 import functools
+import os
 import urllib.request
 import urllib.parse
 from distutils.errors import DistutilsSetupError
@@ -18,6 +19,7 @@
 
 from .textwrap import DALS
 from .test_easy_install import make_nspkg_sdist
+from .test_find_packages import ensure_files
 
 import pytest
 
@@ -69,16 +71,19 @@ def test_dist__get_unpatched_deprecated():
     pytest.warns(DistDeprecationWarning, _get_unpatched, [""])
 
 
+EXAMPLE_BASE_INFO = dict(
+    name="package",
+    version="0.0.1",
+    author="Foo Bar",
+    author_email="foo@bar.net",
+    long_description="Long\ndescription",
+    description="Short description",
+    keywords=["one", "two"],
+)
+
+
 def __read_test_cases():
-    base = dict(
-        name="package",
-        version="0.0.1",
-        author="Foo Bar",
-        author_email="foo@bar.net",
-        long_description="Long\ndescription",
-        description="Short description",
-        keywords=["one", "two"],
-    )
+    base = EXAMPLE_BASE_INFO
 
     params = functools.partial(dict, base)
 
@@ -379,3 +384,126 @@ def test_rfc822_unescape(content, result):
 def test_metadata_name():
     with pytest.raises(DistutilsSetupError, match='missing.*name'):
         Distribution()._validate_metadata()
+
+
+@pytest.mark.parametrize(
+    "dist_name, py_module",
+    [
+        ("my.pkg", "my_pkg"),
+        ("my-pkg", "my_pkg"),
+        ("my_pkg", "my_pkg"),
+        ("pkg", "pkg"),
+    ]
+)
+def test_dist_default_py_modules(tmp_path, dist_name, py_module):
+    (tmp_path / f"{py_module}.py").touch()
+
+    (tmp_path / "setup.py").touch()
+    (tmp_path / "noxfile.py").touch()
+    # ^-- make sure common tool files are ignored
+
+    attrs = {
+        **EXAMPLE_BASE_INFO,
+        "name": dist_name,
+        "src_root": str(tmp_path)
+    }
+    # Find `py_modules` corresponding to dist_name if not given
+    dist = Distribution(attrs)
+    dist.set_defaults()
+    assert dist.py_modules == [py_module]
+    # When `py_modules` is given, don't do anything
+    dist = Distribution({**attrs, "py_modules": ["explicity_py_module"]})
+    dist.set_defaults()
+    assert dist.py_modules == ["explicity_py_module"]
+    # When `packages` is given, don't do anything
+    dist = Distribution({**attrs, "packages": ["explicity_package"]})
+    dist.set_defaults()
+    assert not dist.py_modules
+
+
+@pytest.mark.parametrize(
+    "dist_name, package_dir, package_files, packages",
+    [
+        ("my.pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
+        ("my-pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
+        ("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
+        ("my.pkg", None, ["my/pkg/__init__.py"], ["my", "my.pkg"]),
+        (
+            "my_pkg",
+            None,
+            ["src/my_pkg/__init__.py", "src/my_pkg2/__init__.py"],
+            ["my_pkg", "my_pkg2"]
+        ),
+        (
+            "my_pkg",
+            {"pkg": "lib", "pkg2": "lib2"},
+            ["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"],
+            ["pkg", "pkg.nested", "pkg2"]
+        ),
+    ]
+)
+def test_dist_default_packages(
+    tmp_path, dist_name, package_dir, package_files, packages
+):
+    ensure_files(tmp_path, package_files)
+
+    (tmp_path / "setup.py").touch()
+    (tmp_path / "noxfile.py").touch()
+    # ^-- should not be included by default
+
+    attrs = {
+        **EXAMPLE_BASE_INFO,
+        "name": dist_name,
+        "src_root": str(tmp_path),
+        "package_dir": package_dir
+    }
+    # Find `packages` either corresponding to dist_name or inside src
+    dist = Distribution(attrs)
+    dist.set_defaults()
+    assert not dist.py_modules
+    assert not dist.py_modules
+    assert set(dist.packages) == set(packages)
+    # When `py_modules` is given, don't do anything
+    dist = Distribution({**attrs, "py_modules": ["explicit_py_module"]})
+    dist.set_defaults()
+    assert not dist.packages
+    assert set(dist.py_modules) == {"explicit_py_module"}
+    # When `packages` is given, don't do anything
+    dist = Distribution({**attrs, "packages": ["explicit_package"]})
+    dist.set_defaults()
+    assert not dist.py_modules
+    assert set(dist.packages) == {"explicit_package"}
+
+
+@pytest.mark.parametrize(
+    "dist_name, package_dir, package_files",
+    [
+        ("my.pkg.nested", None, ["my/pkg/nested/__init__.py"]),
+        ("my.pkg", None, ["my/pkg/__init__.py", "my/pkg/file.py"]),
+        ("my_pkg", None, ["my_pkg.py"]),
+        ("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/nested/__init__.py"]),
+        ("my_pkg", None, ["src/my_pkg/__init__.py", "src/my_pkg/nested/__init__.py"]),
+        (
+            "my_pkg",
+            {"my_pkg": "lib", "my_pkg.lib2": "lib2"},
+            ["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"],
+        ),
+        # Should not try to guess a name from multiple py_modules/packages
+        ("UNKNOWN", None, ["src/mod1.py", "src/mod2.py"]),
+        ("UNKNOWN", None, ["src/pkg1/__ini__.py", "src/pkg2/__init__.py"]),
+    ]
+)
+def test_dist_default_name(tmp_path, dist_name, package_dir, package_files):
+    """Make sure dist.name is discovered from packages/py_modules"""
+    ensure_files(tmp_path, package_files)
+    attrs = {
+        **EXAMPLE_BASE_INFO,
+        "src_root": "/".join(os.path.split(tmp_path)),  # POSIX-style
+        "package_dir": package_dir
+    }
+    del attrs["name"]
+
+    dist = Distribution(attrs)
+    dist.set_defaults()
+    assert dist.py_modules or dist.packages
+    assert dist.get_name() == dist_name
diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py
index 29fbd09dbe..2ed790762f 100644
--- a/setuptools/tests/test_dist_info.py
+++ b/setuptools/tests/test_dist_info.py
@@ -1,27 +1,36 @@
 """Test .dist-info style distributions.
 """
+import pathlib
+import re
+import shutil
+import subprocess
+import sys
+from functools import partial
 
 import pytest
 
 import pkg_resources
+from setuptools.archive_util import unpack_archive
 from .textwrap import DALS
 
 
+read = partial(pathlib.Path.read_text, encoding="utf-8")
+
+
 class TestDistInfo:
 
-    metadata_base = DALS("""
+    metadata_base = DALS(
+        """
         Metadata-Version: 1.2
         Requires-Dist: splort (==4)
         Provides-Extra: baz
         Requires-Dist: quux (>=1.1); extra == 'baz'
-        """)
+        """
+    )
 
     @classmethod
     def build_metadata(cls, **kwargs):
-        lines = (
-            '{key}: {value}\n'.format(**locals())
-            for key, value in kwargs.items()
-        )
+        lines = ('{key}: {value}\n'.format(**locals()) for key, value in kwargs.items())
         return cls.metadata_base + ''.join(lines)
 
     @pytest.fixture
@@ -49,8 +58,7 @@ def metadata(self, tmpdir):
 
     def test_distinfo(self, metadata):
         dists = dict(
-            (d.project_name, d)
-            for d in pkg_resources.find_distributions(metadata)
+            (d.project_name, d) for d in pkg_resources.find_distributions(metadata)
         )
 
         assert len(dists) == 2, dists
@@ -72,3 +80,131 @@ def test_conditional_dependencies(self, metadata):
                 pkg_resources.Requirement.parse('quux>=1.1;extra=="baz"'),
             ]
             assert d.extras == ['baz']
+
+    def test_invalid_version(self, tmp_path):
+        """
+        Supplying an invalid version crashes dist_info.
+        """
+        config = "[metadata]\nname=proj\nversion=42\n[egg_info]\ntag_build=invalid!!!\n"
+        (tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
+        msg = re.compile("invalid version", re.M | re.I)
+        proc = run_command_inner("dist_info", cwd=tmp_path, check=False)
+        assert proc.returncode
+        assert msg.search(proc.stdout)
+        assert not list(tmp_path.glob("*.dist-info"))
+
+    def test_tag_arguments(self, tmp_path):
+        config = """
+        [metadata]
+        name=proj
+        version=42
+        [egg_info]
+        tag_date=1
+        tag_build=.post
+        """
+        (tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
+
+        print(run_command("dist_info", "--no-date", cwd=tmp_path))
+        dist_info = next(tmp_path.glob("*.dist-info"))
+        assert dist_info.name.startswith("proj-42")
+        shutil.rmtree(dist_info)
+
+        print(run_command("dist_info", "--tag-build", ".a", cwd=tmp_path))
+        dist_info = next(tmp_path.glob("*.dist-info"))
+        assert dist_info.name.startswith("proj-42a")
+
+    @pytest.mark.parametrize("keep_egg_info", (False, True))
+    def test_output_dir(self, tmp_path, keep_egg_info):
+        config = "[metadata]\nname=proj\nversion=42\n"
+        (tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
+        out = tmp_path / "__out"
+        out.mkdir()
+        opts = ["--keep-egg-info"] if keep_egg_info else []
+        run_command("dist_info", "--output-dir", out, *opts, cwd=tmp_path)
+        assert len(list(out.glob("*.dist-info"))) == 1
+        assert len(list(tmp_path.glob("*.dist-info"))) == 0
+        expected_egg_info = 1 if keep_egg_info else 0
+        assert len(list(out.glob("*.egg-info"))) == expected_egg_info
+        assert len(list(tmp_path.glob("*.egg-info"))) == 0
+        assert len(list(out.glob("*.__bkp__"))) == 0
+        assert len(list(tmp_path.glob("*.__bkp__"))) == 0
+
+
+class TestWheelCompatibility:
+    """Make sure the .dist-info directory produced with the ``dist_info`` command
+    is the same as the one produced by ``bdist_wheel``.
+    """
+
+    SETUPCFG = DALS(
+        """
+    [metadata]
+    name = {name}
+    version = {version}
+
+    [options]
+    install_requires =
+        foo>=12; sys_platform != "linux"
+
+    [options.extras_require]
+    test = pytest
+
+    [options.entry_points]
+    console_scripts =
+        executable-name = my_package.module:function
+    discover =
+        myproj = my_package.other_module:function
+    """
+    )
+
+    EGG_INFO_OPTS = [
+        # Related: #3088 #2872
+        ("", ""),
+        (".post", "[egg_info]\ntag_build = post\n"),
+        (".post", "[egg_info]\ntag_build = .post\n"),
+        (".post", "[egg_info]\ntag_build = post\ntag_date = 1\n"),
+        (".dev", "[egg_info]\ntag_build = .dev\n"),
+        (".dev", "[egg_info]\ntag_build = .dev\ntag_date = 1\n"),
+        ("a1", "[egg_info]\ntag_build = .a1\n"),
+        ("+local", "[egg_info]\ntag_build = +local\n"),
+    ]
+
+    @pytest.mark.parametrize("name", "my-proj my_proj my.proj My.Proj".split())
+    @pytest.mark.parametrize("version", ["0.42.13"])
+    @pytest.mark.parametrize("suffix, cfg", EGG_INFO_OPTS)
+    def test_dist_info_is_the_same_as_in_wheel(
+        self, name, version, tmp_path, suffix, cfg
+    ):
+        config = self.SETUPCFG.format(name=name, version=version) + cfg
+
+        for i in "dir_wheel", "dir_dist":
+            (tmp_path / i).mkdir()
+            (tmp_path / i / "setup.cfg").write_text(config, encoding="utf-8")
+
+        run_command("bdist_wheel", cwd=tmp_path / "dir_wheel")
+        wheel = next(tmp_path.glob("dir_wheel/dist/*.whl"))
+        unpack_archive(wheel, tmp_path / "unpack")
+        wheel_dist_info = next(tmp_path.glob("unpack/*.dist-info"))
+
+        run_command("dist_info", cwd=tmp_path / "dir_dist")
+        dist_info = next(tmp_path.glob("dir_dist/*.dist-info"))
+
+        assert dist_info.name == wheel_dist_info.name
+        assert dist_info.name.startswith(f"{name.replace('-', '_')}-{version}{suffix}")
+        for file in "METADATA", "entry_points.txt":
+            assert read(dist_info / file) == read(wheel_dist_info / file)
+
+
+def run_command_inner(*cmd, **kwargs):
+    opts = {
+        "stderr": subprocess.STDOUT,
+        "stdout": subprocess.PIPE,
+        "text": True,
+        'check': True,
+        **kwargs,
+    }
+    cmd = [sys.executable, "-c", "__import__('setuptools').setup()", *map(str, cmd)]
+    return subprocess.run(cmd, **opts)
+
+
+def run_command(*args, **kwargs):
+    return run_command_inner(*args, **kwargs).stdout
diff --git a/setuptools/tests/test_distutils_adoption.py b/setuptools/tests/test_distutils_adoption.py
index df8f35419c..3f07e9a1da 100644
--- a/setuptools/tests/test_distutils_adoption.py
+++ b/setuptools/tests/test_distutils_adoption.py
@@ -49,6 +49,13 @@ def count_meta_path(venv, env=None):
     return int(popen_text(venv.run)(cmd, env=win_sr(env)))
 
 
+skip_without_stdlib_distutils = pytest.mark.skipif(
+    sys.version_info >= (3, 12),
+    reason='stdlib distutils is removed from Python 3.12+',
+)
+
+
+@skip_without_stdlib_distutils
 def test_distutils_stdlib(venv):
     """
     Ensure stdlib distutils is used when appropriate.
@@ -119,9 +126,9 @@ def test_distutils_has_origin():
 @pytest.mark.parametrize(
     "distutils_version, imported_module",
     [
-        ("stdlib", "dir_util"),
-        ("stdlib", "file_util"),
-        ("stdlib", "archive_util"),
+        pytest.param("stdlib", "dir_util", marks=skip_without_stdlib_distutils),
+        pytest.param("stdlib", "file_util", marks=skip_without_stdlib_distutils),
+        pytest.param("stdlib", "archive_util", marks=skip_without_stdlib_distutils),
         ("local", "dir_util"),
         ("local", "file_util"),
         ("local", "archive_util"),
@@ -138,19 +145,22 @@ def test_modules_are_not_duplicated_on_import(
 
 
 ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED = r"""
-# Similar to ENSURE_IMPORTS_ARE_NOT_DUPLICATED
+import types
 import distutils.dist as dist
 from distutils import log
-
-assert dist.log == log, (
-    f"\n{dist.log}\n!=\n{log}"
-)
-
+if isinstance(dist.log, types.ModuleType):
+    assert dist.log == log, f"\n{dist.log}\n!=\n{log}"
 print("success")
 """
 
 
-@pytest.mark.parametrize("distutils_version", "local stdlib".split())
+@pytest.mark.parametrize(
+    "distutils_version",
+    [
+        "local",
+        pytest.param("stdlib", marks=skip_without_stdlib_distutils),
+    ]
+)
 def test_log_module_is_not_duplicated_on_import(distutils_version, tmpdir_cwd, venv):
     env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version)
     cmd = ['python', '-c', ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED]
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index e27465110f..b2551dbce9 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -12,13 +12,14 @@
 import distutils.errors
 import io
 import zipfile
-import mock
 import time
 import re
 import subprocess
 import pathlib
 import warnings
 from collections import namedtuple
+from pathlib import Path
+from unittest import mock
 
 import pytest
 from jaraco import path
@@ -27,7 +28,9 @@
 from setuptools.sandbox import run_setup
 import setuptools.command.easy_install as ei
 from setuptools.command.easy_install import (
-    EasyInstallDeprecationWarning, ScriptWriter, PthDistributions,
+    EasyInstallDeprecationWarning,
+    ScriptWriter,
+    PthDistributions,
     WindowsScriptWriter,
 )
 from setuptools.dist import Distribution
@@ -61,11 +64,13 @@ def as_requirement(self):
         return 'spec'
 
 
-SETUP_PY = DALS("""
+SETUP_PY = DALS(
+    """
     from setuptools import setup
 
     setup()
-    """)
+    """
+)
 
 
 class TestEasyInstallTest:
@@ -78,8 +83,7 @@ def test_get_script_args(self):
         assert "'spec'" in script
         assert "'console_scripts'" in script
         assert "'name'" in script
-        assert re.search(
-            '^# EASY-INSTALL-ENTRY-SCRIPT', script, flags=re.MULTILINE)
+        assert re.search('^# EASY-INSTALL-ENTRY-SCRIPT', script, flags=re.MULTILINE)
 
     def test_no_find_links(self):
         # new option '--no-find-links', that blocks find-links added at
@@ -123,6 +127,7 @@ def test_all_site_dirs(self, monkeypatch):
 
         def mock_gsp():
             return [path]
+
         monkeypatch.setattr(site, 'getsitepackages', mock_gsp, raising=False)
         assert path in ei.get_site_dirs()
 
@@ -135,7 +140,8 @@ def sdist_unicode(self, tmpdir):
         files = [
             (
                 'setup.py',
-                DALS("""
+                DALS(
+                    """
                     import setuptools
                     setuptools.setup(
                         name="setuptools-test-unicode",
@@ -143,7 +149,8 @@ def sdist_unicode(self, tmpdir):
                         packages=["mypkg"],
                         include_package_data=True,
                     )
-                    """),
+                    """
+                ),
             ),
             (
                 'mypkg/__init__.py',
@@ -165,8 +172,7 @@ def sdist_unicode(self, tmpdir):
         return str(sdist)
 
     @fail_on_ascii
-    def test_unicode_filename_in_sdist(
-            self, sdist_unicode, tmpdir, monkeypatch):
+    def test_unicode_filename_in_sdist(self, sdist_unicode, tmpdir, monkeypatch):
         """
         The install command should execute correctly even if
         the package has unicode filenames.
@@ -187,7 +193,8 @@ def sdist_unicode_in_script(self, tmpdir):
         files = [
             (
                 "setup.py",
-                DALS("""
+                DALS(
+                    """
                     import setuptools
                     setuptools.setup(
                         name="setuptools-test-unicode",
@@ -196,7 +203,8 @@ def sdist_unicode_in_script(self, tmpdir):
                         include_package_data=True,
                         scripts=['mypkg/unicode_in_script'],
                     )
-                    """),
+                    """
+                ),
             ),
             ("mypkg/__init__.py", ""),
             (
@@ -208,7 +216,8 @@ def sdist_unicode_in_script(self, tmpdir):
 
                     non_python_fn() {
                     }
-                """),
+                """
+                ),
             ),
         ]
         sdist_name = "setuptools-test-unicode-script-1.0.zip"
@@ -223,7 +232,8 @@ def sdist_unicode_in_script(self, tmpdir):
 
     @fail_on_ascii
     def test_unicode_content_in_sdist(
-            self, sdist_unicode_in_script, tmpdir, monkeypatch):
+        self, sdist_unicode_in_script, tmpdir, monkeypatch
+    ):
         """
         The install command should execute correctly even if
         the package has unicode in scripts.
@@ -240,21 +250,25 @@ def sdist_script(self, tmpdir):
         files = [
             (
                 'setup.py',
-                DALS("""
+                DALS(
+                    """
                     import setuptools
                     setuptools.setup(
                         name="setuptools-test-script",
                         version="1.0",
                         scripts=["mypkg_script"],
                     )
-                    """),
+                    """
+                ),
             ),
             (
                 'mypkg_script',
-                DALS("""
+                DALS(
+                    """
                      #/usr/bin/python
                      print('mypkg_script')
-                     """),
+                     """
+                ),
             ),
         ]
         sdist_name = 'setuptools-test-script-1.0.zip'
@@ -262,8 +276,9 @@ def sdist_script(self, tmpdir):
         make_sdist(sdist, files)
         return sdist
 
-    @pytest.mark.skipif(not sys.platform.startswith('linux'),
-                        reason="Test can only be run on Linux")
+    @pytest.mark.skipif(
+        not sys.platform.startswith('linux'), reason="Test can only be run on Linux"
+    )
     def test_script_install(self, sdist_script, tmpdir, monkeypatch):
         """
         Check scripts are installed.
@@ -312,7 +327,12 @@ def test_add_from_site_is_ignored(self):
         location = '/test/location/does-not-have-to-exist'
         # PthDistributions expects all locations to be normalized
         location = pkg_resources.normalize_path(location)
-        pth = PthDistributions('does-not_exist', [location, ])
+        pth = PthDistributions(
+            'does-not_exist',
+            [
+                location,
+            ],
+        )
         assert not pth.dirty
         pth.add(PRDistribution(location))
         assert not pth.dirty
@@ -364,7 +384,6 @@ def setup_context(tmpdir):
 @pytest.mark.usefixtures("user_override")
 @pytest.mark.usefixtures("setup_context")
 class TestUserInstallTest:
-
     # prevent check that site-packages is writable. easy_install
     # shouldn't be writing to system site-packages during finalize
     # options, but while it does, bypass the behavior.
@@ -442,6 +461,7 @@ def user_install_setup_context(self, *args, **kwargs):
         """
         with self.orig_context(*args, **kwargs):
             import setuptools.command.easy_install as ei
+
             ei.__file__ = site.USER_SITE
             yield
 
@@ -483,8 +503,77 @@ def test_bdist_egg_available_on_distutils_pkg(self, distutils_package):
         run_setup('setup.py', ['bdist_egg'])
 
 
-class TestSetupRequires:
+class TestInstallRequires:
+    def test_setup_install_includes_dependencies(self, tmp_path, mock_index):
+        """
+        When ``python setup.py install`` is called directly, it will use easy_install
+        to fetch dependencies.
+        """
+        # TODO: Remove these tests once `setup.py install` is completely removed
+        project_root = tmp_path / "project"
+        project_root.mkdir(exist_ok=True)
+        install_root = tmp_path / "install"
+        install_root.mkdir(exist_ok=True)
+
+        self.create_project(project_root)
+        cmd = [
+            sys.executable,
+            '-c',
+            '__import__("setuptools").setup()',
+            'install',
+            '--install-base',
+            str(install_root),
+            '--install-lib',
+            str(install_root),
+            '--install-headers',
+            str(install_root),
+            '--install-scripts',
+            str(install_root),
+            '--install-data',
+            str(install_root),
+            '--install-purelib',
+            str(install_root),
+            '--install-platlib',
+            str(install_root),
+        ]
+        env = {**os.environ, "__EASYINSTALL_INDEX": mock_index.url}
+        cp = subprocess.run(
+            cmd,
+            cwd=str(project_root),
+            env=env,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.STDOUT,
+            text=True,
+        )
+        assert cp.returncode != 0
+        try:
+            assert '/does-not-exist/' in {r.path for r in mock_index.requests}
+            assert next(
+                line
+                for line in cp.stdout.splitlines()
+                if "not find suitable distribution for" in line
+                and "does-not-exist" in line
+            )
+        except Exception:
+            if "failed to get random numbers" in cp.stdout:
+                pytest.xfail(f"{sys.platform} failure - {cp.stdout}")
+            raise
+
+    def create_project(self, root):
+        config = """
+        [metadata]
+        name = project
+        version = 42
+
+        [options]
+        install_requires = does-not-exist
+        py_modules = mod
+        """
+        (root / 'setup.cfg').write_text(DALS(config), encoding="utf-8")
+        (root / 'mod.py').touch()
+
 
+class TestSetupRequires:
     def test_setup_requires_honors_fetch_params(self, mock_index, monkeypatch):
         """
         When easy_install installs a source distribution which specifies
@@ -501,11 +590,14 @@ def test_setup_requires_honors_fetch_params(self, mock_index, monkeypatch):
                     with contexts.environment(PYTHONPATH=temp_install_dir):
                         cmd = [
                             sys.executable,
-                            '-m', 'setup',
+                            '-c',
+                            '__import__("setuptools").setup()',
                             'easy_install',
-                            '--index-url', mock_index.url,
+                            '--index-url',
+                            mock_index.url,
                             '--exclude-scripts',
-                            '--install-dir', temp_install_dir,
+                            '--install-dir',
+                            temp_install_dir,
                             dist_file,
                         ]
                         subprocess.Popen(cmd).wait()
@@ -521,17 +613,25 @@ def create_sdist():
         """
         with contexts.tempdir() as dir:
             dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz')
-            make_sdist(dist_path, [
-                ('setup.py', DALS("""
+            make_sdist(
+                dist_path,
+                [
+                    (
+                        'setup.py',
+                        DALS(
+                            """
                     import setuptools
                     setuptools.setup(
                         name="setuptools-test-fetcher",
                         version="1.0",
                         setup_requires = ['does-not-exist'],
                     )
-                """)),
-                ('setup.cfg', ''),
-            ])
+                """
+                        ),
+                    ),
+                    ('setup.cfg', ''),
+                ],
+            )
             yield dist_path
 
     use_setup_cfg = (
@@ -552,14 +652,16 @@ def test_setup_requires_overrides_version_conflict(self, use_setup_cfg):
         requirement is already on the path.
         """
 
-        fake_dist = PRDistribution('does-not-matter', project_name='foobar',
-                                   version='0.0')
+        fake_dist = PRDistribution(
+            'does-not-matter', project_name='foobar', version='0.0'
+        )
         working_set.add(fake_dist)
 
         with contexts.save_pkg_resources_state():
             with contexts.tempdir() as temp_dir:
                 test_pkg = create_setup_requires_package(
-                    temp_dir, use_setup_cfg=use_setup_cfg)
+                    temp_dir, use_setup_cfg=use_setup_cfg
+                )
                 test_setup_py = os.path.join(test_pkg, 'setup.py')
                 with contexts.quiet() as (stdout, stderr):
                     # Don't even need to install the package, just
@@ -590,11 +692,13 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
                     tf.extractall(foobar_1_dir)
                 sys.path.insert(1, foobar_1_dir)
 
-                dist = PRDistribution(foobar_1_dir, project_name='foo.bar',
-                                      version='0.1')
+                dist = PRDistribution(
+                    foobar_1_dir, project_name='foo.bar', version='0.1'
+                )
                 working_set.add(dist)
 
-                template = DALS("""\
+                template = DALS(
+                    """\
                     import foo  # Even with foo imported first the
                                 # setup_requires package should override
                     import setuptools
@@ -606,11 +710,17 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
 
                     if 'foo.bar-0.2' not in foo.__path__[0]:
                         print('FAIL')
-                """)
+                """
+                )
 
                 test_pkg = create_setup_requires_package(
-                    temp_dir, 'foo.bar', '0.2', make_nspkg_sdist, template,
-                    use_setup_cfg=use_setup_cfg)
+                    temp_dir,
+                    'foo.bar',
+                    '0.2',
+                    make_nspkg_sdist,
+                    template,
+                    use_setup_cfg=use_setup_cfg,
+                )
 
                 test_setup_py = os.path.join(test_pkg, 'setup.py')
 
@@ -622,7 +732,8 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
                     except pkg_resources.VersionConflict:
                         self.fail(
                             'Installing setup.py requirements '
-                            'caused a VersionConflict')
+                            'caused a VersionConflict'
+                        )
 
                 assert 'FAIL' not in stdout.getvalue()
                 lines = stdout.readlines()
@@ -632,27 +743,38 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
     @pytest.mark.parametrize('use_setup_cfg', use_setup_cfg)
     def test_setup_requires_with_attr_version(self, use_setup_cfg):
         def make_dependency_sdist(dist_path, distname, version):
-            files = [(
-                'setup.py',
-                DALS("""
+            files = [
+                (
+                    'setup.py',
+                    DALS(
+                        """
                     import setuptools
                     setuptools.setup(
                         name={name!r},
                         version={version!r},
                         py_modules=[{name!r}],
                     )
-                    """.format(name=distname, version=version)),
-            ), (
-                distname + '.py',
-                DALS("""
+                    """.format(
+                            name=distname, version=version
+                        )
+                    ),
+                ),
+                (
+                    distname + '.py',
+                    DALS(
+                        """
                     version = 42
-                    """),
-            )]
+                    """
+                    ),
+                ),
+            ]
             make_sdist(dist_path, files)
+
         with contexts.save_pkg_resources_state():
             with contexts.tempdir() as temp_dir:
                 test_pkg = create_setup_requires_package(
-                    temp_dir, setup_attrs=dict(version='attr: foobar.version'),
+                    temp_dir,
+                    setup_attrs=dict(version='attr: foobar.version'),
                     make_package=make_dependency_sdist,
                     use_setup_cfg=use_setup_cfg + ('version',),
                 )
@@ -671,15 +793,21 @@ def test_setup_requires_honors_pip_env(self, mock_index, monkeypatch):
         with contexts.save_pkg_resources_state():
             with contexts.tempdir() as temp_dir:
                 test_pkg = create_setup_requires_package(
-                    temp_dir, 'python-xlib', '0.19',
-                    setup_attrs=dict(dependency_links=[]))
+                    temp_dir,
+                    'python-xlib',
+                    '0.19',
+                    setup_attrs=dict(dependency_links=[]),
+                )
                 test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
                 with open(test_setup_cfg, 'w') as fp:
-                    fp.write(DALS(
-                        '''
+                    fp.write(
+                        DALS(
+                            '''
                         [easy_install]
                         index_url = https://pypi.org/legacy/
-                        '''))
+                        '''
+                        )
+                    )
                 test_setup_py = os.path.join(test_pkg, 'setup.py')
                 with pytest.raises(distutils.errors.DistutilsError):
                     run_setup(test_setup_py, [str('--version')])
@@ -698,25 +826,30 @@ def test_setup_requires_with_pep508_url(self, mock_index, monkeypatch):
                 test_pkg = create_setup_requires_package(
                     temp_dir,
                     # Ignored (overridden by setup_attrs)
-                    'python-xlib', '0.19',
-                    setup_attrs=dict(
-                        setup_requires='dependency @ %s' % dep_url))
+                    'python-xlib',
+                    '0.19',
+                    setup_attrs=dict(setup_requires='dependency @ %s' % dep_url),
+                )
                 test_setup_py = os.path.join(test_pkg, 'setup.py')
                 run_setup(test_setup_py, [str('--version')])
         assert len(mock_index.requests) == 0
 
     def test_setup_requires_with_allow_hosts(self, mock_index):
-        ''' The `allow-hosts` option in not supported anymore. '''
+        '''The `allow-hosts` option in not supported anymore.'''
         files = {
             'test_pkg': {
-                'setup.py': DALS('''
+                'setup.py': DALS(
+                    '''
                     from setuptools import setup
                     setup(setup_requires='python-xlib')
-                    '''),
-                'setup.cfg': DALS('''
+                    '''
+                ),
+                'setup.cfg': DALS(
+                    '''
                     [easy_install]
                     allow_hosts = *
-                    '''),
+                    '''
+                ),
             }
         }
         with contexts.save_pkg_resources_state():
@@ -728,7 +861,7 @@ def test_setup_requires_with_allow_hosts(self, mock_index):
         assert len(mock_index.requests) == 0
 
     def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir):
-        ''' Check `python_requires` is honored. '''
+        '''Check `python_requires` is honored.'''
         monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
         monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
         monkeypatch.setenv(str('PIP_NO_INDEX'), str('1'))
@@ -737,59 +870,65 @@ def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir):
         dep_1_0_url = path_to_url(str(tmpdir / dep_1_0_sdist))
         dep_1_0_python_requires = '>=2.7'
         make_python_requires_sdist(
-            str(tmpdir / dep_1_0_sdist), 'dep', '1.0', dep_1_0_python_requires)
+            str(tmpdir / dep_1_0_sdist), 'dep', '1.0', dep_1_0_python_requires
+        )
         dep_2_0_sdist = 'dep-2.0.tar.gz'
         dep_2_0_url = path_to_url(str(tmpdir / dep_2_0_sdist))
-        dep_2_0_python_requires = '!=' + '.'.join(
-            map(str, sys.version_info[:2])) + '.*'
+        dep_2_0_python_requires = '!=' + '.'.join(map(str, sys.version_info[:2])) + '.*'
         make_python_requires_sdist(
-            str(tmpdir / dep_2_0_sdist), 'dep', '2.0', dep_2_0_python_requires)
+            str(tmpdir / dep_2_0_sdist), 'dep', '2.0', dep_2_0_python_requires
+        )
         index = tmpdir / 'index.html'
-        index.write_text(DALS(
-            '''
+        index.write_text(
+            DALS(
+                '''
             
             Links for dep
             
                 

Links for dep

- {dep_1_0_sdist}
- {dep_2_0_sdist}
+ {dep_1_0_sdist}
+ {dep_2_0_sdist}
- ''').format( # noqa + ''' + ).format( # noqa dep_1_0_url=dep_1_0_url, dep_1_0_sdist=dep_1_0_sdist, dep_1_0_python_requires=dep_1_0_python_requires, dep_2_0_url=dep_2_0_url, dep_2_0_sdist=dep_2_0_sdist, dep_2_0_python_requires=dep_2_0_python_requires, - ), 'utf-8') + ), + 'utf-8', + ) index_url = path_to_url(str(index)) with contexts.save_pkg_resources_state(): test_pkg = create_setup_requires_package( str(tmpdir), - 'python-xlib', '0.19', # Ignored (overridden by setup_attrs). - setup_attrs=dict( - setup_requires='dep', dependency_links=[index_url])) + 'python-xlib', + '0.19', # Ignored (overridden by setup_attrs). + setup_attrs=dict(setup_requires='dep', dependency_links=[index_url]), + ) test_setup_py = os.path.join(test_pkg, 'setup.py') run_setup(test_setup_py, [str('--version')]) - eggs = list(map(str, pkg_resources.find_distributions( - os.path.join(test_pkg, '.eggs')))) + eggs = list( + map(str, pkg_resources.find_distributions(os.path.join(test_pkg, '.eggs'))) + ) assert eggs == ['dep 1.0'] - @pytest.mark.parametrize( - 'with_dependency_links_in_setup_py', - (False, True)) + @pytest.mark.parametrize('with_dependency_links_in_setup_py', (False, True)) def test_setup_requires_with_find_links_in_setup_cfg( - self, monkeypatch, - with_dependency_links_in_setup_py): + self, monkeypatch, with_dependency_links_in_setup_py + ): monkeypatch.setenv(str('PIP_RETRIES'), str('0')) monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) with contexts.save_pkg_resources_state(): with contexts.tempdir() as temp_dir: make_trivial_sdist( - os.path.join(temp_dir, 'python-xlib-42.tar.gz'), - 'python-xlib', - '42') + os.path.join(temp_dir, 'python-xlib-42.tar.gz'), 'python-xlib', '42' + ) test_pkg = os.path.join(temp_dir, 'test_pkg') test_setup_py = os.path.join(test_pkg, 'setup.py') test_setup_cfg = os.path.join(test_pkg, 'setup.cfg') @@ -799,62 +938,76 @@ def test_setup_requires_with_find_links_in_setup_cfg( dependency_links = [os.path.join(temp_dir, 'links')] else: dependency_links = [] - fp.write(DALS( - ''' + fp.write( + DALS( + ''' from setuptools import installer, setup setup(setup_requires='python-xlib==42', dependency_links={dependency_links!r}) - ''').format( - dependency_links=dependency_links)) - with open(test_setup_cfg, 'w') as fp: - fp.write(DALS( ''' + ).format(dependency_links=dependency_links) + ) + with open(test_setup_cfg, 'w') as fp: + fp.write( + DALS( + ''' [easy_install] index_url = {index_url} find_links = {find_links} - ''').format(index_url=os.path.join(temp_dir, 'index'), - find_links=temp_dir)) + ''' + ).format( + index_url=os.path.join(temp_dir, 'index'), + find_links=temp_dir, + ) + ) run_setup(test_setup_py, [str('--version')]) - def test_setup_requires_with_transitive_extra_dependency( - self, monkeypatch): - # Use case: installing a package with a build dependency on - # an already installed `dep[extra]`, which in turn depends - # on `extra_dep` (whose is not already installed). + def test_setup_requires_with_transitive_extra_dependency(self, monkeypatch): + ''' + Use case: installing a package with a build dependency on + an already installed `dep[extra]`, which in turn depends + on `extra_dep` (whose is not already installed). + ''' with contexts.save_pkg_resources_state(): with contexts.tempdir() as temp_dir: # Create source distribution for `extra_dep`. make_trivial_sdist( - os.path.join(temp_dir, 'extra_dep-1.0.tar.gz'), - 'extra_dep', '1.0') + os.path.join(temp_dir, 'extra_dep-1.0.tar.gz'), 'extra_dep', '1.0' + ) # Create source tree for `dep`. dep_pkg = os.path.join(temp_dir, 'dep') os.mkdir(dep_pkg) - path.build({ - 'setup.py': - DALS(""" + path.build( + { + 'setup.py': DALS( + """ import setuptools setuptools.setup( name='dep', version='2.0', extras_require={'extra': ['extra_dep']}, ) - """), - 'setup.cfg': '', - }, prefix=dep_pkg) + """ + ), + 'setup.cfg': '', + }, + prefix=dep_pkg, + ) # "Install" dep. - run_setup( - os.path.join(dep_pkg, 'setup.py'), [str('dist_info')]) + run_setup(os.path.join(dep_pkg, 'setup.py'), [str('dist_info')]) working_set.add_entry(dep_pkg) # Create source tree for test package. test_pkg = os.path.join(temp_dir, 'test_pkg') test_setup_py = os.path.join(test_pkg, 'setup.py') os.mkdir(test_pkg) with open(test_setup_py, 'w') as fp: - fp.write(DALS( - ''' + fp.write( + DALS( + ''' from setuptools import installer, setup setup(setup_requires='dep[extra]') - ''')) + ''' + ) + ) # Check... monkeypatch.setenv(str('PIP_FIND_LINKS'), str(temp_dir)) monkeypatch.setenv(str('PIP_NO_INDEX'), str('1')) @@ -862,6 +1015,90 @@ def test_setup_requires_with_transitive_extra_dependency( monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) run_setup(test_setup_py, [str('--version')]) + def test_setup_requires_with_distutils_command_dep(self, monkeypatch): + ''' + Use case: ensure build requirements' extras + are properly installed and activated. + ''' + with contexts.save_pkg_resources_state(): + with contexts.tempdir() as temp_dir: + # Create source distribution for `extra_dep`. + make_sdist( + os.path.join(temp_dir, 'extra_dep-1.0.tar.gz'), + [ + ( + 'setup.py', + DALS( + """ + import setuptools + setuptools.setup( + name='extra_dep', + version='1.0', + py_modules=['extra_dep'], + ) + """ + ), + ), + ('setup.cfg', ''), + ('extra_dep.py', ''), + ], + ) + # Create source tree for `epdep`. + dep_pkg = os.path.join(temp_dir, 'epdep') + os.mkdir(dep_pkg) + path.build( + { + 'setup.py': DALS( + """ + import setuptools + setuptools.setup( + name='dep', version='2.0', + py_modules=['epcmd'], + extras_require={'extra': ['extra_dep']}, + entry_points=''' + [distutils.commands] + epcmd = epcmd:epcmd [extra] + ''', + ) + """ + ), + 'setup.cfg': '', + 'epcmd.py': DALS( + """ + from distutils.command.build_py import build_py + + import extra_dep + + class epcmd(build_py): + pass + """ + ), + }, + prefix=dep_pkg, + ) + # "Install" dep. + run_setup(os.path.join(dep_pkg, 'setup.py'), [str('dist_info')]) + working_set.add_entry(dep_pkg) + # Create source tree for test package. + test_pkg = os.path.join(temp_dir, 'test_pkg') + test_setup_py = os.path.join(test_pkg, 'setup.py') + os.mkdir(test_pkg) + with open(test_setup_py, 'w') as fp: + fp.write( + DALS( + ''' + from setuptools import installer, setup + setup(setup_requires='dep[extra]') + ''' + ) + ) + # Check... + monkeypatch.setenv(str('PIP_FIND_LINKS'), str(temp_dir)) + monkeypatch.setenv(str('PIP_NO_INDEX'), str('1')) + monkeypatch.setenv(str('PIP_RETRIES'), str('0')) + monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) + run_setup(test_setup_py, ['epcmd']) + def make_trivial_sdist(dist_path, distname, version): """ @@ -869,17 +1106,25 @@ def make_trivial_sdist(dist_path, distname, version): setup.py. """ - make_sdist(dist_path, [ - ('setup.py', - DALS("""\ + make_sdist( + dist_path, + [ + ( + 'setup.py', + DALS( + """\ import setuptools setuptools.setup( name=%r, version=%r ) - """ % (distname, version))), - ('setup.cfg', ''), - ]) + """ + % (distname, version) + ), + ), + ('setup.cfg', ''), + ], + ) def make_nspkg_sdist(dist_path, distname, version): @@ -894,7 +1139,8 @@ def make_nspkg_sdist(dist_path, distname, version): packages = ['.'.join(parts[:idx]) for idx in range(1, len(parts) + 1)] - setup_py = DALS("""\ + setup_py = DALS( + """\ import setuptools setuptools.setup( name=%r, @@ -902,12 +1148,13 @@ def make_nspkg_sdist(dist_path, distname, version): packages=%r, namespace_packages=[%r] ) - """ % (distname, version, packages, nspackage)) + """ + % (distname, version, packages, nspackage) + ) init = "__import__('pkg_resources').declare_namespace(__name__)" - files = [('setup.py', setup_py), - (os.path.join(nspackage, '__init__.py'), init)] + files = [('setup.py', setup_py), (os.path.join(nspackage, '__init__.py'), init)] for package in packages[1:]: filename = os.path.join(*(package.split('.') + ['__init__.py'])) files.append((filename, '')) @@ -916,21 +1163,27 @@ def make_nspkg_sdist(dist_path, distname, version): def make_python_requires_sdist(dist_path, distname, version, python_requires): - make_sdist(dist_path, [ - ( - 'setup.py', - DALS("""\ + make_sdist( + dist_path, + [ + ( + 'setup.py', + DALS( + """\ import setuptools setuptools.setup( name={name!r}, version={version!r}, python_requires={python_requires!r}, ) - """).format( - name=distname, version=version, - python_requires=python_requires)), - ('setup.cfg', ''), - ]) + """ + ).format( + name=distname, version=version, python_requires=python_requires + ), + ), + ('setup.cfg', ''), + ], + ) def make_sdist(dist_path, files): @@ -950,10 +1203,15 @@ def make_sdist(dist_path, files): dist.addfile(file_info, fileobj=file_bytes) -def create_setup_requires_package(path, distname='foobar', version='0.1', - make_package=make_trivial_sdist, - setup_py_template=None, setup_attrs={}, - use_setup_cfg=()): +def create_setup_requires_package( + path, + distname='foobar', + version='0.1', + make_package=make_trivial_sdist, + setup_py_template=None, + setup_attrs={}, + use_setup_cfg=(), +): """Creates a source tree under path for a trivial test package that has a single requirement in setup_requires--a tarball for that requirement is also created and added to the dependency_links argument. @@ -964,9 +1222,10 @@ def create_setup_requires_package(path, distname='foobar', version='0.1', """ test_setup_attrs = { - 'name': 'test_pkg', 'version': '0.0', + 'name': 'test_pkg', + 'version': '0.0', 'setup_requires': ['%s==%s' % (distname, version)], - 'dependency_links': [os.path.abspath(path)] + 'dependency_links': [os.path.abspath(path)], } test_setup_attrs.update(setup_attrs) @@ -1004,10 +1263,12 @@ def create_setup_requires_package(path, distname='foobar', version='0.1', # setup.py if setup_py_template is None: - setup_py_template = DALS("""\ + setup_py_template = DALS( + """\ import setuptools setuptools.setup(**%r) - """) + """ + ) with open(os.path.join(test_pkg, 'setup.py'), 'w') as f: f.write(setup_py_template % test_setup_attrs) @@ -1019,7 +1280,7 @@ def create_setup_requires_package(path, distname='foobar', version='0.1', @pytest.mark.skipif( sys.platform.startswith('java') and ei.is_sh(sys.executable), - reason="Test cannot run under java when executable is sh" + reason="Test cannot run under java when executable is sh", ) class TestScriptHeader: non_ascii_exe = '/Users/José/bin/python' @@ -1031,22 +1292,21 @@ def test_get_script_header(self): assert actual == expected def test_get_script_header_args(self): - expected = '#!%s -x\n' % ei.nt_quote_arg( - os.path.normpath(sys.executable)) + expected = '#!%s -x\n' % ei.nt_quote_arg(os.path.normpath(sys.executable)) actual = ei.ScriptWriter.get_header('#!/usr/bin/python -x') assert actual == expected def test_get_script_header_non_ascii_exe(self): actual = ei.ScriptWriter.get_header( - '#!/usr/bin/python', - executable=self.non_ascii_exe) + '#!/usr/bin/python', executable=self.non_ascii_exe + ) expected = str('#!%s -x\n') % self.non_ascii_exe assert actual == expected def test_get_script_header_exe_with_spaces(self): actual = ei.ScriptWriter.get_header( - '#!/usr/bin/python', - executable='"' + self.exe_with_spaces + '"') + '#!/usr/bin/python', executable='"' + self.exe_with_spaces + '"' + ) expected = '#!"%s"\n' % self.exe_with_spaces assert actual == expected @@ -1144,3 +1404,52 @@ def test_use_correct_python_version_string(tmpdir, tmpdir_cwd, monkeypatch): assert cmd.config_vars['py_version'] == '3.10.1' assert cmd.config_vars['py_version_short'] == '3.10' assert cmd.config_vars['py_version_nodot'] == '310' + + +def test_editable_user_and_build_isolation(setup_context, monkeypatch, tmp_path): + '''`setup.py develop` should honor `--user` even under build isolation''' + + # == Arrange == + # Pretend that build isolation was enabled + # e.g pip sets the environment variable PYTHONNOUSERSITE=1 + monkeypatch.setattr('site.ENABLE_USER_SITE', False) + + # Patching $HOME for 2 reasons: + # 1. setuptools/command/easy_install.py:create_home_path + # tries creating directories in $HOME + # given `self.config_vars['DESTDIRS'] = "/home/user/.pyenv/versions/3.9.10 /home/user/.pyenv/versions/3.9.10/lib /home/user/.pyenv/versions/3.9.10/lib/python3.9 /home/user/.pyenv/versions/3.9.10/lib/python3.9/lib-dynload"`` # noqa: E501 + # it will `makedirs("/home/user/.pyenv/versions/3.9.10 /home/user/.pyenv/versions/3.9.10/lib /home/user/.pyenv/versions/3.9.10/lib/python3.9 /home/user/.pyenv/versions/3.9.10/lib/python3.9/lib-dynload")`` # noqa: E501 + # 2. We are going to force `site` to update site.USER_BASE and site.USER_SITE + # To point inside our new home + monkeypatch.setenv('HOME', str(tmp_path / '.home')) + monkeypatch.setenv('USERPROFILE', str(tmp_path / '.home')) + monkeypatch.setenv('APPDATA', str(tmp_path / '.home')) + monkeypatch.setattr('site.USER_BASE', None) + monkeypatch.setattr('site.USER_SITE', None) + user_site = Path(site.getusersitepackages()) + user_site.mkdir(parents=True, exist_ok=True) + + sys_prefix = tmp_path / '.sys_prefix' + sys_prefix.mkdir(parents=True, exist_ok=True) + monkeypatch.setattr('sys.prefix', str(sys_prefix)) + + setup_script = ( + "__import__('setuptools').setup(name='aproj', version=42, packages=[])\n" + ) + (tmp_path / "setup.py").write_text(setup_script, encoding="utf-8") + + # == Sanity check == + assert list(sys_prefix.glob("*")) == [] + assert list(user_site.glob("*")) == [] + + # == Act == + run_setup('setup.py', ['develop', '--user']) + + # == Assert == + # Should not install to sys.prefix + assert list(sys_prefix.glob("*")) == [] + # Should install to user site + installed = {f.name for f in user_site.glob("*")} + # sometimes easy-install.pth is created and sometimes not + installed = installed - {"easy-install.pth"} + assert installed == {'aproj.egg-link'} diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py new file mode 100644 index 0000000000..ac574c0eb8 --- /dev/null +++ b/setuptools/tests/test_editable_install.py @@ -0,0 +1,1011 @@ +import os +import stat +import sys +import subprocess +import platform +from copy import deepcopy +from importlib import import_module +from importlib.machinery import EXTENSION_SUFFIXES +from pathlib import Path +from textwrap import dedent +from unittest.mock import Mock +from uuid import uuid4 + +import jaraco.envs +import jaraco.path +import pip_run.launch +import pytest +from path import Path as _Path + +from . import contexts, namespaces + +from setuptools._importlib import resources as importlib_resources +from setuptools.command.editable_wheel import ( + _DebuggingTips, + _LinkTree, + _find_virtual_namespaces, + _find_namespaces, + _find_package_roots, + _finder_template, + editable_wheel, +) +from setuptools.dist import Distribution +from setuptools.extension import Extension + + +@pytest.fixture(params=["strict", "lenient"]) +def editable_opts(request): + if request.param == "strict": + return ["--config-settings", "editable-mode=strict"] + return [] + + +EXAMPLE = { + 'pyproject.toml': dedent("""\ + [build-system] + requires = ["setuptools"] + build-backend = "setuptools.build_meta" + + [project] + name = "mypkg" + version = "3.14159" + license = {text = "MIT"} + description = "This is a Python package" + dynamic = ["readme"] + classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers" + ] + urls = {Homepage = "http://github.com"} + dependencies = ['importlib-metadata; python_version<"3.8"'] + + [tool.setuptools] + package-dir = {"" = "src"} + packages = {find = {where = ["src"]}} + license-files = ["LICENSE*"] + + [tool.setuptools.dynamic] + readme = {file = "README.rst"} + + [tool.distutils.egg_info] + tag-build = ".post0" + """), + "MANIFEST.in": dedent("""\ + global-include *.py *.txt + global-exclude *.py[cod] + prune dist + prune build + """).strip(), + "README.rst": "This is a ``README``", + "LICENSE.txt": "---- placeholder MIT license ----", + "src": { + "mypkg": { + "__init__.py": dedent("""\ + import sys + + if sys.version_info[:2] >= (3, 8): + from importlib.metadata import PackageNotFoundError, version + else: + from importlib_metadata import PackageNotFoundError, version + + try: + __version__ = version(__name__) + except PackageNotFoundError: + __version__ = "unknown" + """), + "__main__.py": dedent("""\ + from importlib.resources import read_text + from . import __version__, __name__ as parent + from .mod import x + + data = read_text(parent, "data.txt") + print(__version__, data, x) + """), + "mod.py": "x = ''", + "data.txt": "Hello World", + } + } +} + + +SETUP_SCRIPT_STUB = "__import__('setuptools').setup()" + + +@pytest.mark.parametrize( + "files", + [ + {**EXAMPLE, "setup.py": SETUP_SCRIPT_STUB}, # type: ignore + EXAMPLE, # No setup.py script + ] +) +def test_editable_with_pyproject(tmp_path, venv, files, editable_opts): + project = tmp_path / "mypkg" + project.mkdir() + jaraco.path.build(files, prefix=project) + + cmd = [venv.exe(), "-m", "pip", "install", + "--no-build-isolation", # required to force current version of setuptools + "-e", str(project), *editable_opts] + print(str(subprocess.check_output(cmd), "utf-8")) + + cmd = [venv.exe(), "-m", "mypkg"] + assert subprocess.check_output(cmd).strip() == b"3.14159.post0 Hello World" + + (project / "src/mypkg/data.txt").write_text("foobar") + (project / "src/mypkg/mod.py").write_text("x = 42") + assert subprocess.check_output(cmd).strip() == b"3.14159.post0 foobar 42" + + +def test_editable_with_flat_layout(tmp_path, venv, editable_opts): + files = { + "mypkg": { + "pyproject.toml": dedent("""\ + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + + [project] + name = "mypkg" + version = "3.14159" + + [tool.setuptools] + packages = ["pkg"] + py-modules = ["mod"] + """), + "pkg": {"__init__.py": "a = 4"}, + "mod.py": "b = 2", + }, + } + jaraco.path.build(files, prefix=tmp_path) + project = tmp_path / "mypkg" + + cmd = [venv.exe(), "-m", "pip", "install", + "--no-build-isolation", # required to force current version of setuptools + "-e", str(project), *editable_opts] + print(str(subprocess.check_output(cmd), "utf-8")) + cmd = [venv.exe(), "-c", "import pkg, mod; print(pkg.a, mod.b)"] + assert subprocess.check_output(cmd).strip() == b"4 2" + + +def test_editable_with_single_module(tmp_path, venv, editable_opts): + files = { + "mypkg": { + "pyproject.toml": dedent("""\ + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + + [project] + name = "mod" + version = "3.14159" + + [tool.setuptools] + py-modules = ["mod"] + """), + "mod.py": "b = 2", + }, + } + jaraco.path.build(files, prefix=tmp_path) + project = tmp_path / "mypkg" + + cmd = [venv.exe(), "-m", "pip", "install", + "--no-build-isolation", # required to force current version of setuptools + "-e", str(project), *editable_opts] + print(str(subprocess.check_output(cmd), "utf-8")) + cmd = [venv.exe(), "-c", "import mod; print(mod.b)"] + assert subprocess.check_output(cmd).strip() == b"2" + + +class TestLegacyNamespaces: + """Ported from test_develop""" + + def test_namespace_package_importable(self, venv, tmp_path, editable_opts): + """ + Installing two packages sharing the same namespace, one installed + naturally using pip or `--single-version-externally-managed` + and the other installed in editable mode should leave the namespace + intact and both packages reachable by import. + """ + build_system = """\ + [build-system] + requires = ["setuptools"] + build-backend = "setuptools.build_meta" + """ + pkg_A = namespaces.build_namespace_package(tmp_path, 'myns.pkgA') + pkg_B = namespaces.build_namespace_package(tmp_path, 'myns.pkgB') + (pkg_A / "pyproject.toml").write_text(build_system, encoding="utf-8") + (pkg_B / "pyproject.toml").write_text(build_system, encoding="utf-8") + # use pip to install to the target directory + opts = editable_opts[:] + opts.append("--no-build-isolation") # force current version of setuptools + venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts]) + venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts]) + venv.run(["python", "-c", "import myns.pkgA; import myns.pkgB"]) + # additionally ensure that pkg_resources import works + venv.run(["python", "-c", "import pkg_resources"]) + + +class TestPep420Namespaces: + def test_namespace_package_importable(self, venv, tmp_path, editable_opts): + """ + Installing two packages sharing the same namespace, one installed + normally using pip and the other installed in editable mode + should allow importing both packages. + """ + pkg_A = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgA') + pkg_B = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgB') + # use pip to install to the target directory + opts = editable_opts[:] + opts.append("--no-build-isolation") # force current version of setuptools + venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts]) + venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts]) + venv.run(["python", "-c", "import myns.n.pkgA; import myns.n.pkgB"]) + + def test_namespace_created_via_package_dir(self, venv, tmp_path, editable_opts): + """Currently users can create a namespace by tweaking `package_dir`""" + files = { + "pkgA": { + "pyproject.toml": dedent("""\ + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + + [project] + name = "pkgA" + version = "3.14159" + + [tool.setuptools] + package-dir = {"myns.n.pkgA" = "src"} + """), + "src": {"__init__.py": "a = 1"}, + }, + } + jaraco.path.build(files, prefix=tmp_path) + pkg_A = tmp_path / "pkgA" + pkg_B = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgB') + pkg_C = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgC') + + # use pip to install to the target directory + opts = editable_opts[:] + opts.append("--no-build-isolation") # force current version of setuptools + venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts]) + venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts]) + venv.run(["python", "-m", "pip", "install", "-e", str(pkg_C), *opts]) + venv.run(["python", "-c", "from myns.n import pkgA, pkgB, pkgC"]) + + def test_namespace_accidental_config_in_lenient_mode(self, venv, tmp_path): + """Sometimes users might specify an ``include`` pattern that ignores parent + packages. In a normal installation this would ignore all modules inside the + parent packages, and make them namespaces (reported in issue #3504), + so the editable mode should preserve this behaviour. + """ + files = { + "pkgA": { + "pyproject.toml": dedent("""\ + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + + [project] + name = "pkgA" + version = "3.14159" + + [tool.setuptools] + packages.find.include = ["mypkg.*"] + """), + "mypkg": { + "__init__.py": "", + "other.py": "b = 1", + "n": { + "__init__.py": "", + "pkgA.py": "a = 1", + }, + }, + "MANIFEST.in": EXAMPLE["MANIFEST.in"], + }, + } + jaraco.path.build(files, prefix=tmp_path) + pkg_A = tmp_path / "pkgA" + + # use pip to install to the target directory + opts = ["--no-build-isolation"] # force current version of setuptools + venv.run(["python", "-m", "pip", "-v", "install", "-e", str(pkg_A), *opts]) + out = venv.run(["python", "-c", "from mypkg.n import pkgA; print(pkgA.a)"]) + assert str(out, "utf-8").strip() == "1" + cmd = """\ + try: + import mypkg.other + except ImportError: + print("mypkg.other not defined") + """ + out = venv.run(["python", "-c", dedent(cmd)]) + assert "mypkg.other not defined" in str(out, "utf-8") + + +# Moved here from test_develop: +@pytest.mark.xfail( + platform.python_implementation() == 'PyPy', + reason="Workaround fails on PyPy (why?)", +) +def test_editable_with_prefix(tmp_path, sample_project, editable_opts): + """ + Editable install to a prefix should be discoverable. + """ + prefix = tmp_path / 'prefix' + + # figure out where pip will likely install the package + site_packages = prefix / next( + Path(path).relative_to(sys.prefix) + for path in sys.path + if 'site-packages' in path and path.startswith(sys.prefix) + ) + site_packages.mkdir(parents=True) + + # install workaround + pip_run.launch.inject_sitecustomize(site_packages) + + env = dict(os.environ, PYTHONPATH=str(site_packages)) + cmd = [ + sys.executable, + '-m', + 'pip', + 'install', + '--editable', + str(sample_project), + '--prefix', + str(prefix), + '--no-build-isolation', + *editable_opts, + ] + subprocess.check_call(cmd, env=env) + + # now run 'sample' with the prefix on the PYTHONPATH + bin = 'Scripts' if platform.system() == 'Windows' else 'bin' + exe = prefix / bin / 'sample' + if sys.version_info < (3, 8) and platform.system() == 'Windows': + exe = str(exe) + subprocess.check_call([exe], env=env) + + +class TestFinderTemplate: + """This test focus in getting a particular implementation detail right. + If at some point in time the implementation is changed for something different, + this test can be modified or even excluded. + """ + def install_finder(self, finder): + loc = {} + exec(finder, loc, loc) + loc["install"]() + + def test_packages(self, tmp_path): + files = { + "src1": { + "pkg1": { + "__init__.py": "", + "subpkg": {"mod1.py": "a = 42"}, + }, + }, + "src2": {"mod2.py": "a = 43"}, + } + jaraco.path.build(files, prefix=tmp_path) + + mapping = { + "pkg1": str(tmp_path / "src1/pkg1"), + "mod2": str(tmp_path / "src2/mod2") + } + template = _finder_template(str(uuid4()), mapping, {}) + + with contexts.save_paths(), contexts.save_sys_modules(): + for mod in ("pkg1", "pkg1.subpkg", "pkg1.subpkg.mod1", "mod2"): + sys.modules.pop(mod, None) + + self.install_finder(template) + mod1 = import_module("pkg1.subpkg.mod1") + mod2 = import_module("mod2") + subpkg = import_module("pkg1.subpkg") + + assert mod1.a == 42 + assert mod2.a == 43 + expected = str((tmp_path / "src1/pkg1/subpkg").resolve()) + assert_path(subpkg, expected) + + def test_namespace(self, tmp_path): + files = {"pkg": {"__init__.py": "a = 13", "text.txt": "abc"}} + jaraco.path.build(files, prefix=tmp_path) + + mapping = {"ns.othername": str(tmp_path / "pkg")} + namespaces = {"ns": []} + + template = _finder_template(str(uuid4()), mapping, namespaces) + with contexts.save_paths(), contexts.save_sys_modules(): + for mod in ("ns", "ns.othername"): + sys.modules.pop(mod, None) + + self.install_finder(template) + pkg = import_module("ns.othername") + text = importlib_resources.files(pkg) / "text.txt" + + expected = str((tmp_path / "pkg").resolve()) + assert_path(pkg, expected) + assert pkg.a == 13 + + # Make sure resources can also be found + assert text.read_text(encoding="utf-8") == "abc" + + def test_combine_namespaces(self, tmp_path): + files = { + "src1": {"ns": {"pkg1": {"__init__.py": "a = 13"}}}, + "src2": {"ns": {"mod2.py": "b = 37"}}, + } + jaraco.path.build(files, prefix=tmp_path) + + mapping = { + "ns.pkgA": str(tmp_path / "src1/ns/pkg1"), + "ns": str(tmp_path / "src2/ns"), + } + namespaces_ = {"ns": [str(tmp_path / "src1"), str(tmp_path / "src2")]} + template = _finder_template(str(uuid4()), mapping, namespaces_) + + with contexts.save_paths(), contexts.save_sys_modules(): + for mod in ("ns", "ns.pkgA", "ns.mod2"): + sys.modules.pop(mod, None) + + self.install_finder(template) + pkgA = import_module("ns.pkgA") + mod2 = import_module("ns.mod2") + + expected = str((tmp_path / "src1/ns/pkg1").resolve()) + assert_path(pkgA, expected) + assert pkgA.a == 13 + assert mod2.b == 37 + + def test_dynamic_path_computation(self, tmp_path): + # Follows the example in PEP 420 + files = { + "project1": {"parent": {"child": {"one.py": "x = 1"}}}, + "project2": {"parent": {"child": {"two.py": "x = 2"}}}, + "project3": {"parent": {"child": {"three.py": "x = 3"}}}, + } + jaraco.path.build(files, prefix=tmp_path) + mapping = {} + namespaces_ = {"parent": [str(tmp_path / "project1/parent")]} + template = _finder_template(str(uuid4()), mapping, namespaces_) + + mods = (f"parent.child.{name}" for name in ("one", "two", "three")) + with contexts.save_paths(), contexts.save_sys_modules(): + for mod in ("parent", "parent.child", "parent.child", *mods): + sys.modules.pop(mod, None) + + self.install_finder(template) + + one = import_module("parent.child.one") + assert one.x == 1 + + with pytest.raises(ImportError): + import_module("parent.child.two") + + sys.path.append(str(tmp_path / "project2")) + two = import_module("parent.child.two") + assert two.x == 2 + + with pytest.raises(ImportError): + import_module("parent.child.three") + + sys.path.append(str(tmp_path / "project3")) + three = import_module("parent.child.three") + assert three.x == 3 + + def test_no_recursion(self, tmp_path): + # See issue #3550 + files = { + "pkg": { + "__init__.py": "from . import pkg", + }, + } + jaraco.path.build(files, prefix=tmp_path) + + mapping = { + "pkg": str(tmp_path / "pkg"), + } + template = _finder_template(str(uuid4()), mapping, {}) + + with contexts.save_paths(), contexts.save_sys_modules(): + sys.modules.pop("pkg", None) + + self.install_finder(template) + with pytest.raises(ImportError, match="pkg"): + import_module("pkg") + + def test_similar_name(self, tmp_path): + files = { + "foo": { + "__init__.py": "", + "bar": { + "__init__.py": "", + } + }, + } + jaraco.path.build(files, prefix=tmp_path) + + mapping = { + "foo": str(tmp_path / "foo"), + } + template = _finder_template(str(uuid4()), mapping, {}) + + with contexts.save_paths(), contexts.save_sys_modules(): + sys.modules.pop("foo", None) + sys.modules.pop("foo.bar", None) + + self.install_finder(template) + with pytest.raises(ImportError, match="foobar"): + import_module("foobar") + + +def test_pkg_roots(tmp_path): + """This test focus in getting a particular implementation detail right. + If at some point in time the implementation is changed for something different, + this test can be modified or even excluded. + """ + files = { + "a": {"b": {"__init__.py": "ab = 1"}, "__init__.py": "a = 1"}, + "d": {"__init__.py": "d = 1", "e": {"__init__.py": "de = 1"}}, + "f": {"g": {"h": {"__init__.py": "fgh = 1"}}}, + "other": {"__init__.py": "abc = 1"}, + "another": {"__init__.py": "abcxyz = 1"}, + "yet_another": {"__init__.py": "mnopq = 1"}, + } + jaraco.path.build(files, prefix=tmp_path) + package_dir = { + "a.b.c": "other", + "a.b.c.x.y.z": "another", + "m.n.o.p.q": "yet_another" + } + packages = [ + "a", + "a.b", + "a.b.c", + "a.b.c.x.y", + "a.b.c.x.y.z", + "d", + "d.e", + "f", + "f.g", + "f.g.h", + "m.n.o.p.q", + ] + roots = _find_package_roots(packages, package_dir, tmp_path) + assert roots == { + "a": str(tmp_path / "a"), + "a.b.c": str(tmp_path / "other"), + "a.b.c.x.y.z": str(tmp_path / "another"), + "d": str(tmp_path / "d"), + "f": str(tmp_path / "f"), + "m.n.o.p.q": str(tmp_path / "yet_another"), + } + + ns = set(dict(_find_namespaces(packages, roots))) + assert ns == {"f", "f.g"} + + ns = set(_find_virtual_namespaces(roots)) + assert ns == {"a.b", "a.b.c.x", "a.b.c.x.y", "m", "m.n", "m.n.o", "m.n.o.p"} + + +class TestOverallBehaviour: + PYPROJECT = """\ + [build-system] + requires = ["setuptools"] + build-backend = "setuptools.build_meta" + + [project] + name = "mypkg" + version = "3.14159" + """ + + FLAT_LAYOUT = { + "pyproject.toml": dedent(PYPROJECT), + "MANIFEST.in": EXAMPLE["MANIFEST.in"], + "otherfile.py": "", + "mypkg": { + "__init__.py": "", + "mod1.py": "var = 42", + "subpackage": { + "__init__.py": "", + "mod2.py": "var = 13", + "resource_file.txt": "resource 39", + }, + }, + } + + EXAMPLES = { + "flat-layout": FLAT_LAYOUT, + "src-layout": { + "pyproject.toml": dedent(PYPROJECT), + "MANIFEST.in": EXAMPLE["MANIFEST.in"], + "otherfile.py": "", + "src": {"mypkg": FLAT_LAYOUT["mypkg"]}, + }, + "custom-layout": { + "pyproject.toml": dedent(PYPROJECT) + dedent("""\ + [tool.setuptools] + packages = ["mypkg", "mypkg.subpackage"] + + [tool.setuptools.package-dir] + "mypkg.subpackage" = "other" + """), + "MANIFEST.in": EXAMPLE["MANIFEST.in"], + "otherfile.py": "", + "mypkg": { + "__init__.py": "", + "mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"], # type: ignore + }, + "other": FLAT_LAYOUT["mypkg"]["subpackage"], # type: ignore + }, + "namespace": { + "pyproject.toml": dedent(PYPROJECT), + "MANIFEST.in": EXAMPLE["MANIFEST.in"], + "otherfile.py": "", + "src": { + "mypkg": { + "mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"], # type: ignore + "subpackage": FLAT_LAYOUT["mypkg"]["subpackage"], # type: ignore + }, + }, + }, + } + + @pytest.mark.parametrize("layout", EXAMPLES.keys()) + def test_editable_install(self, tmp_path, venv, layout, editable_opts): + project, _ = install_project( + "mypkg", venv, tmp_path, self.EXAMPLES[layout], *editable_opts + ) + + # Ensure stray files are not importable + cmd_import_error = """\ + try: + import otherfile + except ImportError as ex: + print(ex) + """ + out = venv.run(["python", "-c", dedent(cmd_import_error)]) + assert b"No module named 'otherfile'" in out + + # Ensure the modules are importable + cmd_get_vars = """\ + import mypkg, mypkg.mod1, mypkg.subpackage.mod2 + print(mypkg.mod1.var, mypkg.subpackage.mod2.var) + """ + out = venv.run(["python", "-c", dedent(cmd_get_vars)]) + assert b"42 13" in out + + # Ensure resources are reachable + cmd_get_resource = """\ + import mypkg.subpackage + from setuptools._importlib import resources as importlib_resources + text = importlib_resources.files(mypkg.subpackage) / "resource_file.txt" + print(text.read_text(encoding="utf-8")) + """ + out = venv.run(["python", "-c", dedent(cmd_get_resource)]) + assert b"resource 39" in out + + # Ensure files are editable + mod1 = next(project.glob("**/mod1.py")) + mod2 = next(project.glob("**/mod2.py")) + resource_file = next(project.glob("**/resource_file.txt")) + + mod1.write_text("var = 17", encoding="utf-8") + mod2.write_text("var = 781", encoding="utf-8") + resource_file.write_text("resource 374", encoding="utf-8") + + out = venv.run(["python", "-c", dedent(cmd_get_vars)]) + assert b"42 13" not in out + assert b"17 781" in out + + out = venv.run(["python", "-c", dedent(cmd_get_resource)]) + assert b"resource 39" not in out + assert b"resource 374" in out + + +class TestLinkTree: + FILES = deepcopy(TestOverallBehaviour.EXAMPLES["src-layout"]) + FILES["pyproject.toml"] += dedent("""\ + [tool.setuptools] + # Temporary workaround: both `include-package-data` and `package-data` configs + # can be removed after #3260 is fixed. + include-package-data = false + package-data = {"*" = ["*.txt"]} + + [tool.setuptools.packages.find] + where = ["src"] + exclude = ["*.subpackage*"] + """) + FILES["src"]["mypkg"]["resource.not_in_manifest"] = "abc" + + def test_generated_tree(self, tmp_path): + jaraco.path.build(self.FILES, prefix=tmp_path) + + with _Path(tmp_path): + name = "mypkg-3.14159" + dist = Distribution({"script_name": "%PEP 517%"}) + dist.parse_config_files() + + wheel = Mock() + aux = tmp_path / ".aux" + build = tmp_path / ".build" + aux.mkdir() + build.mkdir() + + build_py = dist.get_command_obj("build_py") + build_py.editable_mode = True + build_py.build_lib = str(build) + build_py.ensure_finalized() + outputs = build_py.get_outputs() + output_mapping = build_py.get_output_mapping() + + make_tree = _LinkTree(dist, name, aux, build) + make_tree(wheel, outputs, output_mapping) + + mod1 = next(aux.glob("**/mod1.py")) + expected = tmp_path / "src/mypkg/mod1.py" + assert_link_to(mod1, expected) + + assert next(aux.glob("**/subpackage"), None) is None + assert next(aux.glob("**/mod2.py"), None) is None + assert next(aux.glob("**/resource_file.txt"), None) is None + + assert next(aux.glob("**/resource.not_in_manifest"), None) is None + + def test_strict_install(self, tmp_path, venv): + opts = ["--config-settings", "editable-mode=strict"] + install_project("mypkg", venv, tmp_path, self.FILES, *opts) + + out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"]) + assert b"42" in out + + # Ensure packages excluded from distribution are not importable + cmd_import_error = """\ + try: + from mypkg import subpackage + except ImportError as ex: + print(ex) + """ + out = venv.run(["python", "-c", dedent(cmd_import_error)]) + assert b"cannot import name 'subpackage'" in out + + # Ensure resource files excluded from distribution are not reachable + cmd_get_resource = """\ + import mypkg + from setuptools._importlib import resources as importlib_resources + try: + text = importlib_resources.files(mypkg) / "resource.not_in_manifest" + print(text.read_text(encoding="utf-8")) + except FileNotFoundError as ex: + print(ex) + """ + out = venv.run(["python", "-c", dedent(cmd_get_resource)]) + assert b"No such file or directory" in out + assert b"resource.not_in_manifest" in out + + +@pytest.mark.filterwarnings("ignore:.*compat.*:setuptools.SetuptoolsDeprecationWarning") +def test_compat_install(tmp_path, venv): + # TODO: Remove `compat` after Dec/2022. + opts = ["--config-settings", "editable-mode=compat"] + files = TestOverallBehaviour.EXAMPLES["custom-layout"] + install_project("mypkg", venv, tmp_path, files, *opts) + + out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"]) + assert b"42" in out + + expected_path = comparable_path(str(tmp_path)) + + # Compatible behaviour will make spurious modules and excluded + # files importable directly from the original path + for cmd in ( + "import otherfile; print(otherfile)", + "import other; print(other)", + "import mypkg; print(mypkg)", + ): + out = comparable_path(str(venv.run(["python", "-c", cmd]), "utf-8")) + assert expected_path in out + + # Compatible behaviour will not consider custom mappings + cmd = """\ + try: + from mypkg import subpackage; + except ImportError as ex: + print(ex) + """ + out = str(venv.run(["python", "-c", dedent(cmd)]), "utf-8") + assert "cannot import name 'subpackage'" in out + + +def test_pbr_integration(tmp_path, venv, editable_opts): + """Ensure editable installs work with pbr, issue #3500""" + files = { + "pyproject.toml": dedent("""\ + [build-system] + requires = ["setuptools"] + build-backend = "setuptools.build_meta" + """), + "setup.py": dedent("""\ + __import__('setuptools').setup( + pbr=True, + setup_requires=["pbr"], + ) + """), + "setup.cfg": dedent("""\ + [metadata] + name = mypkg + + [files] + packages = + mypkg + """), + "mypkg": { + "__init__.py": "", + "hello.py": "print('Hello world!')", + }, + "other": {"test.txt": "Another file in here."}, + } + venv.run(["python", "-m", "pip", "install", "pbr"]) + + with contexts.environment(PBR_VERSION="0.42"): + install_project("mypkg", venv, tmp_path, files, *editable_opts) + + out = venv.run(["python", "-c", "import mypkg.hello"]) + assert b"Hello world!" in out + + +class TestCustomBuildPy: + """ + Issue #3501 indicates that some plugins/customizations might rely on: + + 1. ``build_py`` not running + 2. ``build_py`` always copying files to ``build_lib`` + + During the transition period setuptools should prevent potential errors from + happening due to those assumptions. + """ + # TODO: Remove tests after _run_build_steps is removed. + + FILES = { + **TestOverallBehaviour.EXAMPLES["flat-layout"], + "setup.py": dedent("""\ + import pathlib + from setuptools import setup + from setuptools.command.build_py import build_py as orig + + class my_build_py(orig): + def run(self): + super().run() + raise ValueError("TEST_RAISE") + + setup(cmdclass={"build_py": my_build_py}) + """), + } + + def test_safeguarded_from_errors(self, tmp_path, venv): + """Ensure that errors in custom build_py are reported as warnings""" + # Warnings should show up + _, out = install_project("mypkg", venv, tmp_path, self.FILES) + assert b"SetuptoolsDeprecationWarning" in out + assert b"ValueError: TEST_RAISE" in out + # but installation should be successful + out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"]) + assert b"42" in out + + +class TestCustomBuildWheel: + def install_custom_build_wheel(self, dist): + bdist_wheel_cls = dist.get_command_class("bdist_wheel") + + class MyBdistWheel(bdist_wheel_cls): + def get_tag(self): + # In issue #3513, we can see that some extensions may try to access + # the `plat_name` property in bdist_wheel + if self.plat_name.startswith("macosx-"): + _ = "macOS platform" + return super().get_tag() + + dist.cmdclass["bdist_wheel"] = MyBdistWheel + + def test_access_plat_name(self, tmpdir_cwd): + # Even when a custom bdist_wheel tries to access plat_name the build should + # be successful + jaraco.path.build({"module.py": "x = 42"}) + dist = Distribution() + dist.script_name = "setup.py" + dist.set_defaults() + self.install_custom_build_wheel(dist) + cmd = editable_wheel(dist) + cmd.ensure_finalized() + cmd.run() + wheel_file = str(next(Path().glob('dist/*.whl'))) + assert "editable" in wheel_file + + +class TestCustomBuildExt: + def install_custom_build_ext_distutils(self, dist): + from distutils.command.build_ext import build_ext as build_ext_cls + + class MyBuildExt(build_ext_cls): + pass + + dist.cmdclass["build_ext"] = MyBuildExt + + @pytest.mark.skipif( + sys.platform != "linux", reason="compilers may fail without correct setup" + ) + def test_distutils_leave_inplace_files(self, tmpdir_cwd): + jaraco.path.build({"module.c": ""}) + attrs = { + "ext_modules": [Extension("module", ["module.c"])], + } + dist = Distribution(attrs) + dist.script_name = "setup.py" + dist.set_defaults() + self.install_custom_build_ext_distutils(dist) + cmd = editable_wheel(dist) + cmd.ensure_finalized() + cmd.run() + wheel_file = str(next(Path().glob('dist/*.whl'))) + assert "editable" in wheel_file + files = [p for p in Path().glob("module.*") if p.suffix != ".c"] + assert len(files) == 1 + name = files[0].name + assert any(name.endswith(ext) for ext in EXTENSION_SUFFIXES) + + +def test_debugging_tips(tmpdir_cwd, monkeypatch): + """Make sure to display useful debugging tips to the user.""" + jaraco.path.build({"module.py": "x = 42"}) + dist = Distribution() + dist.script_name = "setup.py" + dist.set_defaults() + cmd = editable_wheel(dist) + cmd.ensure_finalized() + + SimulatedErr = type("SimulatedErr", (Exception,), {}) + simulated_failure = Mock(side_effect=SimulatedErr()) + monkeypatch.setattr(cmd, "get_finalized_command", simulated_failure) + + expected_msg = "following steps are recommended to help debugging" + with pytest.raises(SimulatedErr), pytest.warns(_DebuggingTips, match=expected_msg): + cmd.run() + + +def install_project(name, venv, tmp_path, files, *opts): + project = tmp_path / name + project.mkdir() + jaraco.path.build(files, prefix=project) + opts = [*opts, "--no-build-isolation"] # force current version of setuptools + out = venv.run( + ["python", "-m", "pip", "-v", "install", "-e", str(project), *opts], + stderr=subprocess.STDOUT, + ) + return project, out + + +# ---- Assertion Helpers ---- + + +def assert_path(pkg, expected): + # __path__ is not guaranteed to exist, so we have to account for that + if pkg.__path__: + path = next(iter(pkg.__path__), None) + if path: + assert str(Path(path).resolve()) == expected + + +def assert_link_to(file: Path, other: Path): + if file.is_symlink(): + assert str(file.resolve()) == str(other.resolve()) + else: + file_stat = file.stat() + other_stat = other.stat() + assert file_stat[stat.ST_INO] == other_stat[stat.ST_INO] + assert file_stat[stat.ST_DEV] == other_stat[stat.ST_DEV] + + +def comparable_path(str_with_path: str) -> str: + return str_with_path.lower().replace(os.sep, "/").replace("//", "/") diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py index ee07b5a1be..6a2a989308 100644 --- a/setuptools/tests/test_egg_info.py +++ b/setuptools/tests/test_egg_info.py @@ -6,12 +6,19 @@ import stat import time from typing import List, Tuple +from pathlib import Path +from unittest import mock import pytest from jaraco import path +from setuptools import errors from setuptools.command.egg_info import ( - egg_info, manifest_maker, EggInfoDeprecationWarning, get_pkg_info_revision, + EggInfoDeprecationWarning, + egg_info, + get_pkg_info_revision, + manifest_maker, + write_entries, ) from setuptools.dist import Distribution @@ -24,6 +31,28 @@ class Environment(str): pass +@pytest.fixture +def env(): + with contexts.tempdir(prefix='setuptools-test.') as env_dir: + env = Environment(env_dir) + os.chmod(env_dir, stat.S_IRWXU) + subs = 'home', 'lib', 'scripts', 'data', 'egg-base' + env.paths = dict( + (dirname, os.path.join(env_dir, dirname)) + for dirname in subs + ) + list(map(os.mkdir, env.paths.values())) + path.build({ + env.paths['home']: { + '.pydistutils.cfg': DALS(""" + [egg_info] + egg-base = %(egg-base)s + """ % env.paths) + } + }) + yield env + + class TestEggInfo: setup_script = DALS(""" @@ -51,27 +80,6 @@ def _extract_mv_version(pkg_info_lines: List[str]) -> Tuple[int, int]: version_str = pkg_info_lines[0].split(' ')[1] return tuple(map(int, version_str.split('.')[:2])) - @pytest.fixture - def env(self): - with contexts.tempdir(prefix='setuptools-test.') as env_dir: - env = Environment(env_dir) - os.chmod(env_dir, stat.S_IRWXU) - subs = 'home', 'lib', 'scripts', 'data', 'egg-base' - env.paths = dict( - (dirname, os.path.join(env_dir, dirname)) - for dirname in subs - ) - list(map(os.mkdir, env.paths.values())) - path.build({ - env.paths['home']: { - '.pydistutils.cfg': DALS(""" - [egg_info] - egg-base = %(egg-base)s - """ % env.paths) - } - }) - yield env - def test_egg_info_save_version_info_setup_empty(self, tmpdir_cwd, env): """ When the egg_info section is empty or not present, running @@ -151,6 +159,21 @@ def test_expected_files_produced(self, tmpdir_cwd, env): ] assert sorted(actual) == expected + def test_handling_utime_error(self, tmpdir_cwd, env): + dist = Distribution() + ei = egg_info(dist) + utime_patch = mock.patch('os.utime', side_effect=OSError("TEST")) + mkpath_patch = mock.patch( + 'setuptools.command.egg_info.egg_info.mkpath', return_val=None + ) + + with utime_patch, mkpath_patch: + import distutils.errors + + msg = r"Cannot update time stamp of directory 'None'" + with pytest.raises(distutils.errors.DistutilsFileError, match=msg): + ei.run() + def test_license_is_a_string(self, tmpdir_cwd, env): setup_config = DALS(""" [metadata] @@ -1084,3 +1107,27 @@ def test_egg_info_tag_only_once(self, tmpdir_cwd, env): def test_get_pkg_info_revision_deprecated(self): pytest.warns(EggInfoDeprecationWarning, get_pkg_info_revision) + + +class TestWriteEntries: + + def test_invalid_entry_point(self, tmpdir_cwd, env): + dist = Distribution({"name": "foo", "version": "0.0.1"}) + dist.entry_points = {"foo": "foo = invalid-identifier:foo"} + cmd = dist.get_command_obj("egg_info") + expected_msg = r"Problems to parse .*invalid-identifier.*" + with pytest.raises(errors.OptionError, match=expected_msg) as ex: + write_entries(cmd, "entry_points", "entry_points.txt") + assert "ensure entry-point follows the spec" in ex.value.args[0] + + def test_valid_entry_point(self, tmpdir_cwd, env): + dist = Distribution({"name": "foo", "version": "0.0.1"}) + dist.entry_points = { + "abc": "foo = bar:baz", + "def": ["faa = bor:boz"], + } + cmd = dist.get_command_obj("egg_info") + write_entries(cmd, "entry_points", "entry_points.txt") + content = Path("entry_points.txt").read_text(encoding="utf-8") + assert "[abc]\nfoo = bar:baz\n" in content + assert "[def]\nfaa = bor:boz\n" in content diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py index 906713f61d..6c605e02b4 100644 --- a/setuptools/tests/test_find_packages.py +++ b/setuptools/tests/test_find_packages.py @@ -1,4 +1,4 @@ -"""Tests for setuptools.find_packages().""" +"""Tests for automatic package discovery""" import os import sys import shutil @@ -9,6 +9,7 @@ from setuptools import find_packages from setuptools import find_namespace_packages +from setuptools.discovery import FlatLayoutPackageFinder # modeled after CPython's test.support.can_symlink @@ -178,3 +179,68 @@ def test_pep420_ns_package_no_non_package_dirs(self): shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) packages = find_namespace_packages(self.dist_dir) self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) + + +class TestFlatLayoutPackageFinder: + EXAMPLES = { + "hidden-folders": ( + [".pkg/__init__.py", "pkg/__init__.py", "pkg/nested/file.txt"], + ["pkg", "pkg.nested"] + ), + "private-packages": ( + ["_pkg/__init__.py", "pkg/_private/__init__.py"], + ["pkg", "pkg._private"] + ), + "invalid-name": ( + ["invalid-pkg/__init__.py", "other.pkg/__init__.py", "yet,another/file.py"], + [] + ), + "docs": ( + ["pkg/__init__.py", "docs/conf.py", "docs/readme.rst"], + ["pkg"] + ), + "tests": ( + ["pkg/__init__.py", "tests/test_pkg.py", "tests/__init__.py"], + ["pkg"] + ), + "examples": ( + [ + "pkg/__init__.py", + "examples/__init__.py", + "examples/file.py" + "example/other_file.py", + # Sub-packages should always be fine + "pkg/example/__init__.py", + "pkg/examples/__init__.py", + ], + ["pkg", "pkg.examples", "pkg.example"] + ), + "tool-specific": ( + [ + "htmlcov/index.html", + "pkg/__init__.py", + "tasks/__init__.py", + "tasks/subpackage/__init__.py", + "fabfile/__init__.py", + "fabfile/subpackage/__init__.py", + # Sub-packages should always be fine + "pkg/tasks/__init__.py", + "pkg/fabfile/__init__.py", + ], + ["pkg", "pkg.tasks", "pkg.fabfile"] + ) + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_unwanted_directories_not_included(self, tmp_path, example): + files, expected_packages = self.EXAMPLES[example] + ensure_files(tmp_path, files) + found_packages = FlatLayoutPackageFinder.find(str(tmp_path)) + assert set(found_packages) == set(expected_packages) + + +def ensure_files(root_path, files): + for file in files: + path = root_path / file + path.parent.mkdir(parents=True, exist_ok=True) + path.touch() diff --git a/setuptools/tests/test_find_py_modules.py b/setuptools/tests/test_find_py_modules.py new file mode 100644 index 0000000000..4ef6880120 --- /dev/null +++ b/setuptools/tests/test_find_py_modules.py @@ -0,0 +1,81 @@ +"""Tests for automatic discovery of modules""" +import os + +import pytest + +from setuptools.discovery import FlatLayoutModuleFinder, ModuleFinder + +from .test_find_packages import ensure_files, has_symlink + + +class TestModuleFinder: + def find(self, path, *args, **kwargs): + return set(ModuleFinder.find(str(path), *args, **kwargs)) + + EXAMPLES = { + # circumstance: (files, kwargs, expected_modules) + "simple_folder": ( + ["file.py", "other.py"], + {}, # kwargs + ["file", "other"], + ), + "exclude": ( + ["file.py", "other.py"], + {"exclude": ["f*"]}, + ["other"], + ), + "include": ( + ["file.py", "fole.py", "other.py"], + {"include": ["f*"], "exclude": ["fo*"]}, + ["file"], + ), + "invalid-name": ( + ["my-file.py", "other.file.py"], + {}, + [] + ) + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_finder(self, tmp_path, example): + files, kwargs, expected_modules = self.EXAMPLES[example] + ensure_files(tmp_path, files) + assert self.find(tmp_path, **kwargs) == set(expected_modules) + + @pytest.mark.skipif(not has_symlink(), reason='Symlink support required') + def test_symlinked_packages_are_included(self, tmp_path): + src = "_myfiles/file.py" + ensure_files(tmp_path, [src]) + os.symlink(tmp_path / src, tmp_path / "link.py") + assert self.find(tmp_path) == {"link"} + + +class TestFlatLayoutModuleFinder: + def find(self, path, *args, **kwargs): + return set(FlatLayoutModuleFinder.find(str(path))) + + EXAMPLES = { + # circumstance: (files, expected_modules) + "hidden-files": ( + [".module.py"], + [] + ), + "private-modules": ( + ["_module.py"], + [] + ), + "common-names": ( + ["setup.py", "conftest.py", "test.py", "tests.py", "example.py", "mod.py"], + ["mod"] + ), + "tool-specific": ( + ["tasks.py", "fabfile.py", "noxfile.py", "dodo.py", "manage.py", "mod.py"], + ["mod"] + ) + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_unwanted_files_not_included(self, tmp_path, example): + files, expected_modules = self.EXAMPLES[example] + ensure_files(tmp_path, files) + assert self.find(tmp_path) == set(expected_modules) diff --git a/setuptools/tests/test_logging.py b/setuptools/tests/test_logging.py index a5ddd56df0..aa2b502b6c 100644 --- a/setuptools/tests/test_logging.py +++ b/setuptools/tests/test_logging.py @@ -1,4 +1,6 @@ +import inspect import logging +import os import pytest @@ -34,3 +36,18 @@ def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level): log_level = logger.getEffectiveLevel() log_level_name = logging.getLevelName(log_level) assert log_level_name == expected_level + + +def test_patching_does_not_cause_problems(): + # Ensure `dist.log` is only patched if necessary + + import setuptools.logging + from distutils import dist + + setuptools.logging.configure() + + if os.getenv("SETUPTOOLS_USE_DISTUTILS", "local").lower() == "local": + # Modern logging infra, no problematic patching. + assert isinstance(dist.log, logging.Logger) + else: + assert inspect.ismodule(dist.log) diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py index 82bdb9c643..3a973b01c4 100644 --- a/setuptools/tests/test_manifest.py +++ b/setuptools/tests/test_manifest.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """sdist tests""" import contextlib @@ -8,6 +7,7 @@ import tempfile import itertools import io +import logging from distutils import log from distutils.errors import DistutilsTemplateError @@ -18,6 +18,9 @@ import pytest +IS_PYPY = '__pypy__' in sys.builtin_module_names + + def make_local_path(s): """Converts '/' in a string to os.sep""" return s.replace('/', os.sep) @@ -321,41 +324,29 @@ class TestFileListTest(TempDirTestCase): to ensure setuptools' version of FileList keeps parity with distutils. """ - def setup_method(self, method): - super(TestFileListTest, self).setup_method(method) - self.threshold = log.set_threshold(log.FATAL) - self._old_log = log.Log._log - log.Log._log = self._log - self.logs = [] + @pytest.fixture(autouse=os.getenv("SETUPTOOLS_USE_DISTUTILS") == "stdlib") + def _compat_record_logs(self, monkeypatch, caplog): + """Account for stdlib compatibility""" + def _log(_logger, level, msg, args): + exc = sys.exc_info() + rec = logging.LogRecord("distutils", level, "", 0, msg, args, exc) + caplog.records.append(rec) - def teardown_method(self, method): - log.set_threshold(self.threshold) - log.Log._log = self._old_log - super(TestFileListTest, self).teardown_method(method) - - def _log(self, level, msg, args): - if level not in (log.DEBUG, log.INFO, log.WARN, log.ERROR, log.FATAL): - raise ValueError('%s wrong log level' % str(level)) - self.logs.append((level, msg, args)) - - def get_logs(self, *levels): - def _format(msg, args): - if len(args) == 0: - return msg - return msg % args - return [_format(msg, args) for level, msg, args - in self.logs if level in levels] - - def clear_logs(self): - self.logs = [] - - def assertNoWarnings(self): - assert self.get_logs(log.WARN) == [] - self.clear_logs() - - def assertWarnings(self): - assert len(self.get_logs(log.WARN)) > 0 - self.clear_logs() + monkeypatch.setattr(log.Log, "_log", _log) + + def get_records(self, caplog, *levels): + return [r for r in caplog.records if r.levelno in levels] + + def assertNoWarnings(self, caplog): + assert self.get_records(caplog, log.WARN) == [] + caplog.clear() + + def assertWarnings(self, caplog): + if IS_PYPY and not caplog.records: + pytest.xfail("caplog checks may not work well in PyPy") + else: + assert len(self.get_records(caplog, log.WARN)) > 0 + caplog.clear() def make_files(self, files): for file in files: @@ -472,7 +463,8 @@ def test_process_template_line_invalid(self): else: assert False, "Should have thrown an error" - def test_include(self): + def test_include(self, caplog): + caplog.set_level(logging.DEBUG) ml = make_local_path # include file_list = FileList() @@ -481,14 +473,15 @@ def test_include(self): file_list.process_template_line('include *.py') file_list.sort() assert file_list.files == ['a.py'] - self.assertNoWarnings() + self.assertNoWarnings(caplog) file_list.process_template_line('include *.rb') file_list.sort() assert file_list.files == ['a.py'] - self.assertWarnings() + self.assertWarnings(caplog) - def test_exclude(self): + def test_exclude(self, caplog): + caplog.set_level(logging.DEBUG) ml = make_local_path # exclude file_list = FileList() @@ -497,14 +490,15 @@ def test_exclude(self): file_list.process_template_line('exclude *.py') file_list.sort() assert file_list.files == ['b.txt', ml('d/c.py')] - self.assertNoWarnings() + self.assertNoWarnings(caplog) file_list.process_template_line('exclude *.rb') file_list.sort() assert file_list.files == ['b.txt', ml('d/c.py')] - self.assertWarnings() + self.assertWarnings(caplog) - def test_global_include(self): + def test_global_include(self, caplog): + caplog.set_level(logging.DEBUG) ml = make_local_path # global-include file_list = FileList() @@ -513,14 +507,15 @@ def test_global_include(self): file_list.process_template_line('global-include *.py') file_list.sort() assert file_list.files == ['a.py', ml('d/c.py')] - self.assertNoWarnings() + self.assertNoWarnings(caplog) file_list.process_template_line('global-include *.rb') file_list.sort() assert file_list.files == ['a.py', ml('d/c.py')] - self.assertWarnings() + self.assertWarnings(caplog) - def test_global_exclude(self): + def test_global_exclude(self, caplog): + caplog.set_level(logging.DEBUG) ml = make_local_path # global-exclude file_list = FileList() @@ -529,14 +524,15 @@ def test_global_exclude(self): file_list.process_template_line('global-exclude *.py') file_list.sort() assert file_list.files == ['b.txt'] - self.assertNoWarnings() + self.assertNoWarnings(caplog) file_list.process_template_line('global-exclude *.rb') file_list.sort() assert file_list.files == ['b.txt'] - self.assertWarnings() + self.assertWarnings(caplog) - def test_recursive_include(self): + def test_recursive_include(self, caplog): + caplog.set_level(logging.DEBUG) ml = make_local_path # recursive-include file_list = FileList() @@ -545,14 +541,15 @@ def test_recursive_include(self): file_list.process_template_line('recursive-include d *.py') file_list.sort() assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')] - self.assertNoWarnings() + self.assertNoWarnings(caplog) file_list.process_template_line('recursive-include e *.py') file_list.sort() assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')] - self.assertWarnings() + self.assertWarnings(caplog) - def test_recursive_exclude(self): + def test_recursive_exclude(self, caplog): + caplog.set_level(logging.DEBUG) ml = make_local_path # recursive-exclude file_list = FileList() @@ -561,14 +558,15 @@ def test_recursive_exclude(self): file_list.process_template_line('recursive-exclude d *.py') file_list.sort() assert file_list.files == ['a.py', ml('d/c.txt')] - self.assertNoWarnings() + self.assertNoWarnings(caplog) file_list.process_template_line('recursive-exclude e *.py') file_list.sort() assert file_list.files == ['a.py', ml('d/c.txt')] - self.assertWarnings() + self.assertWarnings(caplog) - def test_graft(self): + def test_graft(self, caplog): + caplog.set_level(logging.DEBUG) ml = make_local_path # graft file_list = FileList() @@ -577,14 +575,15 @@ def test_graft(self): file_list.process_template_line('graft d') file_list.sort() assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')] - self.assertNoWarnings() + self.assertNoWarnings(caplog) file_list.process_template_line('graft e') file_list.sort() assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')] - self.assertWarnings() + self.assertWarnings(caplog) - def test_prune(self): + def test_prune(self, caplog): + caplog.set_level(logging.DEBUG) ml = make_local_path # prune file_list = FileList() @@ -593,9 +592,9 @@ def test_prune(self): file_list.process_template_line('prune d') file_list.sort() assert file_list.files == ['a.py', ml('f/f.py')] - self.assertNoWarnings() + self.assertNoWarnings(caplog) file_list.process_template_line('prune e') file_list.sort() assert file_list.files == ['a.py', ml('f/f.py')] - self.assertWarnings() + self.assertWarnings(caplog) diff --git a/setuptools/tests/test_msvc.py b/setuptools/tests/test_msvc.py deleted file mode 100644 index d1527bfa46..0000000000 --- a/setuptools/tests/test_msvc.py +++ /dev/null @@ -1,179 +0,0 @@ -""" -Tests for msvc support module. -""" - -import os -import contextlib -import distutils.errors -import mock - -import pytest - -from . import contexts - -# importing only setuptools should apply the patch -__import__('setuptools') - -pytest.importorskip("distutils.msvc9compiler") - - -def mock_reg(hkcu=None, hklm=None): - """ - Return a mock for distutils.msvc9compiler.Reg, patched - to mock out the functions that access the registry. - """ - - _winreg = getattr(distutils.msvc9compiler, '_winreg', None) - winreg = getattr(distutils.msvc9compiler, 'winreg', _winreg) - - hives = { - winreg.HKEY_CURRENT_USER: hkcu or {}, - winreg.HKEY_LOCAL_MACHINE: hklm or {}, - } - - @classmethod - def read_keys(cls, base, key): - """Return list of registry keys.""" - hive = hives.get(base, {}) - return [ - k.rpartition('\\')[2] - for k in hive if k.startswith(key.lower()) - ] - - @classmethod - def read_values(cls, base, key): - """Return dict of registry keys and values.""" - hive = hives.get(base, {}) - return dict( - (k.rpartition('\\')[2], hive[k]) - for k in hive if k.startswith(key.lower()) - ) - - return mock.patch.multiple( - distutils.msvc9compiler.Reg, - read_keys=read_keys, read_values=read_values) - - -class TestModulePatch: - """ - Ensure that importing setuptools is sufficient to replace - the standard find_vcvarsall function with a version that - recognizes the "Visual C++ for Python" package. - """ - - key_32 = r'software\microsoft\devdiv\vcforpython\9.0\installdir' - key_64 = key_32.replace(r'\microsoft', r'\wow6432node\microsoft') - - def test_patched(self): - "Test the module is actually patched" - mod_name = distutils.msvc9compiler.find_vcvarsall.__module__ - assert mod_name == "setuptools.msvc", "find_vcvarsall unpatched" - - def test_no_registry_entries_means_nothing_found(self): - """ - No registry entries or environment variable should lead to an error - directing the user to download vcpython27. - """ - find_vcvarsall = distutils.msvc9compiler.find_vcvarsall - query_vcvarsall = distutils.msvc9compiler.query_vcvarsall - - with contexts.environment(VS90COMNTOOLS=None): - with mock_reg(): - assert find_vcvarsall(9.0) is None - - try: - query_vcvarsall(9.0) - except Exception as exc: - expected = distutils.errors.DistutilsPlatformError - assert isinstance(exc, expected) - assert 'aka.ms/vcpython27' in str(exc) - - @pytest.fixture - def user_preferred_setting(self): - """ - Set up environment with different install dirs for user vs. system - and yield the user_install_dir for the expected result. - """ - with self.mock_install_dir() as user_install_dir: - with self.mock_install_dir() as system_install_dir: - reg = mock_reg( - hkcu={ - self.key_32: user_install_dir, - }, - hklm={ - self.key_32: system_install_dir, - self.key_64: system_install_dir, - }, - ) - with reg: - yield user_install_dir - - def test_prefer_current_user(self, user_preferred_setting): - """ - Ensure user's settings are preferred. - """ - result = distutils.msvc9compiler.find_vcvarsall(9.0) - expected = os.path.join(user_preferred_setting, 'vcvarsall.bat') - assert expected == result - - @pytest.fixture - def local_machine_setting(self): - """ - Set up environment with only the system environment configured. - """ - with self.mock_install_dir() as system_install_dir: - reg = mock_reg( - hklm={ - self.key_32: system_install_dir, - }, - ) - with reg: - yield system_install_dir - - def test_local_machine_recognized(self, local_machine_setting): - """ - Ensure machine setting is honored if user settings are not present. - """ - result = distutils.msvc9compiler.find_vcvarsall(9.0) - expected = os.path.join(local_machine_setting, 'vcvarsall.bat') - assert expected == result - - @pytest.fixture - def x64_preferred_setting(self): - """ - Set up environment with 64-bit and 32-bit system settings configured - and yield the canonical location. - """ - with self.mock_install_dir() as x32_dir: - with self.mock_install_dir() as x64_dir: - reg = mock_reg( - hklm={ - # This *should* only exist on 32-bit machines - self.key_32: x32_dir, - # This *should* only exist on 64-bit machines - self.key_64: x64_dir, - }, - ) - with reg: - yield x32_dir - - def test_ensure_64_bit_preferred(self, x64_preferred_setting): - """ - Ensure 64-bit system key is preferred. - """ - result = distutils.msvc9compiler.find_vcvarsall(9.0) - expected = os.path.join(x64_preferred_setting, 'vcvarsall.bat') - assert expected == result - - @staticmethod - @contextlib.contextmanager - def mock_install_dir(): - """ - Make a mock install dir in a unique location so that tests can - distinguish which dir was detected in a given scenario. - """ - with contexts.tempdir() as result: - vcvarsall = os.path.join(result, 'vcvarsall.bat') - with open(vcvarsall, 'w'): - pass - yield result diff --git a/setuptools/tests/test_msvc14.py b/setuptools/tests/test_msvc14.py index 1aca12dd37..271d6be562 100644 --- a/setuptools/tests/test_msvc14.py +++ b/setuptools/tests/test_msvc14.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Tests for msvc support module (msvc14 unit tests). """ diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py index 8e9435efef..8b5356dc8e 100644 --- a/setuptools/tests/test_packageindex.py +++ b/setuptools/tests/test_packageindex.py @@ -5,8 +5,8 @@ import urllib.request import urllib.error import http.client +from unittest import mock -import mock import pytest import setuptools.package_index @@ -21,7 +21,9 @@ def test_regex(self): Name (md5) - """.lstrip().format(**locals()) + """.lstrip().format( + **locals() + ) assert setuptools.package_index.PYPI_MD5.match(doc) def test_bad_url_bad_port(self): @@ -38,9 +40,7 @@ def test_bad_url_typo(self): # issue 16 # easy_install inquant.contentmirror.plone breaks because of a typo # in its home URL - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) + index = setuptools.package_index.PackageIndex(hosts=('www.example.com',)) url = ( 'url:%20https://svn.plone.org/svn' @@ -54,9 +54,7 @@ def test_bad_url_typo(self): assert isinstance(v, urllib.error.HTTPError) def test_bad_url_bad_status_line(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) + index = setuptools.package_index.PackageIndex(hosts=('www.example.com',)) def _urlopen(*args): raise http.client.BadStatusLine('line') @@ -74,9 +72,7 @@ def test_bad_url_double_scheme(self): """ A bad URL with a double scheme should raise a DistutilsError. """ - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) + index = setuptools.package_index.PackageIndex(hosts=('www.example.com',)) # issue 20 url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk' @@ -93,22 +89,17 @@ def test_bad_url_double_scheme(self): raise RuntimeError("Did not raise") def test_bad_url_screwy_href(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) + index = setuptools.package_index.PackageIndex(hosts=('www.example.com',)) # issue #160 if sys.version_info[0] == 2 and sys.version_info[1] == 7: # this should not fail url = 'http://example.com' - page = ('') + page = '' index.process_index(url, page) def test_url_ok(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) + index = setuptools.package_index.PackageIndex(hosts=('www.example.com',)) url = 'file:///tmp/test_package_index' assert index.url_ok(url, True) @@ -169,9 +160,7 @@ def test_egg_fragment(self): 'b0', 'rc0', ] - post = [ - '.post0' - ] + post = ['.post0'] dev = [ '.dev0', ] @@ -186,10 +175,14 @@ def test_egg_fragment(self): for e in epoch for r in releases for p in sum([pre, post, dev], ['']) - for locs in local] + for locs in local + ] for v, vc in versions: - dists = list(setuptools.package_index.distros_for_url( - 'http://example.com/example.zip#egg=example-' + v)) + dists = list( + setuptools.package_index.distros_for_url( + 'http://example.com/example-foo.zip#egg=example-foo-' + v + ) + ) assert dists[0].version == '' assert dists[1].version == vc @@ -204,8 +197,7 @@ def test_download_git_with_rev(self, tmpdir): expected_dir = str(tmpdir / 'project@master') expected = ( - 'git clone --quiet ' - 'https://github.example/group/project {expected_dir}' + 'git clone --quiet ' 'https://github.example/group/project {expected_dir}' ).format(**locals()) first_call_args = os_system_mock.call_args_list[0][0] assert first_call_args == (expected,) @@ -226,8 +218,7 @@ def test_download_git_no_rev(self, tmpdir): expected_dir = str(tmpdir / 'project') expected = ( - 'git clone --quiet ' - 'https://github.example/group/project {expected_dir}' + 'git clone --quiet ' 'https://github.example/group/project {expected_dir}' ).format(**locals()) os_system_mock.assert_called_once_with(expected) @@ -243,8 +234,7 @@ def test_download_svn(self, tmpdir): expected_dir = str(tmpdir / 'project') expected = ( - 'svn checkout -q ' - 'svn+https://svn.example/project {expected_dir}' + 'svn checkout -q ' 'svn+https://svn.example/project {expected_dir}' ).format(**locals()) os_system_mock.assert_called_once_with(expected) @@ -252,7 +242,8 @@ def test_download_svn(self, tmpdir): class TestContentCheckers: def test_md5(self): checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') + 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478' + ) checker.feed('You should probably not be using MD5'.encode('ascii')) assert checker.hash.hexdigest() == 'f12895fdffbd45007040d2e44df98478' assert checker.is_valid() @@ -260,25 +251,27 @@ def test_md5(self): def test_other_fragment(self): "Content checks should succeed silently if no hash is present" checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#something%20completely%20different') + 'http://foo/bar#something%20completely%20different' + ) checker.feed('anything'.encode('ascii')) assert checker.is_valid() def test_blank_md5(self): "Content checks should succeed if a hash is empty" - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=') + checker = setuptools.package_index.HashChecker.from_url('http://foo/bar#md5=') checker.feed('anything'.encode('ascii')) assert checker.is_valid() def test_get_hash_name_md5(self): checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') + 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478' + ) assert checker.hash_name == 'md5' def test_report(self): checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') + 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478' + ) rep = checker.report(lambda x: x, 'My message about %s') assert rep == 'My message about md5' @@ -287,8 +280,8 @@ def test_report(self): def temp_home(tmpdir, monkeypatch): key = ( 'USERPROFILE' - if platform.system() == 'Windows' and sys.version_info > (3, 8) else - 'HOME' + if platform.system() == 'Windows' and sys.version_info > (3, 8) + else 'HOME' ) monkeypatch.setitem(os.environ, key, str(tmpdir)) @@ -298,13 +291,25 @@ def temp_home(tmpdir, monkeypatch): class TestPyPIConfig: def test_percent_in_password(self, temp_home): pypirc = temp_home / '.pypirc' - pypirc.write(DALS(""" + pypirc.write( + DALS( + """ [pypi] repository=https://pypi.org username=jaraco password=pity% - """)) + """ + ) + ) cfg = setuptools.package_index.PyPIConfig() cred = cfg.creds_by_repository['https://pypi.org'] assert cred.username == 'jaraco' assert cred.password == 'pity%' + + +@pytest.mark.timeout(1) +def test_REL_DoS(): + """ + REL should not hang on a contrived attack string. + """ + setuptools.package_index.REL.search('< rel=' + ' ' * 2**12) diff --git a/setuptools/tests/test_register.py b/setuptools/tests/test_register.py index 986058067b..ed85e9bbd3 100644 --- a/setuptools/tests/test_register.py +++ b/setuptools/tests/test_register.py @@ -2,10 +2,7 @@ from setuptools.dist import Distribution from setuptools.errors import RemovedCommandError -try: - from unittest import mock -except ImportError: - import mock +from unittest import mock import pytest diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py index 302cff7309..117c077269 100644 --- a/setuptools/tests/test_sdist.py +++ b/setuptools/tests/test_sdist.py @@ -10,6 +10,7 @@ import pytest +from setuptools import Command from setuptools._importlib import metadata from setuptools import SetuptoolsDeprecationWarning from setuptools.command.sdist import sdist @@ -27,11 +28,14 @@ 'data_files': [("data", [os.path.join("d", "e.dat")])], } -SETUP_PY = """\ +SETUP_PY = ( + """\ from setuptools import setup setup(**%r) -""" % SETUP_ATTRS +""" + % SETUP_ATTRS +) @contextlib.contextmanager @@ -85,6 +89,12 @@ def latin1_fail(): ) +skip_under_xdist = pytest.mark.skipif( + "os.environ.get('PYTEST_XDIST_WORKER')", + reason="pytest-dev/pytest-xdist#843", +) + + def touch(path): path.write_text('', encoding='utf-8') @@ -318,6 +328,7 @@ def test_write_manifest_allows_utf8_filenames(self): # The filelist should have been updated as well assert u_filename in mm.filelist.files + @skip_under_xdist def test_write_manifest_skips_non_utf8_filenames(self): """ Files that cannot be encoded to UTF-8 (specifically, those that @@ -450,13 +461,13 @@ def test_sdist_with_utf8_encoded_filename(self): @classmethod def make_strings(cls, item): if isinstance(item, dict): - return { - key: cls.make_strings(value) for key, value in item.items()} + return {key: cls.make_strings(value) for key, value in item.items()} if isinstance(item, list): return list(map(cls.make_strings, item)) return str(item) @fail_on_latin1_encoded_filenames + @skip_under_xdist def test_sdist_with_latin1_encoded_filename(self): # Test for #303. dist = Distribution(self.make_strings(SETUP_ATTRS)) @@ -487,6 +498,63 @@ def test_sdist_with_latin1_encoded_filename(self): filename = filename.decode('latin-1') filename not in cmd.filelist.files + _EXAMPLE_DIRECTIVES = { + "setup.cfg - long_description and version": """ + [metadata] + name = testing + version = file: src/VERSION.txt + license_files = DOWHATYOUWANT + long_description = file: README.rst, USAGE.rst + """, + "pyproject.toml - static readme/license files and dynamic version": """ + [project] + name = "testing" + readme = "USAGE.rst" + license = {file = "DOWHATYOUWANT"} + dynamic = ["version"] + [tool.setuptools.dynamic] + version = {file = ["src/VERSION.txt"]} + """, + "pyproject.toml - directive with str instead of list": """ + [project] + name = "testing" + readme = "USAGE.rst" + license = {file = "DOWHATYOUWANT"} + dynamic = ["version"] + [tool.setuptools.dynamic] + version = {file = "src/VERSION.txt"} + """ + } + + @pytest.mark.parametrize("config", _EXAMPLE_DIRECTIVES.keys()) + def test_add_files_referenced_by_config_directives(self, tmp_path, config): + config_file, _, _ = config.partition(" - ") + config_text = self._EXAMPLE_DIRECTIVES[config] + (tmp_path / 'src').mkdir() + (tmp_path / 'src/VERSION.txt').write_text("0.42", encoding="utf-8") + (tmp_path / 'README.rst').write_text("hello world!", encoding="utf-8") + (tmp_path / 'USAGE.rst').write_text("hello world!", encoding="utf-8") + (tmp_path / 'DOWHATYOUWANT').write_text("hello world!", encoding="utf-8") + (tmp_path / config_file).write_text(config_text, encoding="utf-8") + + dist = Distribution({"packages": []}) + dist.script_name = 'setup.py' + dist.parse_config_files() + + cmd = sdist(dist) + cmd.ensure_finalized() + with quiet(): + cmd.run() + + assert ( + 'src/VERSION.txt' in cmd.filelist.files + or 'src\\VERSION.txt' in cmd.filelist.files + ) + assert 'USAGE.rst' in cmd.filelist.files + assert 'DOWHATYOUWANT' in cmd.filelist.files + assert '/' not in cmd.filelist.files + assert '\\' not in cmd.filelist.files + def test_pyproject_toml_in_sdist(self, tmpdir): """ Check if pyproject.toml is included in source distribution if present @@ -517,6 +585,46 @@ def test_pyproject_toml_excluded(self, tmpdir): manifest = cmd.filelist.files assert 'pyproject.toml' not in manifest + def test_build_subcommand_source_files(self, tmpdir): + touch(tmpdir / '.myfile~') + + # Sanity check: without custom commands file list should not be affected + dist = Distribution({**SETUP_ATTRS, "script_name": "setup.py"}) + cmd = sdist(dist) + cmd.ensure_finalized() + with quiet(): + cmd.run() + manifest = cmd.filelist.files + assert '.myfile~' not in manifest + + # Test: custom command should be able to augment file list + dist = Distribution({**SETUP_ATTRS, "script_name": "setup.py"}) + build = dist.get_command_obj("build") + build.sub_commands = [*build.sub_commands, ("build_custom", None)] + + class build_custom(Command): + def initialize_options(self): + ... + + def finalize_options(self): + ... + + def run(self): + ... + + def get_source_files(self): + return ['.myfile~'] + + dist.cmdclass.update(build_custom=build_custom) + + cmd = sdist(dist) + cmd.use_defaults = True + cmd.ensure_finalized() + with quiet(): + cmd.run() + manifest = cmd.filelist.files + assert '.myfile~' in manifest + def test_default_revctrl(): """ @@ -529,9 +637,11 @@ def test_default_revctrl(): This interface must be maintained until Ubuntu 12.04 is no longer supported (by Setuptools). """ - ep, = metadata.EntryPoints._from_text(""" + (ep,) = metadata.EntryPoints._from_text( + """ [setuptools.file_finders] svn_cvs = setuptools.command.sdist:_default_revctrl - """) + """ + ) res = ep.load() assert hasattr(res, '__iter__') diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py index b97faf17bc..0640f49da1 100644 --- a/setuptools/tests/test_setuptools.py +++ b/setuptools/tests/test_setuptools.py @@ -303,3 +303,8 @@ def test_its_own_wheel_does_not_contain_tests(setuptools_wheel): for member in contents: assert '/tests/' not in member + + +def test_convert_path_deprecated(): + with pytest.warns(setuptools.SetuptoolsDeprecationWarning): + setuptools.convert_path('setuptools/tests') diff --git a/setuptools/tests/test_upload.py b/setuptools/tests/test_upload.py index 7586cb262d..4ed59bc24d 100644 --- a/setuptools/tests/test_upload.py +++ b/setuptools/tests/test_upload.py @@ -2,10 +2,7 @@ from setuptools.dist import Distribution from setuptools.errors import RemovedCommandError -try: - from unittest import mock -except ImportError: - import mock +from unittest import mock import pytest diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py index 0ba89643ec..b17be9ef2b 100644 --- a/setuptools/tests/test_virtualenv.py +++ b/setuptools/tests/test_virtualenv.py @@ -1,7 +1,8 @@ import os import sys -import itertools import subprocess +from urllib.request import urlopen +from urllib.error import URLError import pathlib @@ -18,7 +19,7 @@ def pytest_virtualenv_works(venv): pytest_virtualenv may not work. if it doesn't, skip these tests. See #1284. """ - venv_prefix = venv.run(["python" , "-c", "import sys; print(sys.prefix)"]).strip() + venv_prefix = venv.run(["python", "-c", "import sys; print(sys.prefix)"]).strip() if venv_prefix == sys.prefix: pytest.skip("virtualenv is broken (see pypa/setuptools#1284)") @@ -31,58 +32,60 @@ def test_clean_env_install(venv_without_setuptools, setuptools_wheel): venv_without_setuptools.run(cmd) -def _get_pip_versions(): - # This fixture will attempt to detect if tests are being run without - # network connectivity and if so skip some tests - - network = True +def access_pypi(): + # Detect if tests are being run without connectivity if not os.environ.get('NETWORK_REQUIRED', False): # pragma: nocover - try: - from urllib.request import urlopen - from urllib.error import URLError - except ImportError: - from urllib2 import urlopen, URLError # Python 2.7 compat - try: urlopen('https://pypi.org', timeout=1) except URLError: # No network, disable most of these tests - network = False - - def mark(param, *marks): - if not isinstance(param, type(pytest.param(''))): - param = pytest.param(param) - return param._replace(marks=param.marks + marks) - - def skip_network(param): - return param if network else mark(param, pytest.mark.skip(reason="no network")) - - network_versions = [ - mark('pip<20', pytest.mark.xfail(reason='pypa/pip#6599')), - 'pip<20.1', - 'pip<21', - 'pip<22', - mark( - 'https://github.com/pypa/pip/archive/main.zip', - pytest.mark.xfail(reason='#2975'), - ), - ] + return False - versions = itertools.chain( - [None], - map(skip_network, network_versions) - ) - - return list(versions) + return True @pytest.mark.skipif( 'platform.python_implementation() == "PyPy"', reason="https://github.com/pypa/setuptools/pull/2865#issuecomment-965834995", ) -@pytest.mark.parametrize('pip_version', _get_pip_versions()) -def test_pip_upgrade_from_source(pip_version, venv_without_setuptools, - setuptools_wheel, setuptools_sdist): +@pytest.mark.skipif(not access_pypi(), reason="no network") +# ^-- Even when it is not necessary to install a different version of `pip` +# the build process will still try to download `wheel`, see #3147 and #2986. +@pytest.mark.parametrize( + 'pip_version', + [ + None, + pytest.param( + 'pip<20.1', + marks=pytest.mark.xfail( + 'sys.version_info > (3, 12)', + reason="pip 22 requried for Python 3.12 and later", + ), + ), + pytest.param( + 'pip<21', + marks=pytest.mark.xfail( + 'sys.version_info > (3, 12)', + reason="pip 22 requried for Python 3.12 and later", + ), + ), + pytest.param( + 'pip<22', + marks=pytest.mark.xfail( + 'sys.version_info > (3, 12)', + reason="pip 22 requried for Python 3.12 and later", + ), + ), + 'pip<23', + pytest.param( + 'https://github.com/pypa/pip/archive/main.zip', + marks=pytest.mark.xfail(reason='#2975'), + ), + ], +) +def test_pip_upgrade_from_source( + pip_version, venv_without_setuptools, setuptools_wheel, setuptools_sdist +): """ Check pip can upgrade setuptools from source. """ @@ -106,10 +109,12 @@ def _check_test_command_install_requirements(venv, tmpdir): """ Check the test command will install all required dependencies. """ + def sdist(distname, version): dist_path = tmpdir.join('%s-%s.tar.gz' % (distname, version)) make_nspkg_sdist(str(dist_path), distname, version) return dist_path + dependency_links = [ pathlib.Path(str(dist_path)).as_uri() for dist_path in ( @@ -120,8 +125,9 @@ def sdist(distname, version): ) ] with tmpdir.join('setup.py').open('w') as fp: - fp.write(DALS( - ''' + fp.write( + DALS( + ''' from setuptools import setup setup( @@ -143,17 +149,24 @@ def sdist(distname, version): """, }} ) - '''.format(dependency_links=dependency_links))) + '''.format( + dependency_links=dependency_links + ) + ) + ) with tmpdir.join('test.py').open('w') as fp: - fp.write(DALS( - ''' + fp.write( + DALS( + ''' import foobar import bits import bobs import pieces open('success', 'w').close() - ''')) + ''' + ) + ) cmd = ["python", 'setup.py', 'test', '-s', 'test'] venv.run(cmd, cwd=str(tmpdir)) @@ -161,8 +174,8 @@ def sdist(distname, version): def test_test_command_install_requirements(venv, tmpdir, tmpdir_cwd): - # Ensure pip/wheel packages are installed. - venv.run(["python", "-c", "__import__('pkg_resources').require(['pip', 'wheel'])"]) + # Ensure pip is installed. + venv.run(["python", "-c", "import pip"]) # disable index URL so bits and bobs aren't requested from PyPI with contexts.environment(PYTHONPATH=None, PIP_NO_INDEX="1"): _check_test_command_install_requirements(venv, tmpdir) diff --git a/setuptools/tests/test_warnings.py b/setuptools/tests/test_warnings.py new file mode 100644 index 0000000000..013e2526dc --- /dev/null +++ b/setuptools/tests/test_warnings.py @@ -0,0 +1,107 @@ +from inspect import cleandoc + +import pytest + +from setuptools.warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning + + +_EXAMPLES = { + "default": dict( + args=("Hello {x}", "\n\t{target} {v:.1f}"), + kwargs={"x": 5, "v": 3, "target": "World"}, + expected = """ + Hello 5 + !! + + ******************************************************************************** + World 3.0 + ******************************************************************************** + + !! + """ # noqa, + ), + "futue_due_date": dict( + args=("Summary", "Lorem ipsum"), + kwargs={"due_date": (9999, 11, 22)}, + expected = """ + Summary + !! + + ******************************************************************************** + Lorem ipsum + + By 9999-Nov-22, you need to update your project and remove deprecated calls + or your builds will no longer be supported. + ******************************************************************************** + + !! + """ # noqa + ), + "past_due_date_with_docs": dict( + args=("Summary", "Lorem ipsum"), + kwargs={"due_date": (2000, 11, 22), "see_docs": "some_page.html"}, + expected=""" + Summary + !! + + ******************************************************************************** + Lorem ipsum + + This deprecation is overdue, please update your project and remove deprecated + calls to avoid build errors in the future. + + See https://setuptools.pypa.io/en/latest/some_page.html for details. + ******************************************************************************** + + !! + """ # noqa + ), +} + + +@pytest.mark.parametrize("example_name", _EXAMPLES.keys()) +def test_formatting(monkeypatch, example_name): + """ + It should automatically handle indentation, interpolation and things like due date. + """ + args = _EXAMPLES[example_name]["args"] + kwargs = _EXAMPLES[example_name]["kwargs"] + expected = _EXAMPLES[example_name]["expected"] + + monkeypatch.setenv("SETUPTOOLS_ENFORCE_DEPRECATION", "false") + with pytest.warns(SetuptoolsWarning) as warn_info: + SetuptoolsWarning.emit(*args, **kwargs) + assert _get_message(warn_info) == cleandoc(expected) + + +def test_due_date_enforcement(monkeypatch): + class _MyDeprecation(SetuptoolsDeprecationWarning): + _SUMMARY = "Summary" + _DETAILS = "Lorem ipsum" + _DUE_DATE = (2000, 11, 22) + _SEE_DOCS = "some_page.html" + + monkeypatch.setenv("SETUPTOOLS_ENFORCE_DEPRECATION", "true") + with pytest.raises(SetuptoolsDeprecationWarning) as exc_info: + _MyDeprecation.emit() + + expected=""" + Summary + !! + + ******************************************************************************** + Lorem ipsum + + This deprecation is overdue, please update your project and remove deprecated + calls to avoid build errors in the future. + + See https://setuptools.pypa.io/en/latest/some_page.html for details. + ******************************************************************************** + + !! + """ # noqa + assert str(exc_info.value) == cleandoc(expected) + + +def _get_message(warn_info): + return next(warn.message.args[0] for warn in warn_info) diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py index a15c3a460d..559e16d758 100644 --- a/setuptools/tests/test_wheel.py +++ b/setuptools/tests/test_wheel.py @@ -1,11 +1,11 @@ -# -*- coding: utf-8 -*- - """wheel tests """ from distutils.sysconfig import get_config_var from distutils.util import get_platform import contextlib +import pathlib +import stat import glob import inspect import os @@ -609,8 +609,95 @@ def test_wheel_no_dist_dir(): def test_wheel_is_compatible(monkeypatch): def sys_tags(): - for t in parse_tag('cp36-cp36m-manylinux1_x86_64'): - yield t - monkeypatch.setattr('setuptools.wheel.sys_tags', sys_tags) + return { + (t.interpreter, t.abi, t.platform) + for t in parse_tag('cp36-cp36m-manylinux1_x86_64') + } + monkeypatch.setattr('setuptools.wheel._get_supported_tags', sys_tags) assert Wheel( 'onnxruntime-0.1.2-cp36-cp36m-manylinux1_x86_64.whl').is_compatible() + + +def test_wheel_mode(): + @contextlib.contextmanager + def build_wheel(extra_file_defs=None, **kwargs): + file_defs = { + 'setup.py': (DALS( + ''' + # -*- coding: utf-8 -*- + from setuptools import setup + import setuptools + setup(**%r) + ''' + ) % kwargs).encode('utf-8'), + } + if extra_file_defs: + file_defs.update(extra_file_defs) + with tempdir() as source_dir: + path.build(file_defs, source_dir) + runsh = pathlib.Path(source_dir) / "script.sh" + os.chmod(runsh, 0o777) + subprocess.check_call((sys.executable, 'setup.py', + '-q', 'bdist_wheel'), cwd=source_dir) + yield glob.glob(os.path.join(source_dir, 'dist', '*.whl'))[0] + + params = dict( + id='script', + file_defs={ + 'script.py': DALS( + ''' + #/usr/bin/python + print('hello world!') + ''' + ), + 'script.sh': DALS( + ''' + #/bin/sh + echo 'hello world!' + ''' + ), + }, + setup_kwargs=dict( + scripts=['script.py', 'script.sh'], + ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'top_level.txt', + {'scripts': [ + 'script.py', + 'script.sh' + ]} + + ] + } + }) + ) + + project_name = params.get('name', 'foo') + version = params.get('version', '1.0') + install_tree = params.get('install_tree') + file_defs = params.get('file_defs', {}) + setup_kwargs = params.get('setup_kwargs', {}) + + with build_wheel( + name=project_name, + version=version, + install_requires=[], + extras_require={}, + extra_file_defs=file_defs, + **setup_kwargs + ) as filename, tempdir() as install_dir: + _check_wheel_install(filename, install_dir, + install_tree, project_name, + version, None) + w = Wheel(filename) + base = pathlib.Path(install_dir) / w.egg_name() + script_sh = base / "EGG-INFO" / "scripts" / "script.sh" + assert script_sh.exists() + if sys.platform != 'win32': + # Editable file mode has no effect on Windows + assert oct(stat.S_IMODE(script_sh.stat().st_mode)) == "0o777" diff --git a/setuptools/tests/test_windows_wrappers.py b/setuptools/tests/test_windows_wrappers.py index 8ac9bd072c..f8b82fcc37 100644 --- a/setuptools/tests/test_windows_wrappers.py +++ b/setuptools/tests/test_windows_wrappers.py @@ -107,9 +107,9 @@ def test_basic(self, tmpdir): 'arg5 a\\\\b', ] proc = subprocess.Popen( - cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE) - stdout, stderr = proc.communicate('hello\nworld\n'.encode('ascii')) - actual = stdout.decode('ascii').replace('\r\n', '\n') + cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, text=True) + stdout, stderr = proc.communicate('hello\nworld\n') + actual = stdout.replace('\r\n', '\n') expected = textwrap.dedent(r""" \foo-script.py ['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b'] @@ -148,9 +148,11 @@ def test_with_options(self, tmpdir): cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, - stderr=subprocess.STDOUT) + stderr=subprocess.STDOUT, + text=True, + ) stdout, stderr = proc.communicate() - actual = stdout.decode('ascii').replace('\r\n', '\n') + actual = stdout.replace('\r\n', '\n') expected = textwrap.dedent(r""" \foo-script.py [] @@ -188,7 +190,7 @@ def test_basic(self, tmpdir): ] proc = subprocess.Popen( cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, - stderr=subprocess.STDOUT) + stderr=subprocess.STDOUT, text=True) stdout, stderr = proc.communicate() assert not stdout assert not stderr diff --git a/setuptools/version.py b/setuptools/version.py index 95e1869658..ec253c4144 100644 --- a/setuptools/version.py +++ b/setuptools/version.py @@ -1,6 +1,6 @@ -import pkg_resources +from ._importlib import metadata try: - __version__ = pkg_resources.get_distribution('setuptools').version + __version__ = metadata.version('setuptools') or '0.dev0+unknown' except Exception: - __version__ = 'unknown' + __version__ = '0.dev0+unknown' diff --git a/setuptools/warnings.py b/setuptools/warnings.py new file mode 100644 index 0000000000..4ea782e509 --- /dev/null +++ b/setuptools/warnings.py @@ -0,0 +1,104 @@ +"""Provide basic warnings used by setuptools modules. + +Using custom classes (other than ``UserWarning``) allow users to set +``PYTHONWARNINGS`` filters to run tests and prepare for upcoming changes in +setuptools. +""" + +import os +import warnings +from datetime import date +from inspect import cleandoc +from textwrap import indent +from typing import Optional, Tuple + +_DueDate = Tuple[int, int, int] # time tuple +_INDENT = 8 * " " +_TEMPLATE = f"""{80 * '*'}\n{{details}}\n{80 * '*'}""" + + +class SetuptoolsWarning(UserWarning): + """Base class in ``setuptools`` warning hierarchy.""" + + @classmethod + def emit( + cls, + summary: Optional[str] = None, + details: Optional[str] = None, + due_date: Optional[_DueDate] = None, + see_docs: Optional[str] = None, + see_url: Optional[str] = None, + stacklevel: int = 2, + **kwargs + ): + """Private: reserved for ``setuptools`` internal use only""" + # Default values: + summary_ = summary or getattr(cls, "_SUMMARY", None) or "" + details_ = details or getattr(cls, "_DETAILS", None) or "" + due_date = due_date or getattr(cls, "_DUE_DATE", None) + docs_ref = see_docs or getattr(cls, "_SEE_DOCS", None) + docs_url = docs_ref and f"https://setuptools.pypa.io/en/latest/{docs_ref}" + see_url = see_url or getattr(cls, "_SEE_URL", None) + due = date(*due_date) if due_date else None + + text = cls._format(summary_, details_, due, see_url or docs_url, kwargs) + if due and due < date.today() and _should_enforce(): + raise cls(text) + warnings.warn(text, cls, stacklevel=stacklevel + 1) + + @classmethod + def _format( + cls, + summary: str, + details: str, + due_date: Optional[date] = None, + see_url: Optional[str] = None, + format_args: Optional[dict] = None, + ): + """Private: reserved for ``setuptools`` internal use only""" + today = date.today() + summary = cleandoc(summary).format_map(format_args or {}) + possible_parts = [ + cleandoc(details).format_map(format_args or {}), + ( + f"\nBy {due_date:%Y-%b-%d}, you need to update your project and remove " + "deprecated calls\nor your builds will no longer be supported." + if due_date and due_date > today else None + ), + ( + "\nThis deprecation is overdue, please update your project and remove " + "deprecated\ncalls to avoid build errors in the future." + if due_date and due_date < today else None + ), + (f"\nSee {see_url} for details." if see_url else None) + + ] + parts = [x for x in possible_parts if x] + if parts: + body = indent(_TEMPLATE.format(details="\n".join(parts)), _INDENT) + return "\n".join([summary, "!!\n", body, "\n!!"]) + return summary + + +class InformationOnly(SetuptoolsWarning): + """Currently there is no clear way of displaying messages to the users + that use the setuptools backend directly via ``pip``. + The only thing that might work is a warning, although it is not the + most appropriate tool for the job... + + See pypa/packaging-problems#558. + """ + + +class SetuptoolsDeprecationWarning(SetuptoolsWarning): + """ + Base class for warning deprecations in ``setuptools`` + + This class is not derived from ``DeprecationWarning``, and as such is + visible by default. + """ + + +def _should_enforce(): + enforce = os.getenv("SETUPTOOLS_ENFORCE_DEPRECATION", "false").lower() + return enforce in ("true", "on", "ok", "1") diff --git a/setuptools/wheel.py b/setuptools/wheel.py index 9819e8b9fd..850e43cd01 100644 --- a/setuptools/wheel.py +++ b/setuptools/wheel.py @@ -1,20 +1,22 @@ """Wheels support.""" -from distutils.util import get_platform -from distutils import log import email import itertools +import functools import os import posixpath import re import zipfile +import contextlib + +from distutils.util import get_platform -import pkg_resources import setuptools -from pkg_resources import parse_version +from setuptools.extern.packaging.version import Version as parse_version from setuptools.extern.packaging.tags import sys_tags from setuptools.extern.packaging.utils import canonicalize_name -from setuptools.command.egg_info import write_requirements +from setuptools.command.egg_info import write_requirements, _egg_basename +from setuptools.archive_util import _unpack_zipfile_obj WHEEL_NAME = re.compile( @@ -27,6 +29,14 @@ "__import__('pkg_resources').declare_namespace(__name__)\n" +@functools.lru_cache(maxsize=None) +def _get_supported_tags(): + # We calculate the supported tags only once, otherwise calling + # this method on thousands of wheels takes seconds instead of + # milliseconds. + return {(t.interpreter, t.abi, t.platform) for t in sys_tags()} + + def unpack(src_dir, dst_dir): '''Move everything under `src_dir` to `dst_dir`, and delete the former.''' for dirpath, dirnames, filenames in os.walk(src_dir): @@ -49,6 +59,19 @@ def unpack(src_dir, dst_dir): os.rmdir(dirpath) +@contextlib.contextmanager +def disable_info_traces(): + """ + Temporarily disable info traces. + """ + from distutils import log + saved = log.set_threshold(log.WARN) + try: + yield + finally: + log.set_threshold(saved) + + class Wheel: def __init__(self, filename): @@ -68,16 +91,15 @@ def tags(self): ) def is_compatible(self): - '''Is the wheel is compatible with the current platform?''' - supported_tags = set( - (t.interpreter, t.abi, t.platform) for t in sys_tags()) - return next((True for t in self.tags() if t in supported_tags), False) + '''Is the wheel compatible with the current platform?''' + return next((True for t in self.tags() if t in _get_supported_tags()), False) def egg_name(self): - return pkg_resources.Distribution( - project_name=self.project_name, version=self.version, + return _egg_basename( + self.project_name, + self.version, platform=(None if self.platform == 'any' else get_platform()), - ).egg_name() + '.egg' + ) + ".egg" def get_dist_info(self, zf): # find the correct name of the .dist-info dir in the wheel file @@ -106,6 +128,8 @@ def _install_as_egg(self, destination_eggdir, zf): @staticmethod def _convert_metadata(zf, destination_eggdir, dist_info, egg_info): + import pkg_resources + def get_metadata(name): with zf.open(posixpath.join(dist_info, name)) as fp: value = fp.read().decode('utf-8') @@ -121,8 +145,7 @@ def get_metadata(name): raise ValueError( 'unsupported wheel format version: %s' % wheel_version) # Extract to target directory. - os.mkdir(destination_eggdir) - zf.extractall(destination_eggdir) + _unpack_zipfile_obj(zf, destination_eggdir) # Convert metadata. dist_info = os.path.join(destination_eggdir, dist_info) dist = pkg_resources.Distribution.from_location( @@ -156,17 +179,12 @@ def raw_req(req): extras_require=extras_require, ), ) - # Temporarily disable info traces. - log_threshold = log._global_log.threshold - log.set_threshold(log.WARN) - try: + with disable_info_traces(): write_requirements( setup_dist.get_command_obj('egg_info'), None, os.path.join(egg_info, 'requires.txt'), ) - finally: - log.set_threshold(log_threshold) @staticmethod def _move_data_entries(destination_eggdir, dist_data): diff --git a/setuptools/windows_support.py b/setuptools/windows_support.py index cb977cff95..1ca64fbb54 100644 --- a/setuptools/windows_support.py +++ b/setuptools/windows_support.py @@ -1,5 +1,4 @@ import platform -import ctypes def windows_only(func): @@ -17,6 +16,7 @@ def hide_file(path): `path` must be text. """ + import ctypes __import__('ctypes.wintypes') SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD diff --git a/tools/finalize.py b/tools/finalize.py index e4f655431d..5a4df5dfab 100644 --- a/tools/finalize.py +++ b/tools/finalize.py @@ -42,6 +42,7 @@ def update_changelog(): cmd = [ sys.executable, '-m', 'towncrier', + 'build', '--version', get_version(), '--yes', ] diff --git a/tools/generate_validation_code.py b/tools/generate_validation_code.py new file mode 100644 index 0000000000..201d1b70e1 --- /dev/null +++ b/tools/generate_validation_code.py @@ -0,0 +1,30 @@ +import subprocess +import sys + +from pathlib import Path + + +def generate_pyproject_validation(dest: Path): + """ + Generates validation code for ``pyproject.toml`` based on JSON schemas and the + ``validate-pyproject`` library. + """ + cmd = [ + sys.executable, + "-m", + "validate_pyproject.vendoring", + f"--output-dir={dest}", + "--enable-plugins", + "setuptools", + "distutils", + "--very-verbose" + ] + subprocess.check_call(cmd) + print(f"Validation code generated at: {dest}") + + +def main(): + generate_pyproject_validation(Path("setuptools/config/_validate_pyproject")) + + +__name__ == '__main__' and main() diff --git a/tools/msvc-build-launcher.cmd b/tools/msvc-build-launcher.cmd index 92da290ed1..15b489019c 100644 --- a/tools/msvc-build-launcher.cmd +++ b/tools/msvc-build-launcher.cmd @@ -1,39 +1,39 @@ -@echo off - -REM Use old Windows SDK 6.1 so created .exe will be compatible with -REM old Windows versions. -REM Windows SDK 6.1 may be downloaded at: -REM http://www.microsoft.com/en-us/download/details.aspx?id=11310 -set PATH_OLD=%PATH% - -REM The SDK creates a false install of Visual Studio at one of these locations -set PATH=C:\Program Files\Microsoft Visual Studio 9.0\VC\bin;%PATH% -set PATH=C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin;%PATH% - -REM set up the environment to compile to x86 -call VCVARS32 -if "%ERRORLEVEL%"=="0" ( - cl /D "GUI=0" /D "WIN32_LEAN_AND_MEAN" launcher.c /O2 /link /MACHINE:x86 /SUBSYSTEM:CONSOLE /out:setuptools/cli-32.exe - cl /D "GUI=1" /D "WIN32_LEAN_AND_MEAN" launcher.c /O2 /link /MACHINE:x86 /SUBSYSTEM:WINDOWS /out:setuptools/gui-32.exe -) else ( - echo Windows SDK 6.1 not found to build Windows 32-bit version -) - -REM buildout (and possibly other implementations) currently depend on -REM the 32-bit launcher scripts without the -32 in the filename, so copy them -REM there for now. -copy setuptools/cli-32.exe setuptools/cli.exe -copy setuptools/gui-32.exe setuptools/gui.exe - -REM now for 64-bit -REM Use the x86_amd64 profile, which is the 32-bit cross compiler for amd64 -call VCVARSx86_amd64 -if "%ERRORLEVEL%"=="0" ( - cl /D "GUI=0" /D "WIN32_LEAN_AND_MEAN" launcher.c /O2 /link /MACHINE:x64 /SUBSYSTEM:CONSOLE /out:setuptools/cli-64.exe - cl /D "GUI=1" /D "WIN32_LEAN_AND_MEAN" launcher.c /O2 /link /MACHINE:x64 /SUBSYSTEM:WINDOWS /out:setuptools/gui-64.exe -) else ( - echo Windows SDK 6.1 not found to build Windows 64-bit version -) - -set PATH=%PATH_OLD% - +@echo off + +REM Use old Windows SDK 6.1 so created .exe will be compatible with +REM old Windows versions. +REM Windows SDK 6.1 may be downloaded at: +REM http://www.microsoft.com/en-us/download/details.aspx?id=11310 +set PATH_OLD=%PATH% + +REM The SDK creates a false install of Visual Studio at one of these locations +set PATH=C:\Program Files\Microsoft Visual Studio 9.0\VC\bin;%PATH% +set PATH=C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin;%PATH% + +REM set up the environment to compile to x86 +call VCVARS32 +if "%ERRORLEVEL%"=="0" ( + cl /D "GUI=0" /D "WIN32_LEAN_AND_MEAN" launcher.c /O2 /link /MACHINE:x86 /SUBSYSTEM:CONSOLE /out:setuptools/cli-32.exe + cl /D "GUI=1" /D "WIN32_LEAN_AND_MEAN" launcher.c /O2 /link /MACHINE:x86 /SUBSYSTEM:WINDOWS /out:setuptools/gui-32.exe +) else ( + echo Windows SDK 6.1 not found to build Windows 32-bit version +) + +REM buildout (and possibly other implementations) currently depend on +REM the 32-bit launcher scripts without the -32 in the filename, so copy them +REM there for now. +copy setuptools/cli-32.exe setuptools/cli.exe +copy setuptools/gui-32.exe setuptools/gui.exe + +REM now for 64-bit +REM Use the x86_amd64 profile, which is the 32-bit cross compiler for amd64 +call VCVARSx86_amd64 +if "%ERRORLEVEL%"=="0" ( + cl /D "GUI=0" /D "WIN32_LEAN_AND_MEAN" launcher.c /O2 /link /MACHINE:x64 /SUBSYSTEM:CONSOLE /out:setuptools/cli-64.exe + cl /D "GUI=1" /D "WIN32_LEAN_AND_MEAN" launcher.c /O2 /link /MACHINE:x64 /SUBSYSTEM:WINDOWS /out:setuptools/gui-64.exe +) else ( + echo Windows SDK 6.1 not found to build Windows 64-bit version +) + +set PATH=%PATH_OLD% + diff --git a/tools/vendored.py b/tools/vendored.py index 8a122ad778..9b7cf729ba 100644 --- a/tools/vendored.py +++ b/tools/vendored.py @@ -89,26 +89,36 @@ def rewrite_more_itertools(pkg_files: Path): more_file.write_text(text) +def rewrite_platformdirs(pkg_files: Path): + """ + Replace some absolute imports with relative ones. + """ + init = pkg_files.joinpath('__init__.py') + text = init.read_text() + text = text.replace('from platformdirs.', 'from .') + text = text.replace('from typing_extensions', 'from ..typing_extensions') + init.write_text(text) + + def clean(vendor): """ Remove all files out of the vendor directory except the meta data (as pip uninstall doesn't support -t). """ - remove_all( - path - for path in vendor.glob('*') - if path.basename() != 'vendored.txt' - ) + remove_all(path for path in vendor.glob('*') if path.basename() != 'vendored.txt') def install(vendor): clean(vendor) install_args = [ sys.executable, - '-m', 'pip', + '-m', + 'pip', 'install', - '-r', str(vendor / 'vendored.txt'), - '-t', str(vendor), + '-r', + str(vendor / 'vendored.txt'), + '-t', + str(vendor), ] subprocess.check_call(install_args) (vendor / '__init__.py').write_text('') @@ -122,6 +132,7 @@ def update_pkg_resources(): rewrite_jaraco(vendor / 'jaraco', 'pkg_resources.extern') rewrite_importlib_resources(vendor / 'importlib_resources', 'pkg_resources.extern') rewrite_more_itertools(vendor / "more_itertools") + rewrite_platformdirs(vendor / "platformdirs") def update_setuptools(): diff --git a/tox.ini b/tox.ini index 6b587e2895..2a5217339d 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] envlist = python -minversion = 3.2 +minversion = 3.25 # https://github.com/jaraco/skeleton/issues/6 tox_pip_extensions_ext_venv_update = true toxworkdir={env:TOX_WORK_DIR:.tox} @@ -8,12 +8,18 @@ toxworkdir={env:TOX_WORK_DIR:.tox} [testenv] deps = # Ideally all the dependencies should be set as "extras" +setenv = + PYTHONWARNDEFAULTENCODING = 1 + SETUPTOOLS_ENFORCE_DEPRECATION = 1 commands = pytest {posargs} usedevelop = True -extras = testing +extras = + testing passenv = SETUPTOOLS_USE_DISTUTILS + PRE_BUILT_SETUPTOOLS_WHEEL + PRE_BUILT_SETUPTOOLS_SDIST TIMEOUT_BACKEND_TEST # timeout (in seconds) for test_build_meta windir # required for test_pkg_resources # honor git config in pytest-perf @@ -28,7 +34,7 @@ passenv = setenv = PROJECT_ROOT = {toxinidir} commands = - pytest --integration {posargs:-vv --durations=10 setuptools/tests/integration} + pytest --integration {posargs:-vv --durations=10} setuptools/tests/integration # use verbose mode by default to facilitate debugging from CI logs [testenv:docs] @@ -38,6 +44,7 @@ extras = changedir = docs commands = python -m sphinx -W --keep-going . {toxinidir}/build/html + python -m sphinxlint [testenv:finalize] skip_install = True @@ -55,6 +62,13 @@ deps = commands = python -m tools.vendored +[testenv:generate-validation-code] +skip_install = True +deps = + validate-pyproject[all]==0.12.2 +commands = + python -m tools.generate_validation_code + [testenv:release] skip_install = True deps =