diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD index a6a44d8fcc..88e4495a20 100644 --- a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD +++ b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD @@ -2,8 +2,7 @@ backports.tarfile-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJ backports.tarfile-1.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023 backports.tarfile-1.0.0.dist-info/METADATA,sha256=XlT7JAFR04zDMIjs-EFhqc0CkkVyeh-SiVUoKXONXJ0,1876 backports.tarfile-1.0.0.dist-info/RECORD,, -backports.tarfile-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 backports.tarfile-1.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 backports.tarfile-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10 -backports/__pycache__/tarfile.cpython-312.pyc,, +backports/__pycache__/tarfile.cpython-38.pyc,, backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920 diff --git a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD index ba764991ee..a9cabb9a01 100644 --- a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD +++ b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD @@ -2,19 +2,18 @@ importlib_resources-5.10.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VC importlib_resources-5.10.2.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 importlib_resources-5.10.2.dist-info/METADATA,sha256=Xo5ntATvDYUxdmW8tr8kxtfdiOC9889mOk-LE1LtZfI,4111 importlib_resources-5.10.2.dist-info/RECORD,, -importlib_resources-5.10.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 importlib_resources-5.10.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 importlib_resources-5.10.2.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20 importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506 -importlib_resources/__pycache__/__init__.cpython-312.pyc,, -importlib_resources/__pycache__/_adapters.cpython-312.pyc,, -importlib_resources/__pycache__/_common.cpython-312.pyc,, -importlib_resources/__pycache__/_compat.cpython-312.pyc,, -importlib_resources/__pycache__/_itertools.cpython-312.pyc,, -importlib_resources/__pycache__/_legacy.cpython-312.pyc,, -importlib_resources/__pycache__/abc.cpython-312.pyc,, -importlib_resources/__pycache__/readers.cpython-312.pyc,, -importlib_resources/__pycache__/simple.cpython-312.pyc,, +importlib_resources/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/__pycache__/_adapters.cpython-38.pyc,, +importlib_resources/__pycache__/_common.cpython-38.pyc,, +importlib_resources/__pycache__/_compat.cpython-38.pyc,, +importlib_resources/__pycache__/_itertools.cpython-38.pyc,, +importlib_resources/__pycache__/_legacy.cpython-38.pyc,, +importlib_resources/__pycache__/abc.cpython-38.pyc,, +importlib_resources/__pycache__/readers.cpython-38.pyc,, +importlib_resources/__pycache__/simple.cpython-38.pyc,, importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504 importlib_resources/_common.py,sha256=jSC4xfLdcMNbtbWHtpzbFkNa0W7kvf__nsYn14C_AEU,5457 importlib_resources/_compat.py,sha256=dSadF6WPt8MwOqSm_NIOQPhw4x0iaMYTWxi-XS93p7M,2923 @@ -25,36 +24,36 @@ importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU, importlib_resources/readers.py,sha256=PZsi5qacr2Qn3KHw4qw3Gm1MzrBblPHoTdjqjH7EKWw,3581 importlib_resources/simple.py,sha256=0__2TQBTQoqkajYmNPt1HxERcReAT6boVKJA328pr04,2576 importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/__pycache__/__init__.cpython-312.pyc,, -importlib_resources/tests/__pycache__/_compat.cpython-312.pyc,, -importlib_resources/tests/__pycache__/_path.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_contents.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_files.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_open.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_path.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_read.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_reader.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_resource.cpython-312.pyc,, -importlib_resources/tests/__pycache__/update-zips.cpython-312.pyc,, -importlib_resources/tests/__pycache__/util.cpython-312.pyc,, +importlib_resources/tests/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/__pycache__/_compat.cpython-38.pyc,, +importlib_resources/tests/__pycache__/_path.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_contents.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_files.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_open.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_path.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_read.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_reader.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_resource.cpython-38.pyc,, +importlib_resources/tests/__pycache__/update-zips.cpython-38.pyc,, +importlib_resources/tests/__pycache__/util.cpython-38.pyc,, importlib_resources/tests/_compat.py,sha256=YTSB0U1R9oADnh6GrQcOCgojxcF_N6H1LklymEWf9SQ,708 importlib_resources/tests/_path.py,sha256=yZyWsQzJZQ1Z8ARAxWkjAdaVVsjlzyqxO0qjBUofJ8M,1039 importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data01/__pycache__/__init__.cpython-312.pyc,, +importlib_resources/tests/data01/__pycache__/__init__.cpython-38.pyc,, importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-312.pyc,, +importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-38.pyc,, importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/__pycache__/__init__.cpython-312.pyc,, +importlib_resources/tests/data02/__pycache__/__init__.cpython-38.pyc,, importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/one/__pycache__/__init__.cpython-312.pyc,, +importlib_resources/tests/data02/one/__pycache__/__init__.cpython-38.pyc,, importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13 importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/two/__pycache__/__init__.cpython-312.pyc,, +importlib_resources/tests/data02/two/__pycache__/__init__.cpython-38.pyc,, importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13 importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 @@ -70,8 +69,8 @@ importlib_resources/tests/test_resource.py,sha256=EMoarxTEHcrq8R41LQDsndIG8Idtm4 importlib_resources/tests/update-zips.py,sha256=x-SrO5v87iLLUMXyefxDwAd3imAs_slI94sLWvJ6N40,1417 importlib_resources/tests/util.py,sha256=ARAlxZ47wC-lgR7PGlmgBoi4HnhzcykD5Is2-TAwY0I,4873 importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-312.pyc,, +importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-38.pyc,, importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876 importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-312.pyc,, +importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-38.pyc,, importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698 diff --git a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/REQUESTED b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/REQUESTED deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pkg_resources/_vendor/importlib_resources/_compat.py b/pkg_resources/_vendor/importlib_resources/_compat.py index 8b5b1d280f..6217958d88 100644 --- a/pkg_resources/_vendor/importlib_resources/_compat.py +++ b/pkg_resources/_vendor/importlib_resources/_compat.py @@ -11,7 +11,7 @@ if sys.version_info >= (3, 10): from zipfile import Path as ZipPath # type: ignore else: - from ..zipp import Path as ZipPath # type: ignore + from pkg_resources.extern.zipp import Path as ZipPath # type: ignore try: diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py b/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py index d92c7c56c9..3bc9a83aef 100644 --- a/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py +++ b/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py @@ -1,9 +1,9 @@ import io import unittest -import importlib_resources as resources +from pkg_resources.extern import importlib_resources as resources -from importlib_resources._adapters import ( +from pkg_resources.extern.importlib_resources._adapters import ( CompatibilityFiles, wrap_spec, ) diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_contents.py b/pkg_resources/_vendor/importlib_resources/tests/test_contents.py index 525568e8c9..2ace90c8a5 100644 --- a/pkg_resources/_vendor/importlib_resources/tests/test_contents.py +++ b/pkg_resources/_vendor/importlib_resources/tests/test_contents.py @@ -1,5 +1,5 @@ import unittest -import importlib_resources as resources +from pkg_resources.extern import importlib_resources as resources from . import data01 from . import util diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_files.py b/pkg_resources/_vendor/importlib_resources/tests/test_files.py index d258fb5f0f..b3a77fe9de 100644 --- a/pkg_resources/_vendor/importlib_resources/tests/test_files.py +++ b/pkg_resources/_vendor/importlib_resources/tests/test_files.py @@ -5,7 +5,7 @@ import importlib import contextlib -import importlib_resources as resources +from pkg_resources.extern import importlib_resources as resources from ..abc import Traversable from . import data01 from . import util @@ -97,7 +97,7 @@ def test_implicit_files(self): 'somepkg': { '__init__.py': textwrap.dedent( """ - import importlib_resources as res + from pkg_resources.extern import importlib_resources as res val = res.files().joinpath('res.txt').read_text() """ ), diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_open.py b/pkg_resources/_vendor/importlib_resources/tests/test_open.py index 87b42c3d39..b03cbf515c 100644 --- a/pkg_resources/_vendor/importlib_resources/tests/test_open.py +++ b/pkg_resources/_vendor/importlib_resources/tests/test_open.py @@ -1,6 +1,6 @@ import unittest -import importlib_resources as resources +from pkg_resources.extern import importlib_resources as resources from . import data01 from . import util diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_path.py b/pkg_resources/_vendor/importlib_resources/tests/test_path.py index 4f4d3943bb..956d5dd0f7 100644 --- a/pkg_resources/_vendor/importlib_resources/tests/test_path.py +++ b/pkg_resources/_vendor/importlib_resources/tests/test_path.py @@ -1,7 +1,7 @@ import io import unittest -import importlib_resources as resources +from pkg_resources.extern import importlib_resources as resources from . import data01 from . import util diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_read.py b/pkg_resources/_vendor/importlib_resources/tests/test_read.py index 41dd6db5f3..c8f00cf606 100644 --- a/pkg_resources/_vendor/importlib_resources/tests/test_read.py +++ b/pkg_resources/_vendor/importlib_resources/tests/test_read.py @@ -1,5 +1,5 @@ import unittest -import importlib_resources as resources +from pkg_resources.extern import importlib_resources as resources from . import data01 from . import util diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_reader.py b/pkg_resources/_vendor/importlib_resources/tests/test_reader.py index 1c8ebeeb13..9382deb196 100644 --- a/pkg_resources/_vendor/importlib_resources/tests/test_reader.py +++ b/pkg_resources/_vendor/importlib_resources/tests/test_reader.py @@ -4,7 +4,7 @@ import unittest from importlib import import_module -from importlib_resources.readers import MultiplexedPath, NamespaceReader +from pkg_resources.extern.importlib_resources.readers import MultiplexedPath, NamespaceReader class MultiplexedPathTest(unittest.TestCase): diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_resource.py b/pkg_resources/_vendor/importlib_resources/tests/test_resource.py index 8239027167..43db4d724c 100644 --- a/pkg_resources/_vendor/importlib_resources/tests/test_resource.py +++ b/pkg_resources/_vendor/importlib_resources/tests/test_resource.py @@ -1,6 +1,6 @@ import sys import unittest -import importlib_resources as resources +from pkg_resources.extern import importlib_resources as resources import uuid import pathlib diff --git a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/RECORD index 09d191f214..f3ff478f21 100644 --- a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/RECORD +++ b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/RECORD @@ -4,5 +4,5 @@ jaraco.context-5.3.0.dist-info/METADATA,sha256=xDtguJej0tN9iEXCUvxEJh2a7xceIRVBE jaraco.context-5.3.0.dist-info/RECORD,, jaraco.context-5.3.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 jaraco.context-5.3.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 -jaraco/__pycache__/context.cpython-312.pyc,, +jaraco/__pycache__/context.cpython-38.pyc,, jaraco/context.py,sha256=REoLIxDkO5MfEYowt_WoupNCRoxBS5v7YX2PbW8lIcs,9552 diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD index 783aa7d2b9..af1363da1d 100644 --- a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD +++ b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD @@ -6,5 +6,5 @@ jaraco.functools-4.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPE jaraco.functools-4.0.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642 jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982 -jaraco/functools/__pycache__/__init__.cpython-312.pyc,, +jaraco/functools/__pycache__/__init__.cpython-38.pyc,, jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD index c698101cb4..a7551d1070 100644 --- a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD +++ b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD @@ -7,4 +7,4 @@ jaraco.text-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FG jaraco.text-3.7.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335 jaraco/text/__init__.py,sha256=I56MW2ZFwPrYXIxzqxMBe2A1t-T4uZBgEgAKe9-JoqM,15538 -jaraco/text/__pycache__/__init__.cpython-312.pyc,, +jaraco/text/__pycache__/__init__.cpython-38.pyc,, diff --git a/pkg_resources/_vendor/jaraco/functools/__init__.py b/pkg_resources/_vendor/jaraco/functools/__init__.py index f523099c72..cd2c93231d 100644 --- a/pkg_resources/_vendor/jaraco/functools/__init__.py +++ b/pkg_resources/_vendor/jaraco/functools/__init__.py @@ -7,7 +7,7 @@ import types import warnings -import pkg_resources.extern.more_itertools +from pkg_resources.extern import more_itertools def compose(*funcs): @@ -603,10 +603,10 @@ def splat(func): simple ``map``. >>> pairs = [(-1, 1), (0, 2)] - >>> pkg_resources.extern.more_itertools.consume(itertools.starmap(print, pairs)) + >>> more_itertools.consume(itertools.starmap(print, pairs)) -1 1 0 2 - >>> pkg_resources.extern.more_itertools.consume(map(splat(print), pairs)) + >>> more_itertools.consume(map(splat(print), pairs)) -1 1 0 2 diff --git a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD index 2ce6e4a6f5..04e5089be5 100644 --- a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD +++ b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD @@ -5,9 +5,9 @@ more_itertools-10.2.0.dist-info/RECORD,, more_itertools-10.2.0.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81 more_itertools/__init__.py,sha256=VodgFyRJvpnHbAMgseYRiP7r928FFOAakmQrl6J88os,149 more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43 -more_itertools/__pycache__/__init__.cpython-312.pyc,, -more_itertools/__pycache__/more.cpython-312.pyc,, -more_itertools/__pycache__/recipes.cpython-312.pyc,, +more_itertools/__pycache__/__init__.cpython-38.pyc,, +more_itertools/__pycache__/more.cpython-38.pyc,, +more_itertools/__pycache__/recipes.cpython-38.pyc,, more_itertools/more.py,sha256=jYdpbgXHf8yZDByPrhluxpe0D_IXRk2tfQnyfOFMi74,143045 more_itertools/more.pyi,sha256=KTHYeqr0rFbn1GWRnv0jY64JRNnKKT0kA3kmsah8DYQ,21044 more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/pkg_resources/_vendor/packaging-24.0.dist-info/RECORD b/pkg_resources/_vendor/packaging-24.0.dist-info/RECORD index bcf796c2f4..9abc2bcbe6 100644 --- a/pkg_resources/_vendor/packaging-24.0.dist-info/RECORD +++ b/pkg_resources/_vendor/packaging-24.0.dist-info/RECORD @@ -7,20 +7,20 @@ packaging-24.0.dist-info/RECORD,, packaging-24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 packaging-24.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 packaging/__init__.py,sha256=UzotcV07p8vcJzd80S-W0srhgY8NMVD_XvJcZ7JN-tA,496 -packaging/__pycache__/__init__.cpython-312.pyc,, -packaging/__pycache__/_elffile.cpython-312.pyc,, -packaging/__pycache__/_manylinux.cpython-312.pyc,, -packaging/__pycache__/_musllinux.cpython-312.pyc,, -packaging/__pycache__/_parser.cpython-312.pyc,, -packaging/__pycache__/_structures.cpython-312.pyc,, -packaging/__pycache__/_tokenizer.cpython-312.pyc,, -packaging/__pycache__/markers.cpython-312.pyc,, -packaging/__pycache__/metadata.cpython-312.pyc,, -packaging/__pycache__/requirements.cpython-312.pyc,, -packaging/__pycache__/specifiers.cpython-312.pyc,, -packaging/__pycache__/tags.cpython-312.pyc,, -packaging/__pycache__/utils.cpython-312.pyc,, -packaging/__pycache__/version.cpython-312.pyc,, +packaging/__pycache__/__init__.cpython-38.pyc,, +packaging/__pycache__/_elffile.cpython-38.pyc,, +packaging/__pycache__/_manylinux.cpython-38.pyc,, +packaging/__pycache__/_musllinux.cpython-38.pyc,, +packaging/__pycache__/_parser.cpython-38.pyc,, +packaging/__pycache__/_structures.cpython-38.pyc,, +packaging/__pycache__/_tokenizer.cpython-38.pyc,, +packaging/__pycache__/markers.cpython-38.pyc,, +packaging/__pycache__/metadata.cpython-38.pyc,, +packaging/__pycache__/requirements.cpython-38.pyc,, +packaging/__pycache__/specifiers.cpython-38.pyc,, +packaging/__pycache__/tags.cpython-38.pyc,, +packaging/__pycache__/utils.cpython-38.pyc,, +packaging/__pycache__/version.cpython-38.pyc,, packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266 packaging/_manylinux.py,sha256=1ng_TqyH49hY6s3W_zVHyoJIaogbJqbIF1jJ0fAehc4,9590 packaging/_musllinux.py,sha256=kgmBGLFybpy8609-KTvzmt2zChCPWYvhp5BWP4JX7dE,2676 diff --git a/pkg_resources/_vendor/packaging/specifiers.py b/pkg_resources/_vendor/packaging/specifiers.py index 2d015bab59..d12279510c 100644 --- a/pkg_resources/_vendor/packaging/specifiers.py +++ b/pkg_resources/_vendor/packaging/specifiers.py @@ -4,8 +4,8 @@ """ .. testsetup:: - from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier - from packaging.version import Version + from pkg_resources.extern.packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier + from pkg_resources.extern.packaging.version import Version """ import abc diff --git a/pkg_resources/_vendor/packaging/version.py b/pkg_resources/_vendor/packaging/version.py index 5faab9bd0d..d7c7e5f84b 100644 --- a/pkg_resources/_vendor/packaging/version.py +++ b/pkg_resources/_vendor/packaging/version.py @@ -4,7 +4,7 @@ """ .. testsetup:: - from packaging.version import parse, Version + from pkg_resources.extern.packaging.version import parse, Version """ import itertools diff --git a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD index a721322694..daa815e06f 100644 --- a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD +++ b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD @@ -6,14 +6,14 @@ platformdirs-2.6.2.dist-info/WHEEL,sha256=NaLmgHHW_f9jTvv_wRh9vcK7c7EK9o5fwsIXMO platformdirs-2.6.2.dist-info/licenses/LICENSE,sha256=KeD9YukphQ6G6yjD_czwzv30-pSHkBHP-z0NS-1tTbY,1089 platformdirs/__init__.py,sha256=td0a-fHENmnG8ess2WRoysKv9ud5j6TQ-p_iUM_uE18,12864 platformdirs/__main__.py,sha256=VsC0t5m-6f0YVr96PVks93G3EDF8MSNY4KpUMvPahDA,1164 -platformdirs/__pycache__/__init__.cpython-312.pyc,, -platformdirs/__pycache__/__main__.cpython-312.pyc,, -platformdirs/__pycache__/android.cpython-312.pyc,, -platformdirs/__pycache__/api.cpython-312.pyc,, -platformdirs/__pycache__/macos.cpython-312.pyc,, -platformdirs/__pycache__/unix.cpython-312.pyc,, -platformdirs/__pycache__/version.cpython-312.pyc,, -platformdirs/__pycache__/windows.cpython-312.pyc,, +platformdirs/__pycache__/__init__.cpython-38.pyc,, +platformdirs/__pycache__/__main__.cpython-38.pyc,, +platformdirs/__pycache__/android.cpython-38.pyc,, +platformdirs/__pycache__/api.cpython-38.pyc,, +platformdirs/__pycache__/macos.cpython-38.pyc,, +platformdirs/__pycache__/unix.cpython-38.pyc,, +platformdirs/__pycache__/version.cpython-38.pyc,, +platformdirs/__pycache__/windows.cpython-38.pyc,, platformdirs/android.py,sha256=GKizhyS7ESRiU67u8UnBJLm46goau9937EchXWbPBlk,4068 platformdirs/api.py,sha256=MXKHXOL3eh_-trSok-JUTjAR_zjmmKF3rjREVABjP8s,4910 platformdirs/macos.py,sha256=-3UXQewbT0yMhMdkzRXfXGAntmLIH7Qt4a9Hlf8I5_Y,2655 diff --git a/pkg_resources/_vendor/platformdirs/__init__.py b/pkg_resources/_vendor/platformdirs/__init__.py index aef2821b83..0a80b8f7d2 100644 --- a/pkg_resources/_vendor/platformdirs/__init__.py +++ b/pkg_resources/_vendor/platformdirs/__init__.py @@ -11,7 +11,7 @@ if sys.version_info >= (3, 8): # pragma: no cover (py38+) from typing import Literal else: # pragma: no cover (py38+) - from ..typing_extensions import Literal + from typing_extensions import Literal from .api import PlatformDirsABC from .version import __version__ @@ -20,21 +20,21 @@ def _set_platform_dir_class() -> type[PlatformDirsABC]: if sys.platform == "win32": - from .windows import Windows as Result + from pkg_resources.extern.platformdirs.windows import Windows as Result elif sys.platform == "darwin": - from .macos import MacOS as Result + from pkg_resources.extern.platformdirs.macos import MacOS as Result else: - from .unix import Unix as Result + from pkg_resources.extern.platformdirs.unix import Unix as Result if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system": if os.getenv("SHELL") or os.getenv("PREFIX"): return Result - from .android import _android_folder + from pkg_resources.extern.platformdirs.android import _android_folder if _android_folder() is not None: - from .android import Android + from pkg_resources.extern.platformdirs.android import Android return Android # return to avoid redefinition of result diff --git a/pkg_resources/_vendor/platformdirs/__main__.py b/pkg_resources/_vendor/platformdirs/__main__.py index 0fc1edd59c..325cf6d560 100644 --- a/pkg_resources/_vendor/platformdirs/__main__.py +++ b/pkg_resources/_vendor/platformdirs/__main__.py @@ -1,6 +1,6 @@ from __future__ import annotations -from platformdirs import PlatformDirs, __version__ +from pkg_resources.extern.platformdirs import PlatformDirs, __version__ PROPS = ( "user_data_dir", diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD b/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD index adc797bc2e..29b415e827 100644 --- a/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD +++ b/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD @@ -1,9 +1,8 @@ -__pycache__/zipp.cpython-312.pyc,, +__pycache__/zipp.cpython-38.pyc,, zipp-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 zipp-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 zipp-3.7.0.dist-info/METADATA,sha256=ZLzgaXTyZX_MxTU0lcGfhdPY4CjFrT_3vyQ2Fo49pl8,2261 zipp-3.7.0.dist-info/RECORD,, -zipp-3.7.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 zipp-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 zipp-3.7.0.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5 zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425 diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/REQUESTED b/pkg_resources/_vendor/zipp-3.7.0.dist-info/REQUESTED deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py index b6294dbfdb..bfb9eb8bdf 100644 --- a/pkg_resources/extern/__init__.py +++ b/pkg_resources/extern/__init__.py @@ -77,13 +77,13 @@ def install(self): # cog.outl(f"names = (\n{names}\n)") # ]]] names = ( + 'backports', + 'importlib_resources', + 'jaraco', + 'more_itertools', 'packaging', 'platformdirs', - 'jaraco', - 'importlib_resources', 'zipp', - 'more_itertools', - 'backports', ) # [[[end]]] VendorImporter(__name__, names).install() diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD index a6a44d8fcc..88e4495a20 100644 --- a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD +++ b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD @@ -2,8 +2,7 @@ backports.tarfile-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJ backports.tarfile-1.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023 backports.tarfile-1.0.0.dist-info/METADATA,sha256=XlT7JAFR04zDMIjs-EFhqc0CkkVyeh-SiVUoKXONXJ0,1876 backports.tarfile-1.0.0.dist-info/RECORD,, -backports.tarfile-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 backports.tarfile-1.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 backports.tarfile-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10 -backports/__pycache__/tarfile.cpython-312.pyc,, +backports/__pycache__/tarfile.cpython-38.pyc,, backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920 diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD index c5ed31bf55..780fc6e61f 100644 --- a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD +++ b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD @@ -6,15 +6,15 @@ importlib_metadata-6.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRk importlib_metadata-6.0.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 importlib_metadata-6.0.0.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19 importlib_metadata/__init__.py,sha256=wiMJxNXXhPtRRHSX2N9gGLnTh0YszmE1rn3uKYRrNcs,26490 -importlib_metadata/__pycache__/__init__.cpython-312.pyc,, -importlib_metadata/__pycache__/_adapters.cpython-312.pyc,, -importlib_metadata/__pycache__/_collections.cpython-312.pyc,, -importlib_metadata/__pycache__/_compat.cpython-312.pyc,, -importlib_metadata/__pycache__/_functools.cpython-312.pyc,, -importlib_metadata/__pycache__/_itertools.cpython-312.pyc,, -importlib_metadata/__pycache__/_meta.cpython-312.pyc,, -importlib_metadata/__pycache__/_py39compat.cpython-312.pyc,, -importlib_metadata/__pycache__/_text.cpython-312.pyc,, +importlib_metadata/__pycache__/__init__.cpython-38.pyc,, +importlib_metadata/__pycache__/_adapters.cpython-38.pyc,, +importlib_metadata/__pycache__/_collections.cpython-38.pyc,, +importlib_metadata/__pycache__/_compat.cpython-38.pyc,, +importlib_metadata/__pycache__/_functools.cpython-38.pyc,, +importlib_metadata/__pycache__/_itertools.cpython-38.pyc,, +importlib_metadata/__pycache__/_meta.cpython-38.pyc,, +importlib_metadata/__pycache__/_py39compat.cpython-38.pyc,, +importlib_metadata/__pycache__/_text.cpython-38.pyc,, importlib_metadata/_adapters.py,sha256=i8S6Ib1OQjcILA-l4gkzktMZe18TaeUNI49PLRp6OBU,2454 importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743 importlib_metadata/_compat.py,sha256=9zOKf0eDgkCMnnaEhU5kQVxHd1P8BIYV7Stso7av5h8,1857 diff --git a/setuptools/_vendor/importlib_metadata/__init__.py b/setuptools/_vendor/importlib_metadata/__init__.py index 8864214375..bf77430201 100644 --- a/setuptools/_vendor/importlib_metadata/__init__.py +++ b/setuptools/_vendor/importlib_metadata/__init__.py @@ -3,7 +3,7 @@ import abc import csv import sys -from .. import zipp +from setuptools.extern import zipp import email import pathlib import operator diff --git a/setuptools/_vendor/importlib_metadata/_compat.py b/setuptools/_vendor/importlib_metadata/_compat.py index 84f9eea4f3..3d78566ea3 100644 --- a/setuptools/_vendor/importlib_metadata/_compat.py +++ b/setuptools/_vendor/importlib_metadata/_compat.py @@ -9,7 +9,7 @@ from typing import Protocol except ImportError: # pragma: no cover # Python 3.7 compatibility - from ..typing_extensions import Protocol # type: ignore + from typing_extensions import Protocol # type: ignore def install(cls): diff --git a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD index ba764991ee..bde2e486a0 100644 --- a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD +++ b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD @@ -6,15 +6,15 @@ importlib_resources-5.10.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQe importlib_resources-5.10.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 importlib_resources-5.10.2.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20 importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506 -importlib_resources/__pycache__/__init__.cpython-312.pyc,, -importlib_resources/__pycache__/_adapters.cpython-312.pyc,, -importlib_resources/__pycache__/_common.cpython-312.pyc,, -importlib_resources/__pycache__/_compat.cpython-312.pyc,, -importlib_resources/__pycache__/_itertools.cpython-312.pyc,, -importlib_resources/__pycache__/_legacy.cpython-312.pyc,, -importlib_resources/__pycache__/abc.cpython-312.pyc,, -importlib_resources/__pycache__/readers.cpython-312.pyc,, -importlib_resources/__pycache__/simple.cpython-312.pyc,, +importlib_resources/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/__pycache__/_adapters.cpython-38.pyc,, +importlib_resources/__pycache__/_common.cpython-38.pyc,, +importlib_resources/__pycache__/_compat.cpython-38.pyc,, +importlib_resources/__pycache__/_itertools.cpython-38.pyc,, +importlib_resources/__pycache__/_legacy.cpython-38.pyc,, +importlib_resources/__pycache__/abc.cpython-38.pyc,, +importlib_resources/__pycache__/readers.cpython-38.pyc,, +importlib_resources/__pycache__/simple.cpython-38.pyc,, importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504 importlib_resources/_common.py,sha256=jSC4xfLdcMNbtbWHtpzbFkNa0W7kvf__nsYn14C_AEU,5457 importlib_resources/_compat.py,sha256=dSadF6WPt8MwOqSm_NIOQPhw4x0iaMYTWxi-XS93p7M,2923 @@ -25,36 +25,36 @@ importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU, importlib_resources/readers.py,sha256=PZsi5qacr2Qn3KHw4qw3Gm1MzrBblPHoTdjqjH7EKWw,3581 importlib_resources/simple.py,sha256=0__2TQBTQoqkajYmNPt1HxERcReAT6boVKJA328pr04,2576 importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/__pycache__/__init__.cpython-312.pyc,, -importlib_resources/tests/__pycache__/_compat.cpython-312.pyc,, -importlib_resources/tests/__pycache__/_path.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_contents.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_files.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_open.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_path.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_read.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_reader.cpython-312.pyc,, -importlib_resources/tests/__pycache__/test_resource.cpython-312.pyc,, -importlib_resources/tests/__pycache__/update-zips.cpython-312.pyc,, -importlib_resources/tests/__pycache__/util.cpython-312.pyc,, +importlib_resources/tests/__pycache__/__init__.cpython-38.pyc,, +importlib_resources/tests/__pycache__/_compat.cpython-38.pyc,, +importlib_resources/tests/__pycache__/_path.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_contents.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_files.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_open.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_path.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_read.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_reader.cpython-38.pyc,, +importlib_resources/tests/__pycache__/test_resource.cpython-38.pyc,, +importlib_resources/tests/__pycache__/update-zips.cpython-38.pyc,, +importlib_resources/tests/__pycache__/util.cpython-38.pyc,, importlib_resources/tests/_compat.py,sha256=YTSB0U1R9oADnh6GrQcOCgojxcF_N6H1LklymEWf9SQ,708 importlib_resources/tests/_path.py,sha256=yZyWsQzJZQ1Z8ARAxWkjAdaVVsjlzyqxO0qjBUofJ8M,1039 importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data01/__pycache__/__init__.cpython-312.pyc,, +importlib_resources/tests/data01/__pycache__/__init__.cpython-38.pyc,, importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-312.pyc,, +importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-38.pyc,, importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/__pycache__/__init__.cpython-312.pyc,, +importlib_resources/tests/data02/__pycache__/__init__.cpython-38.pyc,, importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/one/__pycache__/__init__.cpython-312.pyc,, +importlib_resources/tests/data02/one/__pycache__/__init__.cpython-38.pyc,, importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13 importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/two/__pycache__/__init__.cpython-312.pyc,, +importlib_resources/tests/data02/two/__pycache__/__init__.cpython-38.pyc,, importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13 importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 @@ -70,8 +70,8 @@ importlib_resources/tests/test_resource.py,sha256=EMoarxTEHcrq8R41LQDsndIG8Idtm4 importlib_resources/tests/update-zips.py,sha256=x-SrO5v87iLLUMXyefxDwAd3imAs_slI94sLWvJ6N40,1417 importlib_resources/tests/util.py,sha256=ARAlxZ47wC-lgR7PGlmgBoi4HnhzcykD5Is2-TAwY0I,4873 importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-312.pyc,, +importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-38.pyc,, importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876 importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-312.pyc,, +importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-38.pyc,, importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698 diff --git a/setuptools/_vendor/importlib_resources/_compat.py b/setuptools/_vendor/importlib_resources/_compat.py index 8b5b1d280f..590e4f654e 100644 --- a/setuptools/_vendor/importlib_resources/_compat.py +++ b/setuptools/_vendor/importlib_resources/_compat.py @@ -11,7 +11,7 @@ if sys.version_info >= (3, 10): from zipfile import Path as ZipPath # type: ignore else: - from ..zipp import Path as ZipPath # type: ignore + from setuptools.extern.zipp import Path as ZipPath # type: ignore try: diff --git a/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py b/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py index d92c7c56c9..2eba3a741d 100644 --- a/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py +++ b/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py @@ -1,9 +1,9 @@ import io import unittest -import importlib_resources as resources +from setuptools.extern import importlib_resources as resources -from importlib_resources._adapters import ( +from setuptools.extern.importlib_resources._adapters import ( CompatibilityFiles, wrap_spec, ) diff --git a/setuptools/_vendor/importlib_resources/tests/test_contents.py b/setuptools/_vendor/importlib_resources/tests/test_contents.py index 525568e8c9..233f360919 100644 --- a/setuptools/_vendor/importlib_resources/tests/test_contents.py +++ b/setuptools/_vendor/importlib_resources/tests/test_contents.py @@ -1,5 +1,5 @@ import unittest -import importlib_resources as resources +from setuptools.extern import importlib_resources as resources from . import data01 from . import util diff --git a/setuptools/_vendor/importlib_resources/tests/test_files.py b/setuptools/_vendor/importlib_resources/tests/test_files.py index d258fb5f0f..29dd7435d2 100644 --- a/setuptools/_vendor/importlib_resources/tests/test_files.py +++ b/setuptools/_vendor/importlib_resources/tests/test_files.py @@ -5,7 +5,7 @@ import importlib import contextlib -import importlib_resources as resources +from setuptools.extern import importlib_resources as resources from ..abc import Traversable from . import data01 from . import util @@ -97,7 +97,7 @@ def test_implicit_files(self): 'somepkg': { '__init__.py': textwrap.dedent( """ - import importlib_resources as res + from setuptools.extern import importlib_resources as res val = res.files().joinpath('res.txt').read_text() """ ), diff --git a/setuptools/_vendor/importlib_resources/tests/test_open.py b/setuptools/_vendor/importlib_resources/tests/test_open.py index 87b42c3d39..0fe04e205f 100644 --- a/setuptools/_vendor/importlib_resources/tests/test_open.py +++ b/setuptools/_vendor/importlib_resources/tests/test_open.py @@ -1,6 +1,6 @@ import unittest -import importlib_resources as resources +from setuptools.extern import importlib_resources as resources from . import data01 from . import util diff --git a/setuptools/_vendor/importlib_resources/tests/test_path.py b/setuptools/_vendor/importlib_resources/tests/test_path.py index 4f4d3943bb..7f563c1d14 100644 --- a/setuptools/_vendor/importlib_resources/tests/test_path.py +++ b/setuptools/_vendor/importlib_resources/tests/test_path.py @@ -1,7 +1,7 @@ import io import unittest -import importlib_resources as resources +from setuptools.extern import importlib_resources as resources from . import data01 from . import util diff --git a/setuptools/_vendor/importlib_resources/tests/test_read.py b/setuptools/_vendor/importlib_resources/tests/test_read.py index 41dd6db5f3..bbba3303ab 100644 --- a/setuptools/_vendor/importlib_resources/tests/test_read.py +++ b/setuptools/_vendor/importlib_resources/tests/test_read.py @@ -1,5 +1,5 @@ import unittest -import importlib_resources as resources +from setuptools.extern import importlib_resources as resources from . import data01 from . import util diff --git a/setuptools/_vendor/importlib_resources/tests/test_reader.py b/setuptools/_vendor/importlib_resources/tests/test_reader.py index 1c8ebeeb13..63fe0ed8ac 100644 --- a/setuptools/_vendor/importlib_resources/tests/test_reader.py +++ b/setuptools/_vendor/importlib_resources/tests/test_reader.py @@ -4,7 +4,7 @@ import unittest from importlib import import_module -from importlib_resources.readers import MultiplexedPath, NamespaceReader +from setuptools.extern.importlib_resources.readers import MultiplexedPath, NamespaceReader class MultiplexedPathTest(unittest.TestCase): diff --git a/setuptools/_vendor/importlib_resources/tests/test_resource.py b/setuptools/_vendor/importlib_resources/tests/test_resource.py index 8239027167..e873c72664 100644 --- a/setuptools/_vendor/importlib_resources/tests/test_resource.py +++ b/setuptools/_vendor/importlib_resources/tests/test_resource.py @@ -1,6 +1,6 @@ import sys import unittest -import importlib_resources as resources +from setuptools.extern import importlib_resources as resources import uuid import pathlib diff --git a/setuptools/_vendor/jaraco.context-5.3.0.dist-info/RECORD b/setuptools/_vendor/jaraco.context-5.3.0.dist-info/RECORD index 09d191f214..f3ff478f21 100644 --- a/setuptools/_vendor/jaraco.context-5.3.0.dist-info/RECORD +++ b/setuptools/_vendor/jaraco.context-5.3.0.dist-info/RECORD @@ -4,5 +4,5 @@ jaraco.context-5.3.0.dist-info/METADATA,sha256=xDtguJej0tN9iEXCUvxEJh2a7xceIRVBE jaraco.context-5.3.0.dist-info/RECORD,, jaraco.context-5.3.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 jaraco.context-5.3.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 -jaraco/__pycache__/context.cpython-312.pyc,, +jaraco/__pycache__/context.cpython-38.pyc,, jaraco/context.py,sha256=REoLIxDkO5MfEYowt_WoupNCRoxBS5v7YX2PbW8lIcs,9552 diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD index 783aa7d2b9..af1363da1d 100644 --- a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD +++ b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD @@ -6,5 +6,5 @@ jaraco.functools-4.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPE jaraco.functools-4.0.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642 jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982 -jaraco/functools/__pycache__/__init__.cpython-312.pyc,, +jaraco/functools/__pycache__/__init__.cpython-38.pyc,, jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD index c698101cb4..a7551d1070 100644 --- a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD +++ b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD @@ -7,4 +7,4 @@ jaraco.text-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FG jaraco.text-3.7.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7 jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335 jaraco/text/__init__.py,sha256=I56MW2ZFwPrYXIxzqxMBe2A1t-T4uZBgEgAKe9-JoqM,15538 -jaraco/text/__pycache__/__init__.cpython-312.pyc,, +jaraco/text/__pycache__/__init__.cpython-38.pyc,, diff --git a/setuptools/_vendor/jaraco/functools/__init__.py b/setuptools/_vendor/jaraco/functools/__init__.py index 130b87a485..03e0116013 100644 --- a/setuptools/_vendor/jaraco/functools/__init__.py +++ b/setuptools/_vendor/jaraco/functools/__init__.py @@ -7,7 +7,7 @@ import types import warnings -import setuptools.extern.more_itertools +from setuptools.extern import more_itertools def compose(*funcs): @@ -603,10 +603,10 @@ def splat(func): simple ``map``. >>> pairs = [(-1, 1), (0, 2)] - >>> setuptools.extern.more_itertools.consume(itertools.starmap(print, pairs)) + >>> more_itertools.consume(itertools.starmap(print, pairs)) -1 1 0 2 - >>> setuptools.extern.more_itertools.consume(map(splat(print), pairs)) + >>> more_itertools.consume(map(splat(print), pairs)) -1 1 0 2 diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/INSTALLER b/setuptools/_vendor/more_itertools-10.2.0.dist-info/INSTALLER similarity index 100% rename from setuptools/_vendor/more_itertools-8.8.0.dist-info/INSTALLER rename to setuptools/_vendor/more_itertools-10.2.0.dist-info/INSTALLER diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/LICENSE b/setuptools/_vendor/more_itertools-10.2.0.dist-info/LICENSE similarity index 100% rename from setuptools/_vendor/more_itertools-8.8.0.dist-info/LICENSE rename to setuptools/_vendor/more_itertools-10.2.0.dist-info/LICENSE diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/METADATA b/setuptools/_vendor/more_itertools-10.2.0.dist-info/METADATA similarity index 60% rename from setuptools/_vendor/more_itertools-8.8.0.dist-info/METADATA rename to setuptools/_vendor/more_itertools-10.2.0.dist-info/METADATA index bdaee6553f..f54f1ff279 100644 --- a/setuptools/_vendor/more_itertools-8.8.0.dist-info/METADATA +++ b/setuptools/_vendor/more_itertools-10.2.0.dist-info/METADATA @@ -1,28 +1,26 @@ Metadata-Version: 2.1 Name: more-itertools -Version: 8.8.0 +Version: 10.2.0 Summary: More routines for operating on iterables, beyond itertools -Home-page: https://github.com/more-itertools/more-itertools -Author: Erik Rose -Author-email: erikrose@grinchcentral.com -License: MIT -Keywords: itertools,iterator,iteration,filter,peek,peekable,collate,chunk,chunked -Platform: UNKNOWN +Keywords: itertools,iterator,iteration,filter,peek,peekable,chunk,chunked +Author-email: Erik Rose +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: Natural Language :: English Classifier: License :: OSI Approved :: MIT License Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Topic :: Software Development :: Libraries -Requires-Python: >=3.5 -Description-Content-Type: text/x-rst +Project-URL: Homepage, https://github.com/more-itertools/more-itertools ============== More Itertools @@ -36,124 +34,162 @@ for a variety of problems with the functions it provides. In ``more-itertools`` we collect additional building blocks, recipes, and routines for working with Python iterables. -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Grouping | `chunked `_, | -| | `ichunked `_, | -| | `sliced `_, | -| | `distribute `_, | -| | `divide `_, | -| | `split_at `_, | -| | `split_before `_, | -| | `split_after `_, | -| | `split_into `_, | -| | `split_when `_, | -| | `bucket `_, | -| | `unzip `_, | -| | `grouper `_, | -| | `partition `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Lookahead and lookback | `spy `_, | -| | `peekable `_, | -| | `seekable `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Windowing | `windowed `_, | -| | `substrings `_, | -| | `substrings_indexes `_, | -| | `stagger `_, | -| | `windowed_complete `_, | -| | `pairwise `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Augmenting | `count_cycle `_, | -| | `intersperse `_, | -| | `padded `_, | -| | `mark_ends `_, | -| | `repeat_last `_, | -| | `adjacent `_, | -| | `groupby_transform `_, | -| | `padnone `_, | -| | `ncycles `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Combining | `collapse `_, | -| | `sort_together `_, | -| | `interleave `_, | -| | `interleave_longest `_, | -| | `zip_offset `_, | -| | `zip_equal `_, | -| | `dotproduct `_, | -| | `convolve `_, | -| | `flatten `_, | -| | `roundrobin `_, | -| | `prepend `_, | -| | `value_chain `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Summarizing | `ilen `_, | -| | `unique_to_each `_, | -| | `sample `_, | -| | `consecutive_groups `_, | -| | `run_length `_, | -| | `map_reduce `_, | -| | `exactly_n `_, | -| | `is_sorted `_, | -| | `all_equal `_, | -| | `all_unique `_, | -| | `first_true `_, | -| | `quantify `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Selecting | `islice_extended `_, | -| | `first `_, | -| | `last `_, | -| | `one `_, | -| | `only `_, | -| | `strip `_, | -| | `lstrip `_, | -| | `rstrip `_, | -| | `filter_except `_ | -| | `map_except `_ | -| | `nth_or_last `_, | -| | `nth `_, | -| | `take `_, | -| | `tail `_, | -| | `unique_everseen `_, | -| | `unique_justseen `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Combinatorics | `distinct_permutations `_, | -| | `distinct_combinations `_, | -| | `circular_shifts `_, | -| | `partitions `_, | -| | `set_partitions `_, | -| | `product_index `_, | -| | `combination_index `_, | -| | `permutation_index `_, | -| | `powerset `_, | -| | `random_product `_, | -| | `random_permutation `_, | -| | `random_combination `_, | -| | `random_combination_with_replacement `_, | -| | `nth_product `_ | -| | `nth_permutation `_ | -| | `nth_combination `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Wrapping | `always_iterable `_, | -| | `always_reversible `_, | -| | `countable `_, | -| | `consumer `_, | -| | `with_iter `_, | -| | `iter_except `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Others | `locate `_, | -| | `rlocate `_, | -| | `replace `_, | -| | `numeric_range `_, | -| | `side_effect `_, | -| | `iterate `_, | -| | `difference `_, | -| | `make_decorator `_, | -| | `SequenceView `_, | -| | `time_limited `_, | -| | `consume `_, | -| | `tabulate `_, | -| | `repeatfunc `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Grouping | `chunked `_, | +| | `ichunked `_, | +| | `chunked_even `_, | +| | `sliced `_, | +| | `constrained_batches `_, | +| | `distribute `_, | +| | `divide `_, | +| | `split_at `_, | +| | `split_before `_, | +| | `split_after `_, | +| | `split_into `_, | +| | `split_when `_, | +| | `bucket `_, | +| | `unzip `_, | +| | `batched `_, | +| | `grouper `_, | +| | `partition `_, | +| | `transpose `_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Lookahead and lookback | `spy `_, | +| | `peekable `_, | +| | `seekable `_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Windowing | `windowed `_, | +| | `substrings `_, | +| | `substrings_indexes `_, | +| | `stagger `_, | +| | `windowed_complete `_, | +| | `pairwise `_, | +| | `triplewise `_, | +| | `sliding_window `_, | +| | `subslices `_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Augmenting | `count_cycle `_, | +| | `intersperse `_, | +| | `padded `_, | +| | `repeat_each `_, | +| | `mark_ends `_, | +| | `repeat_last `_, | +| | `adjacent `_, | +| | `groupby_transform `_, | +| | `pad_none `_, | +| | `ncycles `_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Combining | `collapse `_, | +| | `sort_together `_, | +| | `interleave `_, | +| | `interleave_longest `_, | +| | `interleave_evenly `_, | +| | `zip_offset `_, | +| | `zip_equal `_, | +| | `zip_broadcast `_, | +| | `dotproduct `_, | +| | `convolve `_, | +| | `flatten `_, | +| | `roundrobin `_, | +| | `prepend `_, | +| | `value_chain `_, | +| | `partial_product `_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Summarizing | `ilen `_, | +| | `unique_to_each `_, | +| | `sample `_, | +| | `consecutive_groups `_, | +| | `run_length `_, | +| | `map_reduce `_, | +| | `exactly_n `_, | +| | `is_sorted `_, | +| | `all_equal `_, | +| | `all_unique `_, | +| | `minmax `_, | +| | `first_true `_, | +| | `quantify `_, | +| | `iequals `_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Selecting | `islice_extended `_, | +| | `first `_, | +| | `last `_, | +| | `one `_, | +| | `only `_, | +| | `strictly_n `_, | +| | `strip `_, | +| | `lstrip `_, | +| | `rstrip `_, | +| | `filter_except `_, | +| | `map_except `_, | +| | `filter_map `_, | +| | `iter_suppress `_, | +| | `nth_or_last `_, | +| | `unique_in_window `_, | +| | `before_and_after `_, | +| | `nth `_, | +| | `take `_, | +| | `tail `_, | +| | `unique_everseen `_, | +| | `unique_justseen `_, | +| | `duplicates_everseen `_, | +| | `duplicates_justseen `_, | +| | `classify_unique `_, | +| | `longest_common_prefix `_, | +| | `takewhile_inclusive `_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Combinatorics | `distinct_permutations `_, | +| | `distinct_combinations `_, | +| | `circular_shifts `_, | +| | `partitions `_, | +| | `set_partitions `_, | +| | `product_index `_, | +| | `combination_index `_, | +| | `permutation_index `_, | +| | `combination_with_replacement_index `_, | +| | `gray_product `_, | +| | `outer_product `_, | +| | `powerset `_, | +| | `random_product `_, | +| | `random_permutation `_, | +| | `random_combination `_, | +| | `random_combination_with_replacement `_, | +| | `nth_product `_, | +| | `nth_permutation `_, | +| | `nth_combination `_, | +| | `nth_combination_with_replacement `_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Wrapping | `always_iterable `_, | +| | `always_reversible `_, | +| | `countable `_, | +| | `consumer `_, | +| | `with_iter `_, | +| | `iter_except `_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Others | `locate `_, | +| | `rlocate `_, | +| | `replace `_, | +| | `numeric_range `_, | +| | `side_effect `_, | +| | `iterate `_, | +| | `difference `_, | +| | `make_decorator `_, | +| | `SequenceView `_, | +| | `time_limited `_, | +| | `map_if `_, | +| | `iter_index `_, | +| | `consume `_, | +| | `tabulate `_, | +| | `repeatfunc `_, | +| | `polynomial_from_roots `_, | +| | `polynomial_eval `_, | +| | `polynomial_derivative `_, | +| | `sieve `_, | +| | `factor `_, | +| | `matmul `_, | +| | `sum_of_squares `_, | +| | `totient `_, | +| | `reshape `_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ Getting started @@ -204,6 +240,7 @@ Blog posts about ``more-itertools``: * `Yo, I heard you like decorators `__ * `Tour of Python Itertools `__ (`Alternate `__) +* `Real-World Python More Itertools `_ Development @@ -218,245 +255,5 @@ repository. Thanks for contributing! Version History =============== - - :noindex: - -8.8.0 ------ - -* New functions - * countable (thanks to krzysieq) - -* Changes to existing functions - * split_before was updated to handle empy collections (thanks to TiunovNN) - * unique_everseen got a performance boost (thanks to Numerlor) - * The type hint for value_chain was corrected (thanks to vr2262) - -8.7.0 ------ - -* New functions - * convolve (from the Python itertools docs) - * product_index, combination_index, and permutation_index (thanks to N8Brooks) - * value_chain (thanks to jenstroeger) - -* Changes to existing functions - * distinct_combinations now uses a non-recursive algorithm (thanks to knutdrand) - * pad_none is now the preferred name for padnone, though the latter remains available. - * pairwise will now use the Python standard library implementation on Python 3.10+ - * sort_together now accepts a ``key`` argument (thanks to brianmaissy) - * seekable now has a ``peek`` method, and can indicate whether the iterator it's wrapping is exhausted (thanks to gsakkis) - * time_limited can now indicate whether its iterator has expired (thanks to roysmith) - * The implementation of unique_everseen was improved (thanks to plammens) - -* Other changes: - * Various documentation updates (thanks to cthoyt, Evantm, and cyphase) - -8.6.0 ------ - -* New itertools - * all_unique (thanks to brianmaissy) - * nth_product and nth_permutation (thanks to N8Brooks) - -* Changes to existing itertools - * chunked and sliced now accept a ``strict`` parameter (thanks to shlomif and jtwool) - -* Other changes - * Python 3.5 has reached its end of life and is no longer supported. - * Python 3.9 is officially supported. - * Various documentation fixes (thanks to timgates42) - -8.5.0 ------ - -* New itertools - * windowed_complete (thanks to MarcinKonowalczyk) - -* Changes to existing itertools: - * The is_sorted implementation was improved (thanks to cool-RR) - * The groupby_transform now accepts a ``reducefunc`` parameter. - * The last implementation was improved (thanks to brianmaissy) - -* Other changes - * Various documentation fixes (thanks to craigrosie, samuelstjean, PiCT0) - * The tests for distinct_combinations were improved (thanks to Minabsapi) - * Automated tests now run on GitHub Actions. All commits now check: - * That unit tests pass - * That the examples in docstrings work - * That test coverage remains high (using `coverage`) - * For linting errors (using `flake8`) - * For consistent style (using `black`) - * That the type stubs work (using `mypy`) - * That the docs build correctly (using `sphinx`) - * That packages build correctly (using `twine`) - -8.4.0 ------ - -* New itertools - * mark_ends (thanks to kalekundert) - * is_sorted - -* Changes to existing itertools: - * islice_extended can now be used with real slices (thanks to cool-RR) - * The implementations for filter_except and map_except were improved (thanks to SergBobrovsky) - -* Other changes - * Automated tests now enforce code style (using `black `__) - * The various signatures of islice_extended and numeric_range now appear in the docs (thanks to dsfulf) - * The test configuration for mypy was updated (thanks to blueyed) - - -8.3.0 ------ - -* New itertools - * zip_equal (thanks to frankier and alexmojaki) - -* Changes to existing itertools: - * split_at, split_before, split_after, and split_when all got a ``maxsplit`` paramter (thanks to jferard and ilai-deutel) - * split_at now accepts a ``keep_separator`` parameter (thanks to jferard) - * distinct_permutations can now generate ``r``-length permutations (thanks to SergBobrovsky and ilai-deutel) - * The windowed implementation was improved (thanks to SergBobrovsky) - * The spy implementation was improved (thanks to has2k1) - -* Other changes - * Type stubs are now tested with ``stubtest`` (thanks to ilai-deutel) - * Tests now run with ``python -m unittest`` instead of ``python setup.py test`` (thanks to jdufresne) - -8.2.0 ------ - -* Bug fixes - * The .pyi files for typing were updated. (thanks to blueyed and ilai-deutel) - -* Changes to existing itertools: - * numeric_range now behaves more like the built-in range. (thanks to jferard) - * bucket now allows for enumerating keys. (thanks to alexchandel) - * sliced now should now work for numpy arrays. (thanks to sswingle) - * seekable now has a ``maxlen`` parameter. - -8.1.0 ------ - -* Bug fixes - * partition works with ``pred=None`` again. (thanks to MSeifert04) - -* New itertools - * sample (thanks to tommyod) - * nth_or_last (thanks to d-ryzhikov) - -* Changes to existing itertools: - * The implementation for divide was improved. (thanks to jferard) - -8.0.2 ------ - -* Bug fixes - * The type stub files are now part of the wheel distribution (thanks to keisheiled) - -8.0.1 ------ - -* Bug fixes - * The type stub files now work for functions imported from the - root package (thanks to keisheiled) - -8.0.0 ------ - -* New itertools and other additions - * This library now ships type hints for use with mypy. - (thanks to ilai-deutel for the implementation, and to gabbard and fmagin for assistance) - * split_when (thanks to jferard) - * repeat_last (thanks to d-ryzhikov) - -* Changes to existing itertools: - * The implementation for set_partitions was improved. (thanks to jferard) - * partition was optimized for expensive predicates. (thanks to stevecj) - * unique_everseen and groupby_transform were re-factored. (thanks to SergBobrovsky) - * The implementation for difference was improved. (thanks to Jabbey92) - -* Other changes - * Python 3.4 has reached its end of life and is no longer supported. - * Python 3.8 is officially supported. (thanks to jdufresne) - * The ``collate`` function has been deprecated. - It raises a ``DeprecationWarning`` if used, and will be removed in a future release. - * one and only now provide more informative error messages. (thanks to gabbard) - * Unit tests were moved outside of the main package (thanks to jdufresne) - * Various documentation fixes (thanks to kriomant, gabbard, jdufresne) - - -7.2.0 ------ - -* New itertools - * distinct_combinations - * set_partitions (thanks to kbarrett) - * filter_except - * map_except - -7.1.0 ------ - -* New itertools - * ichunked (thanks davebelais and youtux) - * only (thanks jaraco) - -* Changes to existing itertools: - * numeric_range now supports ranges specified by - ``datetime.datetime`` and ``datetime.timedelta`` objects (thanks to MSeifert04 for tests). - * difference now supports an *initial* keyword argument. - - -* Other changes - * Various documentation fixes (thanks raimon49, pylang) - -7.0.0 ------ - -* New itertools: - * time_limited - * partitions (thanks to rominf and Saluev) - * substrings_indexes (thanks to rominf) - -* Changes to existing itertools: - * collapse now treats ``bytes`` objects the same as ``str`` objects. (thanks to Sweenpet) - -The major version update is due to the change in the default behavior of -collapse. It now treats ``bytes`` objects the same as ``str`` objects. -This aligns its behavior with always_iterable. - -.. code-block:: python - - >>> from more_itertools import collapse - >>> iterable = [[1, 2], b'345', [6]] - >>> print(list(collapse(iterable))) - [1, 2, b'345', 6] - -6.0.0 ------ - -* Major changes: - * Python 2.7 is no longer supported. The 5.0.0 release will be the last - version targeting Python 2.7. - * All future releases will target the active versions of Python 3. - As of 2019, those are Python 3.4 and above. - * The ``six`` library is no longer a dependency. - * The accumulate function is no longer part of this library. You - may import a better version from the standard ``itertools`` module. - -* Changes to existing itertools: - * The order of the parameters in grouper have changed to match - the latest recipe in the itertools documentation. Use of the old order - will be supported in this release, but emit a ``DeprecationWarning``. - The legacy behavior will be dropped in a future release. (thanks to jaraco) - * distinct_permutations was improved (thanks to jferard - see also `permutations with unique values `_ at StackOverflow.) - * An unused parameter was removed from substrings. (thanks to pylang) - -* Other changes: - * The docs for unique_everseen were improved. (thanks to jferard and MSeifert04) - * Several Python 2-isms were removed. (thanks to jaraco, MSeifert04, and hugovk) - +The version history can be found in `documentation `_. diff --git a/setuptools/_vendor/more_itertools-10.2.0.dist-info/RECORD b/setuptools/_vendor/more_itertools-10.2.0.dist-info/RECORD new file mode 100644 index 0000000000..a471517a57 --- /dev/null +++ b/setuptools/_vendor/more_itertools-10.2.0.dist-info/RECORD @@ -0,0 +1,16 @@ +more_itertools-10.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +more_itertools-10.2.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053 +more_itertools-10.2.0.dist-info/METADATA,sha256=lTIPxfD4IiP6aHzPjP4dXmzRRUmiXicAB6qnY82T-Gs,34886 +more_itertools-10.2.0.dist-info/RECORD,, +more_itertools-10.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +more_itertools-10.2.0.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81 +more_itertools/__init__.py,sha256=VodgFyRJvpnHbAMgseYRiP7r928FFOAakmQrl6J88os,149 +more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43 +more_itertools/__pycache__/__init__.cpython-38.pyc,, +more_itertools/__pycache__/more.cpython-38.pyc,, +more_itertools/__pycache__/recipes.cpython-38.pyc,, +more_itertools/more.py,sha256=jYdpbgXHf8yZDByPrhluxpe0D_IXRk2tfQnyfOFMi74,143045 +more_itertools/more.pyi,sha256=KTHYeqr0rFbn1GWRnv0jY64JRNnKKT0kA3kmsah8DYQ,21044 +more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +more_itertools/recipes.py,sha256=Rb3OhzJTCn2biutDEUSImbuY-8NDS1lkHt0My-uCOf4,27548 +more_itertools/recipes.pyi,sha256=T1IuEVXCqw2NeJJNW036MtWi8BVfR8Ilpf7cBmvhBaQ,4436 diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED b/setuptools/_vendor/more_itertools-10.2.0.dist-info/REQUESTED similarity index 100% rename from pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED rename to setuptools/_vendor/more_itertools-10.2.0.dist-info/REQUESTED diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/WHEEL b/setuptools/_vendor/more_itertools-10.2.0.dist-info/WHEEL similarity index 64% rename from setuptools/_vendor/more_itertools-8.8.0.dist-info/WHEEL rename to setuptools/_vendor/more_itertools-10.2.0.dist-info/WHEEL index 385faab052..db4a255f3a 100644 --- a/setuptools/_vendor/more_itertools-8.8.0.dist-info/WHEEL +++ b/setuptools/_vendor/more_itertools-10.2.0.dist-info/WHEEL @@ -1,5 +1,4 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) +Generator: flit 3.8.0 Root-Is-Purelib: true Tag: py3-none-any - diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD b/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD deleted file mode 100644 index d1a6ea0d22..0000000000 --- a/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD +++ /dev/null @@ -1,17 +0,0 @@ -more_itertools-8.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -more_itertools-8.8.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053 -more_itertools-8.8.0.dist-info/METADATA,sha256=Gke9w7RnfiAvveik_iBBrzd0RjrDhsQ8uRYNBJdo4qQ,40482 -more_itertools-8.8.0.dist-info/RECORD,, -more_itertools-8.8.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -more_itertools-8.8.0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 -more_itertools-8.8.0.dist-info/top_level.txt,sha256=fAuqRXu9LPhxdB9ujJowcFOu1rZ8wzSpOW9_jlKis6M,15 -more_itertools/__init__.py,sha256=C7sXffHTXM3P-iaLPPfqfmDoxOflQMJLcM7ed9p3jak,82 -more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43 -more_itertools/__pycache__/__init__.cpython-312.pyc,, -more_itertools/__pycache__/more.cpython-312.pyc,, -more_itertools/__pycache__/recipes.cpython-312.pyc,, -more_itertools/more.py,sha256=DlZa8v6JihVwfQ5zHidOA-xDE0orcQIUyxVnCaUoDKE,117968 -more_itertools/more.pyi,sha256=r32pH2raBC1zih3evK4fyvAXvrUamJqc6dgV7QCRL_M,14977 -more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -more_itertools/recipes.py,sha256=UkNkrsZyqiwgLHANBTmvMhCvaNSvSNYhyOpz_Jc55DY,16256 -more_itertools/recipes.pyi,sha256=9BpeKd5_qalYVSnuHfqPSCfoGgqnQY2Xu9pNwrDlHU8,3551 diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/REQUESTED b/setuptools/_vendor/more_itertools-8.8.0.dist-info/REQUESTED deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt b/setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt deleted file mode 100644 index a5035befb3..0000000000 --- a/setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -more_itertools diff --git a/setuptools/_vendor/more_itertools/__init__.py b/setuptools/_vendor/more_itertools/__init__.py index 19a169fc30..aff94a9abd 100644 --- a/setuptools/_vendor/more_itertools/__init__.py +++ b/setuptools/_vendor/more_itertools/__init__.py @@ -1,4 +1,6 @@ +"""More routines for operating on iterables, beyond itertools""" + from .more import * # noqa from .recipes import * # noqa -__version__ = '8.8.0' +__version__ = '10.2.0' diff --git a/setuptools/_vendor/more_itertools/more.py b/setuptools/_vendor/more_itertools/more.py old mode 100644 new mode 100755 index e6fca4d47f..d0957681f5 --- a/setuptools/_vendor/more_itertools/more.py +++ b/setuptools/_vendor/more_itertools/more.py @@ -2,8 +2,8 @@ from collections import Counter, defaultdict, deque, abc from collections.abc import Sequence -from functools import partial, reduce, wraps -from heapq import merge, heapify, heapreplace, heappop +from functools import cached_property, partial, reduce, wraps +from heapq import heapify, heapreplace, heappop from itertools import ( chain, compress, @@ -17,72 +17,101 @@ takewhile, tee, zip_longest, + product, ) -from math import exp, factorial, floor, log +from math import exp, factorial, floor, log, perm, comb from queue import Empty, Queue from random import random, randrange, uniform -from operator import itemgetter, mul, sub, gt, lt +from operator import itemgetter, mul, sub, gt, lt, ge, le from sys import hexversion, maxsize from time import monotonic from .recipes import ( + _marker, + _zip_equal, + UnequalIterablesError, consume, flatten, pairwise, powerset, take, unique_everseen, + all_equal, + batched, ) __all__ = [ 'AbortThread', + 'SequenceView', + 'UnequalIterablesError', 'adjacent', + 'all_unique', 'always_iterable', 'always_reversible', 'bucket', 'callback_iter', 'chunked', + 'chunked_even', 'circular_shifts', 'collapse', - 'collate', + 'combination_index', + 'combination_with_replacement_index', 'consecutive_groups', + 'constrained_batches', 'consumer', - 'countable', 'count_cycle', - 'mark_ends', + 'countable', 'difference', 'distinct_combinations', 'distinct_permutations', 'distribute', 'divide', + 'duplicates_everseen', + 'duplicates_justseen', + 'classify_unique', 'exactly_n', 'filter_except', + 'filter_map', 'first', + 'gray_product', 'groupby_transform', + 'ichunked', + 'iequals', 'ilen', - 'interleave_longest', 'interleave', + 'interleave_evenly', + 'interleave_longest', 'intersperse', + 'is_sorted', 'islice_extended', 'iterate', - 'ichunked', - 'is_sorted', + 'iter_suppress', 'last', 'locate', + 'longest_common_prefix', 'lstrip', 'make_decorator', 'map_except', + 'map_if', 'map_reduce', + 'mark_ends', + 'minmax', 'nth_or_last', 'nth_permutation', 'nth_product', + 'nth_combination_with_replacement', 'numeric_range', 'one', 'only', + 'outer_product', 'padded', + 'partial_product', 'partitions', - 'set_partitions', 'peekable', + 'permutation_index', + 'product_index', + 'raise_', + 'repeat_each', 'repeat_last', 'replace', 'rlocate', @@ -90,38 +119,35 @@ 'run_length', 'sample', 'seekable', - 'SequenceView', + 'set_partitions', 'side_effect', 'sliced', 'sort_together', - 'split_at', 'split_after', + 'split_at', 'split_before', - 'split_when', 'split_into', + 'split_when', 'spy', 'stagger', 'strip', + 'strictly_n', 'substrings', 'substrings_indexes', + 'takewhile_inclusive', 'time_limited', + 'unique_in_window', 'unique_to_each', 'unzip', + 'value_chain', 'windowed', + 'windowed_complete', 'with_iter', - 'UnequalIterablesError', + 'zip_broadcast', 'zip_equal', 'zip_offset', - 'windowed_complete', - 'all_unique', - 'value_chain', - 'product_index', - 'combination_index', - 'permutation_index', ] -_marker = object() - def chunked(iterable, n, strict=False): """Break *iterable* into lists of length *n*: @@ -144,6 +170,8 @@ def chunked(iterable, n, strict=False): """ iterator = iter(partial(take, n, iter(iterable)), []) if strict: + if n is None: + raise ValueError('n must not be None when using strict mode.') def ret(): for chunk in iterator: @@ -173,15 +201,14 @@ def first(iterable, default=_marker): ``next(iter(iterable), default)``. """ - try: - return next(iter(iterable)) - except StopIteration as e: - if default is _marker: - raise ValueError( - 'first() was called on an empty iterable, and no ' - 'default value was provided.' - ) from e - return default + for item in iterable: + return item + if default is _marker: + raise ValueError( + 'first() was called on an empty iterable, and no ' + 'default value was provided.' + ) + return default def last(iterable, default=_marker): @@ -395,44 +422,6 @@ def __getitem__(self, index): return self._cache[index] -def collate(*iterables, **kwargs): - """Return a sorted merge of the items from each of several already-sorted - *iterables*. - - >>> list(collate('ACDZ', 'AZ', 'JKL')) - ['A', 'A', 'C', 'D', 'J', 'K', 'L', 'Z', 'Z'] - - Works lazily, keeping only the next value from each iterable in memory. Use - :func:`collate` to, for example, perform a n-way mergesort of items that - don't fit in memory. - - If a *key* function is specified, the iterables will be sorted according - to its result: - - >>> key = lambda s: int(s) # Sort by numeric value, not by string - >>> list(collate(['1', '10'], ['2', '11'], key=key)) - ['1', '2', '10', '11'] - - - If the *iterables* are sorted in descending order, set *reverse* to - ``True``: - - >>> list(collate([5, 3, 1], [4, 2, 0], reverse=True)) - [5, 4, 3, 2, 1, 0] - - If the elements of the passed-in iterables are out of order, you might get - unexpected results. - - On Python 3.5+, this function is an alias for :func:`heapq.merge`. - - """ - warnings.warn( - "collate is no longer part of more_itertools, use heapq.merge", - DeprecationWarning, - ) - return merge(*iterables, **kwargs) - - def consumer(func): """Decorator that automatically advances a PEP-342-style "reverse iterator" to its first yield point so you don't have to call ``next()`` on it @@ -492,7 +481,10 @@ def iterate(func, start): """ while True: yield start - start = func(start) + try: + start = func(start) + except StopIteration: + break def with_iter(context_manager): @@ -577,6 +569,87 @@ def one(iterable, too_short=None, too_long=None): return first_value +def raise_(exception, *args): + raise exception(*args) + + +def strictly_n(iterable, n, too_short=None, too_long=None): + """Validate that *iterable* has exactly *n* items and return them if + it does. If it has fewer than *n* items, call function *too_short* + with those items. If it has more than *n* items, call function + *too_long* with the first ``n + 1`` items. + + >>> iterable = ['a', 'b', 'c', 'd'] + >>> n = 4 + >>> list(strictly_n(iterable, n)) + ['a', 'b', 'c', 'd'] + + Note that the returned iterable must be consumed in order for the check to + be made. + + By default, *too_short* and *too_long* are functions that raise + ``ValueError``. + + >>> list(strictly_n('ab', 3)) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ValueError: too few items in iterable (got 2) + + >>> list(strictly_n('abc', 2)) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ValueError: too many items in iterable (got at least 3) + + You can instead supply functions that do something else. + *too_short* will be called with the number of items in *iterable*. + *too_long* will be called with `n + 1`. + + >>> def too_short(item_count): + ... raise RuntimeError + >>> it = strictly_n('abcd', 6, too_short=too_short) + >>> list(it) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + RuntimeError + + >>> def too_long(item_count): + ... print('The boss is going to hear about this') + >>> it = strictly_n('abcdef', 4, too_long=too_long) + >>> list(it) + The boss is going to hear about this + ['a', 'b', 'c', 'd'] + + """ + if too_short is None: + too_short = lambda item_count: raise_( + ValueError, + 'Too few items in iterable (got {})'.format(item_count), + ) + + if too_long is None: + too_long = lambda item_count: raise_( + ValueError, + 'Too many items in iterable (got at least {})'.format(item_count), + ) + + it = iter(iterable) + for i in range(n): + try: + item = next(it) + except StopIteration: + too_short(i) + return + else: + yield item + + try: + next(it) + except StopIteration: + pass + else: + too_long(n + 1) + + def distinct_permutations(iterable, r=None): """Yield successive distinct permutations of the elements in *iterable*. @@ -601,6 +674,7 @@ def distinct_permutations(iterable, r=None): [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] """ + # Algorithm: https://w.wiki/Qai def _full(A): while True: @@ -691,8 +765,8 @@ def intersperse(e, iterable, n=1): if n == 0: raise ValueError('n must be > 0') elif n == 1: - # interleave(repeat(e), iterable) -> e, x_0, e, e, x_1, e, x_2... - # islice(..., 1, None) -> x_0, e, e, x_1, e, x_2... + # interleave(repeat(e), iterable) -> e, x_0, e, x_1, e, x_2... + # islice(..., 1, None) -> x_0, e, x_1, e, x_2... return islice(interleave(repeat(e), iterable), 1, None) else: # interleave(filler, chunks) -> [e], [x_0, x_1], [e], [x_2, x_3]... @@ -780,7 +854,9 @@ def windowed(seq, n, fillvalue=None, step=1): yield tuple(window) size = len(window) - if size < n: + if size == 0: + return + elif size < n: yield tuple(chain(window, repeat(fillvalue, n - size))) elif 0 < i < min(step, n): window += (fillvalue,) * i @@ -848,7 +924,7 @@ def substrings_indexes(seq, reverse=False): class bucket: - """Wrap *iterable* and return an object that buckets it iterable into + """Wrap *iterable* and return an object that buckets the iterable into child iterables based on a *key* function. >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3'] @@ -1016,6 +1092,72 @@ def interleave_longest(*iterables): return (x for x in i if x is not _marker) +def interleave_evenly(iterables, lengths=None): + """ + Interleave multiple iterables so that their elements are evenly distributed + throughout the output sequence. + + >>> iterables = [1, 2, 3, 4, 5], ['a', 'b'] + >>> list(interleave_evenly(iterables)) + [1, 2, 'a', 3, 4, 'b', 5] + + >>> iterables = [[1, 2, 3], [4, 5], [6, 7, 8]] + >>> list(interleave_evenly(iterables)) + [1, 6, 4, 2, 7, 3, 8, 5] + + This function requires iterables of known length. Iterables without + ``__len__()`` can be used by manually specifying lengths with *lengths*: + + >>> from itertools import combinations, repeat + >>> iterables = [combinations(range(4), 2), ['a', 'b', 'c']] + >>> lengths = [4 * (4 - 1) // 2, 3] + >>> list(interleave_evenly(iterables, lengths=lengths)) + [(0, 1), (0, 2), 'a', (0, 3), (1, 2), 'b', (1, 3), (2, 3), 'c'] + + Based on Bresenham's algorithm. + """ + if lengths is None: + try: + lengths = [len(it) for it in iterables] + except TypeError: + raise ValueError( + 'Iterable lengths could not be determined automatically. ' + 'Specify them with the lengths keyword.' + ) + elif len(iterables) != len(lengths): + raise ValueError('Mismatching number of iterables and lengths.') + + dims = len(lengths) + + # sort iterables by length, descending + lengths_permute = sorted( + range(dims), key=lambda i: lengths[i], reverse=True + ) + lengths_desc = [lengths[i] for i in lengths_permute] + iters_desc = [iter(iterables[i]) for i in lengths_permute] + + # the longest iterable is the primary one (Bresenham: the longest + # distance along an axis) + delta_primary, deltas_secondary = lengths_desc[0], lengths_desc[1:] + iter_primary, iters_secondary = iters_desc[0], iters_desc[1:] + errors = [delta_primary // dims] * len(deltas_secondary) + + to_yield = sum(lengths) + while to_yield: + yield next(iter_primary) + to_yield -= 1 + # update errors for each secondary iterable + errors = [e - delta for e, delta in zip(errors, deltas_secondary)] + + # those iterables for which the error is negative are yielded + # ("diagonal step" in Bresenham) + for i, e in enumerate(errors): + if e < 0: + yield next(iters_secondary[i]) + to_yield -= 1 + errors[i] += delta_primary + + def collapse(iterable, base_type=None, levels=None): """Flatten an iterable with multiple levels of nesting (e.g., a list of lists of tuples) into non-iterable types. @@ -1176,7 +1318,7 @@ def split_at(iterable, pred, maxsplit=-1, keep_separator=False): [[0], [2], [4, 5, 6, 7, 8, 9]] By default, the delimiting items are not included in the output. - The include them, set *keep_separator* to ``True``. + To include them, set *keep_separator* to ``True``. >>> list(split_at('abcdcba', lambda x: x == 'b', keep_separator=True)) [['a'], ['b'], ['c', 'd', 'c'], ['b'], ['a']] @@ -1266,7 +1408,9 @@ def split_after(iterable, pred, maxsplit=-1): if pred(item) and buf: yield buf if maxsplit == 1: - yield list(it) + buf = list(it) + if buf: + yield buf return buf = [] maxsplit -= 1 @@ -1396,6 +1540,15 @@ def padded(iterable, fillvalue=None, n=None, next_multiple=False): yield fillvalue +def repeat_each(iterable, n=2): + """Repeat each element in *iterable* *n* times. + + >>> list(repeat_each('ABC', 3)) + ['A', 'A', 'A', 'B', 'B', 'B', 'C', 'C', 'C'] + """ + return chain.from_iterable(map(repeat, iterable, repeat(n))) + + def repeat_last(iterable, default=None): """After the *iterable* is exhausted, keep yielding its last element. @@ -1478,25 +1631,6 @@ def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None): ) -class UnequalIterablesError(ValueError): - def __init__(self, details=None): - msg = 'Iterables have different lengths' - if details is not None: - msg += (': index 0 has length {}; index {} has length {}').format( - *details - ) - - super().__init__(msg) - - -def _zip_equal_generator(iterables): - for combo in zip_longest(*iterables, fillvalue=_marker): - for val in combo: - if val is _marker: - raise UnequalIterablesError() - yield combo - - def zip_equal(*iterables): """``zip`` the input *iterables* together, but raise ``UnequalIterablesError`` if they aren't all the same length. @@ -1524,23 +1658,8 @@ def zip_equal(*iterables): ), DeprecationWarning, ) - # Check whether the iterables are all the same size. - try: - first_size = len(iterables[0]) - for i, it in enumerate(iterables[1:], 1): - size = len(it) - if size != first_size: - break - else: - # If we didn't break out, we can use the built-in zip. - return zip(*iterables) - # If we did break out, there was a mismatch. - raise UnequalIterablesError(details=(first_size, i, size)) - # If any one of the iterables didn't have a length, start reading - # them until one runs out. - except TypeError: - return _zip_equal_generator(iterables) + return _zip_equal(*iterables) def zip_offset(*iterables, offsets, longest=False, fillvalue=None): @@ -1653,7 +1772,7 @@ def unzip(iterable): of the zipped *iterable*. The ``i``-th iterable contains the ``i``-th element from each element - of the zipped iterable. The first element is used to to determine the + of the zipped iterable. The first element is used to determine the length of the remaining elements. >>> iterable = [('a', 1), ('b', 2), ('c', 3), ('d', 4)] @@ -1965,7 +2084,6 @@ def __init__(self, *args): if self._step == self._zero: raise ValueError('numeric_range() arg 3 must not be zero') self._growing = self._step > self._zero - self._init_len() def __bool__(self): if self._growing: @@ -2041,7 +2159,8 @@ def __iter__(self): def __len__(self): return self._len - def _init_len(self): + @cached_property + def _len(self): if self._growing: start = self._start stop = self._stop @@ -2052,10 +2171,10 @@ def _init_len(self): step = -self._step distance = stop - start if distance <= self._zero: - self._len = 0 + return 0 else: # distance > 0 and step > 0: regular euclidean division q, r = divmod(distance, step) - self._len = int(q) + int(r != self._zero) + return int(q) + int(r != self._zero) def __reduce__(self): return numeric_range, (self._start, self._stop, self._step) @@ -2203,6 +2322,16 @@ def locate(iterable, pred=bool, window_size=None): return compress(count(), starmap(pred, it)) +def longest_common_prefix(iterables): + """Yield elements of the longest common prefix amongst given *iterables*. + + >>> ''.join(longest_common_prefix(['abcd', 'abc', 'abf'])) + 'ab' + + """ + return (c[0] for c in takewhile(all_equal, zip(*iterables))) + + def lstrip(iterable, pred): """Yield the items from *iterable*, but strip any from the beginning for which *pred* returns ``True``. @@ -2511,7 +2640,7 @@ def difference(iterable, func=sub, *, initial=None): if initial is not None: first = [] - return chain(first, starmap(func, zip(b, a))) + return chain(first, map(func, b, a)) class SequenceView(Sequence): @@ -2585,6 +2714,9 @@ class seekable: >>> it.seek(10) >>> next(it) '10' + >>> it.relative_seek(-2) # Seeking relative to the current position + >>> next(it) + '9' >>> it.seek(20) # Seeking past the end of the source isn't a problem >>> list(it) [] @@ -2698,6 +2830,10 @@ def seek(self, index): if remainder > 0: consume(self, remainder) + def relative_seek(self, count): + index = len(self._cache) + self.seek(max(index + count, 0)) + class run_length: """ @@ -2804,6 +2940,7 @@ def make_decorator(wrapping_func, result_index=0): '7' """ + # See https://sites.google.com/site/bbayles/index/decorator_factory for # notes on how this works. def decorator(*wrapping_args, **wrapping_kwargs): @@ -3090,6 +3227,8 @@ class time_limited: stops if the time elapsed is greater than *limit_seconds*. If your time limit is 1 second, but it takes 2 seconds to generate the first item from the iterable, the function will run for 2 seconds and not yield anything. + As a special case, when *limit_seconds* is zero, the iterator never + returns anything. """ @@ -3105,6 +3244,9 @@ def __iter__(self): return self def __next__(self): + if self.limit_seconds == 0: + self.timed_out = True + raise StopIteration item = next(self._iterable) if monotonic() - self._start_time > self.limit_seconds: self.timed_out = True @@ -3154,6 +3296,27 @@ def only(iterable, default=None, too_long=None): return first_value +class _IChunk: + def __init__(self, iterable, n): + self._it = islice(iterable, n) + self._cache = deque() + + def fill_cache(self): + self._cache.extend(self._it) + + def __iter__(self): + return self + + def __next__(self): + try: + return next(self._it) + except StopIteration: + if self._cache: + return self._cache.popleft() + else: + raise + + def ichunked(iterable, n): """Break *iterable* into sub-iterables with *n* elements each. :func:`ichunked` is like :func:`chunked`, but it yields iterables @@ -3175,20 +3338,39 @@ def ichunked(iterable, n): [8, 9, 10, 11] """ - source = iter(iterable) - + source = peekable(iter(iterable)) + ichunk_marker = object() while True: # Check to see whether we're at the end of the source iterable - item = next(source, _marker) - if item is _marker: + item = source.peek(ichunk_marker) + if item is ichunk_marker: return - # Clone the source and yield an n-length slice - source, it = tee(chain([item], source)) - yield islice(it, n) + chunk = _IChunk(source, n) + yield chunk + + # Advance the source iterable and fill previous chunk's cache + chunk.fill_cache() + - # Advance the source iterable - consume(source, n) +def iequals(*iterables): + """Return ``True`` if all given *iterables* are equal to each other, + which means that they contain the same elements in the same order. + + The function is useful for comparing iterables of different data types + or iterables that do not support equality checks. + + >>> iequals("abc", ['a', 'b', 'c'], ('a', 'b', 'c'), iter("abc")) + True + + >>> iequals("abc", "acb") + False + + Not to be confused with :func:`all_equal`, which checks whether all + elements of iterable are equal to each other. + + """ + return all(map(all_equal, zip_longest(*iterables, fillvalue=object()))) def distinct_combinations(iterable, r): @@ -3260,7 +3442,7 @@ def map_except(function, iterable, *exceptions): result, unless *function* raises one of the specified *exceptions*. *function* is called to transform each item in *iterable*. - It should be a accept one argument. + It should accept one argument. >>> iterable = ['1', '2', 'three', '4', None] >>> list(map_except(int, iterable, ValueError, TypeError)) @@ -3276,6 +3458,28 @@ def map_except(function, iterable, *exceptions): pass +def map_if(iterable, pred, func, func_else=lambda x: x): + """Evaluate each item from *iterable* using *pred*. If the result is + equivalent to ``True``, transform the item with *func* and yield it. + Otherwise, transform the item with *func_else* and yield it. + + *pred*, *func*, and *func_else* should each be functions that accept + one argument. By default, *func_else* is the identity function. + + >>> from math import sqrt + >>> iterable = list(range(-5, 5)) + >>> iterable + [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4] + >>> list(map_if(iterable, lambda x: x > 3, lambda x: 'toobig')) + [-5, -4, -3, -2, -1, 0, 1, 2, 3, 'toobig'] + >>> list(map_if(iterable, lambda x: x >= 0, + ... lambda x: f'{sqrt(x):.2f}', lambda x: None)) + [None, None, None, None, None, '0.00', '1.00', '1.41', '1.73', '2.00'] + """ + for item in iterable: + yield func(item) if pred(item) else func_else(item) + + def _sample_unweighted(iterable, k): # Implementation of "Algorithm L" from the 1994 paper by Kim-Hung Li: # "Reservoir-Sampling Algorithms of Time Complexity O(n(1+log(N/n)))". @@ -3292,7 +3496,6 @@ def _sample_unweighted(iterable, k): next_index = k + floor(log(random()) / log(1 - W)) for index, element in enumerate(iterable, k): - if index == next_index: reservoir[randrange(k)] = element # The new W is the largest in a sample of k U(0, `old_W`) numbers @@ -3373,7 +3576,7 @@ def sample(iterable, k, weights=None): return _sample_weighted(iterable, k, weights) -def is_sorted(iterable, key=None, reverse=False): +def is_sorted(iterable, key=None, reverse=False, strict=False): """Returns ``True`` if the items of iterable are in sorted order, and ``False`` otherwise. *key* and *reverse* have the same meaning that they do in the built-in :func:`sorted` function. @@ -3383,12 +3586,20 @@ def is_sorted(iterable, key=None, reverse=False): >>> is_sorted([5, 4, 3, 1, 2], reverse=True) False + If *strict*, tests for strict sorting, that is, returns ``False`` if equal + elements are found: + + >>> is_sorted([1, 2, 2]) + True + >>> is_sorted([1, 2, 2], strict=True) + False + The function returns ``False`` after encountering the first out-of-order item. If there are no out-of-order items, the iterable is exhausted. """ - compare = lt if reverse else gt - it = iterable if (key is None) else map(key, iterable) + compare = (le if reverse else ge) if strict else (lt if reverse else gt) + it = iterable if key is None else map(key, iterable) return not any(starmap(compare, pairwise(it))) @@ -3453,7 +3664,9 @@ def __init__(self, func, callback_kwd='callback', wait_seconds=0.1): self._aborted = False self._future = None self._wait_seconds = wait_seconds - self._executor = __import__("concurrent.futures").futures.ThreadPoolExecutor(max_workers=1) + # Lazily import concurrent.future + self._executor = __import__( + ).futures.__import__("concurrent.futures").futures.ThreadPoolExecutor(max_workers=1) self._iterator = self._reader() def __enter__(self): @@ -3649,7 +3862,7 @@ def nth_permutation(iterable, r, index): elif not 0 <= r < n: raise ValueError else: - c = factorial(n) // factorial(n - r) + c = perm(n, r) if index < 0: index += c @@ -3672,6 +3885,52 @@ def nth_permutation(iterable, r, index): return tuple(map(pool.pop, result)) +def nth_combination_with_replacement(iterable, r, index): + """Equivalent to + ``list(combinations_with_replacement(iterable, r))[index]``. + + + The subsequences with repetition of *iterable* that are of length *r* can + be ordered lexicographically. :func:`nth_combination_with_replacement` + computes the subsequence at sort position *index* directly, without + computing the previous subsequences with replacement. + + >>> nth_combination_with_replacement(range(5), 3, 5) + (0, 1, 1) + + ``ValueError`` will be raised If *r* is negative or greater than the length + of *iterable*. + ``IndexError`` will be raised if the given *index* is invalid. + """ + pool = tuple(iterable) + n = len(pool) + if (r < 0) or (r > n): + raise ValueError + + c = comb(n + r - 1, r) + + if index < 0: + index += c + + if (index < 0) or (index >= c): + raise IndexError + + result = [] + i = 0 + while r: + r -= 1 + while n >= 0: + num_combs = comb(n + r - 1, r) + if index < num_combs: + break + n -= 1 + i += 1 + index -= num_combs + result.append(pool[i]) + + return tuple(result) + + def value_chain(*args): """Yield all arguments passed to the function in the same order in which they were passed. If an argument itself is iterable then iterate over its @@ -3758,14 +4017,71 @@ def combination_index(element, iterable): n, _ = last(pool, default=(n, None)) - # Python versiosn below 3.8 don't have math.comb + # Python versions below 3.8 don't have math.comb index = 1 for i, j in enumerate(reversed(indexes), start=1): j = n - j if i <= j: - index += factorial(j) // (factorial(i) * factorial(j - i)) + index += comb(j, i) + + return comb(n + 1, k + 1) - index + + +def combination_with_replacement_index(element, iterable): + """Equivalent to + ``list(combinations_with_replacement(iterable, r)).index(element)`` + + The subsequences with repetition of *iterable* that are of length *r* can + be ordered lexicographically. :func:`combination_with_replacement_index` + computes the index of the first *element*, without computing the previous + combinations with replacement. + + >>> combination_with_replacement_index('adf', 'abcdefg') + 20 + + ``ValueError`` will be raised if the given *element* isn't one of the + combinations with replacement of *iterable*. + """ + element = tuple(element) + l = len(element) + element = enumerate(element) + + k, y = next(element, (None, None)) + if k is None: + return 0 + + indexes = [] + pool = tuple(iterable) + for n, x in enumerate(pool): + while x == y: + indexes.append(n) + tmp, y = next(element, (None, None)) + if tmp is None: + break + else: + k = tmp + if y is None: + break + else: + raise ValueError( + 'element is not a combination with replacement of iterable' + ) + + n = len(pool) + occupations = [0] * n + for p in indexes: + occupations[p] += 1 + + index = 0 + cumulative_sum = 0 + for k in range(1, n): + cumulative_sum += occupations[k - 1] + j = l + n - 1 - k - cumulative_sum + i = n - k + if i <= j: + index += comb(j, i) - return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index + return index def permutation_index(element, iterable): @@ -3822,3 +4138,518 @@ def __next__(self): self.items_seen += 1 return item + + +def chunked_even(iterable, n): + """Break *iterable* into lists of approximately length *n*. + Items are distributed such the lengths of the lists differ by at most + 1 item. + + >>> iterable = [1, 2, 3, 4, 5, 6, 7] + >>> n = 3 + >>> list(chunked_even(iterable, n)) # List lengths: 3, 2, 2 + [[1, 2, 3], [4, 5], [6, 7]] + >>> list(chunked(iterable, n)) # List lengths: 3, 3, 1 + [[1, 2, 3], [4, 5, 6], [7]] + + """ + + len_method = getattr(iterable, '__len__', None) + + if len_method is None: + return _chunked_even_online(iterable, n) + else: + return _chunked_even_finite(iterable, len_method(), n) + + +def _chunked_even_online(iterable, n): + buffer = [] + maxbuf = n + (n - 2) * (n - 1) + for x in iterable: + buffer.append(x) + if len(buffer) == maxbuf: + yield buffer[:n] + buffer = buffer[n:] + yield from _chunked_even_finite(buffer, len(buffer), n) + + +def _chunked_even_finite(iterable, N, n): + if N < 1: + return + + # Lists are either size `full_size <= n` or `partial_size = full_size - 1` + q, r = divmod(N, n) + num_lists = q + (1 if r > 0 else 0) + q, r = divmod(N, num_lists) + full_size = q + (1 if r > 0 else 0) + partial_size = full_size - 1 + num_full = N - partial_size * num_lists + num_partial = num_lists - num_full + + # Yield num_full lists of full_size + partial_start_idx = num_full * full_size + if full_size > 0: + for i in range(0, partial_start_idx, full_size): + yield list(islice(iterable, i, i + full_size)) + + # Yield num_partial lists of partial_size + if partial_size > 0: + for i in range( + partial_start_idx, + partial_start_idx + (num_partial * partial_size), + partial_size, + ): + yield list(islice(iterable, i, i + partial_size)) + + +def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False): + """A version of :func:`zip` that "broadcasts" any scalar + (i.e., non-iterable) items into output tuples. + + >>> iterable_1 = [1, 2, 3] + >>> iterable_2 = ['a', 'b', 'c'] + >>> scalar = '_' + >>> list(zip_broadcast(iterable_1, iterable_2, scalar)) + [(1, 'a', '_'), (2, 'b', '_'), (3, 'c', '_')] + + The *scalar_types* keyword argument determines what types are considered + scalar. It is set to ``(str, bytes)`` by default. Set it to ``None`` to + treat strings and byte strings as iterable: + + >>> list(zip_broadcast('abc', 0, 'xyz', scalar_types=None)) + [('a', 0, 'x'), ('b', 0, 'y'), ('c', 0, 'z')] + + If the *strict* keyword argument is ``True``, then + ``UnequalIterablesError`` will be raised if any of the iterables have + different lengths. + """ + + def is_scalar(obj): + if scalar_types and isinstance(obj, scalar_types): + return True + try: + iter(obj) + except TypeError: + return True + else: + return False + + size = len(objects) + if not size: + return + + new_item = [None] * size + iterables, iterable_positions = [], [] + for i, obj in enumerate(objects): + if is_scalar(obj): + new_item[i] = obj + else: + iterables.append(iter(obj)) + iterable_positions.append(i) + + if not iterables: + yield tuple(objects) + return + + zipper = _zip_equal if strict else zip + for item in zipper(*iterables): + for i, new_item[i] in zip(iterable_positions, item): + pass + yield tuple(new_item) + + +def unique_in_window(iterable, n, key=None): + """Yield the items from *iterable* that haven't been seen recently. + *n* is the size of the lookback window. + + >>> iterable = [0, 1, 0, 2, 3, 0] + >>> n = 3 + >>> list(unique_in_window(iterable, n)) + [0, 1, 2, 3, 0] + + The *key* function, if provided, will be used to determine uniqueness: + + >>> list(unique_in_window('abAcda', 3, key=lambda x: x.lower())) + ['a', 'b', 'c', 'd', 'a'] + + The items in *iterable* must be hashable. + + """ + if n <= 0: + raise ValueError('n must be greater than 0') + + window = deque(maxlen=n) + counts = defaultdict(int) + use_key = key is not None + + for item in iterable: + if len(window) == n: + to_discard = window[0] + if counts[to_discard] == 1: + del counts[to_discard] + else: + counts[to_discard] -= 1 + + k = key(item) if use_key else item + if k not in counts: + yield item + counts[k] += 1 + window.append(k) + + +def duplicates_everseen(iterable, key=None): + """Yield duplicate elements after their first appearance. + + >>> list(duplicates_everseen('mississippi')) + ['s', 'i', 's', 's', 'i', 'p', 'i'] + >>> list(duplicates_everseen('AaaBbbCccAaa', str.lower)) + ['a', 'a', 'b', 'b', 'c', 'c', 'A', 'a', 'a'] + + This function is analogous to :func:`unique_everseen` and is subject to + the same performance considerations. + + """ + seen_set = set() + seen_list = [] + use_key = key is not None + + for element in iterable: + k = key(element) if use_key else element + try: + if k not in seen_set: + seen_set.add(k) + else: + yield element + except TypeError: + if k not in seen_list: + seen_list.append(k) + else: + yield element + + +def duplicates_justseen(iterable, key=None): + """Yields serially-duplicate elements after their first appearance. + + >>> list(duplicates_justseen('mississippi')) + ['s', 's', 'p'] + >>> list(duplicates_justseen('AaaBbbCccAaa', str.lower)) + ['a', 'a', 'b', 'b', 'c', 'c', 'a', 'a'] + + This function is analogous to :func:`unique_justseen`. + + """ + return flatten(g for _, g in groupby(iterable, key) for _ in g) + + +def classify_unique(iterable, key=None): + """Classify each element in terms of its uniqueness. + + For each element in the input iterable, return a 3-tuple consisting of: + + 1. The element itself + 2. ``False`` if the element is equal to the one preceding it in the input, + ``True`` otherwise (i.e. the equivalent of :func:`unique_justseen`) + 3. ``False`` if this element has been seen anywhere in the input before, + ``True`` otherwise (i.e. the equivalent of :func:`unique_everseen`) + + >>> list(classify_unique('otto')) # doctest: +NORMALIZE_WHITESPACE + [('o', True, True), + ('t', True, True), + ('t', False, False), + ('o', True, False)] + + This function is analogous to :func:`unique_everseen` and is subject to + the same performance considerations. + + """ + seen_set = set() + seen_list = [] + use_key = key is not None + previous = None + + for i, element in enumerate(iterable): + k = key(element) if use_key else element + is_unique_justseen = not i or previous != k + previous = k + is_unique_everseen = False + try: + if k not in seen_set: + seen_set.add(k) + is_unique_everseen = True + except TypeError: + if k not in seen_list: + seen_list.append(k) + is_unique_everseen = True + yield element, is_unique_justseen, is_unique_everseen + + +def minmax(iterable_or_value, *others, key=None, default=_marker): + """Returns both the smallest and largest items in an iterable + or the largest of two or more arguments. + + >>> minmax([3, 1, 5]) + (1, 5) + + >>> minmax(4, 2, 6) + (2, 6) + + If a *key* function is provided, it will be used to transform the input + items for comparison. + + >>> minmax([5, 30], key=str) # '30' sorts before '5' + (30, 5) + + If a *default* value is provided, it will be returned if there are no + input items. + + >>> minmax([], default=(0, 0)) + (0, 0) + + Otherwise ``ValueError`` is raised. + + This function is based on the + `recipe `__ by + Raymond Hettinger and takes care to minimize the number of comparisons + performed. + """ + iterable = (iterable_or_value, *others) if others else iterable_or_value + + it = iter(iterable) + + try: + lo = hi = next(it) + except StopIteration as e: + if default is _marker: + raise ValueError( + '`minmax()` argument is an empty iterable. ' + 'Provide a `default` value to suppress this error.' + ) from e + return default + + # Different branches depending on the presence of key. This saves a lot + # of unimportant copies which would slow the "key=None" branch + # significantly down. + if key is None: + for x, y in zip_longest(it, it, fillvalue=lo): + if y < x: + x, y = y, x + if x < lo: + lo = x + if hi < y: + hi = y + + else: + lo_key = hi_key = key(lo) + + for x, y in zip_longest(it, it, fillvalue=lo): + x_key, y_key = key(x), key(y) + + if y_key < x_key: + x, y, x_key, y_key = y, x, y_key, x_key + if x_key < lo_key: + lo, lo_key = x, x_key + if hi_key < y_key: + hi, hi_key = y, y_key + + return lo, hi + + +def constrained_batches( + iterable, max_size, max_count=None, get_len=len, strict=True +): + """Yield batches of items from *iterable* with a combined size limited by + *max_size*. + + >>> iterable = [b'12345', b'123', b'12345678', b'1', b'1', b'12', b'1'] + >>> list(constrained_batches(iterable, 10)) + [(b'12345', b'123'), (b'12345678', b'1', b'1'), (b'12', b'1')] + + If a *max_count* is supplied, the number of items per batch is also + limited: + + >>> iterable = [b'12345', b'123', b'12345678', b'1', b'1', b'12', b'1'] + >>> list(constrained_batches(iterable, 10, max_count = 2)) + [(b'12345', b'123'), (b'12345678', b'1'), (b'1', b'12'), (b'1',)] + + If a *get_len* function is supplied, use that instead of :func:`len` to + determine item size. + + If *strict* is ``True``, raise ``ValueError`` if any single item is bigger + than *max_size*. Otherwise, allow single items to exceed *max_size*. + """ + if max_size <= 0: + raise ValueError('maximum size must be greater than zero') + + batch = [] + batch_size = 0 + batch_count = 0 + for item in iterable: + item_len = get_len(item) + if strict and item_len > max_size: + raise ValueError('item size exceeds maximum size') + + reached_count = batch_count == max_count + reached_size = item_len + batch_size > max_size + if batch_count and (reached_size or reached_count): + yield tuple(batch) + batch.clear() + batch_size = 0 + batch_count = 0 + + batch.append(item) + batch_size += item_len + batch_count += 1 + + if batch: + yield tuple(batch) + + +def gray_product(*iterables): + """Like :func:`itertools.product`, but return tuples in an order such + that only one element in the generated tuple changes from one iteration + to the next. + + >>> list(gray_product('AB','CD')) + [('A', 'C'), ('B', 'C'), ('B', 'D'), ('A', 'D')] + + This function consumes all of the input iterables before producing output. + If any of the input iterables have fewer than two items, ``ValueError`` + is raised. + + For information on the algorithm, see + `this section `__ + of Donald Knuth's *The Art of Computer Programming*. + """ + all_iterables = tuple(tuple(x) for x in iterables) + iterable_count = len(all_iterables) + for iterable in all_iterables: + if len(iterable) < 2: + raise ValueError("each iterable must have two or more items") + + # This is based on "Algorithm H" from section 7.2.1.1, page 20. + # a holds the indexes of the source iterables for the n-tuple to be yielded + # f is the array of "focus pointers" + # o is the array of "directions" + a = [0] * iterable_count + f = list(range(iterable_count + 1)) + o = [1] * iterable_count + while True: + yield tuple(all_iterables[i][a[i]] for i in range(iterable_count)) + j = f[0] + f[0] = 0 + if j == iterable_count: + break + a[j] = a[j] + o[j] + if a[j] == 0 or a[j] == len(all_iterables[j]) - 1: + o[j] = -o[j] + f[j] = f[j + 1] + f[j + 1] = j + 1 + + +def partial_product(*iterables): + """Yields tuples containing one item from each iterator, with subsequent + tuples changing a single item at a time by advancing each iterator until it + is exhausted. This sequence guarantees every value in each iterable is + output at least once without generating all possible combinations. + + This may be useful, for example, when testing an expensive function. + + >>> list(partial_product('AB', 'C', 'DEF')) + [('A', 'C', 'D'), ('B', 'C', 'D'), ('B', 'C', 'E'), ('B', 'C', 'F')] + """ + + iterators = list(map(iter, iterables)) + + try: + prod = [next(it) for it in iterators] + except StopIteration: + return + yield tuple(prod) + + for i, it in enumerate(iterators): + for prod[i] in it: + yield tuple(prod) + + +def takewhile_inclusive(predicate, iterable): + """A variant of :func:`takewhile` that yields one additional element. + + >>> list(takewhile_inclusive(lambda x: x < 5, [1, 4, 6, 4, 1])) + [1, 4, 6] + + :func:`takewhile` would return ``[1, 4]``. + """ + for x in iterable: + yield x + if not predicate(x): + break + + +def outer_product(func, xs, ys, *args, **kwargs): + """A generalized outer product that applies a binary function to all + pairs of items. Returns a 2D matrix with ``len(xs)`` rows and ``len(ys)`` + columns. + Also accepts ``*args`` and ``**kwargs`` that are passed to ``func``. + + Multiplication table: + + >>> list(outer_product(mul, range(1, 4), range(1, 6))) + [(1, 2, 3, 4, 5), (2, 4, 6, 8, 10), (3, 6, 9, 12, 15)] + + Cross tabulation: + + >>> xs = ['A', 'B', 'A', 'A', 'B', 'B', 'A', 'A', 'B', 'B'] + >>> ys = ['X', 'X', 'X', 'Y', 'Z', 'Z', 'Y', 'Y', 'Z', 'Z'] + >>> rows = list(zip(xs, ys)) + >>> count_rows = lambda x, y: rows.count((x, y)) + >>> list(outer_product(count_rows, sorted(set(xs)), sorted(set(ys)))) + [(2, 3, 0), (1, 0, 4)] + + Usage with ``*args`` and ``**kwargs``: + + >>> animals = ['cat', 'wolf', 'mouse'] + >>> list(outer_product(min, animals, animals, key=len)) + [('cat', 'cat', 'cat'), ('cat', 'wolf', 'wolf'), ('cat', 'wolf', 'mouse')] + """ + ys = tuple(ys) + return batched( + starmap(lambda x, y: func(x, y, *args, **kwargs), product(xs, ys)), + n=len(ys), + ) + + +def iter_suppress(iterable, *exceptions): + """Yield each of the items from *iterable*. If the iteration raises one of + the specified *exceptions*, that exception will be suppressed and iteration + will stop. + + >>> from itertools import chain + >>> def breaks_at_five(x): + ... while True: + ... if x >= 5: + ... raise RuntimeError + ... yield x + ... x += 1 + >>> it_1 = iter_suppress(breaks_at_five(1), RuntimeError) + >>> it_2 = iter_suppress(breaks_at_five(2), RuntimeError) + >>> list(chain(it_1, it_2)) + [1, 2, 3, 4, 2, 3, 4] + """ + try: + yield from iterable + except exceptions: + return + + +def filter_map(func, iterable): + """Apply *func* to every element of *iterable*, yielding only those which + are not ``None``. + + >>> elems = ['1', 'a', '2', 'b', '3'] + >>> list(filter_map(lambda s: int(s) if s.isnumeric() else None, elems)) + [1, 2, 3] + """ + for x in iterable: + y = func(x) + if y is not None: + yield y diff --git a/setuptools/_vendor/more_itertools/more.pyi b/setuptools/_vendor/more_itertools/more.pyi index 2fba9cb300..9a5fc911a3 100644 --- a/setuptools/_vendor/more_itertools/more.pyi +++ b/setuptools/_vendor/more_itertools/more.pyi @@ -1,35 +1,36 @@ """Stubs for more_itertools.more""" +from __future__ import annotations +from types import TracebackType from typing import ( Any, Callable, Container, - Dict, + ContextManager, Generic, Hashable, Iterable, Iterator, - List, - Optional, + overload, Reversible, Sequence, Sized, - Tuple, - Union, + Type, TypeVar, type_check_only, ) -from types import TracebackType -from typing_extensions import ContextManager, Protocol, Type, overload +from typing_extensions import Protocol # Type and type variable definitions _T = TypeVar('_T') +_T1 = TypeVar('_T1') +_T2 = TypeVar('_T2') _U = TypeVar('_U') _V = TypeVar('_V') _W = TypeVar('_W') _T_co = TypeVar('_T_co', covariant=True) -_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[object]]) -_Raisable = Union[BaseException, 'Type[BaseException]'] +_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[Any]]) +_Raisable = BaseException | Type[BaseException] @type_check_only class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ... @@ -37,23 +38,25 @@ class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ... @type_check_only class _SizedReversible(Protocol[_T_co], Sized, Reversible[_T_co]): ... +@type_check_only +class _SupportsSlicing(Protocol[_T_co]): + def __getitem__(self, __k: slice) -> _T_co: ... + def chunked( - iterable: Iterable[_T], n: int, strict: bool = ... -) -> Iterator[List[_T]]: ... + iterable: Iterable[_T], n: int | None, strict: bool = ... +) -> Iterator[list[_T]]: ... @overload def first(iterable: Iterable[_T]) -> _T: ... @overload -def first(iterable: Iterable[_T], default: _U) -> Union[_T, _U]: ... +def first(iterable: Iterable[_T], default: _U) -> _T | _U: ... @overload def last(iterable: Iterable[_T]) -> _T: ... @overload -def last(iterable: Iterable[_T], default: _U) -> Union[_T, _U]: ... +def last(iterable: Iterable[_T], default: _U) -> _T | _U: ... @overload def nth_or_last(iterable: Iterable[_T], n: int) -> _T: ... @overload -def nth_or_last( - iterable: Iterable[_T], n: int, default: _U -) -> Union[_T, _U]: ... +def nth_or_last(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ... class peekable(Generic[_T], Iterator[_T]): def __init__(self, iterable: Iterable[_T]) -> None: ... @@ -62,52 +65,58 @@ class peekable(Generic[_T], Iterator[_T]): @overload def peek(self) -> _T: ... @overload - def peek(self, default: _U) -> Union[_T, _U]: ... + def peek(self, default: _U) -> _T | _U: ... def prepend(self, *items: _T) -> None: ... def __next__(self) -> _T: ... @overload def __getitem__(self, index: int) -> _T: ... @overload - def __getitem__(self, index: slice) -> List[_T]: ... + def __getitem__(self, index: slice) -> list[_T]: ... -def collate(*iterables: Iterable[_T], **kwargs: Any) -> Iterable[_T]: ... def consumer(func: _GenFn) -> _GenFn: ... -def ilen(iterable: Iterable[object]) -> int: ... +def ilen(iterable: Iterable[_T]) -> int: ... def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ... def with_iter( context_manager: ContextManager[Iterable[_T]], ) -> Iterator[_T]: ... def one( iterable: Iterable[_T], - too_short: Optional[_Raisable] = ..., - too_long: Optional[_Raisable] = ..., + too_short: _Raisable | None = ..., + too_long: _Raisable | None = ..., ) -> _T: ... +def raise_(exception: _Raisable, *args: Any) -> None: ... +def strictly_n( + iterable: Iterable[_T], + n: int, + too_short: _GenFn | None = ..., + too_long: _GenFn | None = ..., +) -> list[_T]: ... def distinct_permutations( - iterable: Iterable[_T], r: Optional[int] = ... -) -> Iterator[Tuple[_T, ...]]: ... + iterable: Iterable[_T], r: int | None = ... +) -> Iterator[tuple[_T, ...]]: ... def intersperse( e: _U, iterable: Iterable[_T], n: int = ... -) -> Iterator[Union[_T, _U]]: ... -def unique_to_each(*iterables: Iterable[_T]) -> List[List[_T]]: ... +) -> Iterator[_T | _U]: ... +def unique_to_each(*iterables: Iterable[_T]) -> list[list[_T]]: ... @overload def windowed( seq: Iterable[_T], n: int, *, step: int = ... -) -> Iterator[Tuple[Optional[_T], ...]]: ... +) -> Iterator[tuple[_T | None, ...]]: ... @overload def windowed( seq: Iterable[_T], n: int, fillvalue: _U, step: int = ... -) -> Iterator[Tuple[Union[_T, _U], ...]]: ... -def substrings(iterable: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ... +) -> Iterator[tuple[_T | _U, ...]]: ... +def substrings(iterable: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ... def substrings_indexes( seq: Sequence[_T], reverse: bool = ... -) -> Iterator[Tuple[Sequence[_T], int, int]]: ... +) -> Iterator[tuple[Sequence[_T], int, int]]: ... class bucket(Generic[_T, _U], Container[_U]): def __init__( self, iterable: Iterable[_T], key: Callable[[_T], _U], - validator: Optional[Callable[[object], object]] = ..., + validator: Callable[[_U], object] | None = ..., ) -> None: ... def __contains__(self, value: object) -> bool: ... def __iter__(self) -> Iterator[_U]: ... @@ -115,130 +124,232 @@ class bucket(Generic[_T, _U], Container[_U]): def spy( iterable: Iterable[_T], n: int = ... -) -> Tuple[List[_T], Iterator[_T]]: ... +) -> tuple[list[_T], Iterator[_T]]: ... def interleave(*iterables: Iterable[_T]) -> Iterator[_T]: ... def interleave_longest(*iterables: Iterable[_T]) -> Iterator[_T]: ... +def interleave_evenly( + iterables: list[Iterable[_T]], lengths: list[int] | None = ... +) -> Iterator[_T]: ... def collapse( iterable: Iterable[Any], - base_type: Optional[type] = ..., - levels: Optional[int] = ..., + base_type: type | None = ..., + levels: int | None = ..., ) -> Iterator[Any]: ... @overload def side_effect( func: Callable[[_T], object], iterable: Iterable[_T], chunk_size: None = ..., - before: Optional[Callable[[], object]] = ..., - after: Optional[Callable[[], object]] = ..., + before: Callable[[], object] | None = ..., + after: Callable[[], object] | None = ..., ) -> Iterator[_T]: ... @overload def side_effect( - func: Callable[[List[_T]], object], + func: Callable[[list[_T]], object], iterable: Iterable[_T], chunk_size: int, - before: Optional[Callable[[], object]] = ..., - after: Optional[Callable[[], object]] = ..., + before: Callable[[], object] | None = ..., + after: Callable[[], object] | None = ..., ) -> Iterator[_T]: ... def sliced( - seq: Sequence[_T], n: int, strict: bool = ... -) -> Iterator[Sequence[_T]]: ... + seq: _SupportsSlicing[_T], n: int, strict: bool = ... +) -> Iterator[_T]: ... def split_at( iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ..., keep_separator: bool = ..., -) -> Iterator[List[_T]]: ... +) -> Iterator[list[_T]]: ... def split_before( iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ... -) -> Iterator[List[_T]]: ... +) -> Iterator[list[_T]]: ... def split_after( iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ... -) -> Iterator[List[_T]]: ... +) -> Iterator[list[_T]]: ... def split_when( iterable: Iterable[_T], pred: Callable[[_T, _T], object], maxsplit: int = ..., -) -> Iterator[List[_T]]: ... +) -> Iterator[list[_T]]: ... def split_into( - iterable: Iterable[_T], sizes: Iterable[Optional[int]] -) -> Iterator[List[_T]]: ... + iterable: Iterable[_T], sizes: Iterable[int | None] +) -> Iterator[list[_T]]: ... @overload def padded( iterable: Iterable[_T], *, - n: Optional[int] = ..., - next_multiple: bool = ... -) -> Iterator[Optional[_T]]: ... + n: int | None = ..., + next_multiple: bool = ..., +) -> Iterator[_T | None]: ... @overload def padded( iterable: Iterable[_T], fillvalue: _U, - n: Optional[int] = ..., + n: int | None = ..., next_multiple: bool = ..., -) -> Iterator[Union[_T, _U]]: ... +) -> Iterator[_T | _U]: ... @overload def repeat_last(iterable: Iterable[_T]) -> Iterator[_T]: ... @overload -def repeat_last( - iterable: Iterable[_T], default: _U -) -> Iterator[Union[_T, _U]]: ... -def distribute(n: int, iterable: Iterable[_T]) -> List[Iterator[_T]]: ... +def repeat_last(iterable: Iterable[_T], default: _U) -> Iterator[_T | _U]: ... +def distribute(n: int, iterable: Iterable[_T]) -> list[Iterator[_T]]: ... @overload def stagger( iterable: Iterable[_T], offsets: _SizedIterable[int] = ..., longest: bool = ..., -) -> Iterator[Tuple[Optional[_T], ...]]: ... +) -> Iterator[tuple[_T | None, ...]]: ... @overload def stagger( iterable: Iterable[_T], offsets: _SizedIterable[int] = ..., longest: bool = ..., fillvalue: _U = ..., -) -> Iterator[Tuple[Union[_T, _U], ...]]: ... +) -> Iterator[tuple[_T | _U, ...]]: ... class UnequalIterablesError(ValueError): - def __init__( - self, details: Optional[Tuple[int, int, int]] = ... - ) -> None: ... + def __init__(self, details: tuple[int, int, int] | None = ...) -> None: ... -def zip_equal(*iterables: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ... +@overload +def zip_equal(__iter1: Iterable[_T1]) -> Iterator[tuple[_T1]]: ... +@overload +def zip_equal( + __iter1: Iterable[_T1], __iter2: Iterable[_T2] +) -> Iterator[tuple[_T1, _T2]]: ... +@overload +def zip_equal( + __iter1: Iterable[_T], + __iter2: Iterable[_T], + __iter3: Iterable[_T], + *iterables: Iterable[_T], +) -> Iterator[tuple[_T, ...]]: ... +@overload +def zip_offset( + __iter1: Iterable[_T1], + *, + offsets: _SizedIterable[int], + longest: bool = ..., + fillvalue: None = None, +) -> Iterator[tuple[_T1 | None]]: ... +@overload +def zip_offset( + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + *, + offsets: _SizedIterable[int], + longest: bool = ..., + fillvalue: None = None, +) -> Iterator[tuple[_T1 | None, _T2 | None]]: ... +@overload +def zip_offset( + __iter1: Iterable[_T], + __iter2: Iterable[_T], + __iter3: Iterable[_T], + *iterables: Iterable[_T], + offsets: _SizedIterable[int], + longest: bool = ..., + fillvalue: None = None, +) -> Iterator[tuple[_T | None, ...]]: ... @overload def zip_offset( - *iterables: Iterable[_T], offsets: _SizedIterable[int], longest: bool = ... -) -> Iterator[Tuple[Optional[_T], ...]]: ... + __iter1: Iterable[_T1], + *, + offsets: _SizedIterable[int], + longest: bool = ..., + fillvalue: _U, +) -> Iterator[tuple[_T1 | _U]]: ... @overload def zip_offset( + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + *, + offsets: _SizedIterable[int], + longest: bool = ..., + fillvalue: _U, +) -> Iterator[tuple[_T1 | _U, _T2 | _U]]: ... +@overload +def zip_offset( + __iter1: Iterable[_T], + __iter2: Iterable[_T], + __iter3: Iterable[_T], *iterables: Iterable[_T], offsets: _SizedIterable[int], longest: bool = ..., - fillvalue: _U -) -> Iterator[Tuple[Union[_T, _U], ...]]: ... + fillvalue: _U, +) -> Iterator[tuple[_T | _U, ...]]: ... def sort_together( iterables: Iterable[Iterable[_T]], key_list: Iterable[int] = ..., - key: Optional[Callable[..., Any]] = ..., + key: Callable[..., Any] | None = ..., reverse: bool = ..., -) -> List[Tuple[_T, ...]]: ... -def unzip(iterable: Iterable[Sequence[_T]]) -> Tuple[Iterator[_T], ...]: ... -def divide(n: int, iterable: Iterable[_T]) -> List[Iterator[_T]]: ... +) -> list[tuple[_T, ...]]: ... +def unzip(iterable: Iterable[Sequence[_T]]) -> tuple[Iterator[_T], ...]: ... +def divide(n: int, iterable: Iterable[_T]) -> list[Iterator[_T]]: ... def always_iterable( obj: object, - base_type: Union[ - type, Tuple[Union[type, Tuple[Any, ...]], ...], None - ] = ..., + base_type: type | tuple[type | tuple[Any, ...], ...] | None = ..., ) -> Iterator[Any]: ... def adjacent( predicate: Callable[[_T], bool], iterable: Iterable[_T], distance: int = ..., -) -> Iterator[Tuple[bool, _T]]: ... +) -> Iterator[tuple[bool, _T]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: None = None, + valuefunc: None = None, + reducefunc: None = None, +) -> Iterator[tuple[_T, Iterator[_T]]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: Callable[[_T], _U], + valuefunc: None, + reducefunc: None, +) -> Iterator[tuple[_U, Iterator[_T]]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: None, + valuefunc: Callable[[_T], _V], + reducefunc: None, +) -> Iterable[tuple[_T, Iterable[_V]]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: Callable[[_T], _U], + valuefunc: Callable[[_T], _V], + reducefunc: None, +) -> Iterable[tuple[_U, Iterator[_V]]]: ... +@overload def groupby_transform( iterable: Iterable[_T], - keyfunc: Optional[Callable[[_T], _U]] = ..., - valuefunc: Optional[Callable[[_T], _V]] = ..., - reducefunc: Optional[Callable[..., _W]] = ..., -) -> Iterator[Tuple[_T, _W]]: ... + keyfunc: None, + valuefunc: None, + reducefunc: Callable[[Iterator[_T]], _W], +) -> Iterable[tuple[_T, _W]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: Callable[[_T], _U], + valuefunc: None, + reducefunc: Callable[[Iterator[_T]], _W], +) -> Iterable[tuple[_U, _W]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: None, + valuefunc: Callable[[_T], _V], + reducefunc: Callable[[Iterable[_V]], _W], +) -> Iterable[tuple[_T, _W]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: Callable[[_T], _U], + valuefunc: Callable[[_T], _V], + reducefunc: Callable[[Iterable[_V]], _W], +) -> Iterable[tuple[_U, _W]]: ... class numeric_range(Generic[_T, _U], Sequence[_T], Hashable, Reversible[_T]): @overload @@ -259,22 +370,22 @@ class numeric_range(Generic[_T, _U], Sequence[_T], Hashable, Reversible[_T]): def __len__(self) -> int: ... def __reduce__( self, - ) -> Tuple[Type[numeric_range[_T, _U]], Tuple[_T, _T, _U]]: ... + ) -> tuple[Type[numeric_range[_T, _U]], tuple[_T, _T, _U]]: ... def __repr__(self) -> str: ... def __reversed__(self) -> Iterator[_T]: ... def count(self, value: _T) -> int: ... def index(self, value: _T) -> int: ... # type: ignore def count_cycle( - iterable: Iterable[_T], n: Optional[int] = ... -) -> Iterable[Tuple[int, _T]]: ... + iterable: Iterable[_T], n: int | None = ... +) -> Iterable[tuple[int, _T]]: ... def mark_ends( iterable: Iterable[_T], -) -> Iterable[Tuple[bool, bool, _T]]: ... +) -> Iterable[tuple[bool, bool, _T]]: ... def locate( - iterable: Iterable[object], + iterable: Iterable[_T], pred: Callable[..., Any] = ..., - window_size: Optional[int] = ..., + window_size: int | None = ..., ) -> Iterator[int]: ... def lstrip( iterable: Iterable[_T], pred: Callable[[_T], object] @@ -287,9 +398,7 @@ def strip( ) -> Iterator[_T]: ... class islice_extended(Generic[_T], Iterator[_T]): - def __init__( - self, iterable: Iterable[_T], *args: Optional[int] - ) -> None: ... + def __init__(self, iterable: Iterable[_T], *args: int | None) -> None: ... def __iter__(self) -> islice_extended[_T]: ... def __next__(self) -> _T: ... def __getitem__(self, index: slice) -> islice_extended[_T]: ... @@ -303,8 +412,8 @@ def difference( iterable: Iterable[_T], func: Callable[[_T, _T], _U] = ..., *, - initial: None = ... -) -> Iterator[Union[_T, _U]]: ... + initial: None = ..., +) -> Iterator[_T | _U]: ... @overload def difference( iterable: Iterable[_T], func: Callable[[_T, _T], _U] = ..., *, initial: _U @@ -320,7 +429,7 @@ class SequenceView(Generic[_T], Sequence[_T]): class seekable(Generic[_T], Iterator[_T]): def __init__( - self, iterable: Iterable[_T], maxlen: Optional[int] = ... + self, iterable: Iterable[_T], maxlen: int | None = ... ) -> None: ... def __iter__(self) -> seekable[_T]: ... def __next__(self) -> _T: ... @@ -328,20 +437,21 @@ class seekable(Generic[_T], Iterator[_T]): @overload def peek(self) -> _T: ... @overload - def peek(self, default: _U) -> Union[_T, _U]: ... + def peek(self, default: _U) -> _T | _U: ... def elements(self) -> SequenceView[_T]: ... def seek(self, index: int) -> None: ... + def relative_seek(self, count: int) -> None: ... class run_length: @staticmethod - def encode(iterable: Iterable[_T]) -> Iterator[Tuple[_T, int]]: ... + def encode(iterable: Iterable[_T]) -> Iterator[tuple[_T, int]]: ... @staticmethod - def decode(iterable: Iterable[Tuple[_T, int]]) -> Iterator[_T]: ... + def decode(iterable: Iterable[tuple[_T, int]]) -> Iterator[_T]: ... def exactly_n( iterable: Iterable[_T], n: int, predicate: Callable[[_T], object] = ... ) -> bool: ... -def circular_shifts(iterable: Iterable[_T]) -> List[Tuple[_T, ...]]: ... +def circular_shifts(iterable: Iterable[_T]) -> list[tuple[_T, ...]]: ... def make_decorator( wrapping_func: Callable[..., _U], result_index: int = ... ) -> Callable[..., Callable[[Callable[..., Any]], Callable[..., _U]]]: ... @@ -351,44 +461,44 @@ def map_reduce( keyfunc: Callable[[_T], _U], valuefunc: None = ..., reducefunc: None = ..., -) -> Dict[_U, List[_T]]: ... +) -> dict[_U, list[_T]]: ... @overload def map_reduce( iterable: Iterable[_T], keyfunc: Callable[[_T], _U], valuefunc: Callable[[_T], _V], reducefunc: None = ..., -) -> Dict[_U, List[_V]]: ... +) -> dict[_U, list[_V]]: ... @overload def map_reduce( iterable: Iterable[_T], keyfunc: Callable[[_T], _U], valuefunc: None = ..., - reducefunc: Callable[[List[_T]], _W] = ..., -) -> Dict[_U, _W]: ... + reducefunc: Callable[[list[_T]], _W] = ..., +) -> dict[_U, _W]: ... @overload def map_reduce( iterable: Iterable[_T], keyfunc: Callable[[_T], _U], valuefunc: Callable[[_T], _V], - reducefunc: Callable[[List[_V]], _W], -) -> Dict[_U, _W]: ... + reducefunc: Callable[[list[_V]], _W], +) -> dict[_U, _W]: ... def rlocate( iterable: Iterable[_T], pred: Callable[..., object] = ..., - window_size: Optional[int] = ..., + window_size: int | None = ..., ) -> Iterator[int]: ... def replace( iterable: Iterable[_T], pred: Callable[..., object], substitutes: Iterable[_U], - count: Optional[int] = ..., + count: int | None = ..., window_size: int = ..., -) -> Iterator[Union[_T, _U]]: ... -def partitions(iterable: Iterable[_T]) -> Iterator[List[List[_T]]]: ... +) -> Iterator[_T | _U]: ... +def partitions(iterable: Iterable[_T]) -> Iterator[list[list[_T]]]: ... def set_partitions( - iterable: Iterable[_T], k: Optional[int] = ... -) -> Iterator[List[List[_T]]]: ... + iterable: Iterable[_T], k: int | None = ... +) -> Iterator[list[list[_T]]]: ... class time_limited(Generic[_T], Iterator[_T]): def __init__( @@ -399,35 +509,42 @@ class time_limited(Generic[_T], Iterator[_T]): @overload def only( - iterable: Iterable[_T], *, too_long: Optional[_Raisable] = ... -) -> Optional[_T]: ... + iterable: Iterable[_T], *, too_long: _Raisable | None = ... +) -> _T | None: ... @overload def only( - iterable: Iterable[_T], default: _U, too_long: Optional[_Raisable] = ... -) -> Union[_T, _U]: ... + iterable: Iterable[_T], default: _U, too_long: _Raisable | None = ... +) -> _T | _U: ... def ichunked(iterable: Iterable[_T], n: int) -> Iterator[Iterator[_T]]: ... def distinct_combinations( iterable: Iterable[_T], r: int -) -> Iterator[Tuple[_T, ...]]: ... +) -> Iterator[tuple[_T, ...]]: ... def filter_except( validator: Callable[[Any], object], iterable: Iterable[_T], - *exceptions: Type[BaseException] + *exceptions: Type[BaseException], ) -> Iterator[_T]: ... def map_except( function: Callable[[Any], _U], iterable: Iterable[_T], - *exceptions: Type[BaseException] + *exceptions: Type[BaseException], ) -> Iterator[_U]: ... +def map_if( + iterable: Iterable[Any], + pred: Callable[[Any], bool], + func: Callable[[Any], Any], + func_else: Callable[[Any], Any] | None = ..., +) -> Iterator[Any]: ... def sample( iterable: Iterable[_T], k: int, - weights: Optional[Iterable[float]] = ..., -) -> List[_T]: ... + weights: Iterable[float] | None = ..., +) -> list[_T]: ... def is_sorted( iterable: Iterable[_T], - key: Optional[Callable[[_T], _U]] = ..., + key: Callable[[_T], _U] | None = ..., reverse: bool = False, + strict: bool = False, ) -> bool: ... class AbortThread(BaseException): @@ -443,10 +560,10 @@ class callback_iter(Generic[_T], Iterator[_T]): def __enter__(self) -> callback_iter[_T]: ... def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> Optional[bool]: ... + exc_type: Type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> bool | None: ... def __iter__(self) -> callback_iter[_T]: ... def __next__(self) -> _T: ... def _reader(self) -> Iterator[_T]: ... @@ -457,24 +574,122 @@ class callback_iter(Generic[_T], Iterator[_T]): def windowed_complete( iterable: Iterable[_T], n: int -) -> Iterator[Tuple[_T, ...]]: ... +) -> Iterator[tuple[_T, ...]]: ... def all_unique( - iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ... + iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... ) -> bool: ... -def nth_product(index: int, *args: Iterable[_T]) -> Tuple[_T, ...]: ... +def nth_product(index: int, *args: Iterable[_T]) -> tuple[_T, ...]: ... +def nth_combination_with_replacement( + iterable: Iterable[_T], r: int, index: int +) -> tuple[_T, ...]: ... def nth_permutation( iterable: Iterable[_T], r: int, index: int -) -> Tuple[_T, ...]: ... -def value_chain(*args: Union[_T, Iterable[_T]]) -> Iterable[_T]: ... +) -> tuple[_T, ...]: ... +def value_chain(*args: _T | Iterable[_T]) -> Iterable[_T]: ... def product_index(element: Iterable[_T], *args: Iterable[_T]) -> int: ... def combination_index( element: Iterable[_T], iterable: Iterable[_T] ) -> int: ... +def combination_with_replacement_index( + element: Iterable[_T], iterable: Iterable[_T] +) -> int: ... def permutation_index( element: Iterable[_T], iterable: Iterable[_T] ) -> int: ... +def repeat_each(iterable: Iterable[_T], n: int = ...) -> Iterator[_T]: ... class countable(Generic[_T], Iterator[_T]): def __init__(self, iterable: Iterable[_T]) -> None: ... def __iter__(self) -> countable[_T]: ... def __next__(self) -> _T: ... + +def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[list[_T]]: ... +def zip_broadcast( + *objects: _T | Iterable[_T], + scalar_types: type | tuple[type | tuple[Any, ...], ...] | None = ..., + strict: bool = ..., +) -> Iterable[tuple[_T, ...]]: ... +def unique_in_window( + iterable: Iterable[_T], n: int, key: Callable[[_T], _U] | None = ... +) -> Iterator[_T]: ... +def duplicates_everseen( + iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... +) -> Iterator[_T]: ... +def duplicates_justseen( + iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... +) -> Iterator[_T]: ... +def classify_unique( + iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... +) -> Iterator[tuple[_T, bool, bool]]: ... + +class _SupportsLessThan(Protocol): + def __lt__(self, __other: Any) -> bool: ... + +_SupportsLessThanT = TypeVar("_SupportsLessThanT", bound=_SupportsLessThan) + +@overload +def minmax( + iterable_or_value: Iterable[_SupportsLessThanT], *, key: None = None +) -> tuple[_SupportsLessThanT, _SupportsLessThanT]: ... +@overload +def minmax( + iterable_or_value: Iterable[_T], *, key: Callable[[_T], _SupportsLessThan] +) -> tuple[_T, _T]: ... +@overload +def minmax( + iterable_or_value: Iterable[_SupportsLessThanT], + *, + key: None = None, + default: _U, +) -> _U | tuple[_SupportsLessThanT, _SupportsLessThanT]: ... +@overload +def minmax( + iterable_or_value: Iterable[_T], + *, + key: Callable[[_T], _SupportsLessThan], + default: _U, +) -> _U | tuple[_T, _T]: ... +@overload +def minmax( + iterable_or_value: _SupportsLessThanT, + __other: _SupportsLessThanT, + *others: _SupportsLessThanT, +) -> tuple[_SupportsLessThanT, _SupportsLessThanT]: ... +@overload +def minmax( + iterable_or_value: _T, + __other: _T, + *others: _T, + key: Callable[[_T], _SupportsLessThan], +) -> tuple[_T, _T]: ... +def longest_common_prefix( + iterables: Iterable[Iterable[_T]], +) -> Iterator[_T]: ... +def iequals(*iterables: Iterable[Any]) -> bool: ... +def constrained_batches( + iterable: Iterable[_T], + max_size: int, + max_count: int | None = ..., + get_len: Callable[[_T], object] = ..., + strict: bool = ..., +) -> Iterator[tuple[_T]]: ... +def gray_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ... +def partial_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ... +def takewhile_inclusive( + predicate: Callable[[_T], bool], iterable: Iterable[_T] +) -> Iterator[_T]: ... +def outer_product( + func: Callable[[_T, _U], _V], + xs: Iterable[_T], + ys: Iterable[_U], + *args: Any, + **kwargs: Any, +) -> Iterator[tuple[_V, ...]]: ... +def iter_suppress( + iterable: Iterable[_T], + *exceptions: Type[BaseException], +) -> Iterator[_T]: ... +def filter_map( + func: Callable[[_T], _V | None], + iterable: Iterable[_T], +) -> Iterator[_V]: ... diff --git a/setuptools/_vendor/more_itertools/recipes.py b/setuptools/_vendor/more_itertools/recipes.py index 521abd7c2c..145e3cb5bd 100644 --- a/setuptools/_vendor/more_itertools/recipes.py +++ b/setuptools/_vendor/more_itertools/recipes.py @@ -7,32 +7,43 @@ .. [1] http://docs.python.org/library/itertools.html#recipes """ -import warnings +import math +import operator + from collections import deque +from collections.abc import Sized +from functools import partial, reduce from itertools import ( chain, combinations, + compress, count, cycle, groupby, islice, + product, repeat, starmap, tee, zip_longest, ) -import operator from random import randrange, sample, choice +from sys import hexversion __all__ = [ 'all_equal', + 'batched', + 'before_and_after', 'consume', 'convolve', 'dotproduct', 'first_true', + 'factor', 'flatten', 'grouper', 'iter_except', + 'iter_index', + 'matmul', 'ncycles', 'nth', 'nth_combination', @@ -40,22 +51,47 @@ 'pad_none', 'pairwise', 'partition', + 'polynomial_eval', + 'polynomial_from_roots', + 'polynomial_derivative', 'powerset', 'prepend', 'quantify', + 'reshape', 'random_combination_with_replacement', 'random_combination', 'random_permutation', 'random_product', 'repeatfunc', 'roundrobin', + 'sieve', + 'sliding_window', + 'subslices', + 'sum_of_squares', 'tabulate', 'tail', 'take', + 'totient', + 'transpose', + 'triplewise', 'unique_everseen', 'unique_justseen', ] +_marker = object() + + +# zip with strict is available for Python 3.10+ +try: + zip(strict=True) +except TypeError: + _zip_strict = zip +else: + _zip_strict = partial(zip, strict=True) + +# math.sumprod is available for Python 3.12+ +_sumprod = getattr(math, 'sumprod', lambda x, y: dotproduct(x, y)) + def take(n, iterable): """Return first *n* items of the iterable as a list. @@ -99,7 +135,14 @@ def tail(n, iterable): ['E', 'F', 'G'] """ - return iter(deque(iterable, maxlen=n)) + # If the given iterable has a length, then we can use islice to get its + # final elements. Note that if the iterable is not actually Iterable, + # either islice or deque will throw a TypeError. This is why we don't + # check if it is Iterable. + if isinstance(iterable, Sized): + yield from islice(iterable, max(0, len(iterable) - n), None) + else: + yield from iter(deque(iterable, maxlen=n)) def consume(iterator, n=None): @@ -266,7 +309,7 @@ def _pairwise(iterable): """ a, b = tee(iterable) next(b, None) - yield from zip(a, b) + return zip(a, b) try: @@ -276,25 +319,84 @@ def _pairwise(iterable): else: def pairwise(iterable): - yield from itertools_pairwise(iterable) + return itertools_pairwise(iterable) pairwise.__doc__ = _pairwise.__doc__ -def grouper(iterable, n, fillvalue=None): - """Collect data into fixed-length chunks or blocks. +class UnequalIterablesError(ValueError): + def __init__(self, details=None): + msg = 'Iterables have different lengths' + if details is not None: + msg += (': index 0 has length {}; index {} has length {}').format( + *details + ) + + super().__init__(msg) + + +def _zip_equal_generator(iterables): + for combo in zip_longest(*iterables, fillvalue=_marker): + for val in combo: + if val is _marker: + raise UnequalIterablesError() + yield combo + + +def _zip_equal(*iterables): + # Check whether the iterables are all the same size. + try: + first_size = len(iterables[0]) + for i, it in enumerate(iterables[1:], 1): + size = len(it) + if size != first_size: + raise UnequalIterablesError(details=(first_size, i, size)) + # All sizes are equal, we can use the built-in zip. + return zip(*iterables) + # If any one of the iterables didn't have a length, start reading + # them until one runs out. + except TypeError: + return _zip_equal_generator(iterables) + + +def grouper(iterable, n, incomplete='fill', fillvalue=None): + """Group elements from *iterable* into fixed-length groups of length *n*. + + >>> list(grouper('ABCDEF', 3)) + [('A', 'B', 'C'), ('D', 'E', 'F')] - >>> list(grouper('ABCDEFG', 3, 'x')) + The keyword arguments *incomplete* and *fillvalue* control what happens for + iterables whose length is not a multiple of *n*. + + When *incomplete* is `'fill'`, the last group will contain instances of + *fillvalue*. + + >>> list(grouper('ABCDEFG', 3, incomplete='fill', fillvalue='x')) [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')] + When *incomplete* is `'ignore'`, the last group will not be emitted. + + >>> list(grouper('ABCDEFG', 3, incomplete='ignore', fillvalue='x')) + [('A', 'B', 'C'), ('D', 'E', 'F')] + + When *incomplete* is `'strict'`, a subclass of `ValueError` will be raised. + + >>> it = grouper('ABCDEFG', 3, incomplete='strict') + >>> list(it) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + UnequalIterablesError + """ - if isinstance(iterable, int): - warnings.warn( - "grouper expects iterable as first parameter", DeprecationWarning - ) - n, iterable = iterable, n args = [iter(iterable)] * n - return zip_longest(fillvalue=fillvalue, *args) + if incomplete == 'fill': + return zip_longest(*args, fillvalue=fillvalue) + if incomplete == 'strict': + return _zip_equal(*args) + if incomplete == 'ignore': + return zip(*args) + else: + raise ValueError('Expected fill, strict, or ignore') def roundrobin(*iterables): @@ -343,12 +445,9 @@ def partition(pred, iterable): if pred is None: pred = bool - evaluations = ((pred(x), x) for x in iterable) - t1, t2 = tee(evaluations) - return ( - (x for (cond, x) in t1 if not cond), - (x for (cond, x) in t2 if cond), - ) + t1, t2, p = tee(iterable, 3) + p1, p2 = tee(map(pred, p)) + return (compress(t1, map(operator.not_, p1)), compress(t2, p2)) def powerset(iterable): @@ -396,7 +495,7 @@ def unique_everseen(iterable, key=None): >>> list(unique_everseen(iterable, key=tuple)) # Faster [[1, 2], [2, 3]] - Similary, you may want to convert unhashable ``set`` objects with + Similarly, you may want to convert unhashable ``set`` objects with ``key=frozenset``. For ``dict`` objects, ``key=lambda x: frozenset(x.items())`` can be used. @@ -428,6 +527,9 @@ def unique_justseen(iterable, key=None): ['A', 'B', 'C', 'A', 'D'] """ + if key is None: + return map(operator.itemgetter(0), groupby(iterable)) + return map(next, map(operator.itemgetter(1), groupby(iterable, key))) @@ -442,6 +544,16 @@ def iter_except(func, exception, first=None): >>> list(iter_except(l.pop, IndexError)) [2, 1, 0] + Multiple exceptions can be specified as a stopping condition: + + >>> l = [1, 2, 3, '...', 4, 5, 6] + >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError))) + [7, 6, 5] + >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError))) + [4, 3, 2] + >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError))) + [] + """ try: if first is not None: @@ -612,9 +724,289 @@ def convolve(signal, kernel): is immediately consumed and stored. """ + # This implementation intentionally doesn't match the one in the itertools + # documentation. kernel = tuple(kernel)[::-1] n = len(kernel) window = deque([0], maxlen=n) * n for x in chain(signal, repeat(0, n - 1)): window.append(x) - yield sum(map(operator.mul, kernel, window)) + yield _sumprod(kernel, window) + + +def before_and_after(predicate, it): + """A variant of :func:`takewhile` that allows complete access to the + remainder of the iterator. + + >>> it = iter('ABCdEfGhI') + >>> all_upper, remainder = before_and_after(str.isupper, it) + >>> ''.join(all_upper) + 'ABC' + >>> ''.join(remainder) # takewhile() would lose the 'd' + 'dEfGhI' + + Note that the first iterator must be fully consumed before the second + iterator can generate valid results. + """ + it = iter(it) + transition = [] + + def true_iterator(): + for elem in it: + if predicate(elem): + yield elem + else: + transition.append(elem) + return + + # Note: this is different from itertools recipes to allow nesting + # before_and_after remainders into before_and_after again. See tests + # for an example. + remainder_iterator = chain(transition, it) + + return true_iterator(), remainder_iterator + + +def triplewise(iterable): + """Return overlapping triplets from *iterable*. + + >>> list(triplewise('ABCDE')) + [('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E')] + + """ + for (a, _), (b, c) in pairwise(pairwise(iterable)): + yield a, b, c + + +def sliding_window(iterable, n): + """Return a sliding window of width *n* over *iterable*. + + >>> list(sliding_window(range(6), 4)) + [(0, 1, 2, 3), (1, 2, 3, 4), (2, 3, 4, 5)] + + If *iterable* has fewer than *n* items, then nothing is yielded: + + >>> list(sliding_window(range(3), 4)) + [] + + For a variant with more features, see :func:`windowed`. + """ + it = iter(iterable) + window = deque(islice(it, n - 1), maxlen=n) + for x in it: + window.append(x) + yield tuple(window) + + +def subslices(iterable): + """Return all contiguous non-empty subslices of *iterable*. + + >>> list(subslices('ABC')) + [['A'], ['A', 'B'], ['A', 'B', 'C'], ['B'], ['B', 'C'], ['C']] + + This is similar to :func:`substrings`, but emits items in a different + order. + """ + seq = list(iterable) + slices = starmap(slice, combinations(range(len(seq) + 1), 2)) + return map(operator.getitem, repeat(seq), slices) + + +def polynomial_from_roots(roots): + """Compute a polynomial's coefficients from its roots. + + >>> roots = [5, -4, 3] # (x - 5) * (x + 4) * (x - 3) + >>> polynomial_from_roots(roots) # x^3 - 4 * x^2 - 17 * x + 60 + [1, -4, -17, 60] + """ + factors = zip(repeat(1), map(operator.neg, roots)) + return list(reduce(convolve, factors, [1])) + + +def iter_index(iterable, value, start=0, stop=None): + """Yield the index of each place in *iterable* that *value* occurs, + beginning with index *start* and ending before index *stop*. + + See :func:`locate` for a more general means of finding the indexes + associated with particular values. + + >>> list(iter_index('AABCADEAF', 'A')) + [0, 1, 4, 7] + >>> list(iter_index('AABCADEAF', 'A', 1)) # start index is inclusive + [1, 4, 7] + >>> list(iter_index('AABCADEAF', 'A', 1, 7)) # stop index is not inclusive + [1, 4] + """ + seq_index = getattr(iterable, 'index', None) + if seq_index is None: + # Slow path for general iterables + it = islice(iterable, start, stop) + for i, element in enumerate(it, start): + if element is value or element == value: + yield i + else: + # Fast path for sequences + stop = len(iterable) if stop is None else stop + i = start - 1 + try: + while True: + yield (i := seq_index(value, i + 1, stop)) + except ValueError: + pass + + +def sieve(n): + """Yield the primes less than n. + + >>> list(sieve(30)) + [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] + """ + if n > 2: + yield 2 + start = 3 + data = bytearray((0, 1)) * (n // 2) + limit = math.isqrt(n) + 1 + for p in iter_index(data, 1, start, limit): + yield from iter_index(data, 1, start, p * p) + data[p * p : n : p + p] = bytes(len(range(p * p, n, p + p))) + start = p * p + yield from iter_index(data, 1, start) + + +def _batched(iterable, n, *, strict=False): + """Batch data into tuples of length *n*. If the number of items in + *iterable* is not divisible by *n*: + * The last batch will be shorter if *strict* is ``False``. + * :exc:`ValueError` will be raised if *strict* is ``True``. + + >>> list(batched('ABCDEFG', 3)) + [('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)] + + On Python 3.13 and above, this is an alias for :func:`itertools.batched`. + """ + if n < 1: + raise ValueError('n must be at least one') + it = iter(iterable) + while batch := tuple(islice(it, n)): + if strict and len(batch) != n: + raise ValueError('batched(): incomplete batch') + yield batch + + +if hexversion >= 0x30D00A2: + from itertools import batched as itertools_batched + + def batched(iterable, n, *, strict=False): + return itertools_batched(iterable, n, strict=strict) + +else: + batched = _batched + + batched.__doc__ = _batched.__doc__ + + +def transpose(it): + """Swap the rows and columns of the input matrix. + + >>> list(transpose([(1, 2, 3), (11, 22, 33)])) + [(1, 11), (2, 22), (3, 33)] + + The caller should ensure that the dimensions of the input are compatible. + If the input is empty, no output will be produced. + """ + return _zip_strict(*it) + + +def reshape(matrix, cols): + """Reshape the 2-D input *matrix* to have a column count given by *cols*. + + >>> matrix = [(0, 1), (2, 3), (4, 5)] + >>> cols = 3 + >>> list(reshape(matrix, cols)) + [(0, 1, 2), (3, 4, 5)] + """ + return batched(chain.from_iterable(matrix), cols) + + +def matmul(m1, m2): + """Multiply two matrices. + + >>> list(matmul([(7, 5), (3, 5)], [(2, 5), (7, 9)])) + [(49, 80), (41, 60)] + + The caller should ensure that the dimensions of the input matrices are + compatible with each other. + """ + n = len(m2[0]) + return batched(starmap(_sumprod, product(m1, transpose(m2))), n) + + +def factor(n): + """Yield the prime factors of n. + + >>> list(factor(360)) + [2, 2, 2, 3, 3, 5] + """ + for prime in sieve(math.isqrt(n) + 1): + while not n % prime: + yield prime + n //= prime + if n == 1: + return + if n > 1: + yield n + + +def polynomial_eval(coefficients, x): + """Evaluate a polynomial at a specific value. + + Example: evaluating x^3 - 4 * x^2 - 17 * x + 60 at x = 2.5: + + >>> coefficients = [1, -4, -17, 60] + >>> x = 2.5 + >>> polynomial_eval(coefficients, x) + 8.125 + """ + n = len(coefficients) + if n == 0: + return x * 0 # coerce zero to the type of x + powers = map(pow, repeat(x), reversed(range(n))) + return _sumprod(coefficients, powers) + + +def sum_of_squares(it): + """Return the sum of the squares of the input values. + + >>> sum_of_squares([10, 20, 30]) + 1400 + """ + return _sumprod(*tee(it)) + + +def polynomial_derivative(coefficients): + """Compute the first derivative of a polynomial. + + Example: evaluating the derivative of x^3 - 4 * x^2 - 17 * x + 60 + + >>> coefficients = [1, -4, -17, 60] + >>> derivative_coefficients = polynomial_derivative(coefficients) + >>> derivative_coefficients + [3, -8, -17] + """ + n = len(coefficients) + powers = reversed(range(1, n)) + return list(map(operator.mul, coefficients, powers)) + + +def totient(n): + """Return the count of natural numbers up to *n* that are coprime with *n*. + + >>> totient(9) + 6 + >>> totient(12) + 4 + """ + for p in unique_justseen(factor(n)): + n = n // p * (p - 1) + + return n diff --git a/setuptools/_vendor/more_itertools/recipes.pyi b/setuptools/_vendor/more_itertools/recipes.pyi index 5e39d96390..ed4c19db49 100644 --- a/setuptools/_vendor/more_itertools/recipes.pyi +++ b/setuptools/_vendor/more_itertools/recipes.pyi @@ -1,103 +1,128 @@ """Stubs for more_itertools.recipes""" +from __future__ import annotations + from typing import ( Any, Callable, Iterable, Iterator, - List, - Optional, - Tuple, + overload, + Sequence, + Type, TypeVar, - Union, ) -from typing_extensions import overload, Type # Type and type variable definitions _T = TypeVar('_T') +_T1 = TypeVar('_T1') +_T2 = TypeVar('_T2') _U = TypeVar('_U') -def take(n: int, iterable: Iterable[_T]) -> List[_T]: ... +def take(n: int, iterable: Iterable[_T]) -> list[_T]: ... def tabulate( function: Callable[[int], _T], start: int = ... ) -> Iterator[_T]: ... def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ... -def consume(iterator: Iterable[object], n: Optional[int] = ...) -> None: ... +def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ... @overload -def nth(iterable: Iterable[_T], n: int) -> Optional[_T]: ... +def nth(iterable: Iterable[_T], n: int) -> _T | None: ... @overload -def nth(iterable: Iterable[_T], n: int, default: _U) -> Union[_T, _U]: ... -def all_equal(iterable: Iterable[object]) -> bool: ... +def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ... +def all_equal(iterable: Iterable[_T]) -> bool: ... def quantify( iterable: Iterable[_T], pred: Callable[[_T], bool] = ... ) -> int: ... -def pad_none(iterable: Iterable[_T]) -> Iterator[Optional[_T]]: ... -def padnone(iterable: Iterable[_T]) -> Iterator[Optional[_T]]: ... +def pad_none(iterable: Iterable[_T]) -> Iterator[_T | None]: ... +def padnone(iterable: Iterable[_T]) -> Iterator[_T | None]: ... def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ... -def dotproduct(vec1: Iterable[object], vec2: Iterable[object]) -> object: ... +def dotproduct(vec1: Iterable[_T1], vec2: Iterable[_T2]) -> Any: ... def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ... def repeatfunc( - func: Callable[..., _U], times: Optional[int] = ..., *args: Any + func: Callable[..., _U], times: int | None = ..., *args: Any ) -> Iterator[_U]: ... -def pairwise(iterable: Iterable[_T]) -> Iterator[Tuple[_T, _T]]: ... -@overload -def grouper( - iterable: Iterable[_T], n: int -) -> Iterator[Tuple[Optional[_T], ...]]: ... -@overload +def pairwise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T]]: ... def grouper( - iterable: Iterable[_T], n: int, fillvalue: _U -) -> Iterator[Tuple[Union[_T, _U], ...]]: ... -@overload -def grouper( # Deprecated interface - iterable: int, n: Iterable[_T] -) -> Iterator[Tuple[Optional[_T], ...]]: ... -@overload -def grouper( # Deprecated interface - iterable: int, n: Iterable[_T], fillvalue: _U -) -> Iterator[Tuple[Union[_T, _U], ...]]: ... + iterable: Iterable[_T], + n: int, + incomplete: str = ..., + fillvalue: _U = ..., +) -> Iterator[tuple[_T | _U, ...]]: ... def roundrobin(*iterables: Iterable[_T]) -> Iterator[_T]: ... def partition( - pred: Optional[Callable[[_T], object]], iterable: Iterable[_T] -) -> Tuple[Iterator[_T], Iterator[_T]]: ... -def powerset(iterable: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ... + pred: Callable[[_T], object] | None, iterable: Iterable[_T] +) -> tuple[Iterator[_T], Iterator[_T]]: ... +def powerset(iterable: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ... def unique_everseen( - iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ... + iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... ) -> Iterator[_T]: ... def unique_justseen( - iterable: Iterable[_T], key: Optional[Callable[[_T], object]] = ... + iterable: Iterable[_T], key: Callable[[_T], object] | None = ... ) -> Iterator[_T]: ... @overload def iter_except( - func: Callable[[], _T], exception: Type[BaseException], first: None = ... + func: Callable[[], _T], + exception: Type[BaseException] | tuple[Type[BaseException], ...], + first: None = ..., ) -> Iterator[_T]: ... @overload def iter_except( func: Callable[[], _T], - exception: Type[BaseException], + exception: Type[BaseException] | tuple[Type[BaseException], ...], first: Callable[[], _U], -) -> Iterator[Union[_T, _U]]: ... +) -> Iterator[_T | _U]: ... @overload def first_true( - iterable: Iterable[_T], *, pred: Optional[Callable[[_T], object]] = ... -) -> Optional[_T]: ... + iterable: Iterable[_T], *, pred: Callable[[_T], object] | None = ... +) -> _T | None: ... @overload def first_true( iterable: Iterable[_T], default: _U, - pred: Optional[Callable[[_T], object]] = ..., -) -> Union[_T, _U]: ... + pred: Callable[[_T], object] | None = ..., +) -> _T | _U: ... def random_product( *args: Iterable[_T], repeat: int = ... -) -> Tuple[_T, ...]: ... +) -> tuple[_T, ...]: ... def random_permutation( - iterable: Iterable[_T], r: Optional[int] = ... -) -> Tuple[_T, ...]: ... -def random_combination(iterable: Iterable[_T], r: int) -> Tuple[_T, ...]: ... + iterable: Iterable[_T], r: int | None = ... +) -> tuple[_T, ...]: ... +def random_combination(iterable: Iterable[_T], r: int) -> tuple[_T, ...]: ... def random_combination_with_replacement( iterable: Iterable[_T], r: int -) -> Tuple[_T, ...]: ... +) -> tuple[_T, ...]: ... def nth_combination( iterable: Iterable[_T], r: int, index: int -) -> Tuple[_T, ...]: ... -def prepend(value: _T, iterator: Iterable[_U]) -> Iterator[Union[_T, _U]]: ... +) -> tuple[_T, ...]: ... +def prepend(value: _T, iterator: Iterable[_U]) -> Iterator[_T | _U]: ... def convolve(signal: Iterable[_T], kernel: Iterable[_T]) -> Iterator[_T]: ... +def before_and_after( + predicate: Callable[[_T], bool], it: Iterable[_T] +) -> tuple[Iterator[_T], Iterator[_T]]: ... +def triplewise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T, _T]]: ... +def sliding_window( + iterable: Iterable[_T], n: int +) -> Iterator[tuple[_T, ...]]: ... +def subslices(iterable: Iterable[_T]) -> Iterator[list[_T]]: ... +def polynomial_from_roots(roots: Sequence[_T]) -> list[_T]: ... +def iter_index( + iterable: Iterable[_T], + value: Any, + start: int | None = ..., + stop: int | None = ..., +) -> Iterator[int]: ... +def sieve(n: int) -> Iterator[int]: ... +def batched( + iterable: Iterable[_T], n: int, *, strict: bool = False +) -> Iterator[tuple[_T]]: ... +def transpose( + it: Iterable[Iterable[_T]], +) -> Iterator[tuple[_T, ...]]: ... +def reshape( + matrix: Iterable[Iterable[_T]], cols: int +) -> Iterator[tuple[_T, ...]]: ... +def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[tuple[_T]]: ... +def factor(n: int) -> Iterator[int]: ... +def polynomial_eval(coefficients: Sequence[_T], x: _U) -> _U: ... +def sum_of_squares(it: Iterable[_T]) -> _T: ... +def polynomial_derivative(coefficients: Sequence[_T]) -> list[_T]: ... +def totient(n: int) -> int: ... diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/METADATA b/setuptools/_vendor/ordered_set-3.1.1.dist-info/METADATA index 4c64d142b9..db6e12f2dc 100644 --- a/setuptools/_vendor/ordered_set-3.1.1.dist-info/METADATA +++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/METADATA @@ -153,3 +153,5 @@ look up an entry by its index. OrderedSet is automatically tested on Python 2.7, 3.4, 3.5, 3.6, and 3.7. We've checked more informally that it works on PyPy and PyPy3. + + diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD b/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD index 3267872d45..ab60336edc 100644 --- a/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD +++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD @@ -1,9 +1,9 @@ -__pycache__/ordered_set.cpython-312.pyc,, +__pycache__/ordered_set.cpython-38.pyc,, ordered_set-3.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -ordered_set-3.1.1.dist-info/METADATA,sha256=qEaJM9CbGNixB_jvfohisKbXTUjcef6nCCcBJju6f4U,5357 +ordered_set-3.1.1.dist-info/METADATA,sha256=uGvfFaNmhcl69lGdHmyOXc30N3U6Jn8DByfh_VHEPpw,5359 ordered_set-3.1.1.dist-info/MIT-LICENSE,sha256=TvRE7qUSUBcd0ols7wgNf3zDEEJWW7kv7WDRySrMBBE,1071 ordered_set-3.1.1.dist-info/RECORD,, ordered_set-3.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -ordered_set-3.1.1.dist-info/WHEEL,sha256=DZajD4pwLWue70CAfc7YaxT1wLUciNBvN_TTcvXpltE,110 +ordered_set-3.1.1.dist-info/WHEEL,sha256=WzZ8cwjh8l0jtULNjYq1Hpr-WCqCRgPr--TX4P5I1Wo,110 ordered_set-3.1.1.dist-info/top_level.txt,sha256=NTY2_aDi1Do9fl3Z9EmWPxasFkUeW2dzO2D3RDx5CfM,12 ordered_set.py,sha256=dbaCcs27dyN9gnMWGF5nA_BrVn6Q-NrjKYJpV9_fgBs,15130 diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL b/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL index 832be11132..b733a60d37 100644 --- a/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL +++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.43.0) +Generator: bdist_wheel (0.37.0) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any diff --git a/setuptools/_vendor/packaging-24.0.dist-info/RECORD b/setuptools/_vendor/packaging-24.0.dist-info/RECORD index bcf796c2f4..9abc2bcbe6 100644 --- a/setuptools/_vendor/packaging-24.0.dist-info/RECORD +++ b/setuptools/_vendor/packaging-24.0.dist-info/RECORD @@ -7,20 +7,20 @@ packaging-24.0.dist-info/RECORD,, packaging-24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 packaging-24.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 packaging/__init__.py,sha256=UzotcV07p8vcJzd80S-W0srhgY8NMVD_XvJcZ7JN-tA,496 -packaging/__pycache__/__init__.cpython-312.pyc,, -packaging/__pycache__/_elffile.cpython-312.pyc,, -packaging/__pycache__/_manylinux.cpython-312.pyc,, -packaging/__pycache__/_musllinux.cpython-312.pyc,, -packaging/__pycache__/_parser.cpython-312.pyc,, -packaging/__pycache__/_structures.cpython-312.pyc,, -packaging/__pycache__/_tokenizer.cpython-312.pyc,, -packaging/__pycache__/markers.cpython-312.pyc,, -packaging/__pycache__/metadata.cpython-312.pyc,, -packaging/__pycache__/requirements.cpython-312.pyc,, -packaging/__pycache__/specifiers.cpython-312.pyc,, -packaging/__pycache__/tags.cpython-312.pyc,, -packaging/__pycache__/utils.cpython-312.pyc,, -packaging/__pycache__/version.cpython-312.pyc,, +packaging/__pycache__/__init__.cpython-38.pyc,, +packaging/__pycache__/_elffile.cpython-38.pyc,, +packaging/__pycache__/_manylinux.cpython-38.pyc,, +packaging/__pycache__/_musllinux.cpython-38.pyc,, +packaging/__pycache__/_parser.cpython-38.pyc,, +packaging/__pycache__/_structures.cpython-38.pyc,, +packaging/__pycache__/_tokenizer.cpython-38.pyc,, +packaging/__pycache__/markers.cpython-38.pyc,, +packaging/__pycache__/metadata.cpython-38.pyc,, +packaging/__pycache__/requirements.cpython-38.pyc,, +packaging/__pycache__/specifiers.cpython-38.pyc,, +packaging/__pycache__/tags.cpython-38.pyc,, +packaging/__pycache__/utils.cpython-38.pyc,, +packaging/__pycache__/version.cpython-38.pyc,, packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266 packaging/_manylinux.py,sha256=1ng_TqyH49hY6s3W_zVHyoJIaogbJqbIF1jJ0fAehc4,9590 packaging/_musllinux.py,sha256=kgmBGLFybpy8609-KTvzmt2zChCPWYvhp5BWP4JX7dE,2676 diff --git a/setuptools/_vendor/packaging/specifiers.py b/setuptools/_vendor/packaging/specifiers.py index 2d015bab59..e8378f33bf 100644 --- a/setuptools/_vendor/packaging/specifiers.py +++ b/setuptools/_vendor/packaging/specifiers.py @@ -4,8 +4,8 @@ """ .. testsetup:: - from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier - from packaging.version import Version + from setuptools.extern.packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier + from setuptools.extern.packaging.version import Version """ import abc diff --git a/setuptools/_vendor/packaging/version.py b/setuptools/_vendor/packaging/version.py index 5faab9bd0d..5b984cebe2 100644 --- a/setuptools/_vendor/packaging/version.py +++ b/setuptools/_vendor/packaging/version.py @@ -4,7 +4,7 @@ """ .. testsetup:: - from packaging.version import parse, Version + from setuptools.extern.packaging.version import parse, Version """ import itertools diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD b/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD index 1db8063ec5..a0bb448150 100644 --- a/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD @@ -5,10 +5,10 @@ tomli-2.0.1.dist-info/RECORD,, tomli-2.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 tomli-2.0.1.dist-info/WHEEL,sha256=jPMR_Dzkc4X4icQtmz81lnNY_kAsfog7ry7qoRvYLXw,81 tomli/__init__.py,sha256=JhUwV66DB1g4Hvt1UQCVMdfCu-IgAV8FXmvDU9onxd4,396 -tomli/__pycache__/__init__.cpython-312.pyc,, -tomli/__pycache__/_parser.cpython-312.pyc,, -tomli/__pycache__/_re.cpython-312.pyc,, -tomli/__pycache__/_types.cpython-312.pyc,, +tomli/__pycache__/__init__.cpython-38.pyc,, +tomli/__pycache__/_parser.cpython-38.pyc,, +tomli/__pycache__/_re.cpython-38.pyc,, +tomli/__pycache__/_types.cpython-38.pyc,, tomli/_parser.py,sha256=g9-ENaALS-B8dokYpCuzUFalWlog7T-SIYMjLZSWrtM,22633 tomli/_re.py,sha256=dbjg5ChZT23Ka9z9DHOXfdtSpPwUfdgMXnj8NOoly-w,2943 tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254 diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD b/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD index adc797bc2e..29b415e827 100644 --- a/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD +++ b/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD @@ -1,9 +1,8 @@ -__pycache__/zipp.cpython-312.pyc,, +__pycache__/zipp.cpython-38.pyc,, zipp-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 zipp-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 zipp-3.7.0.dist-info/METADATA,sha256=ZLzgaXTyZX_MxTU0lcGfhdPY4CjFrT_3vyQ2Fo49pl8,2261 zipp-3.7.0.dist-info/RECORD,, -zipp-3.7.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 zipp-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 zipp-3.7.0.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5 zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425 diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/REQUESTED b/setuptools/_vendor/zipp-3.7.0.dist-info/REQUESTED deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py index b0e646bc8c..8eb02ac6d3 100644 --- a/setuptools/extern/__init__.py +++ b/setuptools/extern/__init__.py @@ -77,15 +77,15 @@ def install(self): # cog.outl(f"names = (\n{names}\n)") # ]]] names = ( - 'packaging', - 'ordered_set', - 'more_itertools', - 'jaraco', - 'importlib_resources', + 'backports', 'importlib_metadata', - 'zipp', + 'importlib_resources', + 'jaraco', + 'more_itertools', + 'ordered_set', + 'packaging', 'tomli', - 'backports', + 'zipp', ) # [[[end]]] VendorImporter(__name__, names, 'setuptools._vendor').install()