diff --git a/build.py b/build.py index af75bcb57..7783ae80d 100755 --- a/build.py +++ b/build.py @@ -107,13 +107,16 @@ def initialize(project): "virtualenv>=20.0.0", "importlib-resources>=1.0", "importlib-metadata>=0.12", + "filelock<=3.4.1", + "platformdirs<=2.4.0", "typing-extensions", "colorama~=0.4.3" ]) project.set_property("vendorize_cleanup_globs", ["bin", "setuptools", - "easy_install.py"]) - project.set_property("vendorize_preserve_metadata", ["virtualenv*"]) + "easy_install.py", + "*.pth"]) + project.set_property("vendorize_preserve_metadata", ["virtualenv*", "importlib_metadata*"]) project.set_property("coverage_break_build", False) project.get_property("coverage_exceptions").extend(["pybuilder._vendor", diff --git a/src/main/python/pybuilder/_vendor/LICENSES b/src/main/python/pybuilder/_vendor/LICENSES index 9e288bfc6..283c18a2c 100644 --- a/src/main/python/pybuilder/_vendor/LICENSES +++ b/src/main/python/pybuilder/_vendor/LICENSES @@ -29,7 +29,7 @@ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -importlib_metadata-4.8.1 +importlib_metadata-4.10.0 ========== Copyright 2017-2019 Jason R. Coombs, Barry Warsaw @@ -63,7 +63,7 @@ See the License for the specific language governing permissions and limitations under the License. -setuptools-58.5.2 +setuptools-60.5.0 ========== Copyright Jason R. Coombs @@ -131,7 +131,7 @@ WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWIS THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -virtualenv-20.10.0 +virtualenv-20.13.0 ========== Copyright (c) 2020-202x The virtualenv developers @@ -155,30 +155,295 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -backports.entry_points_selectable-1.1.0 +distlib-0.3.4 ========== -Copyright Jason R. Coombs +A. HISTORY OF THE SOFTWARE +========================== -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations (now Zope +Corporation, see http://www.zope.com). In 2001, the Python Software +Foundation (PSF, see http://www.python.org/psf/) was formed, a +non-profit organization created specifically to own Python-related +Intellectual Property. Zope Corporation is a sponsoring member of +the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.2 2.1.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2.1 2.2 2002 PSF yes + 2.2.2 2.2.1 2002 PSF yes + 2.2.3 2.2.2 2003 PSF yes + 2.3 2.2.2 2002-2003 PSF yes + 2.3.1 2.3 2002-2003 PSF yes + 2.3.2 2.3.1 2002-2003 PSF yes + 2.3.3 2.3.2 2002-2003 PSF yes + 2.3.4 2.3.3 2004 PSF yes + 2.3.5 2.3.4 2005 PSF yes + 2.4 2.3 2004 PSF yes + 2.4.1 2.4 2005 PSF yes + 2.4.2 2.4.1 2005 PSF yes + 2.4.3 2.4.2 2006 PSF yes + 2.4.4 2.4.3 2006 PSF yes + 2.5 2.4 2006 PSF yes + 2.5.1 2.5 2007 PSF yes + 2.5.2 2.5.1 2008 PSF yes + 2.5.3 2.5.2 2008 PSF yes + 2.6 2.5 2008 PSF yes + 2.6.1 2.6 2008 PSF yes + 2.6.2 2.6.1 2009 PSF yes + 2.6.3 2.6.2 2009 PSF yes + 2.6.4 2.6.3 2009 PSF yes + 2.6.5 2.6.4 2010 PSF yes + 3.0 2.6 2008 PSF yes + 3.0.1 3.0 2009 PSF yes + 3.1 3.0.1 2009 PSF yes + 3.1.1 3.1 2009 PSF yes + 3.1.2 3.1 2010 PSF yes + 3.2 3.1 2010 PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 +Python Software Foundation; All Rights Reserved" are retained in Python alone or +in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -filelock-3.3.2 +filelock-3.4.1 ========== This is free and unencumbered software released into the public domain. @@ -254,7 +519,7 @@ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -zipp-3.6.0 +zipp-3.7.0 ========== Copyright Jason R. Coombs @@ -277,7 +542,7 @@ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -typing_extensions-3.10.0.2 +typing_extensions-4.0.1 ========== A. HISTORY OF THE SOFTWARE ========================== diff --git a/src/main/python/pybuilder/_vendor/__init__.py b/src/main/python/pybuilder/_vendor/__init__.py index abe956a3e..5b7679faa 100644 --- a/src/main/python/pybuilder/_vendor/__init__.py +++ b/src/main/python/pybuilder/_vendor/__init__.py @@ -1 +1 @@ -__names__ = ['_distutils_hack', 'backports', 'colorama', 'distlib', 'filelock', 'importlib_metadata', 'importlib_resources', 'pkg_resources', 'platformdirs', 'six', 'tailer', 'tblib', 'typing_extensions', 'virtualenv', 'zipp'] +__names__ = ['_distutils_hack', 'colorama', 'distlib', 'filelock', 'importlib_metadata', 'importlib_resources', 'pkg_resources', 'platformdirs', 'six', 'tailer', 'tblib', 'typing_extensions', 'virtualenv', 'zipp'] diff --git a/src/main/python/pybuilder/_vendor/_distutils_hack/__init__.py b/src/main/python/pybuilder/_vendor/_distutils_hack/__init__.py index 5f40996a6..ab462f950 100644 --- a/src/main/python/pybuilder/_vendor/_distutils_hack/__init__.py +++ b/src/main/python/pybuilder/_vendor/_distutils_hack/__init__.py @@ -1,18 +1,11 @@ +# don't import any costly modules import sys import os -import re -import importlib -import warnings is_pypy = '__pypy__' in sys.builtin_module_names -warnings.filterwarnings('ignore', - r'.+ distutils\b.+ deprecated', - DeprecationWarning) - - def warn_distutils_present(): if 'distutils' not in sys.modules: return @@ -20,6 +13,7 @@ def warn_distutils_present(): # PyPy for 3.6 unconditionally imports distutils, so bypass the warning # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 return + import warnings warnings.warn( "Distutils was imported before Setuptools, but importing Setuptools " "also replaces the `distutils` module in `sys.modules`. This may lead " @@ -32,8 +26,12 @@ def warn_distutils_present(): def clear_distutils(): if 'distutils' not in sys.modules: return + import warnings warnings.warn("Setuptools is replacing distutils.") - mods = [name for name in sys.modules if re.match(r'distutils\b', name)] + mods = [ + name for name in sys.modules + if name == "distutils" or name.startswith("distutils.") + ] for name in mods: del sys.modules[name] @@ -42,17 +40,21 @@ def enabled(): """ Allow selection of distutils by environment variable. """ - which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib') + which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local') return which == 'local' def ensure_local_distutils(): + import importlib clear_distutils() - distutils = importlib.import_module('setuptools._distutils') - distutils.__name__ = 'distutils' - sys.modules['distutils'] = distutils - # sanity check that submodules load as expected + # With the DistutilsMetaFinder in place, + # perform an import to cause distutils to be + # loaded from setuptools._distutils. Ref #2906. + with shim(): + importlib.import_module('distutils') + + # check that submodules load as expected core = importlib.import_module('distutils.core') assert '_distutils' in core.__file__, core.__file__ @@ -69,6 +71,14 @@ def do_override(): ensure_local_distutils() +class _TrivialRe: + def __init__(self, *patterns): + self._patterns = patterns + + def match(self, string): + return all(pat in string for pat in self._patterns) + + class DistutilsMetaFinder: def find_spec(self, fullname, path, target=None): if path is not None: @@ -79,18 +89,45 @@ def find_spec(self, fullname, path, target=None): return method() def spec_for_distutils(self): + import importlib import importlib.abc import importlib.util + import warnings + + # warnings.filterwarnings() imports the re module + warnings._add_filter( + 'ignore', + _TrivialRe("distutils", "deprecated"), + DeprecationWarning, + None, + 0, + append=True + ) + + try: + mod = importlib.import_module('setuptools._distutils') + except Exception: + # There are a couple of cases where setuptools._distutils + # may not be present: + # - An older Setuptools without a local distutils is + # taking precedence. Ref #2957. + # - Path manipulation during sitecustomize removes + # setuptools from the path but only after the hook + # has been loaded. Ref #2980. + # In either case, fall back to stdlib behavior. + return class DistutilsLoader(importlib.abc.Loader): def create_module(self, spec): - return importlib.import_module('setuptools._distutils') + return mod def exec_module(self, module): pass - return importlib.util.spec_from_loader('distutils', DistutilsLoader()) + return importlib.util.spec_from_loader( + 'distutils', DistutilsLoader(), origin=mod.__file__ + ) def spec_for_pip(self): """ @@ -99,25 +136,58 @@ def spec_for_pip(self): """ if self.pip_imported_during_build(): return + if self.is_get_pip(): + return clear_distutils() self.spec_for_distutils = lambda: None - @staticmethod - def pip_imported_during_build(): + @classmethod + def pip_imported_during_build(cls): """ Detect if pip is being imported in a build script. Ref #2355. """ import traceback return any( - frame.f_globals['__file__'].endswith('setup.py') + cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None) ) + @classmethod + def is_get_pip(cls): + """ + Detect if get-pip is being invoked. Ref #2993. + """ + try: + import __main__ + return os.path.basename(__main__.__file__) == 'get-pip.py' + except AttributeError: + pass + + @staticmethod + def frame_file_is_setup(frame): + """ + Return True if the indicated frame suggests a setup.py file. + """ + # some frames may not have __file__ (#2940) + return frame.f_globals.get('__file__', '').endswith('setup.py') + DISTUTILS_FINDER = DistutilsMetaFinder() def add_shim(): + DISTUTILS_FINDER in sys.meta_path or insert_shim() + + +class shim: + def __enter__(self): + insert_shim() + + def __exit__(self, exc, value, tb): + remove_shim() + + +def insert_shim(): sys.meta_path.insert(0, DISTUTILS_FINDER) diff --git a/src/main/python/pybuilder/_vendor/backports/__init__.py b/src/main/python/pybuilder/_vendor/backports/__init__.py deleted file mode 100644 index 0d1f7edf5..000000000 --- a/src/main/python/pybuilder/_vendor/backports/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore diff --git a/src/main/python/pybuilder/_vendor/backports/entry_points_selectable.py b/src/main/python/pybuilder/_vendor/backports/entry_points_selectable.py deleted file mode 100644 index 8afd7e10c..000000000 --- a/src/main/python/pybuilder/_vendor/backports/entry_points_selectable.py +++ /dev/null @@ -1,275 +0,0 @@ -""" ->>> hasattr(entry_points(), 'select') -True ->>> tuple(entry_points(group='console_scripts')) -(...) - -Some usage is deprecated and may emit deprecation warnings -on later versions. - ->>> import warnings ->>> warnings.filterwarnings('ignore', category=DeprecationWarning) - ->>> entry_points()['console_scripts'][0] -EntryPoint...(...) -""" - -import collections -import textwrap -import itertools -import operator -import functools - -try: - from itertools import filterfalse # type: ignore -except ImportError: - from itertools import ifilterfalse as filterfalse # type: ignore - - -try: - # prefer importlib_metadata if it has EntryPoints - from .. import importlib_metadata as metadata # type: ignore - - if not hasattr(metadata, 'EntryPoints'): - raise ImportError("package without EntryPoints") - from ..importlib_metadata import distributions, EntryPoint # type: ignore -except ImportError: - try: - import importlib.metadata as metadata # type: ignore - from importlib.metadata import distributions, EntryPoint # type: ignore - except ImportError: - from ..importlib_metadata import distributions, EntryPoint # type: ignore - - -__all__ = ['entry_points'] - - -def unique_everseen(iterable, key=None): - "List unique elements, preserving order. Remember all elements ever seen." - # unique_everseen('AAAABBBCCDAABBB') --> A B C D - # unique_everseen('ABBCcAD', str.lower) --> A B C D - seen = set() - seen_add = seen.add - if key is None: - for element in filterfalse(seen.__contains__, iterable): - seen_add(element) - yield element - else: - for element in iterable: - k = key(element) - if k not in seen: - seen_add(k) - yield element - - -class Pair(collections.namedtuple('Pair', 'name value')): - @classmethod - def parse(cls, text): - return cls(*map(str.strip, text.split("=", 1))) - - -class Sectioned: - """ - A simple entry point config parser for performance - - >>> for item in Sectioned.read(Sectioned._sample): - ... print(item) - Pair(name='sec1', value='# comments ignored') - Pair(name='sec1', value='a = 1') - Pair(name='sec1', value='b = 2') - Pair(name='sec2', value='a = 2') - - >>> res = Sectioned.section_pairs(Sectioned._sample) - >>> item = next(res) - >>> item.name - 'sec1' - >>> item.value - Pair(name='a', value='1') - >>> item = next(res) - >>> item.value - Pair(name='b', value='2') - >>> item = next(res) - >>> item.name - 'sec2' - >>> item.value - Pair(name='a', value='2') - >>> list(res) - [] - """ - - _sample = textwrap.dedent( - """ - [sec1] - # comments ignored - a = 1 - b = 2 - - [sec2] - a = 2 - """ - ).lstrip() - - @classmethod - def section_pairs(cls, text): - return ( - section._replace(value=Pair.parse(section.value)) - for section in cls.read(text, filter_=cls.valid) - if section.name is not None - ) - - @staticmethod - def read(text, filter_=None): - lines = filter(filter_, map(str.strip, text.splitlines())) - name = None - for value in lines: - section_match = value.startswith('[') and value.endswith(']') - if section_match: - name = value.strip('[]') - continue - yield Pair(name, value) - - @staticmethod - def valid(line): - return line and not line.startswith('#') - - -def compat_matches(ep, **params): - try: - return ep.matches(**params) - except AttributeError: - pass - attrs = (getattr(ep, param) for param in params) - return all(map(operator.eq, params.values(), attrs)) - - -class EntryPoints(list): - """ - An immutable collection of selectable EntryPoint objects. - """ - - __slots__ = () - - def __getitem__(self, name): # -> EntryPoint: - """ - Get the EntryPoint in self matching name. - """ - if isinstance(name, int): - return super(EntryPoints, self).__getitem__(name) - try: - return next(iter(self.select(name=name))) - except StopIteration: - raise KeyError(name) - - def select(self, **params): - """ - Select entry points from self that match the - given parameters (typically group and/or name). - """ - return EntryPoints(ep for ep in self if compat_matches(ep, **params)) - - @property - def names(self): - """ - Return the set of all names of all entry points. - """ - return set(ep.name for ep in self) - - @property - def groups(self): - """ - Return the set of all groups of all entry points. - - For coverage while SelectableGroups is present. - >>> EntryPoints().groups - set(...) - """ - return set(ep.group for ep in self) - - @classmethod - def _from_text_for(cls, text, dist): - return cls(ep._for(dist) for ep in cls._from_text(text)) - - @classmethod - def _from_text(cls, text): - return itertools.starmap(EntryPoint, cls._parse_groups(text or '')) - - @staticmethod - def _parse_groups(text): - return ( - (item.value.name, item.value.value, item.name) - for item in Sectioned.section_pairs(text) - ) - - -class SelectableGroups(dict): - """ - A backward- and forward-compatible result from - entry_points that fully implements the dict interface. - """ - - @classmethod - def load(cls, eps): - by_group = operator.attrgetter('group') - ordered = sorted(eps, key=by_group) - grouped = itertools.groupby(ordered, by_group) - return cls((group, EntryPoints(eps)) for group, eps in grouped) - - @property - def _all(self): - """ - Reconstruct a list of all entrypoints from the groups. - """ - return EntryPoints(itertools.chain.from_iterable(self.values())) - - @property - def groups(self): - return self._all.groups - - @property - def names(self): - """ - for coverage: - >>> SelectableGroups().names - set(...) - """ - return self._all.names - - def select(self, **params): - if not params: - return self - return self._all.select(**params) - - -def entry_points_compat(**params): - """Return EntryPoint objects for all installed packages. - - Pass selection parameters (group or name) to filter the - result to entry points matching those properties (see - EntryPoints.select()). - - For compatibility, returns ``SelectableGroups`` object unless - selection parameters are supplied. In the future, this function - will return ``EntryPoints`` instead of ``SelectableGroups`` - even when no selection parameters are supplied. - - For maximum future compatibility, pass selection parameters - or invoke ``.select`` with parameters on the result. - - :return: EntryPoints or SelectableGroups for all installed packages. - """ - - def dist_name(dist): - return dist.metadata['Name'] - - unique = functools.partial(unique_everseen, key=dist_name) - eps = itertools.chain.from_iterable( - dist.entry_points for dist in unique(distributions()) - ) - return SelectableGroups.load(eps).select(**params) - - -needs_backport = not hasattr(metadata, 'EntryPoints') or issubclass( - metadata.EntryPoints, tuple -) - -entry_points = entry_points_compat if needs_backport else metadata.entry_points diff --git a/src/main/python/pybuilder/_vendor/distlib/__init__.py b/src/main/python/pybuilder/_vendor/distlib/__init__.py index 115494810..6878387a0 100644 --- a/src/main/python/pybuilder/_vendor/distlib/__init__.py +++ b/src/main/python/pybuilder/_vendor/distlib/__init__.py @@ -6,7 +6,7 @@ # import logging -__version__ = '0.3.3' +__version__ = '0.3.4' class DistlibException(Exception): pass diff --git a/src/main/python/pybuilder/_vendor/distlib/_backport/__init__.py b/src/main/python/pybuilder/_vendor/distlib/_backport/__init__.py deleted file mode 100644 index f7dbf4c9a..000000000 --- a/src/main/python/pybuilder/_vendor/distlib/_backport/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Modules copied from Python 3 standard libraries, for internal use only. - -Individual classes and functions are found in d2._backport.misc. Intended -usage is to always import things missing from 3.1 from that module: the -built-in/stdlib objects will be used if found. -""" diff --git a/src/main/python/pybuilder/_vendor/distlib/_backport/misc.py b/src/main/python/pybuilder/_vendor/distlib/_backport/misc.py deleted file mode 100644 index cfb318d34..000000000 --- a/src/main/python/pybuilder/_vendor/distlib/_backport/misc.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012 The Python Software Foundation. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -"""Backports for individual classes and functions.""" - -import os -import sys - -__all__ = ['cache_from_source', 'callable', 'fsencode'] - - -try: - from imp import cache_from_source -except ImportError: - def cache_from_source(py_file, debug=__debug__): - ext = debug and 'c' or 'o' - return py_file + ext - - -try: - callable = callable -except NameError: - from collections import Callable - - def callable(obj): - return isinstance(obj, Callable) - - -try: - fsencode = os.fsencode -except AttributeError: - def fsencode(filename): - if isinstance(filename, bytes): - return filename - elif isinstance(filename, str): - return filename.encode(sys.getfilesystemencoding()) - else: - raise TypeError("expect bytes or str, not %s" % - type(filename).__name__) diff --git a/src/main/python/pybuilder/_vendor/distlib/_backport/shutil.py b/src/main/python/pybuilder/_vendor/distlib/_backport/shutil.py deleted file mode 100644 index 10ed36253..000000000 --- a/src/main/python/pybuilder/_vendor/distlib/_backport/shutil.py +++ /dev/null @@ -1,764 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012 The Python Software Foundation. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -"""Utility functions for copying and archiving files and directory trees. - -XXX The functions here don't copy the resource fork or other metadata on Mac. - -""" - -import os -import sys -import stat -from os.path import abspath -import fnmatch -try: - from collections.abc import Callable -except ImportError: - from collections import Callable -import errno -from . import tarfile - -try: - import bz2 - _BZ2_SUPPORTED = True -except ImportError: - _BZ2_SUPPORTED = False - -try: - from pwd import getpwnam -except ImportError: - getpwnam = None - -try: - from grp import getgrnam -except ImportError: - getgrnam = None - -__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", - "copytree", "move", "rmtree", "Error", "SpecialFileError", - "ExecError", "make_archive", "get_archive_formats", - "register_archive_format", "unregister_archive_format", - "get_unpack_formats", "register_unpack_format", - "unregister_unpack_format", "unpack_archive", "ignore_patterns"] - -class Error(EnvironmentError): - pass - -class SpecialFileError(EnvironmentError): - """Raised when trying to do a kind of operation (e.g. copying) which is - not supported on a special file (e.g. a named pipe)""" - -class ExecError(EnvironmentError): - """Raised when a command could not be executed""" - -class ReadError(EnvironmentError): - """Raised when an archive cannot be read""" - -class RegistryError(Exception): - """Raised when a registry operation with the archiving - and unpacking registries fails""" - - -try: - WindowsError -except NameError: - WindowsError = None - -def copyfileobj(fsrc, fdst, length=16*1024): - """copy data from file-like object fsrc to file-like object fdst""" - while 1: - buf = fsrc.read(length) - if not buf: - break - fdst.write(buf) - -def _samefile(src, dst): - # Macintosh, Unix. - if hasattr(os.path, 'samefile'): - try: - return os.path.samefile(src, dst) - except OSError: - return False - - # All other platforms: check for same pathname. - return (os.path.normcase(os.path.abspath(src)) == - os.path.normcase(os.path.abspath(dst))) - -def copyfile(src, dst): - """Copy data from src to dst""" - if _samefile(src, dst): - raise Error("`%s` and `%s` are the same file" % (src, dst)) - - for fn in [src, dst]: - try: - st = os.stat(fn) - except OSError: - # File most likely does not exist - pass - else: - # XXX What about other special files? (sockets, devices...) - if stat.S_ISFIFO(st.st_mode): - raise SpecialFileError("`%s` is a named pipe" % fn) - - with open(src, 'rb') as fsrc: - with open(dst, 'wb') as fdst: - copyfileobj(fsrc, fdst) - -def copymode(src, dst): - """Copy mode bits from src to dst""" - if hasattr(os, 'chmod'): - st = os.stat(src) - mode = stat.S_IMODE(st.st_mode) - os.chmod(dst, mode) - -def copystat(src, dst): - """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" - st = os.stat(src) - mode = stat.S_IMODE(st.st_mode) - if hasattr(os, 'utime'): - os.utime(dst, (st.st_atime, st.st_mtime)) - if hasattr(os, 'chmod'): - os.chmod(dst, mode) - if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): - try: - os.chflags(dst, st.st_flags) - except OSError as why: - if (not hasattr(errno, 'EOPNOTSUPP') or - why.errno != errno.EOPNOTSUPP): - raise - -def copy(src, dst): - """Copy data and mode bits ("cp src dst"). - - The destination may be a directory. - - """ - if os.path.isdir(dst): - dst = os.path.join(dst, os.path.basename(src)) - copyfile(src, dst) - copymode(src, dst) - -def copy2(src, dst): - """Copy data and all stat info ("cp -p src dst"). - - The destination may be a directory. - - """ - if os.path.isdir(dst): - dst = os.path.join(dst, os.path.basename(src)) - copyfile(src, dst) - copystat(src, dst) - -def ignore_patterns(*patterns): - """Function that can be used as copytree() ignore parameter. - - Patterns is a sequence of glob-style patterns - that are used to exclude files""" - def _ignore_patterns(path, names): - ignored_names = [] - for pattern in patterns: - ignored_names.extend(fnmatch.filter(names, pattern)) - return set(ignored_names) - return _ignore_patterns - -def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, - ignore_dangling_symlinks=False): - """Recursively copy a directory tree. - - The destination directory must not already exist. - If exception(s) occur, an Error is raised with a list of reasons. - - If the optional symlinks flag is true, symbolic links in the - source tree result in symbolic links in the destination tree; if - it is false, the contents of the files pointed to by symbolic - links are copied. If the file pointed by the symlink doesn't - exist, an exception will be added in the list of errors raised in - an Error exception at the end of the copy process. - - You can set the optional ignore_dangling_symlinks flag to true if you - want to silence this exception. Notice that this has no effect on - platforms that don't support os.symlink. - - The optional ignore argument is a callable. If given, it - is called with the `src` parameter, which is the directory - being visited by copytree(), and `names` which is the list of - `src` contents, as returned by os.listdir(): - - callable(src, names) -> ignored_names - - Since copytree() is called recursively, the callable will be - called once for each directory that is copied. It returns a - list of names relative to the `src` directory that should - not be copied. - - The optional copy_function argument is a callable that will be used - to copy each file. It will be called with the source path and the - destination path as arguments. By default, copy2() is used, but any - function that supports the same signature (like copy()) can be used. - - """ - names = os.listdir(src) - if ignore is not None: - ignored_names = ignore(src, names) - else: - ignored_names = set() - - os.makedirs(dst) - errors = [] - for name in names: - if name in ignored_names: - continue - srcname = os.path.join(src, name) - dstname = os.path.join(dst, name) - try: - if os.path.islink(srcname): - linkto = os.readlink(srcname) - if symlinks: - os.symlink(linkto, dstname) - else: - # ignore dangling symlink if the flag is on - if not os.path.exists(linkto) and ignore_dangling_symlinks: - continue - # otherwise let the copy occurs. copy2 will raise an error - copy_function(srcname, dstname) - elif os.path.isdir(srcname): - copytree(srcname, dstname, symlinks, ignore, copy_function) - else: - # Will raise a SpecialFileError for unsupported file types - copy_function(srcname, dstname) - # catch the Error from the recursive copytree so that we can - # continue with other files - except Error as err: - errors.extend(err.args[0]) - except EnvironmentError as why: - errors.append((srcname, dstname, str(why))) - try: - copystat(src, dst) - except OSError as why: - if WindowsError is not None and isinstance(why, WindowsError): - # Copying file access times may fail on Windows - pass - else: - errors.extend((src, dst, str(why))) - if errors: - raise Error(errors) - -def rmtree(path, ignore_errors=False, onerror=None): - """Recursively delete a directory tree. - - If ignore_errors is set, errors are ignored; otherwise, if onerror - is set, it is called to handle the error with arguments (func, - path, exc_info) where func is os.listdir, os.remove, or os.rmdir; - path is the argument to that function that caused it to fail; and - exc_info is a tuple returned by sys.exc_info(). If ignore_errors - is false and onerror is None, an exception is raised. - - """ - if ignore_errors: - def onerror(*args): - pass - elif onerror is None: - def onerror(*args): - raise - try: - if os.path.islink(path): - # symlinks to directories are forbidden, see bug #1669 - raise OSError("Cannot call rmtree on a symbolic link") - except OSError: - onerror(os.path.islink, path, sys.exc_info()) - # can't continue even if onerror hook returns - return - names = [] - try: - names = os.listdir(path) - except os.error: - onerror(os.listdir, path, sys.exc_info()) - for name in names: - fullname = os.path.join(path, name) - try: - mode = os.lstat(fullname).st_mode - except os.error: - mode = 0 - if stat.S_ISDIR(mode): - rmtree(fullname, ignore_errors, onerror) - else: - try: - os.remove(fullname) - except os.error: - onerror(os.remove, fullname, sys.exc_info()) - try: - os.rmdir(path) - except os.error: - onerror(os.rmdir, path, sys.exc_info()) - - -def _basename(path): - # A basename() variant which first strips the trailing slash, if present. - # Thus we always get the last component of the path, even for directories. - return os.path.basename(path.rstrip(os.path.sep)) - -def move(src, dst): - """Recursively move a file or directory to another location. This is - similar to the Unix "mv" command. - - If the destination is a directory or a symlink to a directory, the source - is moved inside the directory. The destination path must not already - exist. - - If the destination already exists but is not a directory, it may be - overwritten depending on os.rename() semantics. - - If the destination is on our current filesystem, then rename() is used. - Otherwise, src is copied to the destination and then removed. - A lot more could be done here... A look at a mv.c shows a lot of - the issues this implementation glosses over. - - """ - real_dst = dst - if os.path.isdir(dst): - if _samefile(src, dst): - # We might be on a case insensitive filesystem, - # perform the rename anyway. - os.rename(src, dst) - return - - real_dst = os.path.join(dst, _basename(src)) - if os.path.exists(real_dst): - raise Error("Destination path '%s' already exists" % real_dst) - try: - os.rename(src, real_dst) - except OSError: - if os.path.isdir(src): - if _destinsrc(src, dst): - raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) - copytree(src, real_dst, symlinks=True) - rmtree(src) - else: - copy2(src, real_dst) - os.unlink(src) - -def _destinsrc(src, dst): - src = abspath(src) - dst = abspath(dst) - if not src.endswith(os.path.sep): - src += os.path.sep - if not dst.endswith(os.path.sep): - dst += os.path.sep - return dst.startswith(src) - -def _get_gid(name): - """Returns a gid, given a group name.""" - if getgrnam is None or name is None: - return None - try: - result = getgrnam(name) - except KeyError: - result = None - if result is not None: - return result[2] - return None - -def _get_uid(name): - """Returns an uid, given a user name.""" - if getpwnam is None or name is None: - return None - try: - result = getpwnam(name) - except KeyError: - result = None - if result is not None: - return result[2] - return None - -def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, - owner=None, group=None, logger=None): - """Create a (possibly compressed) tar file from all the files under - 'base_dir'. - - 'compress' must be "gzip" (the default), "bzip2", or None. - - 'owner' and 'group' can be used to define an owner and a group for the - archive that is being built. If not provided, the current owner and group - will be used. - - The output tar file will be named 'base_name' + ".tar", possibly plus - the appropriate compression extension (".gz", or ".bz2"). - - Returns the output filename. - """ - tar_compression = {'gzip': 'gz', None: ''} - compress_ext = {'gzip': '.gz'} - - if _BZ2_SUPPORTED: - tar_compression['bzip2'] = 'bz2' - compress_ext['bzip2'] = '.bz2' - - # flags for compression program, each element of list will be an argument - if compress is not None and compress not in compress_ext: - raise ValueError("bad value for 'compress', or compression format not " - "supported : {0}".format(compress)) - - archive_name = base_name + '.tar' + compress_ext.get(compress, '') - archive_dir = os.path.dirname(archive_name) - - if not os.path.exists(archive_dir): - if logger is not None: - logger.info("creating %s", archive_dir) - if not dry_run: - os.makedirs(archive_dir) - - # creating the tarball - if logger is not None: - logger.info('Creating tar archive') - - uid = _get_uid(owner) - gid = _get_gid(group) - - def _set_uid_gid(tarinfo): - if gid is not None: - tarinfo.gid = gid - tarinfo.gname = group - if uid is not None: - tarinfo.uid = uid - tarinfo.uname = owner - return tarinfo - - if not dry_run: - tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) - try: - tar.add(base_dir, filter=_set_uid_gid) - finally: - tar.close() - - return archive_name - -def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False): - # XXX see if we want to keep an external call here - if verbose: - zipoptions = "-r" - else: - zipoptions = "-rq" - from distutils.errors import DistutilsExecError - from distutils.spawn import spawn - try: - spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) - except DistutilsExecError: - # XXX really should distinguish between "couldn't find - # external 'zip' command" and "zip failed". - raise ExecError("unable to create zip file '%s': " - "could neither import the 'zipfile' module nor " - "find a standalone zip utility") % zip_filename - -def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): - """Create a zip file from all the files under 'base_dir'. - - The output zip file will be named 'base_name' + ".zip". Uses either the - "zipfile" Python module (if available) or the InfoZIP "zip" utility - (if installed and found on the default search path). If neither tool is - available, raises ExecError. Returns the name of the output zip - file. - """ - zip_filename = base_name + ".zip" - archive_dir = os.path.dirname(base_name) - - if not os.path.exists(archive_dir): - if logger is not None: - logger.info("creating %s", archive_dir) - if not dry_run: - os.makedirs(archive_dir) - - # If zipfile module is not available, try spawning an external 'zip' - # command. - try: - import zipfile - except ImportError: - zipfile = None - - if zipfile is None: - _call_external_zip(base_dir, zip_filename, verbose, dry_run) - else: - if logger is not None: - logger.info("creating '%s' and adding '%s' to it", - zip_filename, base_dir) - - if not dry_run: - zip = zipfile.ZipFile(zip_filename, "w", - compression=zipfile.ZIP_DEFLATED) - - for dirpath, dirnames, filenames in os.walk(base_dir): - for name in filenames: - path = os.path.normpath(os.path.join(dirpath, name)) - if os.path.isfile(path): - zip.write(path, path) - if logger is not None: - logger.info("adding '%s'", path) - zip.close() - - return zip_filename - -_ARCHIVE_FORMATS = { - 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), - 'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), - 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"), - 'zip': (_make_zipfile, [], "ZIP file"), - } - -if _BZ2_SUPPORTED: - _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], - "bzip2'ed tar-file") - -def get_archive_formats(): - """Returns a list of supported formats for archiving and unarchiving. - - Each element of the returned sequence is a tuple (name, description) - """ - formats = [(name, registry[2]) for name, registry in - _ARCHIVE_FORMATS.items()] - formats.sort() - return formats - -def register_archive_format(name, function, extra_args=None, description=''): - """Registers an archive format. - - name is the name of the format. function is the callable that will be - used to create archives. If provided, extra_args is a sequence of - (name, value) tuples that will be passed as arguments to the callable. - description can be provided to describe the format, and will be returned - by the get_archive_formats() function. - """ - if extra_args is None: - extra_args = [] - if not isinstance(function, Callable): - raise TypeError('The %s object is not callable' % function) - if not isinstance(extra_args, (tuple, list)): - raise TypeError('extra_args needs to be a sequence') - for element in extra_args: - if not isinstance(element, (tuple, list)) or len(element) !=2: - raise TypeError('extra_args elements are : (arg_name, value)') - - _ARCHIVE_FORMATS[name] = (function, extra_args, description) - -def unregister_archive_format(name): - del _ARCHIVE_FORMATS[name] - -def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, - dry_run=0, owner=None, group=None, logger=None): - """Create an archive file (eg. zip or tar). - - 'base_name' is the name of the file to create, minus any format-specific - extension; 'format' is the archive format: one of "zip", "tar", "bztar" - or "gztar". - - 'root_dir' is a directory that will be the root directory of the - archive; ie. we typically chdir into 'root_dir' before creating the - archive. 'base_dir' is the directory where we start archiving from; - ie. 'base_dir' will be the common prefix of all files and - directories in the archive. 'root_dir' and 'base_dir' both default - to the current directory. Returns the name of the archive file. - - 'owner' and 'group' are used when creating a tar archive. By default, - uses the current owner and group. - """ - save_cwd = os.getcwd() - if root_dir is not None: - if logger is not None: - logger.debug("changing into '%s'", root_dir) - base_name = os.path.abspath(base_name) - if not dry_run: - os.chdir(root_dir) - - if base_dir is None: - base_dir = os.curdir - - kwargs = {'dry_run': dry_run, 'logger': logger} - - try: - format_info = _ARCHIVE_FORMATS[format] - except KeyError: - raise ValueError("unknown archive format '%s'" % format) - - func = format_info[0] - for arg, val in format_info[1]: - kwargs[arg] = val - - if format != 'zip': - kwargs['owner'] = owner - kwargs['group'] = group - - try: - filename = func(base_name, base_dir, **kwargs) - finally: - if root_dir is not None: - if logger is not None: - logger.debug("changing back to '%s'", save_cwd) - os.chdir(save_cwd) - - return filename - - -def get_unpack_formats(): - """Returns a list of supported formats for unpacking. - - Each element of the returned sequence is a tuple - (name, extensions, description) - """ - formats = [(name, info[0], info[3]) for name, info in - _UNPACK_FORMATS.items()] - formats.sort() - return formats - -def _check_unpack_options(extensions, function, extra_args): - """Checks what gets registered as an unpacker.""" - # first make sure no other unpacker is registered for this extension - existing_extensions = {} - for name, info in _UNPACK_FORMATS.items(): - for ext in info[0]: - existing_extensions[ext] = name - - for extension in extensions: - if extension in existing_extensions: - msg = '%s is already registered for "%s"' - raise RegistryError(msg % (extension, - existing_extensions[extension])) - - if not isinstance(function, Callable): - raise TypeError('The registered function must be a callable') - - -def register_unpack_format(name, extensions, function, extra_args=None, - description=''): - """Registers an unpack format. - - `name` is the name of the format. `extensions` is a list of extensions - corresponding to the format. - - `function` is the callable that will be - used to unpack archives. The callable will receive archives to unpack. - If it's unable to handle an archive, it needs to raise a ReadError - exception. - - If provided, `extra_args` is a sequence of - (name, value) tuples that will be passed as arguments to the callable. - description can be provided to describe the format, and will be returned - by the get_unpack_formats() function. - """ - if extra_args is None: - extra_args = [] - _check_unpack_options(extensions, function, extra_args) - _UNPACK_FORMATS[name] = extensions, function, extra_args, description - -def unregister_unpack_format(name): - """Removes the pack format from the registry.""" - del _UNPACK_FORMATS[name] - -def _ensure_directory(path): - """Ensure that the parent directory of `path` exists""" - dirname = os.path.dirname(path) - if not os.path.isdir(dirname): - os.makedirs(dirname) - -def _unpack_zipfile(filename, extract_dir): - """Unpack zip `filename` to `extract_dir` - """ - try: - import zipfile - except ImportError: - raise ReadError('zlib not supported, cannot unpack this archive.') - - if not zipfile.is_zipfile(filename): - raise ReadError("%s is not a zip file" % filename) - - zip = zipfile.ZipFile(filename) - try: - for info in zip.infolist(): - name = info.filename - - # don't extract absolute paths or ones with .. in them - if name.startswith('/') or '..' in name: - continue - - target = os.path.join(extract_dir, *name.split('/')) - if not target: - continue - - _ensure_directory(target) - if not name.endswith('/'): - # file - data = zip.read(info.filename) - f = open(target, 'wb') - try: - f.write(data) - finally: - f.close() - del data - finally: - zip.close() - -def _unpack_tarfile(filename, extract_dir): - """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` - """ - try: - tarobj = tarfile.open(filename) - except tarfile.TarError: - raise ReadError( - "%s is not a compressed or uncompressed tar file" % filename) - try: - tarobj.extractall(extract_dir) - finally: - tarobj.close() - -_UNPACK_FORMATS = { - 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"), - 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"), - 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file") - } - -if _BZ2_SUPPORTED: - _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], - "bzip2'ed tar-file") - -def _find_unpack_format(filename): - for name, info in _UNPACK_FORMATS.items(): - for extension in info[0]: - if filename.endswith(extension): - return name - return None - -def unpack_archive(filename, extract_dir=None, format=None): - """Unpack an archive. - - `filename` is the name of the archive. - - `extract_dir` is the name of the target directory, where the archive - is unpacked. If not provided, the current working directory is used. - - `format` is the archive format: one of "zip", "tar", or "gztar". Or any - other registered format. If not provided, unpack_archive will use the - filename extension and see if an unpacker was registered for that - extension. - - In case none is found, a ValueError is raised. - """ - if extract_dir is None: - extract_dir = os.getcwd() - - if format is not None: - try: - format_info = _UNPACK_FORMATS[format] - except KeyError: - raise ValueError("Unknown unpack format '{0}'".format(format)) - - func = format_info[1] - func(filename, extract_dir, **dict(format_info[2])) - else: - # we need to look at the registered unpackers supported extensions - format = _find_unpack_format(filename) - if format is None: - raise ReadError("Unknown archive format '{0}'".format(filename)) - - func = _UNPACK_FORMATS[format][1] - kwargs = dict(_UNPACK_FORMATS[format][2]) - func(filename, extract_dir, **kwargs) diff --git a/src/main/python/pybuilder/_vendor/distlib/_backport/sysconfig.cfg b/src/main/python/pybuilder/_vendor/distlib/_backport/sysconfig.cfg deleted file mode 100644 index 1746bd01c..000000000 --- a/src/main/python/pybuilder/_vendor/distlib/_backport/sysconfig.cfg +++ /dev/null @@ -1,84 +0,0 @@ -[posix_prefix] -# Configuration directories. Some of these come straight out of the -# configure script. They are for implementing the other variables, not to -# be used directly in [resource_locations]. -confdir = /etc -datadir = /usr/share -libdir = /usr/lib -statedir = /var -# User resource directory -local = ~/.local/{distribution.name} - -stdlib = {base}/lib/python{py_version_short} -platstdlib = {platbase}/lib/python{py_version_short} -purelib = {base}/lib/python{py_version_short}/site-packages -platlib = {platbase}/lib/python{py_version_short}/site-packages -include = {base}/include/python{py_version_short}{abiflags} -platinclude = {platbase}/include/python{py_version_short}{abiflags} -data = {base} - -[posix_home] -stdlib = {base}/lib/python -platstdlib = {base}/lib/python -purelib = {base}/lib/python -platlib = {base}/lib/python -include = {base}/include/python -platinclude = {base}/include/python -scripts = {base}/bin -data = {base} - -[nt] -stdlib = {base}/Lib -platstdlib = {base}/Lib -purelib = {base}/Lib/site-packages -platlib = {base}/Lib/site-packages -include = {base}/Include -platinclude = {base}/Include -scripts = {base}/Scripts -data = {base} - -[os2] -stdlib = {base}/Lib -platstdlib = {base}/Lib -purelib = {base}/Lib/site-packages -platlib = {base}/Lib/site-packages -include = {base}/Include -platinclude = {base}/Include -scripts = {base}/Scripts -data = {base} - -[os2_home] -stdlib = {userbase}/lib/python{py_version_short} -platstdlib = {userbase}/lib/python{py_version_short} -purelib = {userbase}/lib/python{py_version_short}/site-packages -platlib = {userbase}/lib/python{py_version_short}/site-packages -include = {userbase}/include/python{py_version_short} -scripts = {userbase}/bin -data = {userbase} - -[nt_user] -stdlib = {userbase}/Python{py_version_nodot} -platstdlib = {userbase}/Python{py_version_nodot} -purelib = {userbase}/Python{py_version_nodot}/site-packages -platlib = {userbase}/Python{py_version_nodot}/site-packages -include = {userbase}/Python{py_version_nodot}/Include -scripts = {userbase}/Scripts -data = {userbase} - -[posix_user] -stdlib = {userbase}/lib/python{py_version_short} -platstdlib = {userbase}/lib/python{py_version_short} -purelib = {userbase}/lib/python{py_version_short}/site-packages -platlib = {userbase}/lib/python{py_version_short}/site-packages -include = {userbase}/include/python{py_version_short} -scripts = {userbase}/bin -data = {userbase} - -[osx_framework_user] -stdlib = {userbase}/lib/python -platstdlib = {userbase}/lib/python -purelib = {userbase}/lib/python/site-packages -platlib = {userbase}/lib/python/site-packages -include = {userbase}/include -scripts = {userbase}/bin -data = {userbase} diff --git a/src/main/python/pybuilder/_vendor/distlib/_backport/sysconfig.py b/src/main/python/pybuilder/_vendor/distlib/_backport/sysconfig.py deleted file mode 100644 index b470a373c..000000000 --- a/src/main/python/pybuilder/_vendor/distlib/_backport/sysconfig.py +++ /dev/null @@ -1,786 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012 The Python Software Foundation. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -"""Access to Python's configuration information.""" - -import codecs -import os -import re -import sys -from os.path import pardir, realpath -try: - import configparser -except ImportError: - import ConfigParser as configparser - - -__all__ = [ - 'get_config_h_filename', - 'get_config_var', - 'get_config_vars', - 'get_makefile_filename', - 'get_path', - 'get_path_names', - 'get_paths', - 'get_platform', - 'get_python_version', - 'get_scheme_names', - 'parse_config_h', -] - - -def _safe_realpath(path): - try: - return realpath(path) - except OSError: - return path - - -if sys.executable: - _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) -else: - # sys.executable can be empty if argv[0] has been changed and Python is - # unable to retrieve the real program name - _PROJECT_BASE = _safe_realpath(os.getcwd()) - -if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower(): - _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir)) -# PC/VS7.1 -if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower(): - _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) -# PC/AMD64 -if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower(): - _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) - - -def is_python_build(): - for fn in ("Setup.dist", "Setup.local"): - if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): - return True - return False - -_PYTHON_BUILD = is_python_build() - -_cfg_read = False - -def _ensure_cfg_read(): - global _cfg_read - if not _cfg_read: - from ..resources import finder - backport_package = __name__.rsplit('.', 1)[0] - _finder = finder(backport_package) - _cfgfile = _finder.find('sysconfig.cfg') - assert _cfgfile, 'sysconfig.cfg exists' - with _cfgfile.as_stream() as s: - _SCHEMES.readfp(s) - if _PYTHON_BUILD: - for scheme in ('posix_prefix', 'posix_home'): - _SCHEMES.set(scheme, 'include', '{srcdir}/Include') - _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.') - - _cfg_read = True - - -_SCHEMES = configparser.RawConfigParser() -_VAR_REPL = re.compile(r'\{([^{]*?)\}') - -def _expand_globals(config): - _ensure_cfg_read() - if config.has_section('globals'): - globals = config.items('globals') - else: - globals = tuple() - - sections = config.sections() - for section in sections: - if section == 'globals': - continue - for option, value in globals: - if config.has_option(section, option): - continue - config.set(section, option, value) - config.remove_section('globals') - - # now expanding local variables defined in the cfg file - # - for section in config.sections(): - variables = dict(config.items(section)) - - def _replacer(matchobj): - name = matchobj.group(1) - if name in variables: - return variables[name] - return matchobj.group(0) - - for option, value in config.items(section): - config.set(section, option, _VAR_REPL.sub(_replacer, value)) - -#_expand_globals(_SCHEMES) - -_PY_VERSION = '%s.%s.%s' % sys.version_info[:3] -_PY_VERSION_SHORT = '%s.%s' % sys.version_info[:2] -_PY_VERSION_SHORT_NO_DOT = '%s%s' % sys.version_info[:2] -_PREFIX = os.path.normpath(sys.prefix) -_EXEC_PREFIX = os.path.normpath(sys.exec_prefix) -_CONFIG_VARS = None -_USER_BASE = None - - -def _subst_vars(path, local_vars): - """In the string `path`, replace tokens like {some.thing} with the - corresponding value from the map `local_vars`. - - If there is no corresponding value, leave the token unchanged. - """ - def _replacer(matchobj): - name = matchobj.group(1) - if name in local_vars: - return local_vars[name] - elif name in os.environ: - return os.environ[name] - return matchobj.group(0) - return _VAR_REPL.sub(_replacer, path) - - -def _extend_dict(target_dict, other_dict): - target_keys = target_dict.keys() - for key, value in other_dict.items(): - if key in target_keys: - continue - target_dict[key] = value - - -def _expand_vars(scheme, vars): - res = {} - if vars is None: - vars = {} - _extend_dict(vars, get_config_vars()) - - for key, value in _SCHEMES.items(scheme): - if os.name in ('posix', 'nt'): - value = os.path.expanduser(value) - res[key] = os.path.normpath(_subst_vars(value, vars)) - return res - - -def format_value(value, vars): - def _replacer(matchobj): - name = matchobj.group(1) - if name in vars: - return vars[name] - return matchobj.group(0) - return _VAR_REPL.sub(_replacer, value) - - -def _get_default_scheme(): - if os.name == 'posix': - # the default scheme for posix is posix_prefix - return 'posix_prefix' - return os.name - - -def _getuserbase(): - env_base = os.environ.get("PYTHONUSERBASE", None) - - def joinuser(*args): - return os.path.expanduser(os.path.join(*args)) - - # what about 'os2emx', 'riscos' ? - if os.name == "nt": - base = os.environ.get("APPDATA") or "~" - if env_base: - return env_base - else: - return joinuser(base, "Python") - - if sys.platform == "darwin": - framework = get_config_var("PYTHONFRAMEWORK") - if framework: - if env_base: - return env_base - else: - return joinuser("~", "Library", framework, "%d.%d" % - sys.version_info[:2]) - - if env_base: - return env_base - else: - return joinuser("~", ".local") - - -def _parse_makefile(filename, vars=None): - """Parse a Makefile-style file. - - A dictionary containing name/value pairs is returned. If an - optional dictionary is passed in as the second argument, it is - used instead of a new dictionary. - """ - # Regexes needed for parsing Makefile (and similar syntaxes, - # like old-style Setup files). - _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") - _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") - _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") - - if vars is None: - vars = {} - done = {} - notdone = {} - - with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f: - lines = f.readlines() - - for line in lines: - if line.startswith('#') or line.strip() == '': - continue - m = _variable_rx.match(line) - if m: - n, v = m.group(1, 2) - v = v.strip() - # `$$' is a literal `$' in make - tmpv = v.replace('$$', '') - - if "$" in tmpv: - notdone[n] = v - else: - try: - v = int(v) - except ValueError: - # insert literal `$' - done[n] = v.replace('$$', '$') - else: - done[n] = v - - # do variable interpolation here - variables = list(notdone.keys()) - - # Variables with a 'PY_' prefix in the makefile. These need to - # be made available without that prefix through sysconfig. - # Special care is needed to ensure that variable expansion works, even - # if the expansion uses the name without a prefix. - renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') - - while len(variables) > 0: - for name in tuple(variables): - value = notdone[name] - m = _findvar1_rx.search(value) or _findvar2_rx.search(value) - if m is not None: - n = m.group(1) - found = True - if n in done: - item = str(done[n]) - elif n in notdone: - # get it on a subsequent round - found = False - elif n in os.environ: - # do it like make: fall back to environment - item = os.environ[n] - - elif n in renamed_variables: - if (name.startswith('PY_') and - name[3:] in renamed_variables): - item = "" - - elif 'PY_' + n in notdone: - found = False - - else: - item = str(done['PY_' + n]) - - else: - done[n] = item = "" - - if found: - after = value[m.end():] - value = value[:m.start()] + item + after - if "$" in after: - notdone[name] = value - else: - try: - value = int(value) - except ValueError: - done[name] = value.strip() - else: - done[name] = value - variables.remove(name) - - if (name.startswith('PY_') and - name[3:] in renamed_variables): - - name = name[3:] - if name not in done: - done[name] = value - - else: - # bogus variable reference (e.g. "prefix=$/opt/python"); - # just drop it since we can't deal - done[name] = value - variables.remove(name) - - # strip spurious spaces - for k, v in done.items(): - if isinstance(v, str): - done[k] = v.strip() - - # save the results in the global dictionary - vars.update(done) - return vars - - -def get_makefile_filename(): - """Return the path of the Makefile.""" - if _PYTHON_BUILD: - return os.path.join(_PROJECT_BASE, "Makefile") - if hasattr(sys, 'abiflags'): - config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags) - else: - config_dir_name = 'config' - return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') - - -def _init_posix(vars): - """Initialize the module as appropriate for POSIX systems.""" - # load the installed Makefile: - makefile = get_makefile_filename() - try: - _parse_makefile(makefile, vars) - except IOError as e: - msg = "invalid Python installation: unable to open %s" % makefile - if hasattr(e, "strerror"): - msg = msg + " (%s)" % e.strerror - raise IOError(msg) - # load the installed pyconfig.h: - config_h = get_config_h_filename() - try: - with open(config_h) as f: - parse_config_h(f, vars) - except IOError as e: - msg = "invalid Python installation: unable to open %s" % config_h - if hasattr(e, "strerror"): - msg = msg + " (%s)" % e.strerror - raise IOError(msg) - # On AIX, there are wrong paths to the linker scripts in the Makefile - # -- these paths are relative to the Python source, but when installed - # the scripts are in another directory. - if _PYTHON_BUILD: - vars['LDSHARED'] = vars['BLDSHARED'] - - -def _init_non_posix(vars): - """Initialize the module as appropriate for NT""" - # set basic install directories - vars['LIBDEST'] = get_path('stdlib') - vars['BINLIBDEST'] = get_path('platstdlib') - vars['INCLUDEPY'] = get_path('include') - vars['SO'] = '.pyd' - vars['EXE'] = '.exe' - vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT - vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) - -# -# public APIs -# - - -def parse_config_h(fp, vars=None): - """Parse a config.h-style file. - - A dictionary containing name/value pairs is returned. If an - optional dictionary is passed in as the second argument, it is - used instead of a new dictionary. - """ - if vars is None: - vars = {} - define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") - undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") - - while True: - line = fp.readline() - if not line: - break - m = define_rx.match(line) - if m: - n, v = m.group(1, 2) - try: - v = int(v) - except ValueError: - pass - vars[n] = v - else: - m = undef_rx.match(line) - if m: - vars[m.group(1)] = 0 - return vars - - -def get_config_h_filename(): - """Return the path of pyconfig.h.""" - if _PYTHON_BUILD: - if os.name == "nt": - inc_dir = os.path.join(_PROJECT_BASE, "PC") - else: - inc_dir = _PROJECT_BASE - else: - inc_dir = get_path('platinclude') - return os.path.join(inc_dir, 'pyconfig.h') - - -def get_scheme_names(): - """Return a tuple containing the schemes names.""" - return tuple(sorted(_SCHEMES.sections())) - - -def get_path_names(): - """Return a tuple containing the paths names.""" - # xxx see if we want a static list - return _SCHEMES.options('posix_prefix') - - -def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): - """Return a mapping containing an install scheme. - - ``scheme`` is the install scheme name. If not provided, it will - return the default scheme for the current platform. - """ - _ensure_cfg_read() - if expand: - return _expand_vars(scheme, vars) - else: - return dict(_SCHEMES.items(scheme)) - - -def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True): - """Return a path corresponding to the scheme. - - ``scheme`` is the install scheme name. - """ - return get_paths(scheme, vars, expand)[name] - - -def get_config_vars(*args): - """With no arguments, return a dictionary of all configuration - variables relevant for the current platform. - - On Unix, this means every variable defined in Python's installed Makefile; - On Windows and Mac OS it's a much smaller set. - - With arguments, return a list of values that result from looking up - each argument in the configuration variable dictionary. - """ - global _CONFIG_VARS - if _CONFIG_VARS is None: - _CONFIG_VARS = {} - # Normalized versions of prefix and exec_prefix are handy to have; - # in fact, these are the standard versions used most places in the - # distutils2 module. - _CONFIG_VARS['prefix'] = _PREFIX - _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX - _CONFIG_VARS['py_version'] = _PY_VERSION - _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT - _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2] - _CONFIG_VARS['base'] = _PREFIX - _CONFIG_VARS['platbase'] = _EXEC_PREFIX - _CONFIG_VARS['projectbase'] = _PROJECT_BASE - try: - _CONFIG_VARS['abiflags'] = sys.abiflags - except AttributeError: - # sys.abiflags may not be defined on all platforms. - _CONFIG_VARS['abiflags'] = '' - - if os.name in ('nt', 'os2'): - _init_non_posix(_CONFIG_VARS) - if os.name == 'posix': - _init_posix(_CONFIG_VARS) - # Setting 'userbase' is done below the call to the - # init function to enable using 'get_config_var' in - # the init-function. - if sys.version >= '2.6': - _CONFIG_VARS['userbase'] = _getuserbase() - - if 'srcdir' not in _CONFIG_VARS: - _CONFIG_VARS['srcdir'] = _PROJECT_BASE - else: - _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir']) - - # Convert srcdir into an absolute path if it appears necessary. - # Normally it is relative to the build directory. However, during - # testing, for example, we might be running a non-installed python - # from a different directory. - if _PYTHON_BUILD and os.name == "posix": - base = _PROJECT_BASE - try: - cwd = os.getcwd() - except OSError: - cwd = None - if (not os.path.isabs(_CONFIG_VARS['srcdir']) and - base != cwd): - # srcdir is relative and we are not in the same directory - # as the executable. Assume executable is in the build - # directory and make srcdir absolute. - srcdir = os.path.join(base, _CONFIG_VARS['srcdir']) - _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir) - - if sys.platform == 'darwin': - kernel_version = os.uname()[2] # Kernel version (8.4.3) - major_version = int(kernel_version.split('.')[0]) - - if major_version < 8: - # On Mac OS X before 10.4, check if -arch and -isysroot - # are in CFLAGS or LDFLAGS and remove them if they are. - # This is needed when building extensions on a 10.3 system - # using a universal build of python. - for key in ('LDFLAGS', 'BASECFLAGS', - # a number of derived variables. These need to be - # patched up as well. - 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): - flags = _CONFIG_VARS[key] - flags = re.sub(r'-arch\s+\w+\s', ' ', flags) - flags = re.sub('-isysroot [^ \t]*', ' ', flags) - _CONFIG_VARS[key] = flags - else: - # Allow the user to override the architecture flags using - # an environment variable. - # NOTE: This name was introduced by Apple in OSX 10.5 and - # is used by several scripting languages distributed with - # that OS release. - if 'ARCHFLAGS' in os.environ: - arch = os.environ['ARCHFLAGS'] - for key in ('LDFLAGS', 'BASECFLAGS', - # a number of derived variables. These need to be - # patched up as well. - 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): - - flags = _CONFIG_VARS[key] - flags = re.sub(r'-arch\s+\w+\s', ' ', flags) - flags = flags + ' ' + arch - _CONFIG_VARS[key] = flags - - # If we're on OSX 10.5 or later and the user tries to - # compiles an extension using an SDK that is not present - # on the current machine it is better to not use an SDK - # than to fail. - # - # The major usecase for this is users using a Python.org - # binary installer on OSX 10.6: that installer uses - # the 10.4u SDK, but that SDK is not installed by default - # when you install Xcode. - # - CFLAGS = _CONFIG_VARS.get('CFLAGS', '') - m = re.search(r'-isysroot\s+(\S+)', CFLAGS) - if m is not None: - sdk = m.group(1) - if not os.path.exists(sdk): - for key in ('LDFLAGS', 'BASECFLAGS', - # a number of derived variables. These need to be - # patched up as well. - 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): - - flags = _CONFIG_VARS[key] - flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags) - _CONFIG_VARS[key] = flags - - if args: - vals = [] - for name in args: - vals.append(_CONFIG_VARS.get(name)) - return vals - else: - return _CONFIG_VARS - - -def get_config_var(name): - """Return the value of a single variable using the dictionary returned by - 'get_config_vars()'. - - Equivalent to get_config_vars().get(name) - """ - return get_config_vars().get(name) - - -def get_platform(): - """Return a string that identifies the current platform. - - This is used mainly to distinguish platform-specific build directories and - platform-specific built distributions. Typically includes the OS name - and version and the architecture (as supplied by 'os.uname()'), - although the exact information included depends on the OS; eg. for IRIX - the architecture isn't particularly important (IRIX only runs on SGI - hardware), but for Linux the kernel version isn't particularly - important. - - Examples of returned values: - linux-i586 - linux-alpha (?) - solaris-2.6-sun4u - irix-5.3 - irix64-6.2 - - Windows will return one of: - win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) - win-ia64 (64bit Windows on Itanium) - win32 (all others - specifically, sys.platform is returned) - - For other non-POSIX platforms, currently just returns 'sys.platform'. - """ - if os.name == 'nt': - # sniff sys.version for architecture. - prefix = " bit (" - i = sys.version.find(prefix) - if i == -1: - return sys.platform - j = sys.version.find(")", i) - look = sys.version[i+len(prefix):j].lower() - if look == 'amd64': - return 'win-amd64' - if look == 'itanium': - return 'win-ia64' - return sys.platform - - if os.name != "posix" or not hasattr(os, 'uname'): - # XXX what about the architecture? NT is Intel or Alpha, - # Mac OS is M68k or PPC, etc. - return sys.platform - - # Try to distinguish various flavours of Unix - osname, host, release, version, machine = os.uname() - - # Convert the OS name to lowercase, remove '/' characters - # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") - osname = osname.lower().replace('/', '') - machine = machine.replace(' ', '_') - machine = machine.replace('/', '-') - - if osname[:5] == "linux": - # At least on Linux/Intel, 'machine' is the processor -- - # i386, etc. - # XXX what about Alpha, SPARC, etc? - return "%s-%s" % (osname, machine) - elif osname[:5] == "sunos": - if release[0] >= "5": # SunOS 5 == Solaris 2 - osname = "solaris" - release = "%d.%s" % (int(release[0]) - 3, release[2:]) - # fall through to standard osname-release-machine representation - elif osname[:4] == "irix": # could be "irix64"! - return "%s-%s" % (osname, release) - elif osname[:3] == "aix": - return "%s-%s.%s" % (osname, version, release) - elif osname[:6] == "cygwin": - osname = "cygwin" - rel_re = re.compile(r'[\d.]+') - m = rel_re.match(release) - if m: - release = m.group() - elif osname[:6] == "darwin": - # - # For our purposes, we'll assume that the system version from - # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set - # to. This makes the compatibility story a bit more sane because the - # machine is going to compile and link as if it were - # MACOSX_DEPLOYMENT_TARGET. - cfgvars = get_config_vars() - macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') - - if True: - # Always calculate the release of the running machine, - # needed to determine if we can build fat binaries or not. - - macrelease = macver - # Get the system version. Reading this plist is a documented - # way to get the system version (see the documentation for - # the Gestalt Manager) - try: - f = open('/System/Library/CoreServices/SystemVersion.plist') - except IOError: - # We're on a plain darwin box, fall back to the default - # behaviour. - pass - else: - try: - m = re.search(r'ProductUserVisibleVersion\s*' - r'(.*?)', f.read()) - finally: - f.close() - if m is not None: - macrelease = '.'.join(m.group(1).split('.')[:2]) - # else: fall back to the default behaviour - - if not macver: - macver = macrelease - - if macver: - release = macver - osname = "macosx" - - if ((macrelease + '.') >= '10.4.' and - '-arch' in get_config_vars().get('CFLAGS', '').strip()): - # The universal build will build fat binaries, but not on - # systems before 10.4 - # - # Try to detect 4-way universal builds, those have machine-type - # 'universal' instead of 'fat'. - - machine = 'fat' - cflags = get_config_vars().get('CFLAGS') - - archs = re.findall(r'-arch\s+(\S+)', cflags) - archs = tuple(sorted(set(archs))) - - if len(archs) == 1: - machine = archs[0] - elif archs == ('i386', 'ppc'): - machine = 'fat' - elif archs == ('i386', 'x86_64'): - machine = 'intel' - elif archs == ('i386', 'ppc', 'x86_64'): - machine = 'fat3' - elif archs == ('ppc64', 'x86_64'): - machine = 'fat64' - elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): - machine = 'universal' - else: - raise ValueError( - "Don't know machine value for archs=%r" % (archs,)) - - elif machine == 'i386': - # On OSX the machine type returned by uname is always the - # 32-bit variant, even if the executable architecture is - # the 64-bit variant - if sys.maxsize >= 2**32: - machine = 'x86_64' - - elif machine in ('PowerPC', 'Power_Macintosh'): - # Pick a sane name for the PPC architecture. - # See 'i386' case - if sys.maxsize >= 2**32: - machine = 'ppc64' - else: - machine = 'ppc' - - return "%s-%s-%s" % (osname, release, machine) - - -def get_python_version(): - return _PY_VERSION_SHORT - - -def _print_dict(title, data): - for index, (key, value) in enumerate(sorted(data.items())): - if index == 0: - print('%s: ' % (title)) - print('\t%s = "%s"' % (key, value)) - - -def _main(): - """Display all information sysconfig detains.""" - print('Platform: "%s"' % get_platform()) - print('Python version: "%s"' % get_python_version()) - print('Current installation scheme: "%s"' % _get_default_scheme()) - print() - _print_dict('Paths', get_paths()) - print() - _print_dict('Variables', get_config_vars()) - - -if __name__ == '__main__': - _main() diff --git a/src/main/python/pybuilder/_vendor/distlib/_backport/tarfile.py b/src/main/python/pybuilder/_vendor/distlib/_backport/tarfile.py deleted file mode 100644 index d66d85663..000000000 --- a/src/main/python/pybuilder/_vendor/distlib/_backport/tarfile.py +++ /dev/null @@ -1,2607 +0,0 @@ -#------------------------------------------------------------------- -# tarfile.py -#------------------------------------------------------------------- -# Copyright (C) 2002 Lars Gustaebel -# All rights reserved. -# -# Permission is hereby granted, free of charge, to any person -# obtaining a copy of this software and associated documentation -# files (the "Software"), to deal in the Software without -# restriction, including without limitation the rights to use, -# copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the -# Software is furnished to do so, subject to the following -# conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -# OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import print_function - -"""Read from and write to tar format archives. -""" - -__version__ = "$Revision$" - -version = "0.9.0" -__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)" -__date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $" -__cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $" -__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend." - -#--------- -# Imports -#--------- -import sys -import os -import stat -import errno -import time -import struct -import copy -import re - -try: - import grp, pwd -except ImportError: - grp = pwd = None - -# os.symlink on Windows prior to 6.0 raises NotImplementedError -symlink_exception = (AttributeError, NotImplementedError) -try: - # WindowsError (1314) will be raised if the caller does not hold the - # SeCreateSymbolicLinkPrivilege privilege - symlink_exception += (WindowsError,) -except NameError: - pass - -# from tarfile import * -__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"] - -if sys.version_info[0] < 3: - import __builtin__ as builtins -else: - import builtins - -_open = builtins.open # Since 'open' is TarFile.open - -#--------------------------------------------------------- -# tar constants -#--------------------------------------------------------- -NUL = b"\0" # the null character -BLOCKSIZE = 512 # length of processing blocks -RECORDSIZE = BLOCKSIZE * 20 # length of records -GNU_MAGIC = b"ustar \0" # magic gnu tar string -POSIX_MAGIC = b"ustar\x0000" # magic posix tar string - -LENGTH_NAME = 100 # maximum length of a filename -LENGTH_LINK = 100 # maximum length of a linkname -LENGTH_PREFIX = 155 # maximum length of the prefix field - -REGTYPE = b"0" # regular file -AREGTYPE = b"\0" # regular file -LNKTYPE = b"1" # link (inside tarfile) -SYMTYPE = b"2" # symbolic link -CHRTYPE = b"3" # character special device -BLKTYPE = b"4" # block special device -DIRTYPE = b"5" # directory -FIFOTYPE = b"6" # fifo special device -CONTTYPE = b"7" # contiguous file - -GNUTYPE_LONGNAME = b"L" # GNU tar longname -GNUTYPE_LONGLINK = b"K" # GNU tar longlink -GNUTYPE_SPARSE = b"S" # GNU tar sparse file - -XHDTYPE = b"x" # POSIX.1-2001 extended header -XGLTYPE = b"g" # POSIX.1-2001 global header -SOLARIS_XHDTYPE = b"X" # Solaris extended header - -USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format -GNU_FORMAT = 1 # GNU tar format -PAX_FORMAT = 2 # POSIX.1-2001 (pax) format -DEFAULT_FORMAT = GNU_FORMAT - -#--------------------------------------------------------- -# tarfile constants -#--------------------------------------------------------- -# File types that tarfile supports: -SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE, - SYMTYPE, DIRTYPE, FIFOTYPE, - CONTTYPE, CHRTYPE, BLKTYPE, - GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, - GNUTYPE_SPARSE) - -# File types that will be treated as a regular file. -REGULAR_TYPES = (REGTYPE, AREGTYPE, - CONTTYPE, GNUTYPE_SPARSE) - -# File types that are part of the GNU tar format. -GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, - GNUTYPE_SPARSE) - -# Fields from a pax header that override a TarInfo attribute. -PAX_FIELDS = ("path", "linkpath", "size", "mtime", - "uid", "gid", "uname", "gname") - -# Fields from a pax header that are affected by hdrcharset. -PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname")) - -# Fields in a pax header that are numbers, all other fields -# are treated as strings. -PAX_NUMBER_FIELDS = { - "atime": float, - "ctime": float, - "mtime": float, - "uid": int, - "gid": int, - "size": int -} - -#--------------------------------------------------------- -# Bits used in the mode field, values in octal. -#--------------------------------------------------------- -S_IFLNK = 0o120000 # symbolic link -S_IFREG = 0o100000 # regular file -S_IFBLK = 0o060000 # block device -S_IFDIR = 0o040000 # directory -S_IFCHR = 0o020000 # character device -S_IFIFO = 0o010000 # fifo - -TSUID = 0o4000 # set UID on execution -TSGID = 0o2000 # set GID on execution -TSVTX = 0o1000 # reserved - -TUREAD = 0o400 # read by owner -TUWRITE = 0o200 # write by owner -TUEXEC = 0o100 # execute/search by owner -TGREAD = 0o040 # read by group -TGWRITE = 0o020 # write by group -TGEXEC = 0o010 # execute/search by group -TOREAD = 0o004 # read by other -TOWRITE = 0o002 # write by other -TOEXEC = 0o001 # execute/search by other - -#--------------------------------------------------------- -# initialization -#--------------------------------------------------------- -if os.name in ("nt", "ce"): - ENCODING = "utf-8" -else: - ENCODING = sys.getfilesystemencoding() - -#--------------------------------------------------------- -# Some useful functions -#--------------------------------------------------------- - -def stn(s, length, encoding, errors): - """Convert a string to a null-terminated bytes object. - """ - s = s.encode(encoding, errors) - return s[:length] + (length - len(s)) * NUL - -def nts(s, encoding, errors): - """Convert a null-terminated bytes object to a string. - """ - p = s.find(b"\0") - if p != -1: - s = s[:p] - return s.decode(encoding, errors) - -def nti(s): - """Convert a number field to a python number. - """ - # There are two possible encodings for a number field, see - # itn() below. - if s[0] != chr(0o200): - try: - n = int(nts(s, "ascii", "strict") or "0", 8) - except ValueError: - raise InvalidHeaderError("invalid header") - else: - n = 0 - for i in range(len(s) - 1): - n <<= 8 - n += ord(s[i + 1]) - return n - -def itn(n, digits=8, format=DEFAULT_FORMAT): - """Convert a python number to a number field. - """ - # POSIX 1003.1-1988 requires numbers to be encoded as a string of - # octal digits followed by a null-byte, this allows values up to - # (8**(digits-1))-1. GNU tar allows storing numbers greater than - # that if necessary. A leading 0o200 byte indicates this particular - # encoding, the following digits-1 bytes are a big-endian - # representation. This allows values up to (256**(digits-1))-1. - if 0 <= n < 8 ** (digits - 1): - s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL - else: - if format != GNU_FORMAT or n >= 256 ** (digits - 1): - raise ValueError("overflow in number field") - - if n < 0: - # XXX We mimic GNU tar's behaviour with negative numbers, - # this could raise OverflowError. - n = struct.unpack("L", struct.pack("l", n))[0] - - s = bytearray() - for i in range(digits - 1): - s.insert(0, n & 0o377) - n >>= 8 - s.insert(0, 0o200) - return s - -def calc_chksums(buf): - """Calculate the checksum for a member's header by summing up all - characters except for the chksum field which is treated as if - it was filled with spaces. According to the GNU tar sources, - some tars (Sun and NeXT) calculate chksum with signed char, - which will be different if there are chars in the buffer with - the high bit set. So we calculate two checksums, unsigned and - signed. - """ - unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512])) - signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512])) - return unsigned_chksum, signed_chksum - -def copyfileobj(src, dst, length=None): - """Copy length bytes from fileobj src to fileobj dst. - If length is None, copy the entire content. - """ - if length == 0: - return - if length is None: - while True: - buf = src.read(16*1024) - if not buf: - break - dst.write(buf) - return - - BUFSIZE = 16 * 1024 - blocks, remainder = divmod(length, BUFSIZE) - for b in range(blocks): - buf = src.read(BUFSIZE) - if len(buf) < BUFSIZE: - raise IOError("end of file reached") - dst.write(buf) - - if remainder != 0: - buf = src.read(remainder) - if len(buf) < remainder: - raise IOError("end of file reached") - dst.write(buf) - return - -filemode_table = ( - ((S_IFLNK, "l"), - (S_IFREG, "-"), - (S_IFBLK, "b"), - (S_IFDIR, "d"), - (S_IFCHR, "c"), - (S_IFIFO, "p")), - - ((TUREAD, "r"),), - ((TUWRITE, "w"),), - ((TUEXEC|TSUID, "s"), - (TSUID, "S"), - (TUEXEC, "x")), - - ((TGREAD, "r"),), - ((TGWRITE, "w"),), - ((TGEXEC|TSGID, "s"), - (TSGID, "S"), - (TGEXEC, "x")), - - ((TOREAD, "r"),), - ((TOWRITE, "w"),), - ((TOEXEC|TSVTX, "t"), - (TSVTX, "T"), - (TOEXEC, "x")) -) - -def filemode(mode): - """Convert a file's mode to a string of the form - -rwxrwxrwx. - Used by TarFile.list() - """ - perm = [] - for table in filemode_table: - for bit, char in table: - if mode & bit == bit: - perm.append(char) - break - else: - perm.append("-") - return "".join(perm) - -class TarError(Exception): - """Base exception.""" - pass -class ExtractError(TarError): - """General exception for extract errors.""" - pass -class ReadError(TarError): - """Exception for unreadable tar archives.""" - pass -class CompressionError(TarError): - """Exception for unavailable compression methods.""" - pass -class StreamError(TarError): - """Exception for unsupported operations on stream-like TarFiles.""" - pass -class HeaderError(TarError): - """Base exception for header errors.""" - pass -class EmptyHeaderError(HeaderError): - """Exception for empty headers.""" - pass -class TruncatedHeaderError(HeaderError): - """Exception for truncated headers.""" - pass -class EOFHeaderError(HeaderError): - """Exception for end of file headers.""" - pass -class InvalidHeaderError(HeaderError): - """Exception for invalid headers.""" - pass -class SubsequentHeaderError(HeaderError): - """Exception for missing and invalid extended headers.""" - pass - -#--------------------------- -# internal stream interface -#--------------------------- -class _LowLevelFile(object): - """Low-level file object. Supports reading and writing. - It is used instead of a regular file object for streaming - access. - """ - - def __init__(self, name, mode): - mode = { - "r": os.O_RDONLY, - "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC, - }[mode] - if hasattr(os, "O_BINARY"): - mode |= os.O_BINARY - self.fd = os.open(name, mode, 0o666) - - def close(self): - os.close(self.fd) - - def read(self, size): - return os.read(self.fd, size) - - def write(self, s): - os.write(self.fd, s) - -class _Stream(object): - """Class that serves as an adapter between TarFile and - a stream-like object. The stream-like object only - needs to have a read() or write() method and is accessed - blockwise. Use of gzip or bzip2 compression is possible. - A stream-like object could be for example: sys.stdin, - sys.stdout, a socket, a tape device etc. - - _Stream is intended to be used only internally. - """ - - def __init__(self, name, mode, comptype, fileobj, bufsize): - """Construct a _Stream object. - """ - self._extfileobj = True - if fileobj is None: - fileobj = _LowLevelFile(name, mode) - self._extfileobj = False - - if comptype == '*': - # Enable transparent compression detection for the - # stream interface - fileobj = _StreamProxy(fileobj) - comptype = fileobj.getcomptype() - - self.name = name or "" - self.mode = mode - self.comptype = comptype - self.fileobj = fileobj - self.bufsize = bufsize - self.buf = b"" - self.pos = 0 - self.closed = False - - try: - if comptype == "gz": - try: - import zlib - except ImportError: - raise CompressionError("zlib module is not available") - self.zlib = zlib - self.crc = zlib.crc32(b"") - if mode == "r": - self._init_read_gz() - else: - self._init_write_gz() - - if comptype == "bz2": - try: - import bz2 - except ImportError: - raise CompressionError("bz2 module is not available") - if mode == "r": - self.dbuf = b"" - self.cmp = bz2.BZ2Decompressor() - else: - self.cmp = bz2.BZ2Compressor() - except: - if not self._extfileobj: - self.fileobj.close() - self.closed = True - raise - - def __del__(self): - if hasattr(self, "closed") and not self.closed: - self.close() - - def _init_write_gz(self): - """Initialize for writing with gzip compression. - """ - self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED, - -self.zlib.MAX_WBITS, - self.zlib.DEF_MEM_LEVEL, - 0) - timestamp = struct.pack(" self.bufsize: - self.fileobj.write(self.buf[:self.bufsize]) - self.buf = self.buf[self.bufsize:] - - def close(self): - """Close the _Stream object. No operation should be - done on it afterwards. - """ - if self.closed: - return - - if self.mode == "w" and self.comptype != "tar": - self.buf += self.cmp.flush() - - if self.mode == "w" and self.buf: - self.fileobj.write(self.buf) - self.buf = b"" - if self.comptype == "gz": - # The native zlib crc is an unsigned 32-bit integer, but - # the Python wrapper implicitly casts that to a signed C - # long. So, on a 32-bit box self.crc may "look negative", - # while the same crc on a 64-bit box may "look positive". - # To avoid irksome warnings from the `struct` module, force - # it to look positive on all boxes. - self.fileobj.write(struct.pack("= 0: - blocks, remainder = divmod(pos - self.pos, self.bufsize) - for i in range(blocks): - self.read(self.bufsize) - self.read(remainder) - else: - raise StreamError("seeking backwards is not allowed") - return self.pos - - def read(self, size=None): - """Return the next size number of bytes from the stream. - If size is not defined, return all bytes of the stream - up to EOF. - """ - if size is None: - t = [] - while True: - buf = self._read(self.bufsize) - if not buf: - break - t.append(buf) - buf = "".join(t) - else: - buf = self._read(size) - self.pos += len(buf) - return buf - - def _read(self, size): - """Return size bytes from the stream. - """ - if self.comptype == "tar": - return self.__read(size) - - c = len(self.dbuf) - while c < size: - buf = self.__read(self.bufsize) - if not buf: - break - try: - buf = self.cmp.decompress(buf) - except IOError: - raise ReadError("invalid compressed data") - self.dbuf += buf - c += len(buf) - buf = self.dbuf[:size] - self.dbuf = self.dbuf[size:] - return buf - - def __read(self, size): - """Return size bytes from stream. If internal buffer is empty, - read another block from the stream. - """ - c = len(self.buf) - while c < size: - buf = self.fileobj.read(self.bufsize) - if not buf: - break - self.buf += buf - c += len(buf) - buf = self.buf[:size] - self.buf = self.buf[size:] - return buf -# class _Stream - -class _StreamProxy(object): - """Small proxy class that enables transparent compression - detection for the Stream interface (mode 'r|*'). - """ - - def __init__(self, fileobj): - self.fileobj = fileobj - self.buf = self.fileobj.read(BLOCKSIZE) - - def read(self, size): - self.read = self.fileobj.read - return self.buf - - def getcomptype(self): - if self.buf.startswith(b"\037\213\010"): - return "gz" - if self.buf.startswith(b"BZh91"): - return "bz2" - return "tar" - - def close(self): - self.fileobj.close() -# class StreamProxy - -class _BZ2Proxy(object): - """Small proxy class that enables external file object - support for "r:bz2" and "w:bz2" modes. This is actually - a workaround for a limitation in bz2 module's BZ2File - class which (unlike gzip.GzipFile) has no support for - a file object argument. - """ - - blocksize = 16 * 1024 - - def __init__(self, fileobj, mode): - self.fileobj = fileobj - self.mode = mode - self.name = getattr(self.fileobj, "name", None) - self.init() - - def init(self): - import bz2 - self.pos = 0 - if self.mode == "r": - self.bz2obj = bz2.BZ2Decompressor() - self.fileobj.seek(0) - self.buf = b"" - else: - self.bz2obj = bz2.BZ2Compressor() - - def read(self, size): - x = len(self.buf) - while x < size: - raw = self.fileobj.read(self.blocksize) - if not raw: - break - data = self.bz2obj.decompress(raw) - self.buf += data - x += len(data) - - buf = self.buf[:size] - self.buf = self.buf[size:] - self.pos += len(buf) - return buf - - def seek(self, pos): - if pos < self.pos: - self.init() - self.read(pos - self.pos) - - def tell(self): - return self.pos - - def write(self, data): - self.pos += len(data) - raw = self.bz2obj.compress(data) - self.fileobj.write(raw) - - def close(self): - if self.mode == "w": - raw = self.bz2obj.flush() - self.fileobj.write(raw) -# class _BZ2Proxy - -#------------------------ -# Extraction file object -#------------------------ -class _FileInFile(object): - """A thin wrapper around an existing file object that - provides a part of its data as an individual file - object. - """ - - def __init__(self, fileobj, offset, size, blockinfo=None): - self.fileobj = fileobj - self.offset = offset - self.size = size - self.position = 0 - - if blockinfo is None: - blockinfo = [(0, size)] - - # Construct a map with data and zero blocks. - self.map_index = 0 - self.map = [] - lastpos = 0 - realpos = self.offset - for offset, size in blockinfo: - if offset > lastpos: - self.map.append((False, lastpos, offset, None)) - self.map.append((True, offset, offset + size, realpos)) - realpos += size - lastpos = offset + size - if lastpos < self.size: - self.map.append((False, lastpos, self.size, None)) - - def seekable(self): - if not hasattr(self.fileobj, "seekable"): - # XXX gzip.GzipFile and bz2.BZ2File - return True - return self.fileobj.seekable() - - def tell(self): - """Return the current file position. - """ - return self.position - - def seek(self, position): - """Seek to a position in the file. - """ - self.position = position - - def read(self, size=None): - """Read data from the file. - """ - if size is None: - size = self.size - self.position - else: - size = min(size, self.size - self.position) - - buf = b"" - while size > 0: - while True: - data, start, stop, offset = self.map[self.map_index] - if start <= self.position < stop: - break - else: - self.map_index += 1 - if self.map_index == len(self.map): - self.map_index = 0 - length = min(size, stop - self.position) - if data: - self.fileobj.seek(offset + (self.position - start)) - buf += self.fileobj.read(length) - else: - buf += NUL * length - size -= length - self.position += length - return buf -#class _FileInFile - - -class ExFileObject(object): - """File-like object for reading an archive member. - Is returned by TarFile.extractfile(). - """ - blocksize = 1024 - - def __init__(self, tarfile, tarinfo): - self.fileobj = _FileInFile(tarfile.fileobj, - tarinfo.offset_data, - tarinfo.size, - tarinfo.sparse) - self.name = tarinfo.name - self.mode = "r" - self.closed = False - self.size = tarinfo.size - - self.position = 0 - self.buffer = b"" - - def readable(self): - return True - - def writable(self): - return False - - def seekable(self): - return self.fileobj.seekable() - - def read(self, size=None): - """Read at most size bytes from the file. If size is not - present or None, read all data until EOF is reached. - """ - if self.closed: - raise ValueError("I/O operation on closed file") - - buf = b"" - if self.buffer: - if size is None: - buf = self.buffer - self.buffer = b"" - else: - buf = self.buffer[:size] - self.buffer = self.buffer[size:] - - if size is None: - buf += self.fileobj.read() - else: - buf += self.fileobj.read(size - len(buf)) - - self.position += len(buf) - return buf - - # XXX TextIOWrapper uses the read1() method. - read1 = read - - def readline(self, size=-1): - """Read one entire line from the file. If size is present - and non-negative, return a string with at most that - size, which may be an incomplete line. - """ - if self.closed: - raise ValueError("I/O operation on closed file") - - pos = self.buffer.find(b"\n") + 1 - if pos == 0: - # no newline found. - while True: - buf = self.fileobj.read(self.blocksize) - self.buffer += buf - if not buf or b"\n" in buf: - pos = self.buffer.find(b"\n") + 1 - if pos == 0: - # no newline found. - pos = len(self.buffer) - break - - if size != -1: - pos = min(size, pos) - - buf = self.buffer[:pos] - self.buffer = self.buffer[pos:] - self.position += len(buf) - return buf - - def readlines(self): - """Return a list with all remaining lines. - """ - result = [] - while True: - line = self.readline() - if not line: break - result.append(line) - return result - - def tell(self): - """Return the current file position. - """ - if self.closed: - raise ValueError("I/O operation on closed file") - - return self.position - - def seek(self, pos, whence=os.SEEK_SET): - """Seek to a position in the file. - """ - if self.closed: - raise ValueError("I/O operation on closed file") - - if whence == os.SEEK_SET: - self.position = min(max(pos, 0), self.size) - elif whence == os.SEEK_CUR: - if pos < 0: - self.position = max(self.position + pos, 0) - else: - self.position = min(self.position + pos, self.size) - elif whence == os.SEEK_END: - self.position = max(min(self.size + pos, self.size), 0) - else: - raise ValueError("Invalid argument") - - self.buffer = b"" - self.fileobj.seek(self.position) - - def close(self): - """Close the file object. - """ - self.closed = True - - def __iter__(self): - """Get an iterator over the file's lines. - """ - while True: - line = self.readline() - if not line: - break - yield line -#class ExFileObject - -#------------------ -# Exported Classes -#------------------ -class TarInfo(object): - """Informational class which holds the details about an - archive member given by a tar header block. - TarInfo objects are returned by TarFile.getmember(), - TarFile.getmembers() and TarFile.gettarinfo() and are - usually created internally. - """ - - __slots__ = ("name", "mode", "uid", "gid", "size", "mtime", - "chksum", "type", "linkname", "uname", "gname", - "devmajor", "devminor", - "offset", "offset_data", "pax_headers", "sparse", - "tarfile", "_sparse_structs", "_link_target") - - def __init__(self, name=""): - """Construct a TarInfo object. name is the optional name - of the member. - """ - self.name = name # member name - self.mode = 0o644 # file permissions - self.uid = 0 # user id - self.gid = 0 # group id - self.size = 0 # file size - self.mtime = 0 # modification time - self.chksum = 0 # header checksum - self.type = REGTYPE # member type - self.linkname = "" # link name - self.uname = "" # user name - self.gname = "" # group name - self.devmajor = 0 # device major number - self.devminor = 0 # device minor number - - self.offset = 0 # the tar header starts here - self.offset_data = 0 # the file's data starts here - - self.sparse = None # sparse member information - self.pax_headers = {} # pax header information - - # In pax headers the "name" and "linkname" field are called - # "path" and "linkpath". - def _getpath(self): - return self.name - def _setpath(self, name): - self.name = name - path = property(_getpath, _setpath) - - def _getlinkpath(self): - return self.linkname - def _setlinkpath(self, linkname): - self.linkname = linkname - linkpath = property(_getlinkpath, _setlinkpath) - - def __repr__(self): - return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) - - def get_info(self): - """Return the TarInfo's attributes as a dictionary. - """ - info = { - "name": self.name, - "mode": self.mode & 0o7777, - "uid": self.uid, - "gid": self.gid, - "size": self.size, - "mtime": self.mtime, - "chksum": self.chksum, - "type": self.type, - "linkname": self.linkname, - "uname": self.uname, - "gname": self.gname, - "devmajor": self.devmajor, - "devminor": self.devminor - } - - if info["type"] == DIRTYPE and not info["name"].endswith("/"): - info["name"] += "/" - - return info - - def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"): - """Return a tar header as a string of 512 byte blocks. - """ - info = self.get_info() - - if format == USTAR_FORMAT: - return self.create_ustar_header(info, encoding, errors) - elif format == GNU_FORMAT: - return self.create_gnu_header(info, encoding, errors) - elif format == PAX_FORMAT: - return self.create_pax_header(info, encoding) - else: - raise ValueError("invalid format") - - def create_ustar_header(self, info, encoding, errors): - """Return the object as a ustar header block. - """ - info["magic"] = POSIX_MAGIC - - if len(info["linkname"]) > LENGTH_LINK: - raise ValueError("linkname is too long") - - if len(info["name"]) > LENGTH_NAME: - info["prefix"], info["name"] = self._posix_split_name(info["name"]) - - return self._create_header(info, USTAR_FORMAT, encoding, errors) - - def create_gnu_header(self, info, encoding, errors): - """Return the object as a GNU header block sequence. - """ - info["magic"] = GNU_MAGIC - - buf = b"" - if len(info["linkname"]) > LENGTH_LINK: - buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors) - - if len(info["name"]) > LENGTH_NAME: - buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors) - - return buf + self._create_header(info, GNU_FORMAT, encoding, errors) - - def create_pax_header(self, info, encoding): - """Return the object as a ustar header block. If it cannot be - represented this way, prepend a pax extended header sequence - with supplement information. - """ - info["magic"] = POSIX_MAGIC - pax_headers = self.pax_headers.copy() - - # Test string fields for values that exceed the field length or cannot - # be represented in ASCII encoding. - for name, hname, length in ( - ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK), - ("uname", "uname", 32), ("gname", "gname", 32)): - - if hname in pax_headers: - # The pax header has priority. - continue - - # Try to encode the string as ASCII. - try: - info[name].encode("ascii", "strict") - except UnicodeEncodeError: - pax_headers[hname] = info[name] - continue - - if len(info[name]) > length: - pax_headers[hname] = info[name] - - # Test number fields for values that exceed the field limit or values - # that like to be stored as float. - for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)): - if name in pax_headers: - # The pax header has priority. Avoid overflow. - info[name] = 0 - continue - - val = info[name] - if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float): - pax_headers[name] = str(val) - info[name] = 0 - - # Create a pax extended header if necessary. - if pax_headers: - buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding) - else: - buf = b"" - - return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace") - - @classmethod - def create_pax_global_header(cls, pax_headers): - """Return the object as a pax global header block sequence. - """ - return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8") - - def _posix_split_name(self, name): - """Split a name longer than 100 chars into a prefix - and a name part. - """ - prefix = name[:LENGTH_PREFIX + 1] - while prefix and prefix[-1] != "/": - prefix = prefix[:-1] - - name = name[len(prefix):] - prefix = prefix[:-1] - - if not prefix or len(name) > LENGTH_NAME: - raise ValueError("name is too long") - return prefix, name - - @staticmethod - def _create_header(info, format, encoding, errors): - """Return a header block. info is a dictionary with file - information, format must be one of the *_FORMAT constants. - """ - parts = [ - stn(info.get("name", ""), 100, encoding, errors), - itn(info.get("mode", 0) & 0o7777, 8, format), - itn(info.get("uid", 0), 8, format), - itn(info.get("gid", 0), 8, format), - itn(info.get("size", 0), 12, format), - itn(info.get("mtime", 0), 12, format), - b" ", # checksum field - info.get("type", REGTYPE), - stn(info.get("linkname", ""), 100, encoding, errors), - info.get("magic", POSIX_MAGIC), - stn(info.get("uname", ""), 32, encoding, errors), - stn(info.get("gname", ""), 32, encoding, errors), - itn(info.get("devmajor", 0), 8, format), - itn(info.get("devminor", 0), 8, format), - stn(info.get("prefix", ""), 155, encoding, errors) - ] - - buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts)) - chksum = calc_chksums(buf[-BLOCKSIZE:])[0] - buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:] - return buf - - @staticmethod - def _create_payload(payload): - """Return the string payload filled with zero bytes - up to the next 512 byte border. - """ - blocks, remainder = divmod(len(payload), BLOCKSIZE) - if remainder > 0: - payload += (BLOCKSIZE - remainder) * NUL - return payload - - @classmethod - def _create_gnu_long_header(cls, name, type, encoding, errors): - """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence - for name. - """ - name = name.encode(encoding, errors) + NUL - - info = {} - info["name"] = "././@LongLink" - info["type"] = type - info["size"] = len(name) - info["magic"] = GNU_MAGIC - - # create extended header + name blocks. - return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \ - cls._create_payload(name) - - @classmethod - def _create_pax_generic_header(cls, pax_headers, type, encoding): - """Return a POSIX.1-2008 extended or global header sequence - that contains a list of keyword, value pairs. The values - must be strings. - """ - # Check if one of the fields contains surrogate characters and thereby - # forces hdrcharset=BINARY, see _proc_pax() for more information. - binary = False - for keyword, value in pax_headers.items(): - try: - value.encode("utf8", "strict") - except UnicodeEncodeError: - binary = True - break - - records = b"" - if binary: - # Put the hdrcharset field at the beginning of the header. - records += b"21 hdrcharset=BINARY\n" - - for keyword, value in pax_headers.items(): - keyword = keyword.encode("utf8") - if binary: - # Try to restore the original byte representation of `value'. - # Needless to say, that the encoding must match the string. - value = value.encode(encoding, "surrogateescape") - else: - value = value.encode("utf8") - - l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' - n = p = 0 - while True: - n = l + len(str(p)) - if n == p: - break - p = n - records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n" - - # We use a hardcoded "././@PaxHeader" name like star does - # instead of the one that POSIX recommends. - info = {} - info["name"] = "././@PaxHeader" - info["type"] = type - info["size"] = len(records) - info["magic"] = POSIX_MAGIC - - # Create pax header + record blocks. - return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \ - cls._create_payload(records) - - @classmethod - def frombuf(cls, buf, encoding, errors): - """Construct a TarInfo object from a 512 byte bytes object. - """ - if len(buf) == 0: - raise EmptyHeaderError("empty header") - if len(buf) != BLOCKSIZE: - raise TruncatedHeaderError("truncated header") - if buf.count(NUL) == BLOCKSIZE: - raise EOFHeaderError("end of file header") - - chksum = nti(buf[148:156]) - if chksum not in calc_chksums(buf): - raise InvalidHeaderError("bad checksum") - - obj = cls() - obj.name = nts(buf[0:100], encoding, errors) - obj.mode = nti(buf[100:108]) - obj.uid = nti(buf[108:116]) - obj.gid = nti(buf[116:124]) - obj.size = nti(buf[124:136]) - obj.mtime = nti(buf[136:148]) - obj.chksum = chksum - obj.type = buf[156:157] - obj.linkname = nts(buf[157:257], encoding, errors) - obj.uname = nts(buf[265:297], encoding, errors) - obj.gname = nts(buf[297:329], encoding, errors) - obj.devmajor = nti(buf[329:337]) - obj.devminor = nti(buf[337:345]) - prefix = nts(buf[345:500], encoding, errors) - - # Old V7 tar format represents a directory as a regular - # file with a trailing slash. - if obj.type == AREGTYPE and obj.name.endswith("/"): - obj.type = DIRTYPE - - # The old GNU sparse format occupies some of the unused - # space in the buffer for up to 4 sparse structures. - # Save the them for later processing in _proc_sparse(). - if obj.type == GNUTYPE_SPARSE: - pos = 386 - structs = [] - for i in range(4): - try: - offset = nti(buf[pos:pos + 12]) - numbytes = nti(buf[pos + 12:pos + 24]) - except ValueError: - break - structs.append((offset, numbytes)) - pos += 24 - isextended = bool(buf[482]) - origsize = nti(buf[483:495]) - obj._sparse_structs = (structs, isextended, origsize) - - # Remove redundant slashes from directories. - if obj.isdir(): - obj.name = obj.name.rstrip("/") - - # Reconstruct a ustar longname. - if prefix and obj.type not in GNU_TYPES: - obj.name = prefix + "/" + obj.name - return obj - - @classmethod - def fromtarfile(cls, tarfile): - """Return the next TarInfo object from TarFile object - tarfile. - """ - buf = tarfile.fileobj.read(BLOCKSIZE) - obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors) - obj.offset = tarfile.fileobj.tell() - BLOCKSIZE - return obj._proc_member(tarfile) - - #-------------------------------------------------------------------------- - # The following are methods that are called depending on the type of a - # member. The entry point is _proc_member() which can be overridden in a - # subclass to add custom _proc_*() methods. A _proc_*() method MUST - # implement the following - # operations: - # 1. Set self.offset_data to the position where the data blocks begin, - # if there is data that follows. - # 2. Set tarfile.offset to the position where the next member's header will - # begin. - # 3. Return self or another valid TarInfo object. - def _proc_member(self, tarfile): - """Choose the right processing method depending on - the type and call it. - """ - if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): - return self._proc_gnulong(tarfile) - elif self.type == GNUTYPE_SPARSE: - return self._proc_sparse(tarfile) - elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE): - return self._proc_pax(tarfile) - else: - return self._proc_builtin(tarfile) - - def _proc_builtin(self, tarfile): - """Process a builtin type or an unknown type which - will be treated as a regular file. - """ - self.offset_data = tarfile.fileobj.tell() - offset = self.offset_data - if self.isreg() or self.type not in SUPPORTED_TYPES: - # Skip the following data blocks. - offset += self._block(self.size) - tarfile.offset = offset - - # Patch the TarInfo object with saved global - # header information. - self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors) - - return self - - def _proc_gnulong(self, tarfile): - """Process the blocks that hold a GNU longname - or longlink member. - """ - buf = tarfile.fileobj.read(self._block(self.size)) - - # Fetch the next header and process it. - try: - next = self.fromtarfile(tarfile) - except HeaderError: - raise SubsequentHeaderError("missing or bad subsequent header") - - # Patch the TarInfo object from the next header with - # the longname information. - next.offset = self.offset - if self.type == GNUTYPE_LONGNAME: - next.name = nts(buf, tarfile.encoding, tarfile.errors) - elif self.type == GNUTYPE_LONGLINK: - next.linkname = nts(buf, tarfile.encoding, tarfile.errors) - - return next - - def _proc_sparse(self, tarfile): - """Process a GNU sparse header plus extra headers. - """ - # We already collected some sparse structures in frombuf(). - structs, isextended, origsize = self._sparse_structs - del self._sparse_structs - - # Collect sparse structures from extended header blocks. - while isextended: - buf = tarfile.fileobj.read(BLOCKSIZE) - pos = 0 - for i in range(21): - try: - offset = nti(buf[pos:pos + 12]) - numbytes = nti(buf[pos + 12:pos + 24]) - except ValueError: - break - if offset and numbytes: - structs.append((offset, numbytes)) - pos += 24 - isextended = bool(buf[504]) - self.sparse = structs - - self.offset_data = tarfile.fileobj.tell() - tarfile.offset = self.offset_data + self._block(self.size) - self.size = origsize - return self - - def _proc_pax(self, tarfile): - """Process an extended or global header as described in - POSIX.1-2008. - """ - # Read the header information. - buf = tarfile.fileobj.read(self._block(self.size)) - - # A pax header stores supplemental information for either - # the following file (extended) or all following files - # (global). - if self.type == XGLTYPE: - pax_headers = tarfile.pax_headers - else: - pax_headers = tarfile.pax_headers.copy() - - # Check if the pax header contains a hdrcharset field. This tells us - # the encoding of the path, linkpath, uname and gname fields. Normally, - # these fields are UTF-8 encoded but since POSIX.1-2008 tar - # implementations are allowed to store them as raw binary strings if - # the translation to UTF-8 fails. - match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) - if match is not None: - pax_headers["hdrcharset"] = match.group(1).decode("utf8") - - # For the time being, we don't care about anything other than "BINARY". - # The only other value that is currently allowed by the standard is - # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. - hdrcharset = pax_headers.get("hdrcharset") - if hdrcharset == "BINARY": - encoding = tarfile.encoding - else: - encoding = "utf8" - - # Parse pax header information. A record looks like that: - # "%d %s=%s\n" % (length, keyword, value). length is the size - # of the complete record including the length field itself and - # the newline. keyword and value are both UTF-8 encoded strings. - regex = re.compile(br"(\d+) ([^=]+)=") - pos = 0 - while True: - match = regex.match(buf, pos) - if not match: - break - - length, keyword = match.groups() - length = int(length) - value = buf[match.end(2) + 1:match.start(1) + length - 1] - - # Normally, we could just use "utf8" as the encoding and "strict" - # as the error handler, but we better not take the risk. For - # example, GNU tar <= 1.23 is known to store filenames it cannot - # translate to UTF-8 as raw strings (unfortunately without a - # hdrcharset=BINARY header). - # We first try the strict standard encoding, and if that fails we - # fall back on the user's encoding and error handler. - keyword = self._decode_pax_field(keyword, "utf8", "utf8", - tarfile.errors) - if keyword in PAX_NAME_FIELDS: - value = self._decode_pax_field(value, encoding, tarfile.encoding, - tarfile.errors) - else: - value = self._decode_pax_field(value, "utf8", "utf8", - tarfile.errors) - - pax_headers[keyword] = value - pos += length - - # Fetch the next header. - try: - next = self.fromtarfile(tarfile) - except HeaderError: - raise SubsequentHeaderError("missing or bad subsequent header") - - # Process GNU sparse information. - if "GNU.sparse.map" in pax_headers: - # GNU extended sparse format version 0.1. - self._proc_gnusparse_01(next, pax_headers) - - elif "GNU.sparse.size" in pax_headers: - # GNU extended sparse format version 0.0. - self._proc_gnusparse_00(next, pax_headers, buf) - - elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": - # GNU extended sparse format version 1.0. - self._proc_gnusparse_10(next, pax_headers, tarfile) - - if self.type in (XHDTYPE, SOLARIS_XHDTYPE): - # Patch the TarInfo object with the extended header info. - next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors) - next.offset = self.offset - - if "size" in pax_headers: - # If the extended header replaces the size field, - # we need to recalculate the offset where the next - # header starts. - offset = next.offset_data - if next.isreg() or next.type not in SUPPORTED_TYPES: - offset += next._block(next.size) - tarfile.offset = offset - - return next - - def _proc_gnusparse_00(self, next, pax_headers, buf): - """Process a GNU tar extended sparse header, version 0.0. - """ - offsets = [] - for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): - offsets.append(int(match.group(1))) - numbytes = [] - for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): - numbytes.append(int(match.group(1))) - next.sparse = list(zip(offsets, numbytes)) - - def _proc_gnusparse_01(self, next, pax_headers): - """Process a GNU tar extended sparse header, version 0.1. - """ - sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")] - next.sparse = list(zip(sparse[::2], sparse[1::2])) - - def _proc_gnusparse_10(self, next, pax_headers, tarfile): - """Process a GNU tar extended sparse header, version 1.0. - """ - fields = None - sparse = [] - buf = tarfile.fileobj.read(BLOCKSIZE) - fields, buf = buf.split(b"\n", 1) - fields = int(fields) - while len(sparse) < fields * 2: - if b"\n" not in buf: - buf += tarfile.fileobj.read(BLOCKSIZE) - number, buf = buf.split(b"\n", 1) - sparse.append(int(number)) - next.offset_data = tarfile.fileobj.tell() - next.sparse = list(zip(sparse[::2], sparse[1::2])) - - def _apply_pax_info(self, pax_headers, encoding, errors): - """Replace fields with supplemental information from a previous - pax extended or global header. - """ - for keyword, value in pax_headers.items(): - if keyword == "GNU.sparse.name": - setattr(self, "path", value) - elif keyword == "GNU.sparse.size": - setattr(self, "size", int(value)) - elif keyword == "GNU.sparse.realsize": - setattr(self, "size", int(value)) - elif keyword in PAX_FIELDS: - if keyword in PAX_NUMBER_FIELDS: - try: - value = PAX_NUMBER_FIELDS[keyword](value) - except ValueError: - value = 0 - if keyword == "path": - value = value.rstrip("/") - setattr(self, keyword, value) - - self.pax_headers = pax_headers.copy() - - def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors): - """Decode a single field from a pax record. - """ - try: - return value.decode(encoding, "strict") - except UnicodeDecodeError: - return value.decode(fallback_encoding, fallback_errors) - - def _block(self, count): - """Round up a byte count by BLOCKSIZE and return it, - e.g. _block(834) => 1024. - """ - blocks, remainder = divmod(count, BLOCKSIZE) - if remainder: - blocks += 1 - return blocks * BLOCKSIZE - - def isreg(self): - return self.type in REGULAR_TYPES - def isfile(self): - return self.isreg() - def isdir(self): - return self.type == DIRTYPE - def issym(self): - return self.type == SYMTYPE - def islnk(self): - return self.type == LNKTYPE - def ischr(self): - return self.type == CHRTYPE - def isblk(self): - return self.type == BLKTYPE - def isfifo(self): - return self.type == FIFOTYPE - def issparse(self): - return self.sparse is not None - def isdev(self): - return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE) -# class TarInfo - -class TarFile(object): - """The TarFile Class provides an interface to tar archives. - """ - - debug = 0 # May be set from 0 (no msgs) to 3 (all msgs) - - dereference = False # If true, add content of linked file to the - # tar file, else the link. - - ignore_zeros = False # If true, skips empty or invalid blocks and - # continues processing. - - errorlevel = 1 # If 0, fatal errors only appear in debug - # messages (if debug >= 0). If > 0, errors - # are passed to the caller as exceptions. - - format = DEFAULT_FORMAT # The format to use when creating an archive. - - encoding = ENCODING # Encoding for 8-bit character strings. - - errors = None # Error handler for unicode conversion. - - tarinfo = TarInfo # The default TarInfo class to use. - - fileobject = ExFileObject # The default ExFileObject class to use. - - def __init__(self, name=None, mode="r", fileobj=None, format=None, - tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, - errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None): - """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to - read from an existing archive, 'a' to append data to an existing - file or 'w' to create a new file overwriting an existing one. `mode' - defaults to 'r'. - If `fileobj' is given, it is used for reading or writing data. If it - can be determined, `mode' is overridden by `fileobj's mode. - `fileobj' is not closed, when TarFile is closed. - """ - if len(mode) > 1 or mode not in "raw": - raise ValueError("mode must be 'r', 'a' or 'w'") - self.mode = mode - self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode] - - if not fileobj: - if self.mode == "a" and not os.path.exists(name): - # Create nonexistent files in append mode. - self.mode = "w" - self._mode = "wb" - fileobj = bltn_open(name, self._mode) - self._extfileobj = False - else: - if name is None and hasattr(fileobj, "name"): - name = fileobj.name - if hasattr(fileobj, "mode"): - self._mode = fileobj.mode - self._extfileobj = True - self.name = os.path.abspath(name) if name else None - self.fileobj = fileobj - - # Init attributes. - if format is not None: - self.format = format - if tarinfo is not None: - self.tarinfo = tarinfo - if dereference is not None: - self.dereference = dereference - if ignore_zeros is not None: - self.ignore_zeros = ignore_zeros - if encoding is not None: - self.encoding = encoding - self.errors = errors - - if pax_headers is not None and self.format == PAX_FORMAT: - self.pax_headers = pax_headers - else: - self.pax_headers = {} - - if debug is not None: - self.debug = debug - if errorlevel is not None: - self.errorlevel = errorlevel - - # Init datastructures. - self.closed = False - self.members = [] # list of members as TarInfo objects - self._loaded = False # flag if all members have been read - self.offset = self.fileobj.tell() - # current position in the archive file - self.inodes = {} # dictionary caching the inodes of - # archive members already added - - try: - if self.mode == "r": - self.firstmember = None - self.firstmember = self.next() - - if self.mode == "a": - # Move to the end of the archive, - # before the first empty block. - while True: - self.fileobj.seek(self.offset) - try: - tarinfo = self.tarinfo.fromtarfile(self) - self.members.append(tarinfo) - except EOFHeaderError: - self.fileobj.seek(self.offset) - break - except HeaderError as e: - raise ReadError(str(e)) - - if self.mode in "aw": - self._loaded = True - - if self.pax_headers: - buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy()) - self.fileobj.write(buf) - self.offset += len(buf) - except: - if not self._extfileobj: - self.fileobj.close() - self.closed = True - raise - - #-------------------------------------------------------------------------- - # Below are the classmethods which act as alternate constructors to the - # TarFile class. The open() method is the only one that is needed for - # public use; it is the "super"-constructor and is able to select an - # adequate "sub"-constructor for a particular compression using the mapping - # from OPEN_METH. - # - # This concept allows one to subclass TarFile without losing the comfort of - # the super-constructor. A sub-constructor is registered and made available - # by adding it to the mapping in OPEN_METH. - - @classmethod - def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs): - """Open a tar archive for reading, writing or appending. Return - an appropriate TarFile class. - - mode: - 'r' or 'r:*' open for reading with transparent compression - 'r:' open for reading exclusively uncompressed - 'r:gz' open for reading with gzip compression - 'r:bz2' open for reading with bzip2 compression - 'a' or 'a:' open for appending, creating the file if necessary - 'w' or 'w:' open for writing without compression - 'w:gz' open for writing with gzip compression - 'w:bz2' open for writing with bzip2 compression - - 'r|*' open a stream of tar blocks with transparent compression - 'r|' open an uncompressed stream of tar blocks for reading - 'r|gz' open a gzip compressed stream of tar blocks - 'r|bz2' open a bzip2 compressed stream of tar blocks - 'w|' open an uncompressed stream for writing - 'w|gz' open a gzip compressed stream for writing - 'w|bz2' open a bzip2 compressed stream for writing - """ - - if not name and not fileobj: - raise ValueError("nothing to open") - - if mode in ("r", "r:*"): - # Find out which *open() is appropriate for opening the file. - for comptype in cls.OPEN_METH: - func = getattr(cls, cls.OPEN_METH[comptype]) - if fileobj is not None: - saved_pos = fileobj.tell() - try: - return func(name, "r", fileobj, **kwargs) - except (ReadError, CompressionError) as e: - if fileobj is not None: - fileobj.seek(saved_pos) - continue - raise ReadError("file could not be opened successfully") - - elif ":" in mode: - filemode, comptype = mode.split(":", 1) - filemode = filemode or "r" - comptype = comptype or "tar" - - # Select the *open() function according to - # given compression. - if comptype in cls.OPEN_METH: - func = getattr(cls, cls.OPEN_METH[comptype]) - else: - raise CompressionError("unknown compression type %r" % comptype) - return func(name, filemode, fileobj, **kwargs) - - elif "|" in mode: - filemode, comptype = mode.split("|", 1) - filemode = filemode or "r" - comptype = comptype or "tar" - - if filemode not in "rw": - raise ValueError("mode must be 'r' or 'w'") - - stream = _Stream(name, filemode, comptype, fileobj, bufsize) - try: - t = cls(name, filemode, stream, **kwargs) - except: - stream.close() - raise - t._extfileobj = False - return t - - elif mode in "aw": - return cls.taropen(name, mode, fileobj, **kwargs) - - raise ValueError("undiscernible mode") - - @classmethod - def taropen(cls, name, mode="r", fileobj=None, **kwargs): - """Open uncompressed tar archive name for reading or writing. - """ - if len(mode) > 1 or mode not in "raw": - raise ValueError("mode must be 'r', 'a' or 'w'") - return cls(name, mode, fileobj, **kwargs) - - @classmethod - def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): - """Open gzip compressed tar archive name for reading or writing. - Appending is not allowed. - """ - if len(mode) > 1 or mode not in "rw": - raise ValueError("mode must be 'r' or 'w'") - - try: - import gzip - gzip.GzipFile - except (ImportError, AttributeError): - raise CompressionError("gzip module is not available") - - extfileobj = fileobj is not None - try: - fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj) - t = cls.taropen(name, mode, fileobj, **kwargs) - except IOError: - if not extfileobj and fileobj is not None: - fileobj.close() - if fileobj is None: - raise - raise ReadError("not a gzip file") - except: - if not extfileobj and fileobj is not None: - fileobj.close() - raise - t._extfileobj = extfileobj - return t - - @classmethod - def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): - """Open bzip2 compressed tar archive name for reading or writing. - Appending is not allowed. - """ - if len(mode) > 1 or mode not in "rw": - raise ValueError("mode must be 'r' or 'w'.") - - try: - import bz2 - except ImportError: - raise CompressionError("bz2 module is not available") - - if fileobj is not None: - fileobj = _BZ2Proxy(fileobj, mode) - else: - fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel) - - try: - t = cls.taropen(name, mode, fileobj, **kwargs) - except (IOError, EOFError): - fileobj.close() - raise ReadError("not a bzip2 file") - t._extfileobj = False - return t - - # All *open() methods are registered here. - OPEN_METH = { - "tar": "taropen", # uncompressed tar - "gz": "gzopen", # gzip compressed tar - "bz2": "bz2open" # bzip2 compressed tar - } - - #-------------------------------------------------------------------------- - # The public methods which TarFile provides: - - def close(self): - """Close the TarFile. In write-mode, two finishing zero blocks are - appended to the archive. - """ - if self.closed: - return - - if self.mode in "aw": - self.fileobj.write(NUL * (BLOCKSIZE * 2)) - self.offset += (BLOCKSIZE * 2) - # fill up the end with zero-blocks - # (like option -b20 for tar does) - blocks, remainder = divmod(self.offset, RECORDSIZE) - if remainder > 0: - self.fileobj.write(NUL * (RECORDSIZE - remainder)) - - if not self._extfileobj: - self.fileobj.close() - self.closed = True - - def getmember(self, name): - """Return a TarInfo object for member `name'. If `name' can not be - found in the archive, KeyError is raised. If a member occurs more - than once in the archive, its last occurrence is assumed to be the - most up-to-date version. - """ - tarinfo = self._getmember(name) - if tarinfo is None: - raise KeyError("filename %r not found" % name) - return tarinfo - - def getmembers(self): - """Return the members of the archive as a list of TarInfo objects. The - list has the same order as the members in the archive. - """ - self._check() - if not self._loaded: # if we want to obtain a list of - self._load() # all members, we first have to - # scan the whole archive. - return self.members - - def getnames(self): - """Return the members of the archive as a list of their names. It has - the same order as the list returned by getmembers(). - """ - return [tarinfo.name for tarinfo in self.getmembers()] - - def gettarinfo(self, name=None, arcname=None, fileobj=None): - """Create a TarInfo object for either the file `name' or the file - object `fileobj' (using os.fstat on its file descriptor). You can - modify some of the TarInfo's attributes before you add it using - addfile(). If given, `arcname' specifies an alternative name for the - file in the archive. - """ - self._check("aw") - - # When fileobj is given, replace name by - # fileobj's real name. - if fileobj is not None: - name = fileobj.name - - # Building the name of the member in the archive. - # Backward slashes are converted to forward slashes, - # Absolute paths are turned to relative paths. - if arcname is None: - arcname = name - drv, arcname = os.path.splitdrive(arcname) - arcname = arcname.replace(os.sep, "/") - arcname = arcname.lstrip("/") - - # Now, fill the TarInfo object with - # information specific for the file. - tarinfo = self.tarinfo() - tarinfo.tarfile = self - - # Use os.stat or os.lstat, depending on platform - # and if symlinks shall be resolved. - if fileobj is None: - if hasattr(os, "lstat") and not self.dereference: - statres = os.lstat(name) - else: - statres = os.stat(name) - else: - statres = os.fstat(fileobj.fileno()) - linkname = "" - - stmd = statres.st_mode - if stat.S_ISREG(stmd): - inode = (statres.st_ino, statres.st_dev) - if not self.dereference and statres.st_nlink > 1 and \ - inode in self.inodes and arcname != self.inodes[inode]: - # Is it a hardlink to an already - # archived file? - type = LNKTYPE - linkname = self.inodes[inode] - else: - # The inode is added only if its valid. - # For win32 it is always 0. - type = REGTYPE - if inode[0]: - self.inodes[inode] = arcname - elif stat.S_ISDIR(stmd): - type = DIRTYPE - elif stat.S_ISFIFO(stmd): - type = FIFOTYPE - elif stat.S_ISLNK(stmd): - type = SYMTYPE - linkname = os.readlink(name) - elif stat.S_ISCHR(stmd): - type = CHRTYPE - elif stat.S_ISBLK(stmd): - type = BLKTYPE - else: - return None - - # Fill the TarInfo object with all - # information we can get. - tarinfo.name = arcname - tarinfo.mode = stmd - tarinfo.uid = statres.st_uid - tarinfo.gid = statres.st_gid - if type == REGTYPE: - tarinfo.size = statres.st_size - else: - tarinfo.size = 0 - tarinfo.mtime = statres.st_mtime - tarinfo.type = type - tarinfo.linkname = linkname - if pwd: - try: - tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] - except KeyError: - pass - if grp: - try: - tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] - except KeyError: - pass - - if type in (CHRTYPE, BLKTYPE): - if hasattr(os, "major") and hasattr(os, "minor"): - tarinfo.devmajor = os.major(statres.st_rdev) - tarinfo.devminor = os.minor(statres.st_rdev) - return tarinfo - - def list(self, verbose=True): - """Print a table of contents to sys.stdout. If `verbose' is False, only - the names of the members are printed. If it is True, an `ls -l'-like - output is produced. - """ - self._check() - - for tarinfo in self: - if verbose: - print(filemode(tarinfo.mode), end=' ') - print("%s/%s" % (tarinfo.uname or tarinfo.uid, - tarinfo.gname or tarinfo.gid), end=' ') - if tarinfo.ischr() or tarinfo.isblk(): - print("%10s" % ("%d,%d" \ - % (tarinfo.devmajor, tarinfo.devminor)), end=' ') - else: - print("%10d" % tarinfo.size, end=' ') - print("%d-%02d-%02d %02d:%02d:%02d" \ - % time.localtime(tarinfo.mtime)[:6], end=' ') - - print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ') - - if verbose: - if tarinfo.issym(): - print("->", tarinfo.linkname, end=' ') - if tarinfo.islnk(): - print("link to", tarinfo.linkname, end=' ') - print() - - def add(self, name, arcname=None, recursive=True, exclude=None, filter=None): - """Add the file `name' to the archive. `name' may be any type of file - (directory, fifo, symbolic link, etc.). If given, `arcname' - specifies an alternative name for the file in the archive. - Directories are added recursively by default. This can be avoided by - setting `recursive' to False. `exclude' is a function that should - return True for each filename to be excluded. `filter' is a function - that expects a TarInfo object argument and returns the changed - TarInfo object, if it returns None the TarInfo object will be - excluded from the archive. - """ - self._check("aw") - - if arcname is None: - arcname = name - - # Exclude pathnames. - if exclude is not None: - import warnings - warnings.warn("use the filter argument instead", - DeprecationWarning, 2) - if exclude(name): - self._dbg(2, "tarfile: Excluded %r" % name) - return - - # Skip if somebody tries to archive the archive... - if self.name is not None and os.path.abspath(name) == self.name: - self._dbg(2, "tarfile: Skipped %r" % name) - return - - self._dbg(1, name) - - # Create a TarInfo object from the file. - tarinfo = self.gettarinfo(name, arcname) - - if tarinfo is None: - self._dbg(1, "tarfile: Unsupported type %r" % name) - return - - # Change or exclude the TarInfo object. - if filter is not None: - tarinfo = filter(tarinfo) - if tarinfo is None: - self._dbg(2, "tarfile: Excluded %r" % name) - return - - # Append the tar header and data to the archive. - if tarinfo.isreg(): - f = bltn_open(name, "rb") - self.addfile(tarinfo, f) - f.close() - - elif tarinfo.isdir(): - self.addfile(tarinfo) - if recursive: - for f in os.listdir(name): - self.add(os.path.join(name, f), os.path.join(arcname, f), - recursive, exclude, filter=filter) - - else: - self.addfile(tarinfo) - - def addfile(self, tarinfo, fileobj=None): - """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is - given, tarinfo.size bytes are read from it and added to the archive. - You can create TarInfo objects using gettarinfo(). - On Windows platforms, `fileobj' should always be opened with mode - 'rb' to avoid irritation about the file size. - """ - self._check("aw") - - tarinfo = copy.copy(tarinfo) - - buf = tarinfo.tobuf(self.format, self.encoding, self.errors) - self.fileobj.write(buf) - self.offset += len(buf) - - # If there's data to follow, append it. - if fileobj is not None: - copyfileobj(fileobj, self.fileobj, tarinfo.size) - blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) - if remainder > 0: - self.fileobj.write(NUL * (BLOCKSIZE - remainder)) - blocks += 1 - self.offset += blocks * BLOCKSIZE - - self.members.append(tarinfo) - - def extractall(self, path=".", members=None): - """Extract all members from the archive to the current working - directory and set owner, modification time and permissions on - directories afterwards. `path' specifies a different directory - to extract to. `members' is optional and must be a subset of the - list returned by getmembers(). - """ - directories = [] - - if members is None: - members = self - - for tarinfo in members: - if tarinfo.isdir(): - # Extract directories with a safe mode. - directories.append(tarinfo) - tarinfo = copy.copy(tarinfo) - tarinfo.mode = 0o700 - # Do not set_attrs directories, as we will do that further down - self.extract(tarinfo, path, set_attrs=not tarinfo.isdir()) - - # Reverse sort directories. - directories.sort(key=lambda a: a.name) - directories.reverse() - - # Set correct owner, mtime and filemode on directories. - for tarinfo in directories: - dirpath = os.path.join(path, tarinfo.name) - try: - self.chown(tarinfo, dirpath) - self.utime(tarinfo, dirpath) - self.chmod(tarinfo, dirpath) - except ExtractError as e: - if self.errorlevel > 1: - raise - else: - self._dbg(1, "tarfile: %s" % e) - - def extract(self, member, path="", set_attrs=True): - """Extract a member from the archive to the current working directory, - using its full name. Its file information is extracted as accurately - as possible. `member' may be a filename or a TarInfo object. You can - specify a different directory using `path'. File attributes (owner, - mtime, mode) are set unless `set_attrs' is False. - """ - self._check("r") - - if isinstance(member, str): - tarinfo = self.getmember(member) - else: - tarinfo = member - - # Prepare the link target for makelink(). - if tarinfo.islnk(): - tarinfo._link_target = os.path.join(path, tarinfo.linkname) - - try: - self._extract_member(tarinfo, os.path.join(path, tarinfo.name), - set_attrs=set_attrs) - except EnvironmentError as e: - if self.errorlevel > 0: - raise - else: - if e.filename is None: - self._dbg(1, "tarfile: %s" % e.strerror) - else: - self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) - except ExtractError as e: - if self.errorlevel > 1: - raise - else: - self._dbg(1, "tarfile: %s" % e) - - def extractfile(self, member): - """Extract a member from the archive as a file object. `member' may be - a filename or a TarInfo object. If `member' is a regular file, a - file-like object is returned. If `member' is a link, a file-like - object is constructed from the link's target. If `member' is none of - the above, None is returned. - The file-like object is read-only and provides the following - methods: read(), readline(), readlines(), seek() and tell() - """ - self._check("r") - - if isinstance(member, str): - tarinfo = self.getmember(member) - else: - tarinfo = member - - if tarinfo.isreg(): - return self.fileobject(self, tarinfo) - - elif tarinfo.type not in SUPPORTED_TYPES: - # If a member's type is unknown, it is treated as a - # regular file. - return self.fileobject(self, tarinfo) - - elif tarinfo.islnk() or tarinfo.issym(): - if isinstance(self.fileobj, _Stream): - # A small but ugly workaround for the case that someone tries - # to extract a (sym)link as a file-object from a non-seekable - # stream of tar blocks. - raise StreamError("cannot extract (sym)link as file object") - else: - # A (sym)link's file object is its target's file object. - return self.extractfile(self._find_link_target(tarinfo)) - else: - # If there's no data associated with the member (directory, chrdev, - # blkdev, etc.), return None instead of a file object. - return None - - def _extract_member(self, tarinfo, targetpath, set_attrs=True): - """Extract the TarInfo object tarinfo to a physical - file called targetpath. - """ - # Fetch the TarInfo object for the given name - # and build the destination pathname, replacing - # forward slashes to platform specific separators. - targetpath = targetpath.rstrip("/") - targetpath = targetpath.replace("/", os.sep) - - # Create all upper directories. - upperdirs = os.path.dirname(targetpath) - if upperdirs and not os.path.exists(upperdirs): - # Create directories that are not part of the archive with - # default permissions. - os.makedirs(upperdirs) - - if tarinfo.islnk() or tarinfo.issym(): - self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname)) - else: - self._dbg(1, tarinfo.name) - - if tarinfo.isreg(): - self.makefile(tarinfo, targetpath) - elif tarinfo.isdir(): - self.makedir(tarinfo, targetpath) - elif tarinfo.isfifo(): - self.makefifo(tarinfo, targetpath) - elif tarinfo.ischr() or tarinfo.isblk(): - self.makedev(tarinfo, targetpath) - elif tarinfo.islnk() or tarinfo.issym(): - self.makelink(tarinfo, targetpath) - elif tarinfo.type not in SUPPORTED_TYPES: - self.makeunknown(tarinfo, targetpath) - else: - self.makefile(tarinfo, targetpath) - - if set_attrs: - self.chown(tarinfo, targetpath) - if not tarinfo.issym(): - self.chmod(tarinfo, targetpath) - self.utime(tarinfo, targetpath) - - #-------------------------------------------------------------------------- - # Below are the different file methods. They are called via - # _extract_member() when extract() is called. They can be replaced in a - # subclass to implement other functionality. - - def makedir(self, tarinfo, targetpath): - """Make a directory called targetpath. - """ - try: - # Use a safe mode for the directory, the real mode is set - # later in _extract_member(). - os.mkdir(targetpath, 0o700) - except EnvironmentError as e: - if e.errno != errno.EEXIST: - raise - - def makefile(self, tarinfo, targetpath): - """Make a file called targetpath. - """ - source = self.fileobj - source.seek(tarinfo.offset_data) - target = bltn_open(targetpath, "wb") - if tarinfo.sparse is not None: - for offset, size in tarinfo.sparse: - target.seek(offset) - copyfileobj(source, target, size) - else: - copyfileobj(source, target, tarinfo.size) - target.seek(tarinfo.size) - target.truncate() - target.close() - - def makeunknown(self, tarinfo, targetpath): - """Make a file from a TarInfo object with an unknown type - at targetpath. - """ - self.makefile(tarinfo, targetpath) - self._dbg(1, "tarfile: Unknown file type %r, " \ - "extracted as regular file." % tarinfo.type) - - def makefifo(self, tarinfo, targetpath): - """Make a fifo called targetpath. - """ - if hasattr(os, "mkfifo"): - os.mkfifo(targetpath) - else: - raise ExtractError("fifo not supported by system") - - def makedev(self, tarinfo, targetpath): - """Make a character or block device called targetpath. - """ - if not hasattr(os, "mknod") or not hasattr(os, "makedev"): - raise ExtractError("special devices not supported by system") - - mode = tarinfo.mode - if tarinfo.isblk(): - mode |= stat.S_IFBLK - else: - mode |= stat.S_IFCHR - - os.mknod(targetpath, mode, - os.makedev(tarinfo.devmajor, tarinfo.devminor)) - - def makelink(self, tarinfo, targetpath): - """Make a (symbolic) link called targetpath. If it cannot be created - (platform limitation), we try to make a copy of the referenced file - instead of a link. - """ - try: - # For systems that support symbolic and hard links. - if tarinfo.issym(): - os.symlink(tarinfo.linkname, targetpath) - else: - # See extract(). - if os.path.exists(tarinfo._link_target): - os.link(tarinfo._link_target, targetpath) - else: - self._extract_member(self._find_link_target(tarinfo), - targetpath) - except symlink_exception: - if tarinfo.issym(): - linkpath = os.path.join(os.path.dirname(tarinfo.name), - tarinfo.linkname) - else: - linkpath = tarinfo.linkname - else: - try: - self._extract_member(self._find_link_target(tarinfo), - targetpath) - except KeyError: - raise ExtractError("unable to resolve link inside archive") - - def chown(self, tarinfo, targetpath): - """Set owner of targetpath according to tarinfo. - """ - if pwd and hasattr(os, "geteuid") and os.geteuid() == 0: - # We have to be root to do so. - try: - g = grp.getgrnam(tarinfo.gname)[2] - except KeyError: - g = tarinfo.gid - try: - u = pwd.getpwnam(tarinfo.uname)[2] - except KeyError: - u = tarinfo.uid - try: - if tarinfo.issym() and hasattr(os, "lchown"): - os.lchown(targetpath, u, g) - else: - if sys.platform != "os2emx": - os.chown(targetpath, u, g) - except EnvironmentError as e: - raise ExtractError("could not change owner") - - def chmod(self, tarinfo, targetpath): - """Set file permissions of targetpath according to tarinfo. - """ - if hasattr(os, 'chmod'): - try: - os.chmod(targetpath, tarinfo.mode) - except EnvironmentError as e: - raise ExtractError("could not change mode") - - def utime(self, tarinfo, targetpath): - """Set modification time of targetpath according to tarinfo. - """ - if not hasattr(os, 'utime'): - return - try: - os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) - except EnvironmentError as e: - raise ExtractError("could not change modification time") - - #-------------------------------------------------------------------------- - def next(self): - """Return the next member of the archive as a TarInfo object, when - TarFile is opened for reading. Return None if there is no more - available. - """ - self._check("ra") - if self.firstmember is not None: - m = self.firstmember - self.firstmember = None - return m - - # Read the next block. - self.fileobj.seek(self.offset) - tarinfo = None - while True: - try: - tarinfo = self.tarinfo.fromtarfile(self) - except EOFHeaderError as e: - if self.ignore_zeros: - self._dbg(2, "0x%X: %s" % (self.offset, e)) - self.offset += BLOCKSIZE - continue - except InvalidHeaderError as e: - if self.ignore_zeros: - self._dbg(2, "0x%X: %s" % (self.offset, e)) - self.offset += BLOCKSIZE - continue - elif self.offset == 0: - raise ReadError(str(e)) - except EmptyHeaderError: - if self.offset == 0: - raise ReadError("empty file") - except TruncatedHeaderError as e: - if self.offset == 0: - raise ReadError(str(e)) - except SubsequentHeaderError as e: - raise ReadError(str(e)) - break - - if tarinfo is not None: - self.members.append(tarinfo) - else: - self._loaded = True - - return tarinfo - - #-------------------------------------------------------------------------- - # Little helper methods: - - def _getmember(self, name, tarinfo=None, normalize=False): - """Find an archive member by name from bottom to top. - If tarinfo is given, it is used as the starting point. - """ - # Ensure that all members have been loaded. - members = self.getmembers() - - # Limit the member search list up to tarinfo. - if tarinfo is not None: - members = members[:members.index(tarinfo)] - - if normalize: - name = os.path.normpath(name) - - for member in reversed(members): - if normalize: - member_name = os.path.normpath(member.name) - else: - member_name = member.name - - if name == member_name: - return member - - def _load(self): - """Read through the entire archive file and look for readable - members. - """ - while True: - tarinfo = self.next() - if tarinfo is None: - break - self._loaded = True - - def _check(self, mode=None): - """Check if TarFile is still open, and if the operation's mode - corresponds to TarFile's mode. - """ - if self.closed: - raise IOError("%s is closed" % self.__class__.__name__) - if mode is not None and self.mode not in mode: - raise IOError("bad operation for mode %r" % self.mode) - - def _find_link_target(self, tarinfo): - """Find the target member of a symlink or hardlink member in the - archive. - """ - if tarinfo.issym(): - # Always search the entire archive. - linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname - limit = None - else: - # Search the archive before the link, because a hard link is - # just a reference to an already archived file. - linkname = tarinfo.linkname - limit = tarinfo - - member = self._getmember(linkname, tarinfo=limit, normalize=True) - if member is None: - raise KeyError("linkname %r not found" % linkname) - return member - - def __iter__(self): - """Provide an iterator object. - """ - if self._loaded: - return iter(self.members) - else: - return TarIter(self) - - def _dbg(self, level, msg): - """Write debugging output to sys.stderr. - """ - if level <= self.debug: - print(msg, file=sys.stderr) - - def __enter__(self): - self._check() - return self - - def __exit__(self, type, value, traceback): - if type is None: - self.close() - else: - # An exception occurred. We must not call close() because - # it would try to write end-of-archive blocks and padding. - if not self._extfileobj: - self.fileobj.close() - self.closed = True -# class TarFile - -class TarIter(object): - """Iterator Class. - - for tarinfo in TarFile(...): - suite... - """ - - def __init__(self, tarfile): - """Construct a TarIter object. - """ - self.tarfile = tarfile - self.index = 0 - def __iter__(self): - """Return iterator object. - """ - return self - - def __next__(self): - """Return the next item using TarFile's next() method. - When all members have been read, set TarFile as _loaded. - """ - # Fix for SF #1100429: Under rare circumstances it can - # happen that getmembers() is called during iteration, - # which will cause TarIter to stop prematurely. - if not self.tarfile._loaded: - tarinfo = self.tarfile.next() - if not tarinfo: - self.tarfile._loaded = True - raise StopIteration - else: - try: - tarinfo = self.tarfile.members[self.index] - except IndexError: - raise StopIteration - self.index += 1 - return tarinfo - - next = __next__ # for Python 2.x - -#-------------------- -# exported functions -#-------------------- -def is_tarfile(name): - """Return True if name points to a tar archive that we - are able to handle, else return False. - """ - try: - t = open(name) - t.close() - return True - except TarError: - return False - -bltn_open = open -open = TarFile.open diff --git a/src/main/python/pybuilder/_vendor/distlib/compat.py b/src/main/python/pybuilder/_vendor/distlib/compat.py index e59410695..1fe3d225a 100644 --- a/src/main/python/pybuilder/_vendor/distlib/compat.py +++ b/src/main/python/pybuilder/_vendor/distlib/compat.py @@ -22,7 +22,6 @@ from types import FileType as file_type import __builtin__ as builtins import ConfigParser as configparser - from ._backport import shutil from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, pathname2url, ContentTooShortError, splittype) @@ -313,10 +312,8 @@ def python_implementation(): return 'IronPython' return 'CPython' -try: - import sysconfig -except ImportError: # pragma: no cover - from ._backport import sysconfig +import shutil +import sysconfig try: callable = callable @@ -618,18 +615,15 @@ def clear(self): try: from importlib.util import cache_from_source # Python >= 3.4 except ImportError: # pragma: no cover - try: - from imp import cache_from_source - except ImportError: # pragma: no cover - def cache_from_source(path, debug_override=None): - assert path.endswith('.py') - if debug_override is None: - debug_override = __debug__ - if debug_override: - suffix = 'c' - else: - suffix = 'o' - return path + suffix + def cache_from_source(path, debug_override=None): + assert path.endswith('.py') + if debug_override is None: + debug_override = __debug__ + if debug_override: + suffix = 'c' + else: + suffix = 'o' + return path + suffix try: from collections import OrderedDict diff --git a/src/main/python/pybuilder/_vendor/distlib/database.py b/src/main/python/pybuilder/_vendor/distlib/database.py index 0a90c300b..f48699441 100644 --- a/src/main/python/pybuilder/_vendor/distlib/database.py +++ b/src/main/python/pybuilder/_vendor/distlib/database.py @@ -132,29 +132,35 @@ def _yield_distributions(self): r = finder.find(entry) if not r or r.path in seen: continue - if self._include_dist and entry.endswith(DISTINFO_EXT): - possible_filenames = [METADATA_FILENAME, - WHEEL_METADATA_FILENAME, - LEGACY_METADATA_FILENAME] - for metadata_filename in possible_filenames: - metadata_path = posixpath.join(entry, metadata_filename) - pydist = finder.find(metadata_path) - if pydist: - break - else: - continue + try: + if self._include_dist and entry.endswith(DISTINFO_EXT): + possible_filenames = [METADATA_FILENAME, + WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME] + for metadata_filename in possible_filenames: + metadata_path = posixpath.join(entry, metadata_filename) + pydist = finder.find(metadata_path) + if pydist: + break + else: + continue - with contextlib.closing(pydist.as_stream()) as stream: - metadata = Metadata(fileobj=stream, scheme='legacy') - logger.debug('Found %s', r.path) - seen.add(r.path) - yield new_dist_class(r.path, metadata=metadata, - env=self) - elif self._include_egg and entry.endswith(('.egg-info', - '.egg')): - logger.debug('Found %s', r.path) - seen.add(r.path) - yield old_dist_class(r.path, self) + with contextlib.closing(pydist.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + logger.debug('Found %s', r.path) + seen.add(r.path) + yield new_dist_class(r.path, metadata=metadata, + env=self) + elif self._include_egg and entry.endswith(('.egg-info', + '.egg')): + logger.debug('Found %s', r.path) + seen.add(r.path) + yield old_dist_class(r.path, self) + except Exception as e: + msg = 'Unable to read distribution at %s, perhaps due to bad metadata: %s' + logger.warning(msg, r.path, e) + import warnings + warnings.warn(msg % (r.path, e), stacklevel=2) def _generate_cache(self): """ diff --git a/src/main/python/pybuilder/_vendor/distlib/locators.py b/src/main/python/pybuilder/_vendor/distlib/locators.py index 0c7d63914..c78bc9e23 100644 --- a/src/main/python/pybuilder/_vendor/distlib/locators.py +++ b/src/main/python/pybuilder/_vendor/distlib/locators.py @@ -633,7 +633,7 @@ def _prepare_threads(self): self._threads = [] for i in range(self.num_workers): t = threading.Thread(target=self._fetch) - t.setDaemon(True) + t.daemon = True t.start() self._threads.append(t) diff --git a/src/main/python/pybuilder/_vendor/distlib/markers.py b/src/main/python/pybuilder/_vendor/distlib/markers.py index b43136fa1..9dc684103 100644 --- a/src/main/python/pybuilder/_vendor/distlib/markers.py +++ b/src/main/python/pybuilder/_vendor/distlib/markers.py @@ -90,6 +90,8 @@ def evaluate(self, expr, context): result = self.operations[op](lhs, rhs) return result +_DIGITS = re.compile(r'\d+\.\d+') + def default_context(): def format_full_version(info): version = '%s.%s.%s' % (info.major, info.minor, info.micro) @@ -105,6 +107,9 @@ def format_full_version(info): implementation_version = '0' implementation_name = '' + ppv = platform.python_version() + m = _DIGITS.match(ppv) + pv = m.group(0) result = { 'implementation_name': implementation_name, 'implementation_version': implementation_version, @@ -115,8 +120,8 @@ def format_full_version(info): 'platform_system': platform.system(), 'platform_version': platform.version(), 'platform_in_venv': str(in_venv()), - 'python_full_version': platform.python_version(), - 'python_version': platform.python_version()[:3], + 'python_full_version': ppv, + 'python_version': pv, 'sys_platform': sys.platform, } return result diff --git a/src/main/python/pybuilder/_vendor/distlib/t32.exe b/src/main/python/pybuilder/_vendor/distlib/t32.exe index 8932a18e4..31baab619 100644 Binary files a/src/main/python/pybuilder/_vendor/distlib/t32.exe and b/src/main/python/pybuilder/_vendor/distlib/t32.exe differ diff --git a/src/main/python/pybuilder/_vendor/distlib/t64-arm.exe b/src/main/python/pybuilder/_vendor/distlib/t64-arm.exe index c5df4869d..3acd5ef9e 100644 Binary files a/src/main/python/pybuilder/_vendor/distlib/t64-arm.exe and b/src/main/python/pybuilder/_vendor/distlib/t64-arm.exe differ diff --git a/src/main/python/pybuilder/_vendor/distlib/t64.exe b/src/main/python/pybuilder/_vendor/distlib/t64.exe index 325b8057c..ab825cc41 100644 Binary files a/src/main/python/pybuilder/_vendor/distlib/t64.exe and b/src/main/python/pybuilder/_vendor/distlib/t64.exe differ diff --git a/src/main/python/pybuilder/_vendor/distlib/util.py b/src/main/python/pybuilder/_vendor/distlib/util.py index 80bfc864b..dd01849d9 100644 --- a/src/main/python/pybuilder/_vendor/distlib/util.py +++ b/src/main/python/pybuilder/_vendor/distlib/util.py @@ -1432,29 +1432,19 @@ def connect(self): self.sock = sock self._tunnel() - if not hasattr(ssl, 'SSLContext'): - # For 2.x - if self.ca_certs: - cert_reqs = ssl.CERT_REQUIRED - else: - cert_reqs = ssl.CERT_NONE - self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, - cert_reqs=cert_reqs, - ssl_version=ssl.PROTOCOL_SSLv23, - ca_certs=self.ca_certs) - else: # pragma: no cover - context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - if hasattr(ssl, 'OP_NO_SSLv2'): - context.options |= ssl.OP_NO_SSLv2 - if self.cert_file: - context.load_cert_chain(self.cert_file, self.key_file) - kwargs = {} - if self.ca_certs: - context.verify_mode = ssl.CERT_REQUIRED - context.load_verify_locations(cafile=self.ca_certs) - if getattr(ssl, 'HAS_SNI', False): - kwargs['server_hostname'] = self.host - self.sock = context.wrap_socket(sock, **kwargs) + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + if hasattr(ssl, 'OP_NO_SSLv2'): + context.options |= ssl.OP_NO_SSLv2 + if self.cert_file: + context.load_cert_chain(self.cert_file, self.key_file) + kwargs = {} + if self.ca_certs: + context.verify_mode = ssl.CERT_REQUIRED + context.load_verify_locations(cafile=self.ca_certs) + if getattr(ssl, 'HAS_SNI', False): + kwargs['server_hostname'] = self.host + + self.sock = context.wrap_socket(sock, **kwargs) if self.ca_certs and self.check_domain: try: match_hostname(self.sock.getpeercert(), self.host) @@ -1513,25 +1503,6 @@ def http_open(self, req): # # XML-RPC with timeouts # - -_ver_info = sys.version_info[:2] - -if _ver_info == (2, 6): - class HTTP(httplib.HTTP): - def __init__(self, host='', port=None, **kwargs): - if port == 0: # 0 means use port 0, not the default port - port = None - self._setup(self._connection_class(host, port, **kwargs)) - - - if ssl: - class HTTPS(httplib.HTTPS): - def __init__(self, host='', port=None, **kwargs): - if port == 0: # 0 means use port 0, not the default port - port = None - self._setup(self._connection_class(host, port, **kwargs)) - - class Transport(xmlrpclib.Transport): def __init__(self, timeout, use_datetime=0): self.timeout = timeout @@ -1539,14 +1510,10 @@ def __init__(self, timeout, use_datetime=0): def make_connection(self, host): h, eh, x509 = self.get_host_info(host) - if _ver_info == (2, 6): - result = HTTP(h, timeout=self.timeout) - else: - if not self._connection or host != self._connection[0]: - self._extra_headers = eh - self._connection = host, httplib.HTTPConnection(h) - result = self._connection[1] - return result + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPConnection(h) + return self._connection[1] if ssl: class SafeTransport(xmlrpclib.SafeTransport): @@ -1559,15 +1526,11 @@ def make_connection(self, host): if not kwargs: kwargs = {} kwargs['timeout'] = self.timeout - if _ver_info == (2, 6): - result = HTTPS(host, None, **kwargs) - else: - if not self._connection or host != self._connection[0]: - self._extra_headers = eh - self._connection = host, httplib.HTTPSConnection(h, None, - **kwargs) - result = self._connection[1] - return result + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPSConnection(h, None, + **kwargs) + return self._connection[1] class ServerProxy(xmlrpclib.ServerProxy): diff --git a/src/main/python/pybuilder/_vendor/distlib/w32.exe b/src/main/python/pybuilder/_vendor/distlib/w32.exe index e6439e9e4..56827c1c4 100644 Binary files a/src/main/python/pybuilder/_vendor/distlib/w32.exe and b/src/main/python/pybuilder/_vendor/distlib/w32.exe differ diff --git a/src/main/python/pybuilder/_vendor/distlib/w64-arm.exe b/src/main/python/pybuilder/_vendor/distlib/w64-arm.exe index 70a2ec268..8eeb01199 100644 Binary files a/src/main/python/pybuilder/_vendor/distlib/w64-arm.exe and b/src/main/python/pybuilder/_vendor/distlib/w64-arm.exe differ diff --git a/src/main/python/pybuilder/_vendor/distlib/w64.exe b/src/main/python/pybuilder/_vendor/distlib/w64.exe index 46139dbf9..5b82eff39 100644 Binary files a/src/main/python/pybuilder/_vendor/distlib/w64.exe and b/src/main/python/pybuilder/_vendor/distlib/w64.exe differ diff --git a/src/main/python/pybuilder/_vendor/distutils-precedence.pth b/src/main/python/pybuilder/_vendor/distutils-precedence.pth deleted file mode 100644 index 6de4198fc..000000000 --- a/src/main/python/pybuilder/_vendor/distutils-precedence.pth +++ /dev/null @@ -1 +0,0 @@ -import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'stdlib') == 'local'; enabled and __import__('_distutils_hack').add_shim(); diff --git a/src/main/python/pybuilder/_vendor/filelock/_api.py b/src/main/python/pybuilder/_vendor/filelock/_api.py index 1fd003336..95956480f 100644 --- a/src/main/python/pybuilder/_vendor/filelock/_api.py +++ b/src/main/python/pybuilder/_vendor/filelock/_api.py @@ -1,6 +1,7 @@ import logging import os import time +import warnings from abc import ABC, abstractmethod from threading import Lock from types import TracebackType @@ -106,13 +107,20 @@ def is_locked(self) -> bool: """ return self._lock_file_fd is not None - def acquire(self, timeout: Optional[float] = None, poll_intervall: float = 0.05) -> AcquireReturnProxy: + def acquire( + self, + timeout: Optional[float] = None, + poll_interval: float = 0.05, + *, + poll_intervall: Optional[float] = None, + ) -> AcquireReturnProxy: """ Try to acquire the file lock. :param timeout: maximum wait time for acquiring the lock, ``None`` means use the default :attr:`~timeout` is and if ``timeout < 0``, there is no timeout and this method will block until the lock could be acquired - :param poll_intervall: interval of trying to acquire the lock file + :param poll_interval: interval of trying to acquire the lock file + :param poll_intervall: deprecated, kept for backwards compatibility, use ``poll_interval`` instead :raises Timeout: if fails to acquire lock within the timeout period :return: a context object that will unlock the file when the context is exited @@ -134,12 +142,16 @@ def acquire(self, timeout: Optional[float] = None, poll_intervall: float = 0.05) This method returns now a *proxy* object instead of *self*, so that it can be used in a with statement without side effects. - """ # Use the default timeout, if no timeout is provided. if timeout is None: timeout = self.timeout + if poll_intervall is not None: + msg = "use poll_interval instead of poll_intervall" + warnings.warn(msg, DeprecationWarning, stacklevel=2) + poll_interval = poll_intervall + # Increment the number right at the beginning. We can still undo it, if something fails. with self._thread_lock: self._lock_counter += 1 @@ -162,8 +174,8 @@ def acquire(self, timeout: Optional[float] = None, poll_intervall: float = 0.05) raise Timeout(self._lock_file) else: msg = "Lock %s not acquired on %s, waiting %s seconds ..." - _LOGGER.debug(msg, lock_id, lock_filename, poll_intervall) - time.sleep(poll_intervall) + _LOGGER.debug(msg, lock_id, lock_filename, poll_interval) + time.sleep(poll_interval) except BaseException: # Something did go wrong, so decrement the counter. with self._thread_lock: self._lock_counter = max(0, self._lock_counter - 1) diff --git a/src/main/python/pybuilder/_vendor/filelock/_unix.py b/src/main/python/pybuilder/_vendor/filelock/_unix.py index 19af2de75..02e24ffcb 100644 --- a/src/main/python/pybuilder/_vendor/filelock/_unix.py +++ b/src/main/python/pybuilder/_vendor/filelock/_unix.py @@ -12,7 +12,6 @@ class UnixFileLock(BaseFileLock, ABC): """Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems.""" - else: # pragma: win32 no cover try: import fcntl diff --git a/src/main/python/pybuilder/_vendor/filelock/_windows.py b/src/main/python/pybuilder/_vendor/filelock/_windows.py index 2bb206c24..a163202a1 100644 --- a/src/main/python/pybuilder/_vendor/filelock/_windows.py +++ b/src/main/python/pybuilder/_vendor/filelock/_windows.py @@ -45,7 +45,6 @@ def _release(self) -> None: except OSError: pass - else: # pragma: win32 no cover class WindowsFileLock(BaseFileLock, ABC): diff --git a/src/main/python/pybuilder/_vendor/filelock/version.py b/src/main/python/pybuilder/_vendor/filelock/version.py index 667ff63ca..5cbf9b173 100644 --- a/src/main/python/pybuilder/_vendor/filelock/version.py +++ b/src/main/python/pybuilder/_vendor/filelock/version.py @@ -1,5 +1,5 @@ # coding: utf-8 # file generated by setuptools_scm # don't change, don't track in version control -version = '3.3.2' -version_tuple = (3, 3, 2) +version = '3.4.1' +version_tuple = (3, 4, 1) diff --git a/src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/INSTALLER b/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/INSTALLER similarity index 100% rename from src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/INSTALLER rename to src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/INSTALLER diff --git a/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/LICENSE b/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/LICENSE new file mode 100644 index 000000000..be7e092b0 --- /dev/null +++ b/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright 2017-2019 Jason R. Coombs, Barry Warsaw + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/METADATA b/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/METADATA new file mode 100644 index 000000000..4c16e71b5 --- /dev/null +++ b/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/METADATA @@ -0,0 +1,118 @@ +Metadata-Version: 2.1 +Name: importlib-metadata +Version: 4.10.0 +Summary: Read metadata from Python packages +Home-page: https://github.com/python/importlib_metadata +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.7 +License-File: LICENSE +Requires-Dist: zipp (>=0.5) +Requires-Dist: typing-extensions (>=3.6.4) ; python_version < "3.8" +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: perf +Requires-Dist: ipython ; extra == 'perf' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: packaging ; extra == 'testing' +Requires-Dist: pyfakefs ; extra == 'testing' +Requires-Dist: flufl.flake8 ; extra == 'testing' +Requires-Dist: pytest-perf (>=0.9.2) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/importlib_metadata.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/importlib_metadata + +.. image:: https://github.com/python/importlib_metadata/workflows/tests/badge.svg + :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest + :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2021-informational + :target: https://blog.jaraco.com/skeleton + + +Library to access the metadata for a Python package. + +This package supplies third-party access to the functionality of +`importlib.metadata `_ +including improvements added to subsequent Python versions. + + +Compatibility +============= + +New features are introduced in this third-party library and later merged +into CPython. The following table indicates which versions of this library +were contributed to different versions in the standard library: + +.. list-table:: + :header-rows: 1 + + * - importlib_metadata + - stdlib + * - 4.8 + - 3.11 + * - 4.4 + - 3.10 + * - 1.4 + - 3.8 + + +Usage +===== + +See the `online documentation `_ +for usage details. + +`Finder authors +`_ can +also add support for custom package installers. See the above documentation +for details. + + +Caveats +======= + +This project primarily supports third-party packages installed by PyPA +tools (or other conforming packages). It does not support: + +- Packages in the stdlib. +- Packages installed without metadata. + +Project details +=============== + + * Project home: https://github.com/python/importlib_metadata + * Report bugs at: https://github.com/python/importlib_metadata/issues + * Code hosting: https://github.com/python/importlib_metadata + * Documentation: https://importlib_metadata.readthedocs.io/ + + diff --git a/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/RECORD b/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/RECORD new file mode 100644 index 000000000..10a5c17e3 --- /dev/null +++ b/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/RECORD @@ -0,0 +1,24 @@ +importlib_metadata-4.10.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +importlib_metadata-4.10.0.dist-info/LICENSE,sha256=wNe6dAchmJ1VvVB8D9oTc-gHHadCuaSBAev36sYEM6U,571 +importlib_metadata-4.10.0.dist-info/METADATA,sha256=N1FtYY7tzLu7LlsAS0HnBccOwldt0hm8rUNHZEZcVtM,3989 +importlib_metadata-4.10.0.dist-info/RECORD,, +importlib_metadata-4.10.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_metadata-4.10.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +importlib_metadata-4.10.0.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19 +importlib_metadata/__init__.py,sha256=70l2jeZjz3YeOXwk1GVI9HKxFbeCDNQnAp0dWZ-ccCs,30173 +importlib_metadata/__pycache__/__init__.cpython-39.pyc,, +importlib_metadata/__pycache__/_adapters.cpython-39.pyc,, +importlib_metadata/__pycache__/_collections.cpython-39.pyc,, +importlib_metadata/__pycache__/_compat.cpython-39.pyc,, +importlib_metadata/__pycache__/_functools.cpython-39.pyc,, +importlib_metadata/__pycache__/_itertools.cpython-39.pyc,, +importlib_metadata/__pycache__/_meta.cpython-39.pyc,, +importlib_metadata/__pycache__/_text.cpython-39.pyc,, +importlib_metadata/_adapters.py,sha256=B6fCi5-8mLVDFUZj3krI5nAo-mKp1dH_qIavyIyFrJs,1862 +importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743 +importlib_metadata/_compat.py,sha256=EU2XCFBPFByuI0Of6XkAuBYbzqSyjwwwwqmsK4ccna0,1826 +importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895 +importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068 +importlib_metadata/_meta.py,sha256=_F48Hu_jFxkfKWz5wcYS8vO23qEygbVdF9r-6qh-hjE,1154 +importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166 +importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/REQUESTED b/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/REQUESTED similarity index 100% rename from src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/REQUESTED rename to src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/REQUESTED diff --git a/src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/WHEEL b/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/WHEEL similarity index 83% rename from src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/WHEEL rename to src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/WHEEL index b733a60d3..5bad85fdc 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/WHEEL +++ b/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/WHEEL @@ -1,6 +1,5 @@ Wheel-Version: 1.0 Generator: bdist_wheel (0.37.0) Root-Is-Purelib: true -Tag: py2-none-any Tag: py3-none-any diff --git a/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/top_level.txt b/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/top_level.txt new file mode 100644 index 000000000..bbb07547a --- /dev/null +++ b/src/main/python/pybuilder/_vendor/importlib_metadata-4.10.0.dist-info/top_level.txt @@ -0,0 +1 @@ +importlib_metadata diff --git a/src/main/python/pybuilder/_vendor/importlib_metadata/__init__.py b/src/main/python/pybuilder/_vendor/importlib_metadata/__init__.py index 951c12c5b..0de5aa084 100644 --- a/src/main/python/pybuilder/_vendor/importlib_metadata/__init__.py +++ b/src/main/python/pybuilder/_vendor/importlib_metadata/__init__.py @@ -21,7 +21,7 @@ install, pypy_partial, ) -from ._functools import method_cache +from ._functools import method_cache, pass_none from ._itertools import always_iterable, unique_everseen from ._meta import PackageMetadata, SimplePath @@ -290,37 +290,26 @@ class DeprecatedList(list): stacklevel=pypy_partial(2), ) - def __setitem__(self, *args, **kwargs): - self._warn() - return super().__setitem__(*args, **kwargs) - - def __delitem__(self, *args, **kwargs): - self._warn() - return super().__delitem__(*args, **kwargs) - - def append(self, *args, **kwargs): - self._warn() - return super().append(*args, **kwargs) - - def reverse(self, *args, **kwargs): - self._warn() - return super().reverse(*args, **kwargs) - - def extend(self, *args, **kwargs): - self._warn() - return super().extend(*args, **kwargs) - - def pop(self, *args, **kwargs): - self._warn() - return super().pop(*args, **kwargs) - - def remove(self, *args, **kwargs): - self._warn() - return super().remove(*args, **kwargs) - - def __iadd__(self, *args, **kwargs): - self._warn() - return super().__iadd__(*args, **kwargs) + def _wrap_deprecated_method(method_name: str): # type: ignore + def wrapped(self, *args, **kwargs): + self._warn() + return getattr(super(), method_name)(*args, **kwargs) + + return wrapped + + for method_name in [ + '__setitem__', + '__delitem__', + 'append', + 'reverse', + 'extend', + 'pop', + 'remove', + '__iadd__', + 'insert', + 'sort', + ]: + locals()[method_name] = _wrap_deprecated_method(method_name) def __add__(self, other): if not isinstance(other, tuple): @@ -328,14 +317,6 @@ def __add__(self, other): other = tuple(other) return self.__class__(tuple(self) + other) - def insert(self, *args, **kwargs): - self._warn() - return super().insert(*args, **kwargs) - - def sort(self, *args, **kwargs): - self._warn() - return super().sort(*args, **kwargs) - def __eq__(self, other): if not isinstance(other, tuple): self._warn() @@ -595,18 +576,6 @@ def _discover_resolvers(): ) return filter(None, declared) - @classmethod - def _local(cls, root='.'): - from pep517 import build, meta - - system = build.compat_system(root) - builder = functools.partial( - meta.build, - source_dir=root, - system=system, - ) - return PathDistribution(zipp.Path(meta.build_as_zip(builder))) - @property def metadata(self) -> _meta.PackageMetadata: """Return the parsed metadata for this Distribution. @@ -654,7 +623,6 @@ def files(self): missing. Result may be empty if the metadata exists but is empty. """ - file_lines = self._read_files_distinfo() or self._read_files_egginfo() def make_file(name, hash=None, size_str=None): result = PackagePath(name) @@ -663,7 +631,11 @@ def make_file(name, hash=None, size_str=None): result.dist = self return result - return file_lines and list(starmap(make_file, csv.reader(file_lines))) + @pass_none + def make_files(lines): + return list(starmap(make_file, csv.reader(lines))) + + return make_files(self._read_files_distinfo() or self._read_files_egginfo()) def _read_files_distinfo(self): """ @@ -712,7 +684,7 @@ def _convert_egg_info_reqs_to_simple_reqs(sections): def make_condition(name): return name and f'extra == "{name}"' - def parse_condition(section): + def quoted_marker(section): section = section or '' extra, sep, markers = section.partition(':') if extra and markers: @@ -720,8 +692,17 @@ def parse_condition(section): conditions = list(filter(None, [markers, make_condition(extra)])) return '; ' + ' and '.join(conditions) if conditions else '' + def url_req_space(req): + """ + PEP 508 requires a space between the url_spec and the quoted_marker. + Ref python/importlib_metadata#357. + """ + # '@' is uniquely indicative of a url_req. + return ' ' * ('@' in req) + for section in sections: - yield section.value + parse_condition(section.name) + space = url_req_space(section.value) + yield section.value + space + quoted_marker(section.name) class DistributionFinder(MetaPathFinder): @@ -776,6 +757,9 @@ class FastPath: """ Micro-optimized class for searching a path for children. + + >>> FastPath('').children() + ['...'] """ @functools.lru_cache() # type: ignore @@ -790,7 +774,7 @@ def joinpath(self, child): def children(self): with suppress(Exception): - return os.listdir(self.root or '') + return os.listdir(self.root or '.') with suppress(Exception): return self.zip_children() return [] diff --git a/src/main/python/pybuilder/_vendor/importlib_metadata/_compat.py b/src/main/python/pybuilder/_vendor/importlib_metadata/_compat.py index 90971339d..8fe4e4e3c 100644 --- a/src/main/python/pybuilder/_vendor/importlib_metadata/_compat.py +++ b/src/main/python/pybuilder/_vendor/importlib_metadata/_compat.py @@ -8,12 +8,7 @@ try: from typing import Protocol except ImportError: # pragma: no cover - """ - pytest-mypy complains here because: - error: Incompatible import of "Protocol" (imported name has type - "typing_extensions._SpecialForm", local name has type "typing._SpecialForm") - """ - from ..typing_extensions import Protocol # type: ignore + from typing_extensions import Protocol # type: ignore def install(cls): diff --git a/src/main/python/pybuilder/_vendor/importlib_metadata/_functools.py b/src/main/python/pybuilder/_vendor/importlib_metadata/_functools.py index 73f50d00b..71f66bd03 100644 --- a/src/main/python/pybuilder/_vendor/importlib_metadata/_functools.py +++ b/src/main/python/pybuilder/_vendor/importlib_metadata/_functools.py @@ -83,3 +83,22 @@ def wrapper(self, *args, **kwargs): wrapper.cache_clear = lambda: None return wrapper + + +# From jaraco.functools 3.3 +def pass_none(func): + """ + Wrap func so it's not called if its first param is None + + >>> print_text = pass_none(print) + >>> print_text('text') + text + >>> print_text(None) + """ + + @functools.wraps(func) + def wrapper(param, *args, **kwargs): + if param is not None: + return func(param, *args, **kwargs) + + return wrapper diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/__init__.py b/src/main/python/pybuilder/_vendor/pkg_resources/__init__.py index e81c46674..10ca86b0f 100644 --- a/src/main/python/pybuilder/_vendor/pkg_resources/__init__.py +++ b/src/main/python/pybuilder/_vendor/pkg_resources/__init__.py @@ -1484,7 +1484,7 @@ def _fn(self, base, resource_name): def _validate_resource_path(path): """ Validate the resource paths according to the docs. - https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access + https://setuptools.pypa.io/en/latest/pkg_resources.html#basic-resource-access >>> warned = getfixture('recwarn') >>> warnings.simplefilter('always') @@ -2015,7 +2015,7 @@ def _by_version_descending(names): >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' >>> _by_version_descending(names) - ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] + ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'bar', 'foo'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] @@ -2023,13 +2023,22 @@ def _by_version_descending(names): >>> _by_version_descending(names) ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] """ + def try_parse(name): + """ + Attempt to parse as a version or return a null version. + """ + try: + return packaging.version.Version(name) + except Exception: + return packaging.version.Version('0') + def _by_version(name): """ Parse each component of the filename """ name, ext = os.path.splitext(name) parts = itertools.chain(name.split('-'), [ext]) - return [packaging.version.parse(part) for part in parts] + return [try_parse(part) for part in parts] return sorted(names, key=_by_version, reverse=True) @@ -2387,18 +2396,19 @@ def _set_parent_ns(packageName): setattr(sys.modules[parent], name, sys.modules[packageName]) -def yield_lines(strs): - """Yield non-empty/non-comment lines of a string or sequence""" - if isinstance(strs, str): - for s in strs.splitlines(): - s = s.strip() - # skip blank lines/comments - if s and not s.startswith('#'): - yield s - else: - for ss in strs: - for s in yield_lines(ss): - yield s +def _nonblank(str): + return str and not str.startswith('#') + + +@functools.singledispatch +def yield_lines(iterable): + """Yield valid lines of a string or iterable""" + return itertools.chain.from_iterable(map(yield_lines, iterable)) + + +@yield_lines.register(str) +def _(text): + return filter(_nonblank, map(str.strip, text.splitlines())) MODULE = re.compile(r"\w+(\.\w+)*$").match @@ -3037,12 +3047,12 @@ def reqs_for_extra(extra): if not req.marker or req.marker.evaluate({'extra': extra}): yield req - common = frozenset(reqs_for_extra(None)) + common = types.MappingProxyType(dict.fromkeys(reqs_for_extra(None))) dm[None].extend(common) for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: s_extra = safe_extra(extra.strip()) - dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) + dm[s_extra] = [r for r in reqs_for_extra(extra) if r not in common] return dm diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/__about__.py b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/__about__.py index 4d998578d..c359122f9 100644 --- a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/__about__.py +++ b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/__about__.py @@ -1,7 +1,6 @@ # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. -from __future__ import absolute_import, division, print_function __all__ = [ "__title__", @@ -18,10 +17,10 @@ __summary__ = "Core utilities for Python packages" __uri__ = "https://github.com/pypa/packaging" -__version__ = "20.4" +__version__ = "21.2" __author__ = "Donald Stufft and individual contributors" __email__ = "donald@stufft.io" __license__ = "BSD-2-Clause or Apache-2.0" -__copyright__ = "Copyright 2014-2019 %s" % __author__ +__copyright__ = "2014-2019 %s" % __author__ diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/__init__.py b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/__init__.py index a0cf67df5..3c50c5dcf 100644 --- a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/__init__.py +++ b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/__init__.py @@ -1,7 +1,6 @@ # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. -from __future__ import absolute_import, division, print_function from .__about__ import ( __author__, diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_compat.py b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_compat.py deleted file mode 100644 index e54bd4ede..000000000 --- a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_compat.py +++ /dev/null @@ -1,38 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import sys - -from ._typing import TYPE_CHECKING - -if TYPE_CHECKING: # pragma: no cover - from typing import Any, Dict, Tuple, Type - - -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 - -# flake8: noqa - -if PY3: - string_types = (str,) -else: - string_types = (basestring,) - - -def with_metaclass(meta, *bases): - # type: (Type[Any], Tuple[Type[Any], ...]) -> Any - """ - Create a base class with a metaclass. - """ - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(meta): # type: ignore - def __new__(cls, name, this_bases, d): - # type: (Type[Any], str, Tuple[Any], Dict[Any, Any]) -> Any - return meta(name, bases, d) - - return type.__new__(metaclass, "temporary_class", (), {}) diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_manylinux.py b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_manylinux.py new file mode 100644 index 000000000..4c379aa6f --- /dev/null +++ b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_manylinux.py @@ -0,0 +1,301 @@ +import collections +import functools +import os +import re +import struct +import sys +import warnings +from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple + + +# Python does not provide platform information at sufficient granularity to +# identify the architecture of the running executable in some cases, so we +# determine it dynamically by reading the information from the running +# process. This only applies on Linux, which uses the ELF format. +class _ELFFileHeader: + # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header + class _InvalidELFFileHeader(ValueError): + """ + An invalid ELF file header was found. + """ + + ELF_MAGIC_NUMBER = 0x7F454C46 + ELFCLASS32 = 1 + ELFCLASS64 = 2 + ELFDATA2LSB = 1 + ELFDATA2MSB = 2 + EM_386 = 3 + EM_S390 = 22 + EM_ARM = 40 + EM_X86_64 = 62 + EF_ARM_ABIMASK = 0xFF000000 + EF_ARM_ABI_VER5 = 0x05000000 + EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + def __init__(self, file: IO[bytes]) -> None: + def unpack(fmt: str) -> int: + try: + data = file.read(struct.calcsize(fmt)) + result: Tuple[int, ...] = struct.unpack(fmt, data) + except struct.error: + raise _ELFFileHeader._InvalidELFFileHeader() + return result[0] + + self.e_ident_magic = unpack(">I") + if self.e_ident_magic != self.ELF_MAGIC_NUMBER: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_class = unpack("B") + if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_data = unpack("B") + if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_version = unpack("B") + self.e_ident_osabi = unpack("B") + self.e_ident_abiversion = unpack("B") + self.e_ident_pad = file.read(7) + format_h = "H" + format_i = "I" + format_q = "Q" + format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q + self.e_type = unpack(format_h) + self.e_machine = unpack(format_h) + self.e_version = unpack(format_i) + self.e_entry = unpack(format_p) + self.e_phoff = unpack(format_p) + self.e_shoff = unpack(format_p) + self.e_flags = unpack(format_i) + self.e_ehsize = unpack(format_h) + self.e_phentsize = unpack(format_h) + self.e_phnum = unpack(format_h) + self.e_shentsize = unpack(format_h) + self.e_shnum = unpack(format_h) + self.e_shstrndx = unpack(format_h) + + +def _get_elf_header() -> Optional[_ELFFileHeader]: + try: + with open(sys.executable, "rb") as f: + elf_header = _ELFFileHeader(f) + except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): + return None + return elf_header + + +def _is_linux_armhf() -> bool: + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_ARM + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABIMASK + ) == elf_header.EF_ARM_ABI_VER5 + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD + ) == elf_header.EF_ARM_ABI_FLOAT_HARD + return result + + +def _is_linux_i686() -> bool: + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_386 + return result + + +def _have_compatible_abi(arch: str) -> bool: + if arch == "armv7l": + return _is_linux_armhf() + if arch == "i686": + return _is_linux_i686() + return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"} + + +# If glibc ever changes its major version, we need to know what the last +# minor version was, so we can build the complete list of all versions. +# For now, guess what the highest minor version might be, assume it will +# be 50 for testing. Once this actually happens, update the dictionary +# with the actual value. +_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) + + +class _GLibCVersion(NamedTuple): + major: int + minor: int + + +def _glibc_version_string_confstr() -> Optional[str]: + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 + try: + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". + version_string = os.confstr("CS_GNU_LIBC_VERSION") + assert version_string is not None + _, version = version_string.split() + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes() -> Optional[str]: + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # We must also handle the special case where the executable is not a + # dynamically linked executable. This can occur when using musl libc, + # for example. In this situation, dlopen() will error, leading to an + # OSError. Interestingly, at least in the case of musl, there is no + # errno set on the OSError. The single string argument used to construct + # OSError comes from libc itself and is therefore not portable to + # hard code here. In any case, failure to call dlopen() means we + # can proceed, so we bail on our attempt. + try: + process_namespace = ctypes.CDLL(None) + except OSError: + return None + + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str: str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +def _glibc_version_string() -> Optional[str]: + """Returns glibc version string, or None if not using glibc.""" + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _parse_glibc_version(version_str: str) -> Tuple[int, int]: + """Parse glibc version. + + We use a regexp instead of str.split because we want to discard any + random junk that might come after the minor version -- this might happen + in patched/forked versions of glibc (e.g. Linaro's version of glibc + uses version strings like "2.20-2014.11"). See gh-3588. + """ + m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) + if not m: + warnings.warn( + "Expected glibc version with 2 components major.minor," + " got: %s" % version_str, + RuntimeWarning, + ) + return -1, -1 + return int(m.group("major")), int(m.group("minor")) + + +@functools.lru_cache() +def _get_glibc_version() -> Tuple[int, int]: + version_str = _glibc_version_string() + if version_str is None: + return (-1, -1) + return _parse_glibc_version(version_str) + + +# From PEP 513, PEP 600 +def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool: + sys_glibc = _get_glibc_version() + if sys_glibc < version: + return False + # Check for presence of _manylinux module. + try: + import _manylinux # noqa + except ImportError: + return True + if hasattr(_manylinux, "manylinux_compatible"): + result = _manylinux.manylinux_compatible(version[0], version[1], arch) + if result is not None: + return bool(result) + return True + if version == _GLibCVersion(2, 5): + if hasattr(_manylinux, "manylinux1_compatible"): + return bool(_manylinux.manylinux1_compatible) + if version == _GLibCVersion(2, 12): + if hasattr(_manylinux, "manylinux2010_compatible"): + return bool(_manylinux.manylinux2010_compatible) + if version == _GLibCVersion(2, 17): + if hasattr(_manylinux, "manylinux2014_compatible"): + return bool(_manylinux.manylinux2014_compatible) + return True + + +_LEGACY_MANYLINUX_MAP = { + # CentOS 7 w/ glibc 2.17 (PEP 599) + (2, 17): "manylinux2014", + # CentOS 6 w/ glibc 2.12 (PEP 571) + (2, 12): "manylinux2010", + # CentOS 5 w/ glibc 2.5 (PEP 513) + (2, 5): "manylinux1", +} + + +def platform_tags(linux: str, arch: str) -> Iterator[str]: + if not _have_compatible_abi(arch): + return + # Oldest glibc to be supported regardless of architecture is (2, 17). + too_old_glibc2 = _GLibCVersion(2, 16) + if arch in {"x86_64", "i686"}: + # On x86/i686 also oldest glibc to be supported is (2, 5). + too_old_glibc2 = _GLibCVersion(2, 4) + current_glibc = _GLibCVersion(*_get_glibc_version()) + glibc_max_list = [current_glibc] + # We can assume compatibility across glibc major versions. + # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 + # + # Build a list of maximum glibc versions so that we can + # output the canonical list of all glibc from current_glibc + # down to too_old_glibc2, including all intermediary versions. + for glibc_major in range(current_glibc.major - 1, 1, -1): + glibc_minor = _LAST_GLIBC_MINOR[glibc_major] + glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) + for glibc_max in glibc_max_list: + if glibc_max.major == too_old_glibc2.major: + min_minor = too_old_glibc2.minor + else: + # For other glibc major versions oldest supported is (x, 0). + min_minor = -1 + for glibc_minor in range(glibc_max.minor, min_minor, -1): + glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) + tag = "manylinux_{}_{}".format(*glibc_version) + if _is_compatible(tag, arch, glibc_version): + yield linux.replace("linux", tag) + # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. + if glibc_version in _LEGACY_MANYLINUX_MAP: + legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] + if _is_compatible(legacy_tag, arch, glibc_version): + yield linux.replace("linux", legacy_tag) diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_musllinux.py b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_musllinux.py new file mode 100644 index 000000000..85450fafa --- /dev/null +++ b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_musllinux.py @@ -0,0 +1,136 @@ +"""PEP 656 support. + +This module implements logic to detect if the currently running Python is +linked against musl, and what musl version is used. +""" + +import contextlib +import functools +import operator +import os +import re +import struct +import subprocess +import sys +from typing import IO, Iterator, NamedTuple, Optional, Tuple + + +def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]: + return struct.unpack(fmt, f.read(struct.calcsize(fmt))) + + +def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]: + """Detect musl libc location by parsing the Python executable. + + Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca + ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html + """ + f.seek(0) + try: + ident = _read_unpacked(f, "16B") + except struct.error: + return None + if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF. + return None + f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version. + + try: + # e_fmt: Format for program header. + # p_fmt: Format for section header. + # p_idx: Indexes to find p_type, p_offset, and p_filesz. + e_fmt, p_fmt, p_idx = { + 1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit. + 2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit. + }[ident[4]] + except KeyError: + return None + else: + p_get = operator.itemgetter(*p_idx) + + # Find the interpreter section and return its content. + try: + _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt) + except struct.error: + return None + for i in range(e_phnum + 1): + f.seek(e_phoff + e_phentsize * i) + try: + p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt)) + except struct.error: + return None + if p_type != 3: # Not PT_INTERP. + continue + f.seek(p_offset) + interpreter = os.fsdecode(f.read(p_filesz)).strip("\0") + if "musl" not in interpreter: + return None + return interpreter + return None + + +class _MuslVersion(NamedTuple): + major: int + minor: int + + +def _parse_musl_version(output: str) -> Optional[_MuslVersion]: + lines = [n for n in (n.strip() for n in output.splitlines()) if n] + if len(lines) < 2 or lines[0][:4] != "musl": + return None + m = re.match(r"Version (\d+)\.(\d+)", lines[1]) + if not m: + return None + return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) + + +@functools.lru_cache() +def _get_musl_version(executable: str) -> Optional[_MuslVersion]: + """Detect currently-running musl runtime version. + + This is done by checking the specified executable's dynamic linking + information, and invoking the loader to parse its output for a version + string. If the loader is musl, the output would be something like:: + + musl libc (x86_64) + Version 1.2.2 + Dynamic Program Loader + """ + with contextlib.ExitStack() as stack: + try: + f = stack.enter_context(open(executable, "rb")) + except IOError: + return None + ld = _parse_ld_musl_from_elf(f) + if not ld: + return None + proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True) + return _parse_musl_version(proc.stderr) + + +def platform_tags(arch: str) -> Iterator[str]: + """Generate musllinux tags compatible to the current platform. + + :param arch: Should be the part of platform tag after the ``linux_`` + prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a + prerequisite for the current platform to be musllinux-compatible. + + :returns: An iterator of compatible musllinux tags. + """ + sys_musl = _get_musl_version(sys.executable) + if sys_musl is None: # Python not dynamically linked against musl. + return + for minor in range(sys_musl.minor, -1, -1): + yield f"musllinux_{sys_musl.major}_{minor}_{arch}" + + +if __name__ == "__main__": # pragma: no cover + import sysconfig + + plat = sysconfig.get_platform() + assert plat.startswith("linux-"), "not linux" + + print("plat:", plat) + print("musl:", _get_musl_version(sys.executable)) + print("tags:", end=" ") + for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): + print(t, end="\n ") diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_structures.py b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_structures.py index 800d5c558..951549753 100644 --- a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_structures.py +++ b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_structures.py @@ -1,85 +1,66 @@ # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. -from __future__ import absolute_import, division, print_function -class InfinityType(object): - def __repr__(self): - # type: () -> str +class InfinityType: + def __repr__(self) -> str: return "Infinity" - def __hash__(self): - # type: () -> int + def __hash__(self) -> int: return hash(repr(self)) - def __lt__(self, other): - # type: (object) -> bool + def __lt__(self, other: object) -> bool: return False - def __le__(self, other): - # type: (object) -> bool + def __le__(self, other: object) -> bool: return False - def __eq__(self, other): - # type: (object) -> bool + def __eq__(self, other: object) -> bool: return isinstance(other, self.__class__) - def __ne__(self, other): - # type: (object) -> bool + def __ne__(self, other: object) -> bool: return not isinstance(other, self.__class__) - def __gt__(self, other): - # type: (object) -> bool + def __gt__(self, other: object) -> bool: return True - def __ge__(self, other): - # type: (object) -> bool + def __ge__(self, other: object) -> bool: return True - def __neg__(self): - # type: (object) -> NegativeInfinityType + def __neg__(self: object) -> "NegativeInfinityType": return NegativeInfinity Infinity = InfinityType() -class NegativeInfinityType(object): - def __repr__(self): - # type: () -> str +class NegativeInfinityType: + def __repr__(self) -> str: return "-Infinity" - def __hash__(self): - # type: () -> int + def __hash__(self) -> int: return hash(repr(self)) - def __lt__(self, other): - # type: (object) -> bool + def __lt__(self, other: object) -> bool: return True - def __le__(self, other): - # type: (object) -> bool + def __le__(self, other: object) -> bool: return True - def __eq__(self, other): - # type: (object) -> bool + def __eq__(self, other: object) -> bool: return isinstance(other, self.__class__) - def __ne__(self, other): - # type: (object) -> bool + def __ne__(self, other: object) -> bool: return not isinstance(other, self.__class__) - def __gt__(self, other): - # type: (object) -> bool + def __gt__(self, other: object) -> bool: return False - def __ge__(self, other): - # type: (object) -> bool + def __ge__(self, other: object) -> bool: return False - def __neg__(self): - # type: (object) -> InfinityType + def __neg__(self: object) -> InfinityType: return Infinity diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_typing.py b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_typing.py deleted file mode 100644 index 77a8b9185..000000000 --- a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/_typing.py +++ /dev/null @@ -1,48 +0,0 @@ -"""For neatly implementing static typing in packaging. - -`mypy` - the static type analysis tool we use - uses the `typing` module, which -provides core functionality fundamental to mypy's functioning. - -Generally, `typing` would be imported at runtime and used in that fashion - -it acts as a no-op at runtime and does not have any run-time overhead by -design. - -As it turns out, `typing` is not vendorable - it uses separate sources for -Python 2/Python 3. Thus, this codebase can not expect it to be present. -To work around this, mypy allows the typing import to be behind a False-y -optional to prevent it from running at runtime and type-comments can be used -to remove the need for the types to be accessible directly during runtime. - -This module provides the False-y guard in a nicely named fashion so that a -curious maintainer can reach here to read this. - -In packaging, all static-typing related imports should be guarded as follows: - - from packaging._typing import TYPE_CHECKING - - if TYPE_CHECKING: - from typing import ... - -Ref: https://github.com/python/mypy/issues/3216 -""" - -__all__ = ["TYPE_CHECKING", "cast"] - -# The TYPE_CHECKING constant defined by the typing module is False at runtime -# but True while type checking. -if False: # pragma: no cover - from typing import TYPE_CHECKING -else: - TYPE_CHECKING = False - -# typing's cast syntax requires calling typing.cast at runtime, but we don't -# want to import typing at runtime. Here, we inform the type checkers that -# we're importing `typing.cast` as `cast` and re-implement typing.cast's -# runtime behavior in a block that is ignored by type checkers. -if TYPE_CHECKING: # pragma: no cover - # not executed at runtime - from typing import cast -else: - # executed at runtime - def cast(type_, value): # noqa - return value diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/markers.py b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/markers.py index 76f4fe26d..3234819a0 100644 --- a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/markers.py +++ b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/markers.py @@ -1,26 +1,26 @@ # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. -from __future__ import absolute_import, division, print_function import operator import os import platform import sys +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +from ...extern.pyparsing import ( # noqa: N817 + Forward, + Group, + Literal as L, + ParseException, + ParseResults, + QuotedString, + ZeroOrMore, + stringEnd, + stringStart, +) -from ...extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd -from ...extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString -from ...extern.pyparsing import Literal as L # noqa - -from ._compat import string_types -from ._typing import TYPE_CHECKING -from .specifiers import Specifier, InvalidSpecifier - -if TYPE_CHECKING: # pragma: no cover - from typing import Any, Callable, Dict, List, Optional, Tuple, Union - - Operator = Callable[[str, str], bool] - +from .specifiers import InvalidSpecifier, Specifier __all__ = [ "InvalidMarker", @@ -30,6 +30,8 @@ "default_environment", ] +Operator = Callable[[str, str], bool] + class InvalidMarker(ValueError): """ @@ -50,39 +52,32 @@ class UndefinedEnvironmentName(ValueError): """ -class Node(object): - def __init__(self, value): - # type: (Any) -> None +class Node: + def __init__(self, value: Any) -> None: self.value = value - def __str__(self): - # type: () -> str + def __str__(self) -> str: return str(self.value) - def __repr__(self): - # type: () -> str - return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) + def __repr__(self) -> str: + return f"<{self.__class__.__name__}('{self}')>" - def serialize(self): - # type: () -> str + def serialize(self) -> str: raise NotImplementedError class Variable(Node): - def serialize(self): - # type: () -> str + def serialize(self) -> str: return str(self) class Value(Node): - def serialize(self): - # type: () -> str - return '"{0}"'.format(self) + def serialize(self) -> str: + return f'"{self}"' class Op(Node): - def serialize(self): - # type: () -> str + def serialize(self) -> str: return str(self) @@ -143,18 +138,18 @@ def serialize(self): MARKER = stringStart + MARKER_EXPR + stringEnd -def _coerce_parse_result(results): - # type: (Union[ParseResults, List[Any]]) -> List[Any] +def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]: if isinstance(results, ParseResults): return [_coerce_parse_result(i) for i in results] else: return results -def _format_marker(marker, first=True): - # type: (Union[List[str], Tuple[Node, ...], str], Optional[bool]) -> str +def _format_marker( + marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True +) -> str: - assert isinstance(marker, (list, tuple, string_types)) + assert isinstance(marker, (list, tuple, str)) # Sometimes we have a structure like [[...]] which is a single item list # where the single item is itself it's own list. In that case we want skip @@ -179,7 +174,7 @@ def _format_marker(marker, first=True): return marker -_operators = { +_operators: Dict[str, Operator] = { "in": lambda lhs, rhs: lhs in rhs, "not in": lambda lhs, rhs: lhs not in rhs, "<": operator.lt, @@ -188,11 +183,10 @@ def _format_marker(marker, first=True): "!=": operator.ne, ">=": operator.ge, ">": operator.gt, -} # type: Dict[str, Operator] +} -def _eval_op(lhs, op, rhs): - # type: (str, Op, str) -> bool +def _eval_op(lhs: str, op: Op, rhs: str) -> bool: try: spec = Specifier("".join([op.serialize(), rhs])) except InvalidSpecifier: @@ -200,40 +194,36 @@ def _eval_op(lhs, op, rhs): else: return spec.contains(lhs) - oper = _operators.get(op.serialize()) # type: Optional[Operator] + oper: Optional[Operator] = _operators.get(op.serialize()) if oper is None: - raise UndefinedComparison( - "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) - ) + raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") return oper(lhs, rhs) -class Undefined(object): +class Undefined: pass _undefined = Undefined() -def _get_env(environment, name): - # type: (Dict[str, str], str) -> str - value = environment.get(name, _undefined) # type: Union[str, Undefined] +def _get_env(environment: Dict[str, str], name: str) -> str: + value: Union[str, Undefined] = environment.get(name, _undefined) if isinstance(value, Undefined): raise UndefinedEnvironmentName( - "{0!r} does not exist in evaluation environment.".format(name) + f"{name!r} does not exist in evaluation environment." ) return value -def _evaluate_markers(markers, environment): - # type: (List[Any], Dict[str, str]) -> bool - groups = [[]] # type: List[List[bool]] +def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool: + groups: List[List[bool]] = [[]] for marker in markers: - assert isinstance(marker, (list, tuple, string_types)) + assert isinstance(marker, (list, tuple, str)) if isinstance(marker, list): groups[-1].append(_evaluate_markers(marker, environment)) @@ -256,8 +246,7 @@ def _evaluate_markers(markers, environment): return any(all(item) for item in groups) -def format_full_version(info): - # type: (sys._version_info) -> str +def format_full_version(info: "sys._version_info") -> str: version = "{0.major}.{0.minor}.{0.micro}".format(info) kind = info.releaselevel if kind != "final": @@ -265,18 +254,9 @@ def format_full_version(info): return version -def default_environment(): - # type: () -> Dict[str, str] - if hasattr(sys, "implementation"): - # Ignoring the `sys.implementation` reference for type checking due to - # mypy not liking that the attribute doesn't exist in Python 2.7 when - # run with the `--py27` flag. - iver = format_full_version(sys.implementation.version) # type: ignore - implementation_name = sys.implementation.name # type: ignore - else: - iver = "0" - implementation_name = "" - +def default_environment() -> Dict[str, str]: + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name return { "implementation_name": implementation_name, "implementation_version": iver, @@ -292,27 +272,23 @@ def default_environment(): } -class Marker(object): - def __init__(self, marker): - # type: (str) -> None +class Marker: + def __init__(self, marker: str) -> None: try: self._markers = _coerce_parse_result(MARKER.parseString(marker)) except ParseException as e: - err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( - marker, marker[e.loc : e.loc + 8] + raise InvalidMarker( + f"Invalid marker: {marker!r}, parse error at " + f"{marker[e.loc : e.loc + 8]!r}" ) - raise InvalidMarker(err_str) - def __str__(self): - # type: () -> str + def __str__(self) -> str: return _format_marker(self._markers) - def __repr__(self): - # type: () -> str - return "".format(str(self)) + def __repr__(self) -> str: + return f"" - def evaluate(self, environment=None): - # type: (Optional[Dict[str, str]]) -> bool + def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: """Evaluate a marker. Return the boolean from evaluating the given marker against the diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/requirements.py b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/requirements.py index 47134ba2b..315e4be21 100644 --- a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/requirements.py +++ b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/requirements.py @@ -1,23 +1,28 @@ # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. -from __future__ import absolute_import, division, print_function -import string import re +import string +import urllib.parse +from typing import List, Optional as TOptional, Set + +from ...extern.pyparsing import ( # noqa + Combine, + Literal as L, + Optional, + ParseException, + Regex, + Word, + ZeroOrMore, + originalTextFor, + stringEnd, + stringStart, +) -from ...extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException -from ...extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine -from ...extern.pyparsing import Literal as L # noqa -from urllib import parse as urlparse - -from ._typing import TYPE_CHECKING from .markers import MARKER_EXPR, Marker from .specifiers import LegacySpecifier, Specifier, SpecifierSet -if TYPE_CHECKING: # pragma: no cover - from typing import List - class InvalidRequirement(ValueError): """ @@ -55,7 +60,7 @@ class InvalidRequirement(ValueError): VERSION_MANY = Combine( VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False )("_raw_spec") -_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) +_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY) _VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") @@ -79,7 +84,7 @@ class InvalidRequirement(ValueError): REQUIREMENT.parseString("x[]") -class Requirement(object): +class Requirement: """Parse a requirement. Parse a given requirement string into its parts, such as name, specifier, @@ -92,54 +97,50 @@ class Requirement(object): # the thing as well as the version? What about the markers? # TODO: Can we normalize the name and extra name? - def __init__(self, requirement_string): - # type: (str) -> None + def __init__(self, requirement_string: str) -> None: try: req = REQUIREMENT.parseString(requirement_string) except ParseException as e: raise InvalidRequirement( - 'Parse error at "{0!r}": {1}'.format( - requirement_string[e.loc : e.loc + 8], e.msg - ) + f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}' ) - self.name = req.name + self.name: str = req.name if req.url: - parsed_url = urlparse.urlparse(req.url) + parsed_url = urllib.parse.urlparse(req.url) if parsed_url.scheme == "file": - if urlparse.urlunparse(parsed_url) != req.url: + if urllib.parse.urlunparse(parsed_url) != req.url: raise InvalidRequirement("Invalid URL given") elif not (parsed_url.scheme and parsed_url.netloc) or ( not parsed_url.scheme and not parsed_url.netloc ): - raise InvalidRequirement("Invalid URL: {0}".format(req.url)) - self.url = req.url + raise InvalidRequirement(f"Invalid URL: {req.url}") + self.url: TOptional[str] = req.url else: self.url = None - self.extras = set(req.extras.asList() if req.extras else []) - self.specifier = SpecifierSet(req.specifier) - self.marker = req.marker if req.marker else None + self.extras: Set[str] = set(req.extras.asList() if req.extras else []) + self.specifier: SpecifierSet = SpecifierSet(req.specifier) + self.marker: TOptional[Marker] = req.marker if req.marker else None - def __str__(self): - # type: () -> str - parts = [self.name] # type: List[str] + def __str__(self) -> str: + parts: List[str] = [self.name] if self.extras: - parts.append("[{0}]".format(",".join(sorted(self.extras)))) + formatted_extras = ",".join(sorted(self.extras)) + parts.append(f"[{formatted_extras}]") if self.specifier: parts.append(str(self.specifier)) if self.url: - parts.append("@ {0}".format(self.url)) + parts.append(f"@ {self.url}") if self.marker: parts.append(" ") if self.marker: - parts.append("; {0}".format(self.marker)) + parts.append(f"; {self.marker}") return "".join(parts) - def __repr__(self): - # type: () -> str - return "".format(str(self)) + def __repr__(self) -> str: + return f"" diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/specifiers.py b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/specifiers.py index fe09bb1db..ce66bd4ad 100644 --- a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/specifiers.py +++ b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/specifiers.py @@ -1,34 +1,33 @@ # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. -from __future__ import absolute_import, division, print_function import abc import functools import itertools import re +import warnings +from typing import ( + Callable, + Dict, + Iterable, + Iterator, + List, + Optional, + Pattern, + Set, + Tuple, + TypeVar, + Union, +) -from ._compat import string_types, with_metaclass -from ._typing import TYPE_CHECKING from .utils import canonicalize_version -from .version import Version, LegacyVersion, parse - -if TYPE_CHECKING: # pragma: no cover - from typing import ( - List, - Dict, - Union, - Iterable, - Iterator, - Optional, - Callable, - Tuple, - FrozenSet, - ) +from .version import LegacyVersion, Version, parse - ParsedVersion = Union[Version, LegacyVersion] - UnparsedVersion = Union[Version, LegacyVersion, str] - CallableOperator = Callable[[ParsedVersion, str], bool] +ParsedVersion = Union[Version, LegacyVersion] +UnparsedVersion = Union[Version, LegacyVersion, str] +VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion) +CallableOperator = Callable[[ParsedVersion, str], bool] class InvalidSpecifier(ValueError): @@ -37,64 +36,58 @@ class InvalidSpecifier(ValueError): """ -class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): # type: ignore +class BaseSpecifier(metaclass=abc.ABCMeta): @abc.abstractmethod - def __str__(self): - # type: () -> str + def __str__(self) -> str: """ Returns the str representation of this Specifier like object. This should be representative of the Specifier itself. """ @abc.abstractmethod - def __hash__(self): - # type: () -> int + def __hash__(self) -> int: """ Returns a hash value for this Specifier like object. """ @abc.abstractmethod - def __eq__(self, other): - # type: (object) -> bool + def __eq__(self, other: object) -> bool: """ Returns a boolean representing whether or not the two Specifier like objects are equal. """ @abc.abstractmethod - def __ne__(self, other): - # type: (object) -> bool + def __ne__(self, other: object) -> bool: """ Returns a boolean representing whether or not the two Specifier like objects are not equal. """ @abc.abstractproperty - def prereleases(self): - # type: () -> Optional[bool] + def prereleases(self) -> Optional[bool]: """ Returns whether or not pre-releases as a whole are allowed by this specifier. """ @prereleases.setter - def prereleases(self, value): - # type: (bool) -> None + def prereleases(self, value: bool) -> None: """ Sets whether or not pre-releases as a whole are allowed by this specifier. """ @abc.abstractmethod - def contains(self, item, prereleases=None): - # type: (str, Optional[bool]) -> bool + def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: """ Determines if the given item is contained within this specifier. """ @abc.abstractmethod - def filter(self, iterable, prereleases=None): - # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] + def filter( + self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None + ) -> Iterable[VersionTypeVar]: """ Takes an iterable of items and filters them so that only items which are contained within this specifier are allowed in it. @@ -103,48 +96,43 @@ def filter(self, iterable, prereleases=None): class _IndividualSpecifier(BaseSpecifier): - _operators = {} # type: Dict[str, str] + _operators: Dict[str, str] = {} + _regex: Pattern[str] - def __init__(self, spec="", prereleases=None): - # type: (str, Optional[bool]) -> None + def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: match = self._regex.search(spec) if not match: - raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) + raise InvalidSpecifier(f"Invalid specifier: '{spec}'") - self._spec = ( + self._spec: Tuple[str, str] = ( match.group("operator").strip(), match.group("version").strip(), - ) # type: Tuple[str, str] + ) # Store whether or not this Specifier should accept prereleases self._prereleases = prereleases - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: pre = ( - ", prereleases={0!r}".format(self.prereleases) + f", prereleases={self.prereleases!r}" if self._prereleases is not None else "" ) - return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre) + return "<{}({!r}{})>".format(self.__class__.__name__, str(self), pre) - def __str__(self): - # type: () -> str - return "{0}{1}".format(*self._spec) + def __str__(self) -> str: + return "{}{}".format(*self._spec) @property - def _canonical_spec(self): - # type: () -> Tuple[str, Union[Version, str]] + def _canonical_spec(self) -> Tuple[str, str]: return self._spec[0], canonicalize_version(self._spec[1]) - def __hash__(self): - # type: () -> int + def __hash__(self) -> int: return hash(self._canonical_spec) - def __eq__(self, other): - # type: (object) -> bool - if isinstance(other, string_types): + def __eq__(self, other: object) -> bool: + if isinstance(other, str): try: other = self.__class__(str(other)) except InvalidSpecifier: @@ -154,9 +142,8 @@ def __eq__(self, other): return self._canonical_spec == other._canonical_spec - def __ne__(self, other): - # type: (object) -> bool - if isinstance(other, string_types): + def __ne__(self, other: object) -> bool: + if isinstance(other, str): try: other = self.__class__(str(other)) except InvalidSpecifier: @@ -166,45 +153,39 @@ def __ne__(self, other): return self._spec != other._spec - def _get_operator(self, op): - # type: (str) -> CallableOperator - operator_callable = getattr( - self, "_compare_{0}".format(self._operators[op]) - ) # type: CallableOperator + def _get_operator(self, op: str) -> CallableOperator: + operator_callable: CallableOperator = getattr( + self, f"_compare_{self._operators[op]}" + ) return operator_callable - def _coerce_version(self, version): - # type: (UnparsedVersion) -> ParsedVersion + def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion: if not isinstance(version, (LegacyVersion, Version)): version = parse(version) return version @property - def operator(self): - # type: () -> str + def operator(self) -> str: return self._spec[0] @property - def version(self): - # type: () -> str + def version(self) -> str: return self._spec[1] @property - def prereleases(self): - # type: () -> Optional[bool] + def prereleases(self) -> Optional[bool]: return self._prereleases @prereleases.setter - def prereleases(self, value): - # type: (bool) -> None + def prereleases(self, value: bool) -> None: self._prereleases = value - def __contains__(self, item): - # type: (str) -> bool + def __contains__(self, item: str) -> bool: return self.contains(item) - def contains(self, item, prereleases=None): - # type: (UnparsedVersion, Optional[bool]) -> bool + def contains( + self, item: UnparsedVersion, prereleases: Optional[bool] = None + ) -> bool: # Determine if prereleases are to be allowed or not. if prereleases is None: @@ -222,11 +203,12 @@ def contains(self, item, prereleases=None): # Actually do the comparison to determine if this item is contained # within this Specifier or not. - operator_callable = self._get_operator(self.operator) # type: CallableOperator + operator_callable: CallableOperator = self._get_operator(self.operator) return operator_callable(normalized_item, self.version) - def filter(self, iterable, prereleases=None): - # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] + def filter( + self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None + ) -> Iterable[VersionTypeVar]: yielded = False found_prereleases = [] @@ -240,7 +222,7 @@ def filter(self, iterable, prereleases=None): if self.contains(parsed_version, **kw): # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later incase nothing + # prereleases, then we'll store it for later in case nothing # else matches this specifier. if parsed_version.is_prerelease and not ( prereleases or self.prereleases @@ -285,44 +267,46 @@ class LegacySpecifier(_IndividualSpecifier): ">": "greater_than", } - def _coerce_version(self, version): - # type: (Union[ParsedVersion, str]) -> LegacyVersion + def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: + super().__init__(spec, prereleases) + + warnings.warn( + "Creating a LegacyVersion has been deprecated and will be " + "removed in the next major release", + DeprecationWarning, + ) + + def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion: if not isinstance(version, LegacyVersion): version = LegacyVersion(str(version)) return version - def _compare_equal(self, prospective, spec): - # type: (LegacyVersion, str) -> bool + def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool: return prospective == self._coerce_version(spec) - def _compare_not_equal(self, prospective, spec): - # type: (LegacyVersion, str) -> bool + def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool: return prospective != self._coerce_version(spec) - def _compare_less_than_equal(self, prospective, spec): - # type: (LegacyVersion, str) -> bool + def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool: return prospective <= self._coerce_version(spec) - def _compare_greater_than_equal(self, prospective, spec): - # type: (LegacyVersion, str) -> bool + def _compare_greater_than_equal( + self, prospective: LegacyVersion, spec: str + ) -> bool: return prospective >= self._coerce_version(spec) - def _compare_less_than(self, prospective, spec): - # type: (LegacyVersion, str) -> bool + def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool: return prospective < self._coerce_version(spec) - def _compare_greater_than(self, prospective, spec): - # type: (LegacyVersion, str) -> bool + def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool: return prospective > self._coerce_version(spec) def _require_version_compare( - fn # type: (Callable[[Specifier, ParsedVersion, str], bool]) -): - # type: (...) -> Callable[[Specifier, ParsedVersion, str], bool] + fn: Callable[["Specifier", ParsedVersion, str], bool] +) -> Callable[["Specifier", ParsedVersion, str], bool]: @functools.wraps(fn) - def wrapped(self, prospective, spec): - # type: (Specifier, ParsedVersion, str) -> bool + def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool: if not isinstance(prospective, Version): return False return fn(self, prospective, spec) @@ -439,8 +423,7 @@ class Specifier(_IndividualSpecifier): } @_require_version_compare - def _compare_compatible(self, prospective, spec): - # type: (ParsedVersion, str) -> bool + def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool: # Compatible releases have an equivalent combination of >= and ==. That # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to @@ -449,15 +432,9 @@ def _compare_compatible(self, prospective, spec): # the other specifiers. # We want everything but the last item in the version, but we want to - # ignore post and dev releases and we want to treat the pre-release as - # it's own separate segment. + # ignore suffix segments. prefix = ".".join( - list( - itertools.takewhile( - lambda x: (not x.startswith("post") and not x.startswith("dev")), - _version_split(spec), - ) - )[:-1] + list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] ) # Add the prefix notation to the end of our string @@ -468,8 +445,7 @@ def _compare_compatible(self, prospective, spec): ) @_require_version_compare - def _compare_equal(self, prospective, spec): - # type: (ParsedVersion, str) -> bool + def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool: # We need special logic to handle prefix matching if spec.endswith(".*"): @@ -509,13 +485,11 @@ def _compare_equal(self, prospective, spec): return prospective == spec_version @_require_version_compare - def _compare_not_equal(self, prospective, spec): - # type: (ParsedVersion, str) -> bool + def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool: return not self._compare_equal(prospective, spec) @_require_version_compare - def _compare_less_than_equal(self, prospective, spec): - # type: (ParsedVersion, str) -> bool + def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool: # NB: Local version identifiers are NOT permitted in the version # specifier, so local version labels can be universally removed from @@ -523,8 +497,9 @@ def _compare_less_than_equal(self, prospective, spec): return Version(prospective.public) <= Version(spec) @_require_version_compare - def _compare_greater_than_equal(self, prospective, spec): - # type: (ParsedVersion, str) -> bool + def _compare_greater_than_equal( + self, prospective: ParsedVersion, spec: str + ) -> bool: # NB: Local version identifiers are NOT permitted in the version # specifier, so local version labels can be universally removed from @@ -532,8 +507,7 @@ def _compare_greater_than_equal(self, prospective, spec): return Version(prospective.public) >= Version(spec) @_require_version_compare - def _compare_less_than(self, prospective, spec_str): - # type: (ParsedVersion, str) -> bool + def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool: # Convert our spec to a Version instance, since we'll want to work with # it as a version. @@ -559,8 +533,7 @@ def _compare_less_than(self, prospective, spec_str): return True @_require_version_compare - def _compare_greater_than(self, prospective, spec_str): - # type: (ParsedVersion, str) -> bool + def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool: # Convert our spec to a Version instance, since we'll want to work with # it as a version. @@ -591,13 +564,11 @@ def _compare_greater_than(self, prospective, spec_str): # same version in the spec. return True - def _compare_arbitrary(self, prospective, spec): - # type: (Version, str) -> bool + def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: return str(prospective).lower() == str(spec).lower() @property - def prereleases(self): - # type: () -> bool + def prereleases(self) -> bool: # If there is an explicit prereleases set for this, then we'll just # blindly use that. @@ -622,17 +593,15 @@ def prereleases(self): return False @prereleases.setter - def prereleases(self, value): - # type: (bool) -> None + def prereleases(self, value: bool) -> None: self._prereleases = value _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") -def _version_split(version): - # type: (str) -> List[str] - result = [] # type: List[str] +def _version_split(version: str) -> List[str]: + result: List[str] = [] for item in version.split("."): match = _prefix_regex.search(item) if match: @@ -642,8 +611,13 @@ def _version_split(version): return result -def _pad_version(left, right): - # type: (List[str], List[str]) -> Tuple[List[str], List[str]] +def _is_not_suffix(segment: str) -> bool: + return not any( + segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") + ) + + +def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]: left_split, right_split = [], [] # Get the release segment of our versions @@ -662,8 +636,9 @@ def _pad_version(left, right): class SpecifierSet(BaseSpecifier): - def __init__(self, specifiers="", prereleases=None): - # type: (str, Optional[bool]) -> None + def __init__( + self, specifiers: str = "", prereleases: Optional[bool] = None + ) -> None: # Split on , to break each individual specifier into it's own item, and # strip each item to remove leading/trailing whitespace. @@ -671,7 +646,7 @@ def __init__(self, specifiers="", prereleases=None): # Parsed each individual specifier, attempting first to make it a # Specifier and falling back to a LegacySpecifier. - parsed = set() + parsed: Set[_IndividualSpecifier] = set() for specifier in split_specifiers: try: parsed.add(Specifier(specifier)) @@ -685,27 +660,23 @@ def __init__(self, specifiers="", prereleases=None): # we accept prereleases or not. self._prereleases = prereleases - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: pre = ( - ", prereleases={0!r}".format(self.prereleases) + f", prereleases={self.prereleases!r}" if self._prereleases is not None else "" ) - return "".format(str(self), pre) + return "".format(str(self), pre) - def __str__(self): - # type: () -> str + def __str__(self) -> str: return ",".join(sorted(str(s) for s in self._specs)) - def __hash__(self): - # type: () -> int + def __hash__(self) -> int: return hash(self._specs) - def __and__(self, other): - # type: (Union[SpecifierSet, str]) -> SpecifierSet - if isinstance(other, string_types): + def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": + if isinstance(other, str): other = SpecifierSet(other) elif not isinstance(other, SpecifierSet): return NotImplemented @@ -727,35 +698,30 @@ def __and__(self, other): return specifier - def __eq__(self, other): - # type: (object) -> bool - if isinstance(other, (string_types, _IndividualSpecifier)): + def __eq__(self, other: object) -> bool: + if isinstance(other, (str, _IndividualSpecifier)): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented return self._specs == other._specs - def __ne__(self, other): - # type: (object) -> bool - if isinstance(other, (string_types, _IndividualSpecifier)): + def __ne__(self, other: object) -> bool: + if isinstance(other, (str, _IndividualSpecifier)): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented return self._specs != other._specs - def __len__(self): - # type: () -> int + def __len__(self) -> int: return len(self._specs) - def __iter__(self): - # type: () -> Iterator[FrozenSet[_IndividualSpecifier]] + def __iter__(self) -> Iterator[_IndividualSpecifier]: return iter(self._specs) @property - def prereleases(self): - # type: () -> Optional[bool] + def prereleases(self) -> Optional[bool]: # If we have been given an explicit prerelease modifier, then we'll # pass that through here. @@ -773,16 +739,15 @@ def prereleases(self): return any(s.prereleases for s in self._specs) @prereleases.setter - def prereleases(self, value): - # type: (bool) -> None + def prereleases(self, value: bool) -> None: self._prereleases = value - def __contains__(self, item): - # type: (Union[ParsedVersion, str]) -> bool + def __contains__(self, item: UnparsedVersion) -> bool: return self.contains(item) - def contains(self, item, prereleases=None): - # type: (Union[ParsedVersion, str], Optional[bool]) -> bool + def contains( + self, item: UnparsedVersion, prereleases: Optional[bool] = None + ) -> bool: # Ensure that our item is a Version or LegacyVersion instance. if not isinstance(item, (LegacyVersion, Version)): @@ -810,11 +775,8 @@ def contains(self, item, prereleases=None): return all(s.contains(item, prereleases=prereleases) for s in self._specs) def filter( - self, - iterable, # type: Iterable[Union[ParsedVersion, str]] - prereleases=None, # type: Optional[bool] - ): - # type: (...) -> Iterable[Union[ParsedVersion, str]] + self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None + ) -> Iterable[VersionTypeVar]: # Determine if we're forcing a prerelease or not, if we're not forcing # one for this particular filter call, then we'll use whatever the @@ -833,8 +795,11 @@ def filter( # which will filter out any pre-releases, unless there are no final # releases, and which will filter out LegacyVersion in general. else: - filtered = [] # type: List[Union[ParsedVersion, str]] - found_prereleases = [] # type: List[Union[ParsedVersion, str]] + filtered: List[VersionTypeVar] = [] + found_prereleases: List[VersionTypeVar] = [] + + item: UnparsedVersion + parsed_version: Union[Version, LegacyVersion] for item in iterable: # Ensure that we some kind of Version class for this item. diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/tags.py b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/tags.py index 9064910b8..e65890a90 100644 --- a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/tags.py +++ b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/tags.py @@ -2,62 +2,44 @@ # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. -from __future__ import absolute_import - -import distutils.util - -try: - from importlib.machinery import EXTENSION_SUFFIXES -except ImportError: # pragma: no cover - import imp - - EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] - del imp import logging -import os import platform -import re -import struct import sys import sysconfig -import warnings - -from ._typing import TYPE_CHECKING, cast - -if TYPE_CHECKING: # pragma: no cover - from typing import ( - Dict, - FrozenSet, - IO, - Iterable, - Iterator, - List, - Optional, - Sequence, - Tuple, - Union, - ) - - PythonVersion = Sequence[int] - MacVersion = Tuple[int, int] - GlibcVersion = Tuple[int, int] - +from importlib.machinery import EXTENSION_SUFFIXES +from typing import ( + Dict, + FrozenSet, + Iterable, + Iterator, + List, + Optional, + Sequence, + Tuple, + Union, + cast, +) + +from . import _manylinux, _musllinux logger = logging.getLogger(__name__) -INTERPRETER_SHORT_NAMES = { +PythonVersion = Sequence[int] +MacVersion = Tuple[int, int] + +INTERPRETER_SHORT_NAMES: Dict[str, str] = { "python": "py", # Generic. "cpython": "cp", "pypy": "pp", "ironpython": "ip", "jython": "jy", -} # type: Dict[str, str] +} _32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 -class Tag(object): +class Tag: """ A representation of the tag triple for a wheel. @@ -65,55 +47,53 @@ class Tag(object): is also supported. """ - __slots__ = ["_interpreter", "_abi", "_platform"] + __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] - def __init__(self, interpreter, abi, platform): - # type: (str, str, str) -> None + def __init__(self, interpreter: str, abi: str, platform: str) -> None: self._interpreter = interpreter.lower() self._abi = abi.lower() self._platform = platform.lower() + # The __hash__ of every single element in a Set[Tag] will be evaluated each time + # that a set calls its `.disjoint()` method, which may be called hundreds of + # times when scanning a page of links for packages with tags matching that + # Set[Tag]. Pre-computing the value here produces significant speedups for + # downstream consumers. + self._hash = hash((self._interpreter, self._abi, self._platform)) @property - def interpreter(self): - # type: () -> str + def interpreter(self) -> str: return self._interpreter @property - def abi(self): - # type: () -> str + def abi(self) -> str: return self._abi @property - def platform(self): - # type: () -> str + def platform(self) -> str: return self._platform - def __eq__(self, other): - # type: (object) -> bool + def __eq__(self, other: object) -> bool: if not isinstance(other, Tag): return NotImplemented return ( - (self.platform == other.platform) - and (self.abi == other.abi) - and (self.interpreter == other.interpreter) + (self._hash == other._hash) # Short-circuit ASAP for perf reasons. + and (self._platform == other._platform) + and (self._abi == other._abi) + and (self._interpreter == other._interpreter) ) - def __hash__(self): - # type: () -> int - return hash((self._interpreter, self._abi, self._platform)) + def __hash__(self) -> int: + return self._hash - def __str__(self): - # type: () -> str - return "{}-{}-{}".format(self._interpreter, self._abi, self._platform) + def __str__(self) -> str: + return f"{self._interpreter}-{self._abi}-{self._platform}" - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) -def parse_tag(tag): - # type: (str) -> FrozenSet[Tag] +def parse_tag(tag: str) -> FrozenSet[Tag]: """ Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. @@ -129,24 +109,7 @@ def parse_tag(tag): return frozenset(tags) -def _warn_keyword_parameter(func_name, kwargs): - # type: (str, Dict[str, bool]) -> bool - """ - Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only. - """ - if not kwargs: - return False - elif len(kwargs) > 1 or "warn" not in kwargs: - kwargs.pop("warn", None) - arg = next(iter(kwargs.keys())) - raise TypeError( - "{}() got an unexpected keyword argument {!r}".format(func_name, arg) - ) - return kwargs["warn"] - - -def _get_config_var(name, warn=False): - # type: (str, bool) -> Union[int, str, None] +def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: value = sysconfig.get_config_var(name) if value is None and warn: logger.debug( @@ -155,13 +118,11 @@ def _get_config_var(name, warn=False): return value -def _normalize_string(string): - # type: (str) -> str +def _normalize_string(string: str) -> str: return string.replace(".", "_").replace("-", "_") -def _abi3_applies(python_version): - # type: (PythonVersion) -> bool +def _abi3_applies(python_version: PythonVersion) -> bool: """ Determine if the Python version supports abi3. @@ -170,8 +131,7 @@ def _abi3_applies(python_version): return len(python_version) > 1 and tuple(python_version) >= (3, 2) -def _cpython_abis(py_version, warn=False): - # type: (PythonVersion, bool) -> List[str] +def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: py_version = tuple(py_version) # To allow for version comparison. abis = [] version = _version_nodot(py_version[:2]) @@ -197,7 +157,7 @@ def _cpython_abis(py_version, warn=False): elif debug: # Debug builds can also load "normal" extension modules. # We can also assume no UCS-4 or pymalloc requirement. - abis.append("cp{version}".format(version=version)) + abis.append(f"cp{version}") abis.insert( 0, "cp{version}{debug}{pymalloc}{ucs4}".format( @@ -208,12 +168,12 @@ def _cpython_abis(py_version, warn=False): def cpython_tags( - python_version=None, # type: Optional[PythonVersion] - abis=None, # type: Optional[Iterable[str]] - platforms=None, # type: Optional[Iterable[str]] - **kwargs # type: bool -): - # type: (...) -> Iterator[Tag] + python_version: Optional[PythonVersion] = None, + abis: Optional[Iterable[str]] = None, + platforms: Optional[Iterable[str]] = None, + *, + warn: bool = False, +) -> Iterator[Tag]: """ Yields the tags for a CPython interpreter. @@ -229,7 +189,6 @@ def cpython_tags( If 'abi3' or 'none' are specified in 'abis' then they will be yielded at their normal position and not at the beginning. """ - warn = _warn_keyword_parameter("cpython_tags", kwargs) if not python_version: python_version = sys.version_info[:2] @@ -248,15 +207,13 @@ def cpython_tags( except ValueError: pass - platforms = list(platforms or _platform_tags()) + platforms = list(platforms or platform_tags()) for abi in abis: for platform_ in platforms: yield Tag(interpreter, abi, platform_) if _abi3_applies(python_version): - for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): - yield tag - for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms): - yield tag + yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) + yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) if _abi3_applies(python_version): for minor_version in range(python_version[1] - 1, 1, -1): @@ -267,20 +224,19 @@ def cpython_tags( yield Tag(interpreter, "abi3", platform_) -def _generic_abi(): - # type: () -> Iterator[str] +def _generic_abi() -> Iterator[str]: abi = sysconfig.get_config_var("SOABI") if abi: yield _normalize_string(abi) def generic_tags( - interpreter=None, # type: Optional[str] - abis=None, # type: Optional[Iterable[str]] - platforms=None, # type: Optional[Iterable[str]] - **kwargs # type: bool -): - # type: (...) -> Iterator[Tag] + interpreter: Optional[str] = None, + abis: Optional[Iterable[str]] = None, + platforms: Optional[Iterable[str]] = None, + *, + warn: bool = False, +) -> Iterator[Tag]: """ Yields the tags for a generic interpreter. @@ -289,14 +245,13 @@ def generic_tags( The "none" ABI will be added if it was not explicitly provided. """ - warn = _warn_keyword_parameter("generic_tags", kwargs) if not interpreter: interp_name = interpreter_name() interp_version = interpreter_version(warn=warn) interpreter = "".join([interp_name, interp_version]) if abis is None: abis = _generic_abi() - platforms = list(platforms or _platform_tags()) + platforms = list(platforms or platform_tags()) abis = list(abis) if "none" not in abis: abis.append("none") @@ -305,8 +260,7 @@ def generic_tags( yield Tag(interpreter, abi, platform_) -def _py_interpreter_range(py_version): - # type: (PythonVersion) -> Iterator[str] +def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: """ Yields Python versions in descending order. @@ -322,11 +276,10 @@ def _py_interpreter_range(py_version): def compatible_tags( - python_version=None, # type: Optional[PythonVersion] - interpreter=None, # type: Optional[str] - platforms=None, # type: Optional[Iterable[str]] -): - # type: (...) -> Iterator[Tag] + python_version: Optional[PythonVersion] = None, + interpreter: Optional[str] = None, + platforms: Optional[Iterable[str]] = None, +) -> Iterator[Tag]: """ Yields the sequence of tags that are compatible with a specific version of Python. @@ -337,7 +290,7 @@ def compatible_tags( """ if not python_version: python_version = sys.version_info[:2] - platforms = list(platforms or _platform_tags()) + platforms = list(platforms or platform_tags()) for version in _py_interpreter_range(python_version): for platform_ in platforms: yield Tag(version, "none", platform_) @@ -347,8 +300,7 @@ def compatible_tags( yield Tag(version, "none", "any") -def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): - # type: (str, bool) -> str +def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: if not is_32bit: return arch @@ -358,8 +310,7 @@ def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): return "i386" -def _mac_binary_formats(version, cpu_arch): - # type: (MacVersion, str) -> List[str] +def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: formats = [cpu_arch] if cpu_arch == "x86_64": if version < (10, 4): @@ -382,12 +333,18 @@ def _mac_binary_formats(version, cpu_arch): return [] formats.extend(["fat32", "fat"]) - formats.append("universal") + if cpu_arch in {"arm64", "x86_64"}: + formats.append("universal2") + + if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: + formats.append("universal") + return formats -def mac_platforms(version=None, arch=None): - # type: (Optional[MacVersion], Optional[str]) -> Iterator[str] +def mac_platforms( + version: Optional[MacVersion] = None, arch: Optional[str] = None +) -> Iterator[str]: """ Yields the platform tags for a macOS system. @@ -396,7 +353,7 @@ def mac_platforms(version=None, arch=None): generate platform tags for. Both parameters default to the appropriate value for the current system. """ - version_str, _, cpu_arch = platform.mac_ver() # type: ignore + version_str, _, cpu_arch = platform.mac_ver() if version is None: version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) else: @@ -405,283 +362,76 @@ def mac_platforms(version=None, arch=None): arch = _mac_arch(cpu_arch) else: arch = arch - for minor_version in range(version[1], -1, -1): - compat_version = version[0], minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - - -# From PEP 513. -def _is_manylinux_compatible(name, glibc_version): - # type: (str, GlibcVersion) -> bool - # Check for presence of _manylinux module. - try: - import _manylinux # noqa - - return bool(getattr(_manylinux, name + "_compatible")) - except (ImportError, AttributeError): - # Fall through to heuristic check below. - pass - return _have_compatible_glibc(*glibc_version) - - -def _glibc_version_string(): - # type: () -> Optional[str] - # Returns glibc version string, or None if not using glibc. - return _glibc_version_string_confstr() or _glibc_version_string_ctypes() - - -def _glibc_version_string_confstr(): - # type: () -> Optional[str] - """ - Primary implementation of glibc_version_string using os.confstr. - """ - # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely - # to be broken or missing. This strategy is used in the standard library - # platform module. - # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 - try: - # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". - version_string = os.confstr( # type: ignore[attr-defined] # noqa: F821 - "CS_GNU_LIBC_VERSION" - ) - assert version_string is not None - _, version = version_string.split() # type: Tuple[str, str] - except (AssertionError, AttributeError, OSError, ValueError): - # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... - return None - return version + if (10, 0) <= version and version < (11, 0): + # Prior to Mac OS 11, each yearly release of Mac OS bumped the + # "minor" version number. The major version was always 10. + for minor_version in range(version[1], -1, -1): + compat_version = 10, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=10, minor=minor_version, binary_format=binary_format + ) + if version >= (11, 0): + # Starting with Mac OS 11, each yearly release bumps the major version + # number. The minor versions are now the midyear updates. + for major_version in range(version[0], 10, -1): + compat_version = major_version, 0 + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=major_version, minor=0, binary_format=binary_format + ) -def _glibc_version_string_ctypes(): - # type: () -> Optional[str] - """ - Fallback implementation of glibc_version_string using ctypes. - """ - try: - import ctypes - except ImportError: - return None - - # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen - # manpage says, "If filename is NULL, then the returned handle is for the - # main program". This way we can let the linker do the work to figure out - # which libc our process is actually using. - # - # Note: typeshed is wrong here so we are ignoring this line. - process_namespace = ctypes.CDLL(None) # type: ignore - try: - gnu_get_libc_version = process_namespace.gnu_get_libc_version - except AttributeError: - # Symbol doesn't exist -> therefore, we are not linked to - # glibc. - return None - - # Call gnu_get_libc_version, which returns a string like "2.5" - gnu_get_libc_version.restype = ctypes.c_char_p - version_str = gnu_get_libc_version() # type: str - # py2 / py3 compatibility: - if not isinstance(version_str, str): - version_str = version_str.decode("ascii") - - return version_str - - -# Separated out from have_compatible_glibc for easier unit testing. -def _check_glibc_version(version_str, required_major, minimum_minor): - # type: (str, int, int) -> bool - # Parse string and check against requested version. - # - # We use a regexp instead of str.split because we want to discard any - # random junk that might come after the minor version -- this might happen - # in patched/forked versions of glibc (e.g. Linaro's version of glibc - # uses version strings like "2.20-2014.11"). See gh-3588. - m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) - if not m: - warnings.warn( - "Expected glibc version with 2 components major.minor," - " got: %s" % version_str, - RuntimeWarning, - ) - return False - return ( - int(m.group("major")) == required_major - and int(m.group("minor")) >= minimum_minor - ) + if version >= (11, 0): + # Mac OS 11 on x86_64 is compatible with binaries from previous releases. + # Arm64 support was introduced in 11.0, so no Arm binaries from previous + # releases exist. + # + # However, the "universal2" binary format can have a + # macOS version earlier than 11.0 when the x86_64 part of the binary supports + # that version of macOS. + if arch == "x86_64": + for minor_version in range(16, 3, -1): + compat_version = 10, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + else: + for minor_version in range(16, 3, -1): + compat_version = 10, minor_version + binary_format = "universal2" + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) -def _have_compatible_glibc(required_major, minimum_minor): - # type: (int, int) -> bool - version_str = _glibc_version_string() - if version_str is None: - return False - return _check_glibc_version(version_str, required_major, minimum_minor) - - -# Python does not provide platform information at sufficient granularity to -# identify the architecture of the running executable in some cases, so we -# determine it dynamically by reading the information from the running -# process. This only applies on Linux, which uses the ELF format. -class _ELFFileHeader(object): - # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header - class _InvalidELFFileHeader(ValueError): - """ - An invalid ELF file header was found. - """ - - ELF_MAGIC_NUMBER = 0x7F454C46 - ELFCLASS32 = 1 - ELFCLASS64 = 2 - ELFDATA2LSB = 1 - ELFDATA2MSB = 2 - EM_386 = 3 - EM_S390 = 22 - EM_ARM = 40 - EM_X86_64 = 62 - EF_ARM_ABIMASK = 0xFF000000 - EF_ARM_ABI_VER5 = 0x05000000 - EF_ARM_ABI_FLOAT_HARD = 0x00000400 - - def __init__(self, file): - # type: (IO[bytes]) -> None - def unpack(fmt): - # type: (str) -> int - try: - (result,) = struct.unpack( - fmt, file.read(struct.calcsize(fmt)) - ) # type: (int, ) - except struct.error: - raise _ELFFileHeader._InvalidELFFileHeader() - return result - - self.e_ident_magic = unpack(">I") - if self.e_ident_magic != self.ELF_MAGIC_NUMBER: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_class = unpack("B") - if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_data = unpack("B") - if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_version = unpack("B") - self.e_ident_osabi = unpack("B") - self.e_ident_abiversion = unpack("B") - self.e_ident_pad = file.read(7) - format_h = "H" - format_i = "I" - format_q = "Q" - format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q - self.e_type = unpack(format_h) - self.e_machine = unpack(format_h) - self.e_version = unpack(format_i) - self.e_entry = unpack(format_p) - self.e_phoff = unpack(format_p) - self.e_shoff = unpack(format_p) - self.e_flags = unpack(format_i) - self.e_ehsize = unpack(format_h) - self.e_phentsize = unpack(format_h) - self.e_phnum = unpack(format_h) - self.e_shentsize = unpack(format_h) - self.e_shnum = unpack(format_h) - self.e_shstrndx = unpack(format_h) - - -def _get_elf_header(): - # type: () -> Optional[_ELFFileHeader] - try: - with open(sys.executable, "rb") as f: - elf_header = _ELFFileHeader(f) - except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): - return None - return elf_header - - -def _is_linux_armhf(): - # type: () -> bool - # hard-float ABI can be detected from the ELF header of the running - # process - # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf - elf_header = _get_elf_header() - if elf_header is None: - return False - result = elf_header.e_ident_class == elf_header.ELFCLASS32 - result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB - result &= elf_header.e_machine == elf_header.EM_ARM - result &= ( - elf_header.e_flags & elf_header.EF_ARM_ABIMASK - ) == elf_header.EF_ARM_ABI_VER5 - result &= ( - elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD - ) == elf_header.EF_ARM_ABI_FLOAT_HARD - return result - - -def _is_linux_i686(): - # type: () -> bool - elf_header = _get_elf_header() - if elf_header is None: - return False - result = elf_header.e_ident_class == elf_header.ELFCLASS32 - result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB - result &= elf_header.e_machine == elf_header.EM_386 - return result - - -def _have_compatible_manylinux_abi(arch): - # type: (str) -> bool - if arch == "armv7l": - return _is_linux_armhf() - if arch == "i686": - return _is_linux_i686() - return True - - -def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): - # type: (bool) -> Iterator[str] - linux = _normalize_string(distutils.util.get_platform()) +def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: + linux = _normalize_string(sysconfig.get_platform()) if is_32bit: if linux == "linux_x86_64": linux = "linux_i686" elif linux == "linux_aarch64": linux = "linux_armv7l" - manylinux_support = [] _, arch = linux.split("_", 1) - if _have_compatible_manylinux_abi(arch): - if arch in {"x86_64", "i686", "aarch64", "armv7l", "ppc64", "ppc64le", "s390x"}: - manylinux_support.append( - ("manylinux2014", (2, 17)) - ) # CentOS 7 w/ glibc 2.17 (PEP 599) - if arch in {"x86_64", "i686"}: - manylinux_support.append( - ("manylinux2010", (2, 12)) - ) # CentOS 6 w/ glibc 2.12 (PEP 571) - manylinux_support.append( - ("manylinux1", (2, 5)) - ) # CentOS 5 w/ glibc 2.5 (PEP 513) - manylinux_support_iter = iter(manylinux_support) - for name, glibc_version in manylinux_support_iter: - if _is_manylinux_compatible(name, glibc_version): - yield linux.replace("linux", name) - break - # Support for a later manylinux implies support for an earlier version. - for name, _ in manylinux_support_iter: - yield linux.replace("linux", name) + yield from _manylinux.platform_tags(linux, arch) + yield from _musllinux.platform_tags(arch) yield linux -def _generic_platforms(): - # type: () -> Iterator[str] - yield _normalize_string(distutils.util.get_platform()) +def _generic_platforms() -> Iterator[str]: + yield _normalize_string(sysconfig.get_platform()) -def _platform_tags(): - # type: () -> Iterator[str] +def platform_tags() -> Iterator[str]: """ Provides the platform tags for this installation. """ @@ -693,25 +443,18 @@ def _platform_tags(): return _generic_platforms() -def interpreter_name(): - # type: () -> str +def interpreter_name() -> str: """ Returns the name of the running interpreter. """ - try: - name = sys.implementation.name # type: ignore - except AttributeError: # pragma: no cover - # Python 2.7 compatibility. - name = platform.python_implementation().lower() + name = sys.implementation.name return INTERPRETER_SHORT_NAMES.get(name) or name -def interpreter_version(**kwargs): - # type: (bool) -> str +def interpreter_version(*, warn: bool = False) -> str: """ Returns the version of the running interpreter. """ - warn = _warn_keyword_parameter("interpreter_version", kwargs) version = _get_config_var("py_version_nodot", warn=warn) if version: version = str(version) @@ -720,32 +463,22 @@ def interpreter_version(**kwargs): return version -def _version_nodot(version): - # type: (PythonVersion) -> str - if any(v >= 10 for v in version): - sep = "_" - else: - sep = "" - return sep.join(map(str, version)) +def _version_nodot(version: PythonVersion) -> str: + return "".join(map(str, version)) -def sys_tags(**kwargs): - # type: (bool) -> Iterator[Tag] +def sys_tags(*, warn: bool = False) -> Iterator[Tag]: """ Returns the sequence of tag triples for the running interpreter. The order of the sequence corresponds to priority order for the interpreter, from most to least important. """ - warn = _warn_keyword_parameter("sys_tags", kwargs) interp_name = interpreter_name() if interp_name == "cp": - for tag in cpython_tags(warn=warn): - yield tag + yield from cpython_tags(warn=warn) else: - for tag in generic_tags(): - yield tag + yield from generic_tags() - for tag in compatible_tags(): - yield tag + yield from compatible_tags() diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/utils.py b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/utils.py index 19579c1a0..bab11b80c 100644 --- a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/utils.py +++ b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/utils.py @@ -1,65 +1,136 @@ # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. -from __future__ import absolute_import, division, print_function import re +from typing import FrozenSet, NewType, Tuple, Union, cast -from ._typing import TYPE_CHECKING, cast +from .tags import Tag, parse_tag from .version import InvalidVersion, Version -if TYPE_CHECKING: # pragma: no cover - from typing import NewType, Union +BuildTag = Union[Tuple[()], Tuple[int, str]] +NormalizedName = NewType("NormalizedName", str) + + +class InvalidWheelFilename(ValueError): + """ + An invalid wheel filename was found, users should refer to PEP 427. + """ + + +class InvalidSdistFilename(ValueError): + """ + An invalid sdist filename was found, users should refer to the packaging user guide. + """ - NormalizedName = NewType("NormalizedName", str) _canonicalize_regex = re.compile(r"[-_.]+") +# PEP 427: The build number must start with a digit. +_build_tag_regex = re.compile(r"(\d+)(.*)") -def canonicalize_name(name): - # type: (str) -> NormalizedName +def canonicalize_name(name: str) -> NormalizedName: # This is taken from PEP 503. value = _canonicalize_regex.sub("-", name).lower() - return cast("NormalizedName", value) + return cast(NormalizedName, value) -def canonicalize_version(_version): - # type: (str) -> Union[Version, str] +def canonicalize_version(version: Union[Version, str]) -> str: """ This is very similar to Version.__str__, but has one subtle difference with the way it handles the release segment. """ - - try: - version = Version(_version) - except InvalidVersion: - # Legacy versions cannot be normalized - return _version + if isinstance(version, str): + try: + parsed = Version(version) + except InvalidVersion: + # Legacy versions cannot be normalized + return version + else: + parsed = version parts = [] # Epoch - if version.epoch != 0: - parts.append("{0}!".format(version.epoch)) + if parsed.epoch != 0: + parts.append(f"{parsed.epoch}!") # Release segment # NB: This strips trailing '.0's to normalize - parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release))) + parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release))) # Pre-release - if version.pre is not None: - parts.append("".join(str(x) for x in version.pre)) + if parsed.pre is not None: + parts.append("".join(str(x) for x in parsed.pre)) # Post-release - if version.post is not None: - parts.append(".post{0}".format(version.post)) + if parsed.post is not None: + parts.append(f".post{parsed.post}") # Development release - if version.dev is not None: - parts.append(".dev{0}".format(version.dev)) + if parsed.dev is not None: + parts.append(f".dev{parsed.dev}") # Local version segment - if version.local is not None: - parts.append("+{0}".format(version.local)) + if parsed.local is not None: + parts.append(f"+{parsed.local}") return "".join(parts) + + +def parse_wheel_filename( + filename: str, +) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]: + if not filename.endswith(".whl"): + raise InvalidWheelFilename( + f"Invalid wheel filename (extension must be '.whl'): {filename}" + ) + + filename = filename[:-4] + dashes = filename.count("-") + if dashes not in (4, 5): + raise InvalidWheelFilename( + f"Invalid wheel filename (wrong number of parts): {filename}" + ) + + parts = filename.split("-", dashes - 2) + name_part = parts[0] + # See PEP 427 for the rules on escaping the project name + if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: + raise InvalidWheelFilename(f"Invalid project name: {filename}") + name = canonicalize_name(name_part) + version = Version(parts[1]) + if dashes == 5: + build_part = parts[2] + build_match = _build_tag_regex.match(build_part) + if build_match is None: + raise InvalidWheelFilename( + f"Invalid build number: {build_part} in '{filename}'" + ) + build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) + else: + build = () + tags = parse_tag(parts[-1]) + return (name, version, build, tags) + + +def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]: + if filename.endswith(".tar.gz"): + file_stem = filename[: -len(".tar.gz")] + elif filename.endswith(".zip"): + file_stem = filename[: -len(".zip")] + else: + raise InvalidSdistFilename( + f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" + f" {filename}" + ) + + # We are requiring a PEP 440 version, which cannot contain dashes, + # so we split on the last dash. + name_part, sep, version_part = file_stem.rpartition("-") + if not sep: + raise InvalidSdistFilename(f"Invalid sdist filename: {filename}") + + name = canonicalize_name(name_part) + version = Version(version_part) + return (name, version) diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/version.py b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/version.py index 00371e86a..de9a09a4e 100644 --- a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/version.py +++ b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/packaging/version.py @@ -1,52 +1,45 @@ # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. -from __future__ import absolute_import, division, print_function import collections import itertools import re +import warnings +from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union -from ._structures import Infinity, NegativeInfinity -from ._typing import TYPE_CHECKING - -if TYPE_CHECKING: # pragma: no cover - from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union - - from ._structures import InfinityType, NegativeInfinityType - - InfiniteTypes = Union[InfinityType, NegativeInfinityType] - PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] - SubLocalType = Union[InfiniteTypes, int, str] - LocalType = Union[ - NegativeInfinityType, - Tuple[ - Union[ - SubLocalType, - Tuple[SubLocalType, str], - Tuple[NegativeInfinityType, SubLocalType], - ], - ..., - ], - ] - CmpKey = Tuple[ - int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType - ] - LegacyCmpKey = Tuple[int, Tuple[str, ...]] - VersionComparisonMethod = Callable[ - [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool - ] +from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType __all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] +InfiniteTypes = Union[InfinityType, NegativeInfinityType] +PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] +SubLocalType = Union[InfiniteTypes, int, str] +LocalType = Union[ + NegativeInfinityType, + Tuple[ + Union[ + SubLocalType, + Tuple[SubLocalType, str], + Tuple[NegativeInfinityType, SubLocalType], + ], + ..., + ], +] +CmpKey = Tuple[ + int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType +] +LegacyCmpKey = Tuple[int, Tuple[str, ...]] +VersionComparisonMethod = Callable[ + [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool +] _Version = collections.namedtuple( "_Version", ["epoch", "release", "dev", "pre", "post", "local"] ) -def parse(version): - # type: (str) -> Union[LegacyVersion, Version] +def parse(version: str) -> Union["LegacyVersion", "Version"]: """ Parse the given version string and return either a :class:`Version` object or a :class:`LegacyVersion` object depending on if the given version is @@ -64,112 +57,111 @@ class InvalidVersion(ValueError): """ -class _BaseVersion(object): - _key = None # type: Union[CmpKey, LegacyCmpKey] +class _BaseVersion: + _key: Union[CmpKey, LegacyCmpKey] - def __hash__(self): - # type: () -> int + def __hash__(self) -> int: return hash(self._key) - def __lt__(self, other): - # type: (_BaseVersion) -> bool - return self._compare(other, lambda s, o: s < o) + # Please keep the duplicated `isinstance` check + # in the six comparisons hereunder + # unless you find a way to avoid adding overhead function calls. + def __lt__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key < other._key + + def __le__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key <= other._key - def __le__(self, other): - # type: (_BaseVersion) -> bool - return self._compare(other, lambda s, o: s <= o) + def __eq__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented - def __eq__(self, other): - # type: (object) -> bool - return self._compare(other, lambda s, o: s == o) + return self._key == other._key - def __ge__(self, other): - # type: (_BaseVersion) -> bool - return self._compare(other, lambda s, o: s >= o) + def __ge__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented - def __gt__(self, other): - # type: (_BaseVersion) -> bool - return self._compare(other, lambda s, o: s > o) + return self._key >= other._key - def __ne__(self, other): - # type: (object) -> bool - return self._compare(other, lambda s, o: s != o) + def __gt__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key > other._key - def _compare(self, other, method): - # type: (object, VersionComparisonMethod) -> Union[bool, NotImplemented] + def __ne__(self, other: object) -> bool: if not isinstance(other, _BaseVersion): return NotImplemented - return method(self._key, other._key) + return self._key != other._key class LegacyVersion(_BaseVersion): - def __init__(self, version): - # type: (str) -> None + def __init__(self, version: str) -> None: self._version = str(version) self._key = _legacy_cmpkey(self._version) - def __str__(self): - # type: () -> str + warnings.warn( + "Creating a LegacyVersion has been deprecated and will be " + "removed in the next major release", + DeprecationWarning, + ) + + def __str__(self) -> str: return self._version - def __repr__(self): - # type: () -> str - return "".format(repr(str(self))) + def __repr__(self) -> str: + return f"" @property - def public(self): - # type: () -> str + def public(self) -> str: return self._version @property - def base_version(self): - # type: () -> str + def base_version(self) -> str: return self._version @property - def epoch(self): - # type: () -> int + def epoch(self) -> int: return -1 @property - def release(self): - # type: () -> None + def release(self) -> None: return None @property - def pre(self): - # type: () -> None + def pre(self) -> None: return None @property - def post(self): - # type: () -> None + def post(self) -> None: return None @property - def dev(self): - # type: () -> None + def dev(self) -> None: return None @property - def local(self): - # type: () -> None + def local(self) -> None: return None @property - def is_prerelease(self): - # type: () -> bool + def is_prerelease(self) -> bool: return False @property - def is_postrelease(self): - # type: () -> bool + def is_postrelease(self) -> bool: return False @property - def is_devrelease(self): - # type: () -> bool + def is_devrelease(self) -> bool: return False @@ -184,8 +176,7 @@ def is_devrelease(self): } -def _parse_version_parts(s): - # type: (str) -> Iterator[str] +def _parse_version_parts(s: str) -> Iterator[str]: for part in _legacy_version_component_re.split(s): part = _legacy_version_replacement_map.get(part, part) @@ -202,8 +193,7 @@ def _parse_version_parts(s): yield "*final" -def _legacy_cmpkey(version): - # type: (str) -> LegacyCmpKey +def _legacy_cmpkey(version: str) -> LegacyCmpKey: # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch # greater than or equal to 0. This will effectively put the LegacyVersion, @@ -213,7 +203,7 @@ def _legacy_cmpkey(version): # This scheme is taken from pkg_resources.parse_version setuptools prior to # it's adoption of the packaging library. - parts = [] # type: List[str] + parts: List[str] = [] for part in _parse_version_parts(version.lower()): if part.startswith("*"): # remove "-" before a prerelease tag @@ -268,13 +258,12 @@ class Version(_BaseVersion): _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE) - def __init__(self, version): - # type: (str) -> None + def __init__(self, version: str) -> None: # Validate the version and parse it into pieces match = self._regex.search(version) if not match: - raise InvalidVersion("Invalid version: '{0}'".format(version)) + raise InvalidVersion(f"Invalid version: '{version}'") # Store the parsed out pieces of the version self._version = _Version( @@ -298,17 +287,15 @@ def __init__(self, version): self._version.local, ) - def __repr__(self): - # type: () -> str - return "".format(repr(str(self))) + def __repr__(self) -> str: + return f"" - def __str__(self): - # type: () -> str + def __str__(self) -> str: parts = [] # Epoch if self.epoch != 0: - parts.append("{0}!".format(self.epoch)) + parts.append(f"{self.epoch}!") # Release segment parts.append(".".join(str(x) for x in self.release)) @@ -319,67 +306,59 @@ def __str__(self): # Post-release if self.post is not None: - parts.append(".post{0}".format(self.post)) + parts.append(f".post{self.post}") # Development release if self.dev is not None: - parts.append(".dev{0}".format(self.dev)) + parts.append(f".dev{self.dev}") # Local version segment if self.local is not None: - parts.append("+{0}".format(self.local)) + parts.append(f"+{self.local}") return "".join(parts) @property - def epoch(self): - # type: () -> int - _epoch = self._version.epoch # type: int + def epoch(self) -> int: + _epoch: int = self._version.epoch return _epoch @property - def release(self): - # type: () -> Tuple[int, ...] - _release = self._version.release # type: Tuple[int, ...] + def release(self) -> Tuple[int, ...]: + _release: Tuple[int, ...] = self._version.release return _release @property - def pre(self): - # type: () -> Optional[Tuple[str, int]] - _pre = self._version.pre # type: Optional[Tuple[str, int]] + def pre(self) -> Optional[Tuple[str, int]]: + _pre: Optional[Tuple[str, int]] = self._version.pre return _pre @property - def post(self): - # type: () -> Optional[Tuple[str, int]] + def post(self) -> Optional[int]: return self._version.post[1] if self._version.post else None @property - def dev(self): - # type: () -> Optional[Tuple[str, int]] + def dev(self) -> Optional[int]: return self._version.dev[1] if self._version.dev else None @property - def local(self): - # type: () -> Optional[str] + def local(self) -> Optional[str]: if self._version.local: return ".".join(str(x) for x in self._version.local) else: return None @property - def public(self): - # type: () -> str + def public(self) -> str: return str(self).split("+", 1)[0] @property - def base_version(self): - # type: () -> str + def base_version(self) -> str: parts = [] # Epoch if self.epoch != 0: - parts.append("{0}!".format(self.epoch)) + parts.append(f"{self.epoch}!") # Release segment parts.append(".".join(str(x) for x in self.release)) @@ -387,41 +366,33 @@ def base_version(self): return "".join(parts) @property - def is_prerelease(self): - # type: () -> bool + def is_prerelease(self) -> bool: return self.dev is not None or self.pre is not None @property - def is_postrelease(self): - # type: () -> bool + def is_postrelease(self) -> bool: return self.post is not None @property - def is_devrelease(self): - # type: () -> bool + def is_devrelease(self) -> bool: return self.dev is not None @property - def major(self): - # type: () -> int + def major(self) -> int: return self.release[0] if len(self.release) >= 1 else 0 @property - def minor(self): - # type: () -> int + def minor(self) -> int: return self.release[1] if len(self.release) >= 2 else 0 @property - def micro(self): - # type: () -> int + def micro(self) -> int: return self.release[2] if len(self.release) >= 3 else 0 def _parse_letter_version( - letter, # type: str - number, # type: Union[str, bytes, SupportsInt] -): - # type: (...) -> Optional[Tuple[str, int]] + letter: str, number: Union[str, bytes, SupportsInt] +) -> Optional[Tuple[str, int]]: if letter: # We consider there to be an implicit 0 in a pre-release if there is @@ -458,8 +429,7 @@ def _parse_letter_version( _local_version_separators = re.compile(r"[\._-]") -def _parse_local_version(local): - # type: (str) -> Optional[LocalType] +def _parse_local_version(local: str) -> Optional[LocalType]: """ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). """ @@ -472,14 +442,13 @@ def _parse_local_version(local): def _cmpkey( - epoch, # type: int - release, # type: Tuple[int, ...] - pre, # type: Optional[Tuple[str, int]] - post, # type: Optional[Tuple[str, int]] - dev, # type: Optional[Tuple[str, int]] - local, # type: Optional[Tuple[SubLocalType]] -): - # type: (...) -> CmpKey + epoch: int, + release: Tuple[int, ...], + pre: Optional[Tuple[str, int]], + post: Optional[Tuple[str, int]], + dev: Optional[Tuple[str, int]], + local: Optional[Tuple[SubLocalType]], +) -> CmpKey: # When we compare a release version, we want to compare it with all of the # trailing zeros removed. So we'll use a reverse the list, drop all the now @@ -495,7 +464,7 @@ def _cmpkey( # if there is not a pre or a post segment. If we have one of those then # the normal sorting rules will handle this case correctly. if pre is None and post is None and dev is not None: - _pre = NegativeInfinity # type: PrePostDevType + _pre: PrePostDevType = NegativeInfinity # Versions without a pre-release (except as noted above) should sort after # those with one. elif pre is None: @@ -505,21 +474,21 @@ def _cmpkey( # Versions without a post segment should sort before those with one. if post is None: - _post = NegativeInfinity # type: PrePostDevType + _post: PrePostDevType = NegativeInfinity else: _post = post # Versions without a development segment should sort after those with one. if dev is None: - _dev = Infinity # type: PrePostDevType + _dev: PrePostDevType = Infinity else: _dev = dev if local is None: # Versions without a local segment should sort before those with one. - _local = NegativeInfinity # type: LocalType + _local: LocalType = NegativeInfinity else: # Versions with a local segment need that segment parsed to implement # the sorting rules in PEP440. diff --git a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/pyparsing.py b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/pyparsing.py index 4cae78838..cf75e1e5f 100644 --- a/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/pyparsing.py +++ b/src/main/python/pybuilder/_vendor/pkg_resources/_vendor/pyparsing.py @@ -1625,7 +1625,7 @@ def parseString( self, instring, parseAll=False ): (see L{I{parseWithTabs}}) - define your parse action using the full C{(s,loc,toks)} signature, and reference the input string using the parse action's C{s} argument - - explicitly expand the tabs in your input string before calling + - explictly expand the tabs in your input string before calling C{parseString} Example:: diff --git a/src/main/python/pybuilder/_vendor/platformdirs/__init__.py b/src/main/python/pybuilder/_vendor/platformdirs/__init__.py index c4f30e128..26032a3b3 100644 --- a/src/main/python/pybuilder/_vendor/platformdirs/__init__.py +++ b/src/main/python/pybuilder/_vendor/platformdirs/__init__.py @@ -9,7 +9,7 @@ from typing import TYPE_CHECKING, Optional, Type, Union if TYPE_CHECKING: - from ..typing_extensions import Literal # pragma: no cover + from typing_extensions import Literal # pragma: no cover from .api import PlatformDirsABC from .version import __version__, __version_info__ diff --git a/src/main/python/pybuilder/_vendor/typing_extensions.py b/src/main/python/pybuilder/_vendor/typing_extensions.py index 95bb87357..9f1c7aa31 100644 --- a/src/main/python/pybuilder/_vendor/typing_extensions.py +++ b/src/main/python/pybuilder/_vendor/typing_extensions.py @@ -1,50 +1,20 @@ import abc import collections -import contextlib +import collections.abc +import operator import sys import typing -import collections.abc as collections_abc -import operator - -# These are used by Protocol implementation -# We use internal typing helpers here, but this significantly reduces -# code duplication. (Also this is only until Protocol is in typing.) -from typing import Generic, Callable, TypeVar, Tuple # After PEP 560, internal typing API was substantially reworked. # This is especially important for Protocol class which uses internal APIs -# quite extensivelly. +# quite extensively. PEP_560 = sys.version_info[:3] >= (3, 7, 0) if PEP_560: - GenericMeta = TypingMeta = type - from typing import _GenericAlias + GenericMeta = type else: - from typing import GenericMeta, TypingMeta -OLD_GENERICS = False -try: - from typing import _type_vars, _next_in_mro, _type_check -except ImportError: - OLD_GENERICS = True -try: - from typing import _subs_tree # noqa - SUBS_TREE = True -except ImportError: - SUBS_TREE = False -try: - from typing import _tp_cache -except ImportError: - def _tp_cache(x): - return x -try: - from typing import _TypingEllipsis, _TypingEmpty -except ImportError: - class _TypingEllipsis: - pass - - class _TypingEmpty: - pass - + # 3.6 + from typing import GenericMeta, _type_vars # noqa # The two functions below are copies of typing internal helpers. # They are needed by _ProtocolMeta @@ -60,56 +30,12 @@ def _no_slots_copy(dct): def _check_generic(cls, parameters): if not cls.__parameters__: - raise TypeError("%s is not a generic class" % repr(cls)) + raise TypeError(f"{cls} is not a generic class") alen = len(parameters) elen = len(cls.__parameters__) if alen != elen: - raise TypeError("Too %s parameters for %s; actual %s, expected %s" % - ("many" if alen > elen else "few", repr(cls), alen, elen)) - - -if hasattr(typing, '_generic_new'): - _generic_new = typing._generic_new -else: - # Note: The '_generic_new(...)' function is used as a part of the - # process of creating a generic type and was added to the typing module - # as of Python 3.5.3. - # - # We've defined '_generic_new(...)' below to exactly match the behavior - # implemented in older versions of 'typing' bundled with Python 3.5.0 to - # 3.5.2. This helps eliminate redundancy when defining collection types - # like 'Deque' later. - # - # See https://github.com/python/typing/pull/308 for more details -- in - # particular, compare and contrast the definition of types like - # 'typing.List' before and after the merge. - - def _generic_new(base_cls, cls, *args, **kwargs): - return base_cls.__new__(cls, *args, **kwargs) - -# See https://github.com/python/typing/pull/439 -if hasattr(typing, '_geqv'): - from typing import _geqv - _geqv_defined = True -else: - _geqv = None - _geqv_defined = False - -if sys.version_info[:2] >= (3, 6): - import _collections_abc - _check_methods_in_mro = _collections_abc._check_methods -else: - def _check_methods_in_mro(C, *methods): - mro = C.__mro__ - for method in methods: - for B in mro: - if method in B.__dict__: - if B.__dict__[method] is None: - return NotImplemented - break - else: - return NotImplemented - return True + raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments for {cls};" + f" actual {alen}, expected {elen}") # Please keep __all__ alphabetized within each category. @@ -119,18 +45,17 @@ def _check_methods_in_mro(C, *methods): 'Concatenate', 'Final', 'ParamSpec', + 'Self', 'Type', # ABCs (from collections.abc). - # The following are added depending on presence - # of their non-generic counterparts in stdlib: - # 'Awaitable', - # 'AsyncIterator', - # 'AsyncIterable', - # 'Coroutine', - # 'AsyncGenerator', - # 'AsyncContextManager', - # 'ChainMap', + 'Awaitable', + 'AsyncIterator', + 'AsyncIterable', + 'Coroutine', + 'AsyncGenerator', + 'AsyncContextManager', + 'ChainMap', # Concrete collection types. 'ContextManager', @@ -144,38 +69,29 @@ def _check_methods_in_mro(C, *methods): 'SupportsIndex', # One-off things. + 'Annotated', 'final', 'IntVar', 'Literal', 'NewType', 'overload', + 'Protocol', + 'runtime', + 'runtime_checkable', 'Text', 'TypeAlias', 'TypeGuard', 'TYPE_CHECKING', ] -# Annotated relies on substitution trees of pep 560. It will not work for -# versions of typing older than 3.5.3 -HAVE_ANNOTATED = PEP_560 or SUBS_TREE - if PEP_560: __all__.extend(["get_args", "get_origin", "get_type_hints"]) -if HAVE_ANNOTATED: - __all__.append("Annotated") - -# Protocols are hard to backport to the original version of typing 3.5.0 -HAVE_PROTOCOLS = sys.version_info[:3] != (3, 5, 0) - -if HAVE_PROTOCOLS: - __all__.extend(['Protocol', 'runtime', 'runtime_checkable']) - - -# TODO +# 3.6.2+ if hasattr(typing, 'NoReturn'): NoReturn = typing.NoReturn -elif hasattr(typing, '_FinalTypingBase'): +# 3.6.0-3.6.1 +else: class _NoReturn(typing._FinalTypingBase, _root=True): """Special type indicating functions that never return. Example:: @@ -197,32 +113,6 @@ def __subclasscheck__(self, cls): raise TypeError("NoReturn cannot be used with issubclass().") NoReturn = _NoReturn(_root=True) -else: - class _NoReturnMeta(typing.TypingMeta): - """Metaclass for NoReturn""" - def __new__(cls, name, bases, namespace, _root=False): - return super().__new__(cls, name, bases, namespace, _root=_root) - - def __instancecheck__(self, obj): - raise TypeError("NoReturn cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("NoReturn cannot be used with issubclass().") - - class NoReturn(typing.Final, metaclass=_NoReturnMeta, _root=True): - """Special type indicating functions that never return. - Example:: - - from typing import NoReturn - - def stop() -> NoReturn: - raise Exception('no way') - - This type is invalid in other positions, e.g., ``List[NoReturn]`` - will fail in static type checkers. - """ - __slots__ = () - # Some unconstrained type variables. These are used by the container types. # (These are not for export.) @@ -230,142 +120,15 @@ def stop() -> NoReturn: KT = typing.TypeVar('KT') # Key type. VT = typing.TypeVar('VT') # Value type. T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. -V_co = typing.TypeVar('V_co', covariant=True) # Any type covariant containers. -VT_co = typing.TypeVar('VT_co', covariant=True) # Value type covariant containers. T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. - -if hasattr(typing, 'ClassVar'): - ClassVar = typing.ClassVar -elif hasattr(typing, '_FinalTypingBase'): - class _ClassVar(typing._FinalTypingBase, _root=True): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats: ClassVar[Dict[str, int]] = {} # class variable - damage: int = 10 # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(typing._type_check(item, - '{} accepts only single type.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - ClassVar = _ClassVar(_root=True) -else: - class _ClassVarMeta(typing.TypingMeta): - """Metaclass for ClassVar""" - - def __new__(cls, name, bases, namespace, tp=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if tp is not None: - self.__type__ = tp - return self - - def __instancecheck__(self, obj): - raise TypeError("ClassVar cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("ClassVar cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is not None: - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - param = typing._type_check( - item, - '{} accepts only single type.'.format(cls.__name__[1:])) - return cls(self.__name__, self.__bases__, - dict(self.__dict__), tp=param, _root=True) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(self.__name__, self.__bases__, - dict(self.__dict__), tp=self.__type__, - _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - class ClassVar(typing.Final, metaclass=_ClassVarMeta, _root=True): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats: ClassVar[Dict[str, int]] = {} # class variable - damage: int = 10 # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __type__ = None +ClassVar = typing.ClassVar # On older versions of typing there is an internal class named "Final". +# 3.8+ if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): Final = typing.Final +# 3.7 elif sys.version_info[:2] >= (3, 7): class _FinalForm(typing._SpecialForm, _root=True): @@ -374,8 +137,8 @@ def __repr__(self): def __getitem__(self, parameters): item = typing._type_check(parameters, - '{} accepts only single type'.format(self._name)) - return _GenericAlias(self, (item,)) + f'{self._name} accepts only single type') + return typing._GenericAlias(self, (item,)) Final = _FinalForm('Final', doc="""A special typing construct to indicate that a name @@ -391,7 +154,8 @@ class FastConnector(Connection): TIMEOUT = 1 # Error reported by type checker There is no runtime checking of these properties.""") -elif hasattr(typing, '_FinalTypingBase'): +# 3.6 +else: class _Final(typing._FinalTypingBase, _root=True): """A special typing construct to indicate that a name cannot be re-assigned or overridden in a subclass. @@ -417,10 +181,9 @@ def __getitem__(self, item): cls = type(self) if self.__type__ is None: return cls(typing._type_check(item, - '{} accepts only single type.'.format(cls.__name__[1:])), + f'{cls.__name__[1:]} accepts only single type.'), _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) + raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') def _eval_type(self, globalns, localns): new_tp = typing._eval_type(self.__type__, globalns, localns) @@ -431,7 +194,7 @@ def _eval_type(self, globalns, localns): def __repr__(self): r = super().__repr__() if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) + r += f'[{typing._type_repr(self.__type__)}]' return r def __hash__(self): @@ -445,79 +208,12 @@ def __eq__(self, other): return self is other Final = _Final(_root=True) -else: - class _FinalMeta(typing.TypingMeta): - """Metaclass for Final""" - - def __new__(cls, name, bases, namespace, tp=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if tp is not None: - self.__type__ = tp - return self - - def __instancecheck__(self, obj): - raise TypeError("Final cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Final cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is not None: - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - param = typing._type_check( - item, - '{} accepts only single type.'.format(cls.__name__[1:])) - return cls(self.__name__, self.__bases__, - dict(self.__dict__), tp=param, _root=True) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(self.__name__, self.__bases__, - dict(self.__dict__), tp=self.__type__, - _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, Final): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - class Final(typing.Final, metaclass=_FinalMeta, _root=True): - """A special typing construct to indicate that a name - cannot be re-assigned or overridden in a subclass. - For example: - - MAX_SIZE: Final = 9000 - MAX_SIZE += 1 # Error reported by type checker - - class Connection: - TIMEOUT: Final[int] = 10 - class FastConnector(Connection): - TIMEOUT = 1 # Error reported by type checker - - There is no runtime checking of these properties. - """ - - __type__ = None +# 3.8+ if hasattr(typing, 'final'): final = typing.final +# 3.6-3.7 else: def final(f): """This decorator can be used to indicate to type checkers that @@ -543,11 +239,13 @@ class Other(Leaf): # Error reported by type checker def IntVar(name): - return TypeVar(name) + return typing.TypeVar(name) +# 3.8+: if hasattr(typing, 'Literal'): Literal = typing.Literal +# 3.7: elif sys.version_info[:2] >= (3, 7): class _LiteralForm(typing._SpecialForm, _root=True): @@ -555,7 +253,7 @@ def __repr__(self): return 'typing_extensions.' + self._name def __getitem__(self, parameters): - return _GenericAlias(self, parameters) + return typing._GenericAlias(self, parameters) Literal = _LiteralForm('Literal', doc="""A type that can be used to indicate to type checkers @@ -570,7 +268,8 @@ def __getitem__(self, parameters): Literal[...] cannot be subclassed. There is no runtime checking verifying that the parameter is actually a value instead of a type.""") -elif hasattr(typing, '_FinalTypingBase'): +# 3.6: +else: class _Literal(typing._FinalTypingBase, _root=True): """A type that can be used to indicate to type checkers that the corresponding value has a value literally equivalent to the @@ -596,8 +295,7 @@ def __getitem__(self, values): if not isinstance(values, tuple): values = (values,) return cls(values, _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) + raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') def _eval_type(self, globalns, localns): return self @@ -605,7 +303,7 @@ def _eval_type(self, globalns, localns): def __repr__(self): r = super().__repr__() if self.__values__ is not None: - r += '[{}]'.format(', '.join(map(typing._type_repr, self.__values__))) + r += f'[{", ".join(map(typing._type_repr, self.__values__))}]' return r def __hash__(self): @@ -619,161 +317,18 @@ def __eq__(self, other): return self is other Literal = _Literal(_root=True) -else: - class _LiteralMeta(typing.TypingMeta): - """Metaclass for Literal""" - - def __new__(cls, name, bases, namespace, values=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if values is not None: - self.__values__ = values - return self - - def __instancecheck__(self, obj): - raise TypeError("Literal cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Literal cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__values__ is not None: - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - if not isinstance(item, tuple): - item = (item,) - return cls(self.__name__, self.__bases__, - dict(self.__dict__), values=item, _root=True) - - def _eval_type(self, globalns, localns): - return self - - def __repr__(self): - r = super().__repr__() - if self.__values__ is not None: - r += '[{}]'.format(', '.join(map(typing._type_repr, self.__values__))) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__values__)) - - def __eq__(self, other): - if not isinstance(other, Literal): - return NotImplemented - if self.__values__ is not None: - return self.__values__ == other.__values__ - return self is other - - class Literal(typing.Final, metaclass=_LiteralMeta, _root=True): - """A type that can be used to indicate to type checkers that the - corresponding value has a value literally equivalent to the - provided parameter. For example: - - var: Literal[4] = 4 - - The type checker understands that 'var' is literally equal to the - value 4 and no other value. - - Literal[...] cannot be subclassed. There is no runtime checking - verifying that the parameter is actually a value instead of a type. - """ - - __values__ = None - - -def _overload_dummy(*args, **kwds): - """Helper for @overload to raise when called.""" - raise NotImplementedError( - "You should not call an overloaded function. " - "A series of @overload-decorated functions " - "outside a stub module should always be followed " - "by an implementation that is not @overload-ed.") - -def overload(func): - """Decorator for overloaded functions/methods. - In a stub file, place two or more stub definitions for the same - function in a row, each decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - - In a non-stub file (i.e. a regular .py file), do the same but - follow it with an implementation. The implementation should *not* - be decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - def utf8(value): - # implementation goes here - """ - return _overload_dummy +_overload_dummy = typing._overload_dummy # noqa +overload = typing.overload # This is not a real generic class. Don't use outside annotations. -if hasattr(typing, 'Type'): - Type = typing.Type -else: - # Internal type variable used for Type[]. - CT_co = typing.TypeVar('CT_co', covariant=True, bound=type) - - class Type(typing.Generic[CT_co], extra=type): - """A special construct usable to annotate class objects. - - For example, suppose we have the following classes:: - - class User: ... # Abstract base for User classes - class BasicUser(User): ... - class ProUser(User): ... - class TeamUser(User): ... - - And a function that takes a class argument that's a subclass of - User and returns an instance of the corresponding class:: - - U = TypeVar('U', bound=User) - def new_user(user_class: Type[U]) -> U: - user = user_class() - # (Here we could write the user object to a database) - return user - joe = new_user(BasicUser) - - At this point the type checker knows that joe has type BasicUser. - """ - - __slots__ = () - +Type = typing.Type # Various ABCs mimicking those in collections.abc. # A few are simply re-exported for completeness. -def _define_guard(type_name): - """ - Returns True if the given type isn't defined in typing but - is defined in collections_abc. - - Adds the type to __all__ if the collection is found in either - typing or collection_abc. - """ - if hasattr(typing, type_name): - __all__.append(type_name) - globals()[type_name] = getattr(typing, type_name) - return False - elif hasattr(collections_abc, type_name): - __all__.append(type_name) - return True - else: - return False - class _ExtensionsGenericMeta(GenericMeta): def __subclasscheck__(self, subclass): @@ -782,12 +337,11 @@ def __subclasscheck__(self, subclass): between collections, typing, and typing_extensions on older versions of Python, see https://github.com/python/typing/issues/501. """ - if sys.version_info[:3] >= (3, 5, 3) or sys.version_info[:3] < (3, 5, 0): - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - return False + if self.__origin__ is not None: + if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: + raise TypeError("Parameterized generics cannot be used with class " + "or instance checks") + return False if not self.__extra__: return super().__subclasscheck__(subclass) res = self.__extra__.__subclasshook__(subclass) @@ -803,162 +357,59 @@ def __subclasscheck__(self, subclass): return False -if _define_guard('Awaitable'): - class Awaitable(typing.Generic[T_co], metaclass=_ExtensionsGenericMeta, - extra=collections_abc.Awaitable): - __slots__ = () - +Awaitable = typing.Awaitable +Coroutine = typing.Coroutine +AsyncIterable = typing.AsyncIterable +AsyncIterator = typing.AsyncIterator -if _define_guard('Coroutine'): - class Coroutine(Awaitable[V_co], typing.Generic[T_co, T_contra, V_co], - metaclass=_ExtensionsGenericMeta, - extra=collections_abc.Coroutine): - __slots__ = () - - -if _define_guard('AsyncIterable'): - class AsyncIterable(typing.Generic[T_co], - metaclass=_ExtensionsGenericMeta, - extra=collections_abc.AsyncIterable): - __slots__ = () - - -if _define_guard('AsyncIterator'): - class AsyncIterator(AsyncIterable[T_co], - metaclass=_ExtensionsGenericMeta, - extra=collections_abc.AsyncIterator): - __slots__ = () - - -if hasattr(typing, 'Deque'): - Deque = typing.Deque -elif _geqv_defined: - class Deque(collections.deque, typing.MutableSequence[T], - metaclass=_ExtensionsGenericMeta, - extra=collections.deque): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Deque): - return collections.deque(*args, **kwds) - return _generic_new(collections.deque, cls, *args, **kwds) -else: - class Deque(collections.deque, typing.MutableSequence[T], - metaclass=_ExtensionsGenericMeta, - extra=collections.deque): +# 3.6.1+ +if hasattr(typing, 'Deque'): + Deque = typing.Deque +# 3.6.0 +else: + class Deque(collections.deque, typing.MutableSequence[T], + metaclass=_ExtensionsGenericMeta, + extra=collections.deque): __slots__ = () def __new__(cls, *args, **kwds): if cls._gorg is Deque: return collections.deque(*args, **kwds) - return _generic_new(collections.deque, cls, *args, **kwds) - + return typing._generic_new(collections.deque, cls, *args, **kwds) -if hasattr(typing, 'ContextManager'): - ContextManager = typing.ContextManager -elif hasattr(contextlib, 'AbstractContextManager'): - class ContextManager(typing.Generic[T_co], - metaclass=_ExtensionsGenericMeta, - extra=contextlib.AbstractContextManager): - __slots__ = () +ContextManager = typing.ContextManager +# 3.6.2+ +if hasattr(typing, 'AsyncContextManager'): + AsyncContextManager = typing.AsyncContextManager +# 3.6.0-3.6.1 else: - class ContextManager(typing.Generic[T_co]): + from _collections_abc import _check_methods as _check_methods_in_mro # noqa + + class AsyncContextManager(typing.Generic[T_co]): __slots__ = () - def __enter__(self): + async def __aenter__(self): return self @abc.abstractmethod - def __exit__(self, exc_type, exc_value, traceback): + async def __aexit__(self, exc_type, exc_value, traceback): return None @classmethod def __subclasshook__(cls, C): - if cls is ContextManager: - # In Python 3.6+, it is possible to set a method to None to - # explicitly indicate that the class does not implement an ABC - # (https://bugs.python.org/issue25958), but we do not support - # that pattern here because this fallback class is only used - # in Python 3.5 and earlier. - if (any("__enter__" in B.__dict__ for B in C.__mro__) and - any("__exit__" in B.__dict__ for B in C.__mro__)): - return True + if cls is AsyncContextManager: + return _check_methods_in_mro(C, "__aenter__", "__aexit__") return NotImplemented +DefaultDict = typing.DefaultDict -if hasattr(typing, 'AsyncContextManager'): - AsyncContextManager = typing.AsyncContextManager - __all__.append('AsyncContextManager') -elif hasattr(contextlib, 'AbstractAsyncContextManager'): - class AsyncContextManager(typing.Generic[T_co], - metaclass=_ExtensionsGenericMeta, - extra=contextlib.AbstractAsyncContextManager): - __slots__ = () - - __all__.append('AsyncContextManager') -elif sys.version_info[:2] >= (3, 5): - exec(""" -class AsyncContextManager(typing.Generic[T_co]): - __slots__ = () - - async def __aenter__(self): - return self - - @abc.abstractmethod - async def __aexit__(self, exc_type, exc_value, traceback): - return None - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncContextManager: - return _check_methods_in_mro(C, "__aenter__", "__aexit__") - return NotImplemented - -__all__.append('AsyncContextManager') -""") - - -if hasattr(typing, 'DefaultDict'): - DefaultDict = typing.DefaultDict -elif _geqv_defined: - class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.defaultdict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, DefaultDict): - return collections.defaultdict(*args, **kwds) - return _generic_new(collections.defaultdict, cls, *args, **kwds) -else: - class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.defaultdict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is DefaultDict: - return collections.defaultdict(*args, **kwds) - return _generic_new(collections.defaultdict, cls, *args, **kwds) - - +# 3.7.2+ if hasattr(typing, 'OrderedDict'): OrderedDict = typing.OrderedDict +# 3.7.0-3.7.2 elif (3, 7, 0) <= sys.version_info[:3] < (3, 7, 2): OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) -elif _geqv_defined: - class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.OrderedDict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, OrderedDict): - return collections.OrderedDict(*args, **kwds) - return _generic_new(collections.OrderedDict, cls, *args, **kwds) +# 3.6 else: class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT], metaclass=_ExtensionsGenericMeta, @@ -969,44 +420,12 @@ class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT], def __new__(cls, *args, **kwds): if cls._gorg is OrderedDict: return collections.OrderedDict(*args, **kwds) - return _generic_new(collections.OrderedDict, cls, *args, **kwds) - + return typing._generic_new(collections.OrderedDict, cls, *args, **kwds) +# 3.6.2+ if hasattr(typing, 'Counter'): Counter = typing.Counter -elif (3, 5, 0) <= sys.version_info[:3] <= (3, 5, 1): - assert _geqv_defined - _TInt = typing.TypeVar('_TInt') - - class _CounterMeta(typing.GenericMeta): - """Metaclass for Counter""" - def __getitem__(self, item): - return super().__getitem__((item, int)) - - class Counter(collections.Counter, - typing.Dict[T, int], - metaclass=_CounterMeta, - extra=collections.Counter): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Counter): - return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - -elif _geqv_defined: - class Counter(collections.Counter, - typing.Dict[T, int], - metaclass=_ExtensionsGenericMeta, extra=collections.Counter): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Counter): - return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - +# 3.6.0-3.6.1 else: class Counter(collections.Counter, typing.Dict[T, int], @@ -1017,88 +436,36 @@ class Counter(collections.Counter, def __new__(cls, *args, **kwds): if cls._gorg is Counter: return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - + return typing._generic_new(collections.Counter, cls, *args, **kwds) +# 3.6.1+ if hasattr(typing, 'ChainMap'): ChainMap = typing.ChainMap - __all__.append('ChainMap') elif hasattr(collections, 'ChainMap'): - # ChainMap only exists in 3.3+ - if _geqv_defined: - class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.ChainMap): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, ChainMap): - return collections.ChainMap(*args, **kwds) - return _generic_new(collections.ChainMap, cls, *args, **kwds) - else: - class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.ChainMap): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is ChainMap: - return collections.ChainMap(*args, **kwds) - return _generic_new(collections.ChainMap, cls, *args, **kwds) - - __all__.append('ChainMap') + class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], + metaclass=_ExtensionsGenericMeta, + extra=collections.ChainMap): + __slots__ = () -if _define_guard('AsyncGenerator'): + def __new__(cls, *args, **kwds): + if cls._gorg is ChainMap: + return collections.ChainMap(*args, **kwds) + return typing._generic_new(collections.ChainMap, cls, *args, **kwds) + +# 3.6.1+ +if hasattr(typing, 'AsyncGenerator'): + AsyncGenerator = typing.AsyncGenerator +# 3.6.0 +else: class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra], metaclass=_ExtensionsGenericMeta, - extra=collections_abc.AsyncGenerator): + extra=collections.abc.AsyncGenerator): __slots__ = () - -if hasattr(typing, 'NewType'): - NewType = typing.NewType -else: - def NewType(name, tp): - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy function that simply returns its argument. Usage:: - - UserId = NewType('UserId', int) - - def name_by_id(user_id: UserId) -> str: - ... - - UserId('user') # Fails type check - - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - - num = UserId(5) + 1 # type: int - """ - - def new_type(x): - return x - - new_type.__name__ = name - new_type.__supertype__ = tp - return new_type - - -if hasattr(typing, 'Text'): - Text = typing.Text -else: - Text = str - - -if hasattr(typing, 'TYPE_CHECKING'): - TYPE_CHECKING = typing.TYPE_CHECKING -else: - # Constant that's True when type checking, but False here. - TYPE_CHECKING = False +NewType = typing.NewType +Text = typing.Text +TYPE_CHECKING = typing.TYPE_CHECKING def _gorg(cls): @@ -1111,16 +478,6 @@ def _gorg(cls): return cls -if OLD_GENERICS: - def _next_in_mro(cls): # noqa - """This function exists for compatibility with old typing versions.""" - next_in_mro = object - for i, c in enumerate(cls.__mro__[:-1]): - if isinstance(c, GenericMeta) and _gorg(c) is Generic: - next_in_mro = cls.__mro__[i + 1] - return next_in_mro - - _PROTO_WHITELIST = ['Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', @@ -1150,257 +507,12 @@ def _is_callable_members_only(cls): return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) +# 3.8+ if hasattr(typing, 'Protocol'): Protocol = typing.Protocol -elif HAVE_PROTOCOLS and not PEP_560: - - def _no_init(self, *args, **kwargs): - if type(self)._is_protocol: - raise TypeError('Protocols cannot be instantiated') - - class _ProtocolMeta(GenericMeta): - """Internal metaclass for Protocol. - - This exists so Protocol classes can be generic without deriving - from Generic. - """ - if not OLD_GENERICS: - def __new__(cls, name, bases, namespace, - tvars=None, args=None, origin=None, extra=None, orig_bases=None): - # This is just a version copied from GenericMeta.__new__ that - # includes "Protocol" special treatment. (Comments removed for brevity.) - assert extra is None # Protocols should not have extra - if tvars is not None: - assert origin is not None - assert all(isinstance(t, TypeVar) for t in tvars), tvars - else: - tvars = _type_vars(bases) - gvars = None - for base in bases: - if base is Generic: - raise TypeError("Cannot inherit from plain Generic") - if (isinstance(base, GenericMeta) and - base.__origin__ in (Generic, Protocol)): - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...] or" - " Protocol[...] multiple times.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - raise TypeError( - "Some type variables (%s) " - "are not listed in %s[%s]" % - (", ".join(str(t) for t in tvars if t not in gvarset), - "Generic" if any(b.__origin__ is Generic - for b in bases) else "Protocol", - ", ".join(str(g) for g in gvars))) - tvars = gvars - - initial_bases = bases - if (extra is not None and type(extra) is abc.ABCMeta and - extra not in bases): - bases = (extra,) + bases - bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b - for b in bases) - if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): - bases = tuple(b for b in bases if b is not Generic) - namespace.update({'__origin__': origin, '__extra__': extra}) - self = super(GenericMeta, cls).__new__(cls, name, bases, namespace, - _root=True) - super(GenericMeta, self).__setattr__('_gorg', - self if not origin else - _gorg(origin)) - self.__parameters__ = tvars - self.__args__ = tuple(... if a is _TypingEllipsis else - () if a is _TypingEmpty else - a for a in args) if args else None - self.__next_in_mro__ = _next_in_mro(self) - if orig_bases is None: - self.__orig_bases__ = initial_bases - elif origin is not None: - self._abc_registry = origin._abc_registry - self._abc_cache = origin._abc_cache - if hasattr(self, '_subs_tree'): - self.__tree_hash__ = (hash(self._subs_tree()) if origin else - super(GenericMeta, self).__hash__()) - return self - - def __init__(cls, *args, **kwargs): - super().__init__(*args, **kwargs) - if not cls.__dict__.get('_is_protocol', None): - cls._is_protocol = any(b is Protocol or - isinstance(b, _ProtocolMeta) and - b.__origin__ is Protocol - for b in cls.__bases__) - if cls._is_protocol: - for base in cls.__mro__[1:]: - if not (base in (object, Generic) or - base.__module__ == 'collections.abc' and - base.__name__ in _PROTO_WHITELIST or - isinstance(base, TypingMeta) and base._is_protocol or - isinstance(base, GenericMeta) and - base.__origin__ is Generic): - raise TypeError('Protocols can only inherit from other' - ' protocols, got %r' % base) - - cls.__init__ = _no_init - - def _proto_hook(other): - if not cls.__dict__.get('_is_protocol', None): - return NotImplemented - if not isinstance(other, type): - # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') - for attr in _get_protocol_attrs(cls): - for base in other.__mro__: - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break - annotations = getattr(base, '__annotations__', {}) - if (isinstance(annotations, typing.Mapping) and - attr in annotations and - isinstance(other, _ProtocolMeta) and - other._is_protocol): - break - else: - return NotImplemented - return True - if '__subclasshook__' not in cls.__dict__: - cls.__subclasshook__ = _proto_hook - - def __instancecheck__(self, instance): - # We need this method for situations where attributes are - # assigned in __init__. - if ((not getattr(self, '_is_protocol', False) or - _is_callable_members_only(self)) and - issubclass(instance.__class__, self)): - return True - if self._is_protocol: - if all(hasattr(instance, attr) and - (not callable(getattr(self, attr, None)) or - getattr(instance, attr) is not None) - for attr in _get_protocol_attrs(self)): - return True - return super(GenericMeta, self).__instancecheck__(instance) - - def __subclasscheck__(self, cls): - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - return False - if (self.__dict__.get('_is_protocol', None) and - not self.__dict__.get('_is_runtime_protocol', None)): - if sys._getframe(1).f_globals['__name__'] in ['abc', - 'functools', - 'typing']: - return False - raise TypeError("Instance and class checks can only be used with" - " @runtime protocols") - if (self.__dict__.get('_is_runtime_protocol', None) and - not _is_callable_members_only(self)): - if sys._getframe(1).f_globals['__name__'] in ['abc', - 'functools', - 'typing']: - return super(GenericMeta, self).__subclasscheck__(cls) - raise TypeError("Protocols with non-method members" - " don't support issubclass()") - return super(GenericMeta, self).__subclasscheck__(cls) - - if not OLD_GENERICS: - @_tp_cache - def __getitem__(self, params): - # We also need to copy this from GenericMeta.__getitem__ to get - # special treatment of "Protocol". (Comments removed for brevity.) - if not isinstance(params, tuple): - params = (params,) - if not params and _gorg(self) is not Tuple: - raise TypeError( - "Parameter list to %s[...] cannot be empty" % self.__qualname__) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self in (Generic, Protocol): - if not all(isinstance(p, TypeVar) for p in params): - raise TypeError( - "Parameters to %r[...] must all be type variables" % self) - if len(set(params)) != len(params): - raise TypeError( - "Parameters to %r[...] must all be unique" % self) - tvars = params - args = params - elif self in (Tuple, Callable): - tvars = _type_vars(params) - args = params - elif self.__origin__ in (Generic, Protocol): - raise TypeError("Cannot subscript already-subscripted %s" % - repr(self)) - else: - _check_generic(self, params) - tvars = _type_vars(params) - args = params - - prepend = (self,) if self.__origin__ is None else () - return self.__class__(self.__name__, - prepend + self.__bases__, - _no_slots_copy(self.__dict__), - tvars=tvars, - args=args, - origin=self, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - class Protocol(metaclass=_ProtocolMeta): - """Base class for protocol classes. Protocol classes are defined as:: - - class Proto(Protocol): - def meth(self) -> int: - ... - - Such classes are primarily used with static type checkers that recognize - structural subtyping (static duck-typing), for example:: - - class C: - def meth(self) -> int: - return 0 - - def func(x: Proto) -> int: - return x.meth() - - func(C()) # Passes static type check - - See PEP 544 for details. Protocol classes decorated with - @typing_extensions.runtime act as simple-minded runtime protocol that checks - only the presence of given attributes, ignoring their type signatures. - - Protocol classes can be generic, they are defined as:: - - class GenProto({bases}): - def meth(self) -> T: - ... - """ - __slots__ = () - _is_protocol = True - - def __new__(cls, *args, **kwds): - if _gorg(cls) is Protocol: - raise TypeError("Type Protocol cannot be instantiated; " - "it can be used only as a base class") - if OLD_GENERICS: - return _generic_new(_next_in_mro(cls), cls, *args, **kwds) - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - if Protocol.__doc__ is not None: - Protocol.__doc__ = Protocol.__doc__.format(bases="Protocol, Generic[T]" if - OLD_GENERICS else "Protocol[T]") - - +# 3.7 elif PEP_560: - from typing import _type_check, _collect_type_vars # noqa + from typing import _collect_type_vars # noqa def _no_init(self, *args, **kwargs): if type(self)._is_protocol: @@ -1413,14 +525,14 @@ def __instancecheck__(cls, instance): # We need this method for situations where attributes are # assigned in __init__. if ((not getattr(cls, '_is_protocol', False) or - _is_callable_members_only(cls)) and + _is_callable_members_only(cls)) and issubclass(instance.__class__, cls)): return True if cls._is_protocol: if all(hasattr(instance, attr) and - (not callable(getattr(cls, attr, None)) or - getattr(instance, attr) is not None) - for attr in _get_protocol_attrs(cls)): + (not callable(getattr(cls, attr, None)) or + getattr(instance, attr) is not None) + for attr in _get_protocol_attrs(cls)): return True return super().__instancecheck__(instance) @@ -1465,38 +577,38 @@ def __new__(cls, *args, **kwds): "it can only be used as a base class") return super().__new__(cls) - @_tp_cache + @typing._tp_cache def __class_getitem__(cls, params): if not isinstance(params, tuple): params = (params,) - if not params and cls is not Tuple: + if not params and cls is not typing.Tuple: raise TypeError( - "Parameter list to {}[...] cannot be empty".format(cls.__qualname__)) + f"Parameter list to {cls.__qualname__}[...] cannot be empty") msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) + params = tuple(typing._type_check(p, msg) for p in params) # noqa if cls is Protocol: # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, TypeVar) for p in params): + if not all(isinstance(p, typing.TypeVar) for p in params): i = 0 - while isinstance(params[i], TypeVar): + while isinstance(params[i], typing.TypeVar): i += 1 raise TypeError( "Parameters to Protocol[...] must all be type variables." - " Parameter {} is {}".format(i + 1, params[i])) + f" Parameter {i + 1} is {params[i]}") if len(set(params)) != len(params): raise TypeError( "Parameters to Protocol[...] must all be unique") else: # Subscripting a regular Generic subclass. _check_generic(cls, params) - return _GenericAlias(cls, params) + return typing._GenericAlias(cls, params) def __init_subclass__(cls, *args, **kwargs): tvars = [] if '__orig_bases__' in cls.__dict__: - error = Generic in cls.__orig_bases__ + error = typing.Generic in cls.__orig_bases__ else: - error = Generic in cls.__bases__ + error = typing.Generic in cls.__bases__ if error: raise TypeError("Cannot inherit from plain Generic") if '__orig_bases__' in cls.__dict__: @@ -1508,10 +620,10 @@ def __init_subclass__(cls, *args, **kwargs): # and reject multiple Generic[...] and/or Protocol[...]. gvars = None for base in cls.__orig_bases__: - if (isinstance(base, _GenericAlias) and - base.__origin__ in (Generic, Protocol)): + if (isinstance(base, typing._GenericAlias) and + base.__origin__ in (typing.Generic, Protocol)): # for error messages - the_base = 'Generic' if base.__origin__ is Generic else 'Protocol' + the_base = base.__origin__.__name__ if gvars is not None: raise TypeError( "Cannot inherit from Generic[...]" @@ -1525,9 +637,8 @@ def __init_subclass__(cls, *args, **kwargs): if not tvarset <= gvarset: s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) s_args = ', '.join(str(g) for g in gvars) - raise TypeError("Some type variables ({}) are" - " not listed in {}[{}]".format(s_vars, - the_base, s_args)) + raise TypeError(f"Some type variables ({s_vars}) are" + f" not listed in {the_base}[{s_args}]") tvars = gvars cls.__parameters__ = tuple(tvars) @@ -1535,59 +646,299 @@ def __init_subclass__(cls, *args, **kwargs): if not cls.__dict__.get('_is_protocol', None): cls._is_protocol = any(b is Protocol for b in cls.__bases__) - # Set (or override) the protocol subclass hook. - def _proto_hook(other): - if not cls.__dict__.get('_is_protocol', None): - return NotImplemented - if not getattr(cls, '_is_runtime_protocol', False): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError("Instance and class checks can only be used with" - " @runtime protocols") - if not _is_callable_members_only(cls): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError("Protocols with non-method members" - " don't support issubclass()") - if not isinstance(other, type): - # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') - for attr in _get_protocol_attrs(cls): - for base in other.__mro__: - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break - annotations = getattr(base, '__annotations__', {}) - if (isinstance(annotations, typing.Mapping) and - attr in annotations and - isinstance(other, _ProtocolMeta) and - other._is_protocol): - break - else: - return NotImplemented - return True - if '__subclasshook__' not in cls.__dict__: - cls.__subclasshook__ = _proto_hook + # Set (or override) the protocol subclass hook. + def _proto_hook(other): + if not cls.__dict__.get('_is_protocol', None): + return NotImplemented + if not getattr(cls, '_is_runtime_protocol', False): + if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + return NotImplemented + raise TypeError("Instance and class checks can only be used with" + " @runtime protocols") + if not _is_callable_members_only(cls): + if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + return NotImplemented + raise TypeError("Protocols with non-method members" + " don't support issubclass()") + if not isinstance(other, type): + # Same error as for issubclass(1, int) + raise TypeError('issubclass() arg 1 must be a class') + for attr in _get_protocol_attrs(cls): + for base in other.__mro__: + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + annotations = getattr(base, '__annotations__', {}) + if (isinstance(annotations, typing.Mapping) and + attr in annotations and + isinstance(other, _ProtocolMeta) and + other._is_protocol): + break + else: + return NotImplemented + return True + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + # We have nothing more to do for non-protocols. + if not cls._is_protocol: + return + + # Check consistency of bases. + for base in cls.__bases__: + if not (base in (object, typing.Generic) or + base.__module__ == 'collections.abc' and + base.__name__ in _PROTO_WHITELIST or + isinstance(base, _ProtocolMeta) and base._is_protocol): + raise TypeError('Protocols can only inherit from other' + f' protocols, got {repr(base)}') + cls.__init__ = _no_init +# 3.6 +else: + from typing import _next_in_mro, _type_check # noqa + + def _no_init(self, *args, **kwargs): + if type(self)._is_protocol: + raise TypeError('Protocols cannot be instantiated') + + class _ProtocolMeta(GenericMeta): + """Internal metaclass for Protocol. + + This exists so Protocol classes can be generic without deriving + from Generic. + """ + def __new__(cls, name, bases, namespace, + tvars=None, args=None, origin=None, extra=None, orig_bases=None): + # This is just a version copied from GenericMeta.__new__ that + # includes "Protocol" special treatment. (Comments removed for brevity.) + assert extra is None # Protocols should not have extra + if tvars is not None: + assert origin is not None + assert all(isinstance(t, typing.TypeVar) for t in tvars), tvars + else: + tvars = _type_vars(bases) + gvars = None + for base in bases: + if base is typing.Generic: + raise TypeError("Cannot inherit from plain Generic") + if (isinstance(base, GenericMeta) and + base.__origin__ in (typing.Generic, Protocol)): + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...] or" + " Protocol[...] multiple times.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + s_vars = ", ".join(str(t) for t in tvars if t not in gvarset) + s_args = ", ".join(str(g) for g in gvars) + cls_name = "Generic" if any(b.__origin__ is typing.Generic + for b in bases) else "Protocol" + raise TypeError(f"Some type variables ({s_vars}) are" + f" not listed in {cls_name}[{s_args}]") + tvars = gvars + + initial_bases = bases + if (extra is not None and type(extra) is abc.ABCMeta and + extra not in bases): + bases = (extra,) + bases + bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b + for b in bases) + if any(isinstance(b, GenericMeta) and b is not typing.Generic for b in bases): + bases = tuple(b for b in bases if b is not typing.Generic) + namespace.update({'__origin__': origin, '__extra__': extra}) + self = super(GenericMeta, cls).__new__(cls, name, bases, namespace, + _root=True) + super(GenericMeta, self).__setattr__('_gorg', + self if not origin else + _gorg(origin)) + self.__parameters__ = tvars + self.__args__ = tuple(... if a is typing._TypingEllipsis else + () if a is typing._TypingEmpty else + a for a in args) if args else None + self.__next_in_mro__ = _next_in_mro(self) + if orig_bases is None: + self.__orig_bases__ = initial_bases + elif origin is not None: + self._abc_registry = origin._abc_registry + self._abc_cache = origin._abc_cache + if hasattr(self, '_subs_tree'): + self.__tree_hash__ = (hash(self._subs_tree()) if origin else + super(GenericMeta, self).__hash__()) + return self + + def __init__(cls, *args, **kwargs): + super().__init__(*args, **kwargs) + if not cls.__dict__.get('_is_protocol', None): + cls._is_protocol = any(b is Protocol or + isinstance(b, _ProtocolMeta) and + b.__origin__ is Protocol + for b in cls.__bases__) + if cls._is_protocol: + for base in cls.__mro__[1:]: + if not (base in (object, typing.Generic) or + base.__module__ == 'collections.abc' and + base.__name__ in _PROTO_WHITELIST or + isinstance(base, typing.TypingMeta) and base._is_protocol or + isinstance(base, GenericMeta) and + base.__origin__ is typing.Generic): + raise TypeError(f'Protocols can only inherit from other' + f' protocols, got {repr(base)}') + + cls.__init__ = _no_init + + def _proto_hook(other): + if not cls.__dict__.get('_is_protocol', None): + return NotImplemented + if not isinstance(other, type): + # Same error as for issubclass(1, int) + raise TypeError('issubclass() arg 1 must be a class') + for attr in _get_protocol_attrs(cls): + for base in other.__mro__: + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + annotations = getattr(base, '__annotations__', {}) + if (isinstance(annotations, typing.Mapping) and + attr in annotations and + isinstance(other, _ProtocolMeta) and + other._is_protocol): + break + else: + return NotImplemented + return True + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + def __instancecheck__(self, instance): + # We need this method for situations where attributes are + # assigned in __init__. + if ((not getattr(self, '_is_protocol', False) or + _is_callable_members_only(self)) and + issubclass(instance.__class__, self)): + return True + if self._is_protocol: + if all(hasattr(instance, attr) and + (not callable(getattr(self, attr, None)) or + getattr(instance, attr) is not None) + for attr in _get_protocol_attrs(self)): + return True + return super(GenericMeta, self).__instancecheck__(instance) + + def __subclasscheck__(self, cls): + if self.__origin__ is not None: + if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: + raise TypeError("Parameterized generics cannot be used with class " + "or instance checks") + return False + if (self.__dict__.get('_is_protocol', None) and + not self.__dict__.get('_is_runtime_protocol', None)): + if sys._getframe(1).f_globals['__name__'] in ['abc', + 'functools', + 'typing']: + return False + raise TypeError("Instance and class checks can only be used with" + " @runtime protocols") + if (self.__dict__.get('_is_runtime_protocol', None) and + not _is_callable_members_only(self)): + if sys._getframe(1).f_globals['__name__'] in ['abc', + 'functools', + 'typing']: + return super(GenericMeta, self).__subclasscheck__(cls) + raise TypeError("Protocols with non-method members" + " don't support issubclass()") + return super(GenericMeta, self).__subclasscheck__(cls) + + @typing._tp_cache + def __getitem__(self, params): + # We also need to copy this from GenericMeta.__getitem__ to get + # special treatment of "Protocol". (Comments removed for brevity.) + if not isinstance(params, tuple): + params = (params,) + if not params and _gorg(self) is not typing.Tuple: + raise TypeError( + f"Parameter list to {self.__qualname__}[...] cannot be empty") + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + if self in (typing.Generic, Protocol): + if not all(isinstance(p, typing.TypeVar) for p in params): + raise TypeError( + f"Parameters to {repr(self)}[...] must all be type variables") + if len(set(params)) != len(params): + raise TypeError( + f"Parameters to {repr(self)}[...] must all be unique") + tvars = params + args = params + elif self in (typing.Tuple, typing.Callable): + tvars = _type_vars(params) + args = params + elif self.__origin__ in (typing.Generic, Protocol): + raise TypeError(f"Cannot subscript already-subscripted {repr(self)}") + else: + _check_generic(self, params) + tvars = _type_vars(params) + args = params + + prepend = (self,) if self.__origin__ is None else () + return self.__class__(self.__name__, + prepend + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=tvars, + args=args, + origin=self, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + class Protocol(metaclass=_ProtocolMeta): + """Base class for protocol classes. Protocol classes are defined as:: + + class Proto(Protocol): + def meth(self) -> int: + ... + + Such classes are primarily used with static type checkers that recognize + structural subtyping (static duck-typing), for example:: + + class C: + def meth(self) -> int: + return 0 + + def func(x: Proto) -> int: + return x.meth() + + func(C()) # Passes static type check + + See PEP 544 for details. Protocol classes decorated with + @typing_extensions.runtime act as simple-minded runtime protocol that checks + only the presence of given attributes, ignoring their type signatures. + + Protocol classes can be generic, they are defined as:: - # We have nothing more to do for non-protocols. - if not cls._is_protocol: - return + class GenProto(Protocol[T]): + def meth(self) -> T: + ... + """ + __slots__ = () + _is_protocol = True - # Check consistency of bases. - for base in cls.__bases__: - if not (base in (object, Generic) or - base.__module__ == 'collections.abc' and - base.__name__ in _PROTO_WHITELIST or - isinstance(base, _ProtocolMeta) and base._is_protocol): - raise TypeError('Protocols can only inherit from other' - ' protocols, got %r' % base) - cls.__init__ = _no_init + def __new__(cls, *args, **kwds): + if _gorg(cls) is Protocol: + raise TypeError("Type Protocol cannot be instantiated; " + "it can be used only as a base class") + return typing._generic_new(cls.__next_in_mro__, cls, *args, **kwds) +# 3.8+ if hasattr(typing, 'runtime_checkable'): runtime_checkable = typing.runtime_checkable -elif HAVE_PROTOCOLS: +# 3.6-3.7 +else: def runtime_checkable(cls): """Mark a protocol class as a runtime protocol, so that it can be used with isinstance() and issubclass(). Raise TypeError @@ -1598,19 +949,20 @@ def runtime_checkable(cls): """ if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: raise TypeError('@runtime_checkable can be only applied to protocol classes,' - ' got %r' % cls) + f' got {cls!r}') cls._is_runtime_protocol = True return cls -if HAVE_PROTOCOLS: - # Exists for backwards compatibility. - runtime = runtime_checkable +# Exists for backwards compatibility. +runtime = runtime_checkable +# 3.8+ if hasattr(typing, 'SupportsIndex'): SupportsIndex = typing.SupportsIndex -elif HAVE_PROTOCOLS: +# 3.6-3.7 +else: @runtime_checkable class SupportsIndex(Protocol): __slots__ = () @@ -1665,8 +1017,8 @@ def _typeddict_new(*args, total=True, **kwargs): fields, = args # allow the "_fields" keyword be passed except ValueError: raise TypeError('TypedDict.__new__() takes from 2 to 3 ' - 'positional arguments but {} ' - 'were given'.format(len(args) + 2)) + f'positional arguments but {len(args) + 2} ' + 'were given') elif '_fields' in kwargs and len(kwargs) == 1: fields = kwargs.pop('_fields') import warnings @@ -1695,10 +1047,7 @@ def _typeddict_new(*args, total=True, **kwargs): class _TypedDictMeta(type): def __init__(cls, name, bases, ns, total=True): - # In Python 3.4 and 3.5 the __init__ method also needs to support the - # keyword arguments. - # See https://www.python.org/dev/peps/pep-0487/#implementation-details - super(_TypedDictMeta, cls).__init__(name, bases, ns) + super().__init__(name, bases, ns) def __new__(cls, name, bases, ns, total=True): # Create new typed dict class object. @@ -1708,7 +1057,7 @@ def __new__(cls, name, bases, ns, total=True): # Subclasses and instances of TypedDict return actual dictionaries # via _dict_new. ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new - tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns) + tp_dict = super().__new__(cls, name, (dict,), ns) annotations = {} own_annotations = ns.get('__annotations__', {}) @@ -1780,6 +1129,7 @@ class Point2D(TypedDict): # Not exported and not a public API, but needed for get_origin() and get_args() # to work. _AnnotatedAlias = typing._AnnotatedAlias +# 3.7-3.8 elif PEP_560: class _AnnotatedAlias(typing._GenericAlias, _root=True): """Runtime representation of an annotated type. @@ -1802,10 +1152,8 @@ def copy_with(self, params): return _AnnotatedAlias(new_type, self.__metadata__) def __repr__(self): - return "typing_extensions.Annotated[{}, {}]".format( - typing._type_repr(self.__origin__), - ", ".join(repr(a) for a in self.__metadata__) - ) + return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " + f"{', '.join(repr(a) for a in self.__metadata__)}]") def __reduce__(self): return operator.getitem, ( @@ -1860,7 +1208,7 @@ class Annotated: def __new__(cls, *args, **kwargs): raise TypeError("Type Annotated cannot be instantiated.") - @_tp_cache + @typing._tp_cache def __class_getitem__(cls, params): if not isinstance(params, tuple) or len(params) < 2: raise TypeError("Annotated[...] should be used " @@ -1873,7 +1221,7 @@ def __class_getitem__(cls, params): def __init_subclass__(cls, *args, **kwargs): raise TypeError( - "Cannot subclass {}.Annotated".format(cls.__module__) + f"Cannot subclass {cls.__module__}.Annotated" ) def _strip_annotations(t): @@ -1925,8 +1273,8 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False): if include_extras: return hint return {k: _strip_annotations(t) for k, t in hint.items()} - -elif HAVE_ANNOTATED: +# 3.6 +else: def _is_dunder(name): """Returns True if name is a __dunder_variable_name__.""" @@ -1955,7 +1303,7 @@ def _tree_repr(self, tree): else: tp_repr = origin[0]._tree_repr(origin) metadata_reprs = ", ".join(repr(arg) for arg in metadata) - return '%s[%s, %s]' % (cls, tp_repr, metadata_reprs) + return f'{cls}[{tp_repr}, {metadata_reprs}]' def _subs_tree(self, tvars=None, args=None): # noqa if self is Annotated: @@ -1981,7 +1329,7 @@ def _get_cons(self): else: return tree - @_tp_cache + @typing._tp_cache def __getitem__(self, params): if not isinstance(params, tuple): params = (params,) @@ -2067,23 +1415,23 @@ class Annotated(metaclass=AnnotatedMeta): """ # Python 3.8 has get_origin() and get_args() but those implementations aren't -# Annotated-aware, so we can't use those, only Python 3.9 versions will do. -# Similarly, Python 3.9's implementation doesn't support ParamSpecArgs and -# ParamSpecKwargs. +# Annotated-aware, so we can't use those. Python 3.9's versions don't support +# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. if sys.version_info[:2] >= (3, 10): get_origin = typing.get_origin get_args = typing.get_args +# 3.7-3.9 elif PEP_560: try: # 3.9+ from typing import _BaseGenericAlias except ImportError: - _BaseGenericAlias = _GenericAlias + _BaseGenericAlias = typing._GenericAlias try: # 3.9+ from typing import GenericAlias except ImportError: - GenericAlias = _GenericAlias + GenericAlias = typing._GenericAlias def get_origin(tp): """Get the unsubscripted version of a type. @@ -2102,11 +1450,11 @@ def get_origin(tp): """ if isinstance(tp, _AnnotatedAlias): return Annotated - if isinstance(tp, (_GenericAlias, GenericAlias, _BaseGenericAlias, + if isinstance(tp, (typing._GenericAlias, GenericAlias, _BaseGenericAlias, ParamSpecArgs, ParamSpecKwargs)): return tp.__origin__ - if tp is Generic: - return Generic + if tp is typing.Generic: + return typing.Generic return None def get_args(tp): @@ -2122,7 +1470,7 @@ def get_args(tp): """ if isinstance(tp, _AnnotatedAlias): return (tp.__origin__,) + tp.__metadata__ - if isinstance(tp, (_GenericAlias, GenericAlias)): + if isinstance(tp, (typing._GenericAlias, GenericAlias)): if getattr(tp, "_special", False): return () res = tp.__args__ @@ -2132,8 +1480,10 @@ def get_args(tp): return () +# 3.10+ if hasattr(typing, 'TypeAlias'): TypeAlias = typing.TypeAlias +# 3.9 elif sys.version_info[:2] >= (3, 9): class _TypeAliasForm(typing._SpecialForm, _root=True): def __repr__(self): @@ -2151,8 +1501,8 @@ def TypeAlias(self, parameters): It's invalid when used anywhere except as in the example above. """ - raise TypeError("{} is not subscriptable".format(self)) - + raise TypeError(f"{self} is not subscriptable") +# 3.7-3.8 elif sys.version_info[:2] >= (3, 7): class _TypeAliasForm(typing._SpecialForm, _root=True): def __repr__(self): @@ -2169,8 +1519,8 @@ def __repr__(self): It's invalid when used anywhere except as in the example above.""") - -elif hasattr(typing, '_FinalTypingBase'): +# 3.6 +else: class _TypeAliasMeta(typing.TypingMeta): """Metaclass for TypeAlias""" @@ -2200,37 +1550,13 @@ def __repr__(self): return 'typing_extensions.TypeAlias' TypeAlias = _TypeAliasBase(_root=True) -else: - class _TypeAliasMeta(typing.TypingMeta): - """Metaclass for TypeAlias""" - - def __instancecheck__(self, obj): - raise TypeError("TypeAlias cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("TypeAlias cannot be used with issubclass().") - - def __call__(self, *args, **kwargs): - raise TypeError("Cannot instantiate TypeAlias") - - class TypeAlias(metaclass=_TypeAliasMeta, _root=True): - """Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example above. - """ - __slots__ = () # Python 3.10+ has PEP 612 if hasattr(typing, 'ParamSpecArgs'): ParamSpecArgs = typing.ParamSpecArgs ParamSpecKwargs = typing.ParamSpecKwargs +# 3.6-3.9 else: class _Immutable: """Mixin to indicate that object should not be copied.""" @@ -2258,7 +1584,7 @@ def __init__(self, origin): self.__origin__ = origin def __repr__(self): - return "{}.args".format(self.__origin__.__name__) + return f"{self.__origin__.__name__}.args" class ParamSpecKwargs(_Immutable): """The kwargs for a ParamSpec object. @@ -2276,10 +1602,12 @@ def __init__(self, origin): self.__origin__ = origin def __repr__(self): - return "{}.kwargs".format(self.__origin__.__name__) + return f"{self.__origin__.__name__}.kwargs" +# 3.10+ if hasattr(typing, 'ParamSpec'): ParamSpec = typing.ParamSpec +# 3.6-3.9 else: # Inherits from list as a workaround for Callable checks in Python < 3.9.2. @@ -2331,7 +1659,7 @@ def add_two(x: float, y: float) -> float: """ # Trick Generic __parameters__. - __class__ = TypeVar + __class__ = typing.TypeVar @property def args(self): @@ -2382,61 +1710,60 @@ def __call__(self, *args, **kwargs): pass if not PEP_560: - # Only needed in 3.6 and lower. + # Only needed in 3.6. def _get_type_vars(self, tvars): if self not in tvars: tvars.append(self) -# Inherits from list as a workaround for Callable checks in Python < 3.9.2. -class _ConcatenateGenericAlias(list): +# 3.6-3.9 +if not hasattr(typing, 'Concatenate'): + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class _ConcatenateGenericAlias(list): - # Trick Generic into looking into this for __parameters__. - if PEP_560: - __class__ = typing._GenericAlias - elif sys.version_info[:3] == (3, 5, 2): - __class__ = typing.TypingMeta - else: - __class__ = typing._TypingBase + # Trick Generic into looking into this for __parameters__. + if PEP_560: + __class__ = typing._GenericAlias + else: + __class__ = typing._TypingBase - # Flag in 3.8. - _special = False - # Attribute in 3.6 and earlier. - if sys.version_info[:3] == (3, 5, 2): - _gorg = typing.GenericMeta - else: + # Flag in 3.8. + _special = False + # Attribute in 3.6 and earlier. _gorg = typing.Generic - def __init__(self, origin, args): - super().__init__(args) - self.__origin__ = origin - self.__args__ = args + def __init__(self, origin, args): + super().__init__(args) + self.__origin__ = origin + self.__args__ = args - def __repr__(self): - _type_repr = typing._type_repr - return '{origin}[{args}]' \ - .format(origin=_type_repr(self.__origin__), - args=', '.join(_type_repr(arg) for arg in self.__args__)) + def __repr__(self): + _type_repr = typing._type_repr + return (f'{_type_repr(self.__origin__)}' + f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') - def __hash__(self): - return hash((self.__origin__, self.__args__)) + def __hash__(self): + return hash((self.__origin__, self.__args__)) - # Hack to get typing._type_check to pass in Generic. - def __call__(self, *args, **kwargs): - pass + # Hack to get typing._type_check to pass in Generic. + def __call__(self, *args, **kwargs): + pass - @property - def __parameters__(self): - return tuple(tp for tp in self.__args__ if isinstance(tp, (TypeVar, ParamSpec))) + @property + def __parameters__(self): + return tuple( + tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) + ) - if not PEP_560: - # Only required in 3.6 and lower. - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - typing._get_type_vars(self.__parameters__, tvars) + if not PEP_560: + # Only required in 3.6. + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + typing._get_type_vars(self.__parameters__, tvars) -@_tp_cache +# 3.6-3.9 +@typing._tp_cache def _concatenate_getitem(self, parameters): if parameters == (): raise TypeError("Cannot take a Concatenate of no types.") @@ -2450,9 +1777,11 @@ def _concatenate_getitem(self, parameters): return _ConcatenateGenericAlias(self, parameters) +# 3.10+ if hasattr(typing, 'Concatenate'): Concatenate = typing.Concatenate _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa +# 3.9 elif sys.version_info[:2] >= (3, 9): @_TypeAliasForm def Concatenate(self, parameters): @@ -2467,7 +1796,7 @@ def Concatenate(self, parameters): See PEP 612 for detailed information. """ return _concatenate_getitem(self, parameters) - +# 3.7-8 elif sys.version_info[:2] >= (3, 7): class _ConcatenateForm(typing._SpecialForm, _root=True): def __repr__(self): @@ -2488,8 +1817,8 @@ def __getitem__(self, parameters): See PEP 612 for detailed information. """) - -elif hasattr(typing, '_FinalTypingBase'): +# 3.6 +else: class _ConcatenateAliasMeta(typing.TypingMeta): """Metaclass for Concatenate.""" @@ -2524,38 +1853,11 @@ def __getitem__(self, parameters): return _concatenate_getitem(self, parameters) Concatenate = _ConcatenateAliasBase(_root=True) -# For 3.5.0 - 3.5.2 -else: - class _ConcatenateAliasMeta(typing.TypingMeta): - """Metaclass for Concatenate.""" - - def __instancecheck__(self, obj): - raise TypeError("TypeAlias cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("TypeAlias cannot be used with issubclass().") - - def __call__(self, *args, **kwargs): - raise TypeError("Cannot instantiate TypeAlias") - - def __getitem__(self, parameters): - return _concatenate_getitem(self, parameters) - - class Concatenate(metaclass=_ConcatenateAliasMeta, _root=True): - """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """ - __slots__ = () +# 3.10+ if hasattr(typing, 'TypeGuard'): TypeGuard = typing.TypeGuard +# 3.9 elif sys.version_info[:2] >= (3, 9): class _TypeGuardForm(typing._SpecialForm, _root=True): def __repr__(self): @@ -2605,9 +1907,9 @@ def is_str(val: Union[str, float]): ``TypeGuard`` also works with type variables. For more information, see PEP 647 (User-Defined Type Guards). """ - item = typing._type_check(parameters, '{} accepts only single type.'.format(self)) - return _GenericAlias(self, (item,)) - + item = typing._type_check(parameters, f'{self} accepts only single type.') + return typing._GenericAlias(self, (item,)) +# 3.7-3.8 elif sys.version_info[:2] >= (3, 7): class _TypeGuardForm(typing._SpecialForm, _root=True): @@ -2616,8 +1918,8 @@ def __repr__(self): def __getitem__(self, parameters): item = typing._type_check(parameters, - '{} accepts only a single type'.format(self._name)) - return _GenericAlias(self, (item,)) + f'{self._name} accepts only a single type') + return typing._GenericAlias(self, (item,)) TypeGuard = _TypeGuardForm( 'TypeGuard', @@ -2663,7 +1965,8 @@ def is_str(val: Union[str, float]): ``TypeGuard`` also works with type variables. For more information, see PEP 647 (User-Defined Type Guards). """) -elif hasattr(typing, '_FinalTypingBase'): +# 3.6 +else: class _TypeGuard(typing._FinalTypingBase, _root=True): """Special typing form used to annotate the return type of a user-defined type guard function. ``TypeGuard`` only accepts a single type argument. @@ -2717,10 +2020,9 @@ def __getitem__(self, item): cls = type(self) if self.__type__ is None: return cls(typing._type_check(item, - '{} accepts only a single type.'.format(cls.__name__[1:])), + f'{cls.__name__[1:]} accepts only a single type.'), _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) + raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') def _eval_type(self, globalns, localns): new_tp = typing._eval_type(self.__type__, globalns, localns) @@ -2731,7 +2033,7 @@ def _eval_type(self, globalns, localns): def __repr__(self): r = super().__repr__() if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) + r += f'[{typing._type_repr(self.__type__)}]' return r def __hash__(self): @@ -2745,41 +2047,203 @@ def __eq__(self, other): return self is other TypeGuard = _TypeGuard(_root=True) + +if hasattr(typing, "Self"): + Self = typing.Self +elif sys.version_info[:2] >= (3, 7): + # Vendored from cpython typing._SpecialFrom + class _SpecialForm(typing._Final, _root=True): + __slots__ = ('_name', '__doc__', '_getitem') + + def __init__(self, getitem): + self._getitem = getitem + self._name = getitem.__name__ + self.__doc__ = getitem.__doc__ + + def __getattr__(self, item): + if item in {'__name__', '__qualname__'}: + return self._name + + raise AttributeError(item) + + def __mro_entries__(self, bases): + raise TypeError(f"Cannot subclass {self!r}") + + def __repr__(self): + return f'typing_extensions.{self._name}' + + def __reduce__(self): + return self._name + + def __call__(self, *args, **kwds): + raise TypeError(f"Cannot instantiate {self!r}") + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + def __instancecheck__(self, obj): + raise TypeError(f"{self} cannot be used with isinstance()") + + def __subclasscheck__(self, cls): + raise TypeError(f"{self} cannot be used with issubclass()") + + @typing._tp_cache + def __getitem__(self, parameters): + return self._getitem(self, parameters) + + @_SpecialForm + def Self(self, params): + """Used to spell the type of "self" in classes. + + Example:: + + from typing import Self + + class ReturnsSelf: + def parse(self, data: bytes) -> Self: + ... + return self + + """ + + raise TypeError(f"{self} is not subscriptable") else: - class _TypeGuardMeta(typing.TypingMeta): - """Metaclass for TypeGuard""" + class _Self(typing._FinalTypingBase, _root=True): + """Used to spell the type of "self" in classes. - def __new__(cls, name, bases, namespace, tp=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if tp is not None: - self.__type__ = tp - return self + Example:: + + from typing import Self + + class ReturnsSelf: + def parse(self, data: bytes) -> Self: + ... + return self + + """ + + __slots__ = () def __instancecheck__(self, obj): - raise TypeError("TypeGuard cannot be used with isinstance().") + raise TypeError(f"{self} cannot be used with isinstance().") def __subclasscheck__(self, cls): - raise TypeError("TypeGuard cannot be used with issubclass().") + raise TypeError(f"{self} cannot be used with issubclass().") + + Self = _Self(_root=True) + + +if hasattr(typing, 'Required'): + Required = typing.Required + NotRequired = typing.NotRequired +elif sys.version_info[:2] >= (3, 9): + class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_ExtensionsSpecialForm + def Required(self, parameters): + """A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """ + item = typing._type_check(parameters, f'{self._name} accepts only single type') + return typing._GenericAlias(self, (item,)) + + @_ExtensionsSpecialForm + def NotRequired(self, parameters): + """A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """ + item = typing._type_check(parameters, f'{self._name} accepts only single type') + return typing._GenericAlias(self, (item,)) + +elif sys.version_info[:2] >= (3, 7): + class _RequiredForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + '{} accepts only single type'.format(self._name)) + return typing._GenericAlias(self, (item,)) + + Required = _RequiredForm( + 'Required', + doc="""A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """) + NotRequired = _RequiredForm( + 'NotRequired', + doc="""A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """) +else: + # NOTE: Modeled after _Final's implementation when _FinalTypingBase available + class _MaybeRequired(typing._FinalTypingBase, _root=True): + __slots__ = ('__type__',) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp def __getitem__(self, item): cls = type(self) - if self.__type__ is not None: - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - param = typing._type_check( - item, - '{} accepts only single type.'.format(cls.__name__[1:])) - return cls(self.__name__, self.__bases__, - dict(self.__dict__), tp=param, _root=True) + if self.__type__ is None: + return cls(typing._type_check(item, + '{} accepts only single type.'.format(cls.__name__[1:])), + _root=True) + raise TypeError('{} cannot be further subscripted' + .format(cls.__name__[1:])) def _eval_type(self, globalns, localns): new_tp = typing._eval_type(self.__type__, globalns, localns) if new_tp == self.__type__: return self - return type(self)(self.__name__, self.__bases__, - dict(self.__dict__), tp=self.__type__, - _root=True) + return type(self)(new_tp, _root=True) def __repr__(self): r = super().__repr__() @@ -2791,53 +2255,42 @@ def __hash__(self): return hash((type(self).__name__, self.__type__)) def __eq__(self, other): - if not hasattr(other, "__type__"): + if not isinstance(other, type(self)): return NotImplemented if self.__type__ is not None: return self.__type__ == other.__type__ return self is other - class TypeGuard(typing.Final, metaclass=_TypeGuardMeta, _root=True): - """Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. + class _Required(_MaybeRequired, _root=True): + """A special typing construct to mark a key of a total=False TypedDict + as required. For example: - Using ``-> TypeGuard`` tells the static type checker that for a given - function: + class Movie(TypedDict, total=False): + title: Required[str] + year: int - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) - For example:: + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """ - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... + class _NotRequired(_MaybeRequired, _root=True): + """A special typing construct to mark a key of a TypedDict as + potentially missing. For example: - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. + class Movie(TypedDict): + title: str + year: NotRequired[int] - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) """ - __type__ = None + + Required = _Required(_root=True) + NotRequired = _NotRequired(_root=True) diff --git a/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/INSTALLER b/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/INSTALLER new file mode 100644 index 000000000..a1b589e38 --- /dev/null +++ b/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/LICENSE b/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/LICENSE similarity index 100% rename from src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/LICENSE rename to src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/LICENSE diff --git a/src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/METADATA b/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/METADATA similarity index 95% rename from src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/METADATA rename to src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/METADATA index ae167fdd5..f1bbeedc4 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/METADATA +++ b/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: virtualenv -Version: 20.10.0 +Version: 20.13.0 Summary: Virtual Python Environment builder Home-page: https://virtualenv.pypa.io/ Author: Bernat Gabor @@ -35,7 +35,6 @@ Classifier: Topic :: Utilities Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7 Description-Content-Type: text/markdown License-File: LICENSE -Requires-Dist: backports.entry-points-selectable (>=1.0.4) Requires-Dist: distlib (<1,>=0.3.1) Requires-Dist: filelock (<4,>=3.2) Requires-Dist: platformdirs (<3,>=2) @@ -71,7 +70,6 @@ Requires-Dist: packaging (>=20.0) ; (python_version > "3.4") and extra == 'testi [![PyPI - Downloads](https://img.shields.io/pypi/dm/virtualenv?style=flat-square)](https://pypistats.org/packages/virtualenv) [![PyPI - License](https://img.shields.io/pypi/l/virtualenv?style=flat-square)](https://opensource.org/licenses/MIT) [![Build Status](https://github.com/pypa/virtualenv/workflows/check/badge.svg?branch=main&event=push)](https://github.com/pypa/virtualenv/actions?query=workflow%3Acheck) -[![codecov](https://codecov.io/gh/pypa/virtualenv/branch/main/graph/badge.svg)](https://codecov.io/gh/pypa/virtualenv) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg?style=flat-square)](https://github.com/psf/black) diff --git a/src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/RECORD b/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/RECORD similarity index 86% rename from src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/RECORD rename to src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/RECORD index 9247664ba..4a9395dc2 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/RECORD +++ b/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/RECORD @@ -1,13 +1,13 @@ ../../bin/virtualenv,sha256=9uwG4VGVYupz1JIRWbbouIYGI2a-wzwFlRnMuJibupk,323 -virtualenv-20.10.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -virtualenv-20.10.0.dist-info/LICENSE,sha256=XBWRk3jFsqqrexnOpw2M3HX3aHnjJFTkwDmfi3HRcek,1074 -virtualenv-20.10.0.dist-info/METADATA,sha256=Pq3BZpF-bcin97V0MRQ3WbVc1RuII457FhmZZBlLa4s,4834 -virtualenv-20.10.0.dist-info/RECORD,, -virtualenv-20.10.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -virtualenv-20.10.0.dist-info/WHEEL,sha256=WzZ8cwjh8l0jtULNjYq1Hpr-WCqCRgPr--TX4P5I1Wo,110 -virtualenv-20.10.0.dist-info/entry_points.txt,sha256=6zmFm_CDLesgofz69n3kNlv41oMrNAObxIXIpH8BcLA,1621 -virtualenv-20.10.0.dist-info/top_level.txt,sha256=JV-LVlC8YeIw1DgiYI0hEot7tgFy5IWdKVcSG7NyzaI,11 -virtualenv-20.10.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +virtualenv-20.13.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +virtualenv-20.13.0.dist-info/LICENSE,sha256=XBWRk3jFsqqrexnOpw2M3HX3aHnjJFTkwDmfi3HRcek,1074 +virtualenv-20.13.0.dist-info/METADATA,sha256=EypJ26IqjGoF6joNjcvbSJeAJUkqT7O9Y85EQAHryeA,4656 +virtualenv-20.13.0.dist-info/RECORD,, +virtualenv-20.13.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +virtualenv-20.13.0.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110 +virtualenv-20.13.0.dist-info/entry_points.txt,sha256=fZvQAmsETUCLoxi8dBcQPIYaNW_xd-zWhenomSPxa7E,1732 +virtualenv-20.13.0.dist-info/top_level.txt,sha256=JV-LVlC8YeIw1DgiYI0hEot7tgFy5IWdKVcSG7NyzaI,11 +virtualenv-20.13.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 virtualenv/__init__.py,sha256=SMvpjz4VJ3vJ_yfDDPzJAdi2GJOYd_UBXXuvImO07gk,205 virtualenv/__main__.py,sha256=ypkUDuc5Q8XVyFpW-wEWgzx0LuAdK1yF2FsCUYlgPEk,2900 virtualenv/__pycache__/__init__.cpython-39.pyc,, @@ -33,7 +33,7 @@ virtualenv/activation/cshell/__pycache__/__init__.cpython-39.pyc,, virtualenv/activation/cshell/activate.csh,sha256=wm_0n7o83RXSTUJsWc2PPV33Yh9Gad8AJQIuRvL6EYg,1471 virtualenv/activation/fish/__init__.py,sha256=hDkJq1P1wK2qm6BXydXWA9GMkBpj-TaejbKSceFnGZU,251 virtualenv/activation/fish/__pycache__/__init__.cpython-39.pyc,, -virtualenv/activation/fish/activate.fish,sha256=V7XWj24MKkc3dZHiPBsJ20WjjR7Ybqgdp0xGFuxzFBU,3060 +virtualenv/activation/fish/activate.fish,sha256=FkJpM8png8v9Sk3yGNJCcGzUKAAMF1Hz0dYrskut0z0,3056 virtualenv/activation/nushell/__init__.py,sha256=glvGnNDjhHEzUhtcQNBtD_1lRWZvqJNQkWtuXvOiAJw,1078 virtualenv/activation/nushell/__pycache__/__init__.cpython-39.pyc,, virtualenv/activation/nushell/activate.nu,sha256=6ZwkehNka4J_fJP-vQQoco9VJgSZVV4_-yEYR5aHb9o,1309 @@ -56,7 +56,7 @@ virtualenv/app_data/__pycache__/via_tempdir.cpython-39.pyc,, virtualenv/app_data/base.py,sha256=dbS5Maob1-Cqs6EVqTmmbjAGeNYA1iw3pmdgYPWCJak,2129 virtualenv/app_data/na.py,sha256=N2lR5VV4coM4Lym_E-nW_8Mh3x5-U6Jsq9x4wJn3wBM,1310 virtualenv/app_data/read_only.py,sha256=MD-4Bl2SZZiGw0g8qZy0YLBGZGCuFYXnAEvWboF1PSc,1006 -virtualenv/app_data/via_disk_folder.py,sha256=6sICLoJn7UvDYl-DhEGJ1lj-pqSQy_h1GxJTxeGPUcg,5598 +virtualenv/app_data/via_disk_folder.py,sha256=UP1gx7WzG3USf_rQdKR9F5w_zuSPQQaqvbd58cqJHLw,5624 virtualenv/app_data/via_tempdir.py,sha256=Z_-PoU7qeZe-idzi3nqys4FX0rfsRgOQ9_7XwX3hxSA,770 virtualenv/config/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57 virtualenv/config/__pycache__/__init__.cpython-39.pyc,, @@ -103,15 +103,15 @@ virtualenv/create/via_global_ref/builtin/cpython/__pycache__/mac_os.cpython-39.p virtualenv/create/via_global_ref/builtin/cpython/common.py,sha256=U7EvB9-2DlOQTGrTyPrEcItEbJ1sFBzo1EAOcAIjQ5Q,2392 virtualenv/create/via_global_ref/builtin/cpython/cpython2.py,sha256=NixgnZITjDP8qBWlnN40lUTeaPNoDUKPSS2ByUBs7Fk,3752 virtualenv/create/via_global_ref/builtin/cpython/cpython3.py,sha256=jz4mbuhu9BcbcZSwmKBT8eSIvKi6kgkx_V-fuAmtmjc,3312 -virtualenv/create/via_global_ref/builtin/cpython/mac_os.py,sha256=ZsBHbjuuXn8chJWIqF_hlervx1ZcpDDuPCaKVpOx4iQ,12620 +virtualenv/create/via_global_ref/builtin/cpython/mac_os.py,sha256=gAtr3u0G3akU2eczFK4Q11MWSlLiNIrBS25OAR3jyQ8,14324 virtualenv/create/via_global_ref/builtin/pypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 virtualenv/create/via_global_ref/builtin/pypy/__pycache__/__init__.cpython-39.pyc,, virtualenv/create/via_global_ref/builtin/pypy/__pycache__/common.cpython-39.pyc,, virtualenv/create/via_global_ref/builtin/pypy/__pycache__/pypy2.cpython-39.pyc,, virtualenv/create/via_global_ref/builtin/pypy/__pycache__/pypy3.cpython-39.pyc,, -virtualenv/create/via_global_ref/builtin/pypy/common.py,sha256=-t-TZxCTJwpIh_oRsDyv5IilH19jKqJrZa27zWN_8Ws,1816 -virtualenv/create/via_global_ref/builtin/pypy/pypy2.py,sha256=kixO53Iy1ab0-KKPWEuQEJHoWEkKXjA1LpVIBhnz8C8,3563 -virtualenv/create/via_global_ref/builtin/pypy/pypy3.py,sha256=Dfb7qIw2MYADaudZnhHwvu7XEhxs-_FKutpuiSmMvu0,2551 +virtualenv/create/via_global_ref/builtin/pypy/common.py,sha256=t7XRfNtWFuVpZsnBAc-9Mgc-UXN8gbFs514jQ9viqsU,1762 +virtualenv/create/via_global_ref/builtin/pypy/pypy2.py,sha256=YbakUalkwuVVJp0leN1Kuuk3hzx74sm8kzOzviTNSBc,3725 +virtualenv/create/via_global_ref/builtin/pypy/pypy3.py,sha256=MD-gP0A4v-Qeg0hxcvRxMYzjHD6e25vNR9wKi5_nDvo,2777 virtualenv/create/via_global_ref/builtin/python2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 virtualenv/create/via_global_ref/builtin/python2/__pycache__/__init__.cpython-39.pyc,, virtualenv/create/via_global_ref/builtin/python2/__pycache__/python2.cpython-39.pyc,, @@ -132,7 +132,7 @@ virtualenv/discovery/__pycache__/py_spec.cpython-39.pyc,, virtualenv/discovery/builtin.py,sha256=LYvYqlyfrzULHeAwm4J4B9y4hLJHczNsivMtvsCoX78,6384 virtualenv/discovery/cached_py_info.py,sha256=Lo74BQutsLR8z2JFKXdX6HizKrP5-sxsPCCVQZRzB8Q,5312 virtualenv/discovery/discover.py,sha256=dq-yReN-vZHs9ZCBEOHN97KZErE2K26bPZRfVnNESIU,1241 -virtualenv/discovery/py_info.py,sha256=I49_I0U1YgF9tbAqx2sloZIyw4kMK7A3Yd1ZyUz1yyc,23261 +virtualenv/discovery/py_info.py,sha256=sa4Eg6ijopVJ3WTCoprvpLfjePFX3NdLy5kjZJZCB9s,23382 virtualenv/discovery/py_spec.py,sha256=wQhLzCfXoSPsAAO9nm5-I2lNolVDux4W2vPSUfJGjlc,4790 virtualenv/discovery/windows/__init__.py,sha256=9LjYTjiPygcERmWUugyHcv5jHmZSfHO3H2RvwzIvQvU,1200 virtualenv/discovery/windows/__pycache__/__init__.cpython-39.pyc,, @@ -151,7 +151,7 @@ virtualenv/run/plugin/__pycache__/creators.cpython-39.pyc,, virtualenv/run/plugin/__pycache__/discovery.cpython-39.pyc,, virtualenv/run/plugin/__pycache__/seeders.cpython-39.pyc,, virtualenv/run/plugin/activators.py,sha256=f6ZdG-Na6_jquTuG-E7kT40CAknGeB7YLgZzIPnDbsM,2225 -virtualenv/run/plugin/base.py,sha256=_lpRytNmPnkzJzzQhCy2LXWp0tJbDVGdZJkfabx8Zwc,1854 +virtualenv/run/plugin/base.py,sha256=nm5_Yal-XMFYmIxZ7HPpB6tv7cXih1aYHen-PHMS9oU,2277 virtualenv/run/plugin/creators.py,sha256=PIxJ85KmrQU7lUO-r8Znxbb4lTEzwHggc9lcDqmt2tc,3494 virtualenv/run/plugin/discovery.py,sha256=M1S8CZPqUsIpJ88FT8CyGB32lRuflEY5pgK_XcCnK60,1156 virtualenv/run/plugin/seeders.py,sha256=c1mhzu0HNzKdif6YUV35fuAOS0XHFJz3TtccLW5fWG0,1074 @@ -184,17 +184,18 @@ virtualenv/seed/wheels/__pycache__/acquire.cpython-39.pyc,, virtualenv/seed/wheels/__pycache__/bundle.cpython-39.pyc,, virtualenv/seed/wheels/__pycache__/periodic_update.cpython-39.pyc,, virtualenv/seed/wheels/__pycache__/util.cpython-39.pyc,, -virtualenv/seed/wheels/acquire.py,sha256=SJuKhqJPFC_Sn4lQttI_pAGZUXmO3LGgVaQzvGy6WGo,4472 -virtualenv/seed/wheels/bundle.py,sha256=3tw1CNANa7gvvPR1H-_P-ofsMhluKh81yvNUK4-hphg,1845 -virtualenv/seed/wheels/embed/__init__.py,sha256=mNgjpdtAkl-A_K3MOmuGd_7lPtiwE5tuuPiMnJQRB3Y,1787 +virtualenv/seed/wheels/acquire.py,sha256=NsQ-uf-h4vHRIvpEBw9h5foWEpcLPKnqcx4iDkIqXkY,4737 +virtualenv/seed/wheels/bundle.py,sha256=ovNzr-J4zn1EEvhIse4zeElYqrzUB_lyl-noZq_mk_I,1887 +virtualenv/seed/wheels/embed/__init__.py,sha256=5rTDfBiHJP9VaSxjrG4jv2nQHXF-fIWOG5HFSxxEGF8,1968 virtualenv/seed/wheels/embed/__pycache__/__init__.cpython-39.pyc,, virtualenv/seed/wheels/embed/pip-20.3.4-py2.py3-none-any.whl,sha256=IXrlFhoOCMD7hzhYgG40eMl3XK_85RaLUOyIXjWMGZ0,1522101 virtualenv/seed/wheels/embed/pip-21.3.1-py3-none-any.whl,sha256=3q8y3Nmrgh41nNgzB4a80HdgS1xXMMCwlu2kb5XCSi0,1723581 virtualenv/seed/wheels/embed/setuptools-44.1.1-py2.py3-none-any.whl,sha256=J6cUwJJTE05gpvpoEw94xwN-VWLE8h-PMY8q6QDRUtU,583493 virtualenv/seed/wheels/embed/setuptools-50.3.2-py3-none-any.whl,sha256=LCQqCFb7rX775WDfSnrdkyTzQM9I30NlHpYEkkRmeUo,785194 -virtualenv/seed/wheels/embed/setuptools-58.3.0-py3-none-any.whl,sha256=GiTw5cFLka1oEHRackJyH9UBHtFkVTE2-Ub3aLBlWfc,946220 -virtualenv/seed/wheels/embed/wheel-0.37.0-py2.py3-none-any.whl,sha256=IQFLK9k8bQA0trpdNeTrKENA4J1jxZrvb8FLDzRhRv0,35161 -virtualenv/seed/wheels/periodic_update.py,sha256=D9WwkJsB4W-THEyFKHWxa2QYmIROepcYi-VjUwAbKpg,12874 +virtualenv/seed/wheels/embed/setuptools-59.6.0-py3-none-any.whl,sha256=TOkvHh-PASM-6ZUsBPa4HR4Ck51uG0iEKBVJdKTQeD4,952597 +virtualenv/seed/wheels/embed/setuptools-60.2.0-py3-none-any.whl,sha256=XImxoUpnrF8JVvHLCut9HT9Mi6Tk4at78a9JM_mi8P4,953071 +virtualenv/seed/wheels/embed/wheel-0.37.1-py2.py3-none-any.whl,sha256=S9zX2EATgIYSbNCSVNxhlftPxvAcBQodcjbyYw2x0io,35301 +virtualenv/seed/wheels/periodic_update.py,sha256=JHR82w7TSEJaqa73hgoRDloZepa8gRH9k7nWMzOCAVk,15361 virtualenv/seed/wheels/util.py,sha256=Zdo76KEDqbNmM5u9JTuyu5uzEN_fQ4oj6qHOt0h0o1M,3960 virtualenv/util/__init__.py,sha256=om6Hs2lH5igf5lkcSmQFiU7iMZ0Wx4dmSlMc6XW_Llg,199 virtualenv/util/__pycache__/__init__.cpython-39.pyc,, @@ -203,7 +204,7 @@ virtualenv/util/__pycache__/lock.cpython-39.pyc,, virtualenv/util/__pycache__/six.cpython-39.pyc,, virtualenv/util/__pycache__/zipapp.cpython-39.pyc,, virtualenv/util/error.py,sha256=SRSZlXvMYQuJwxoUfNhlAyo3VwrAnIsZemSwPOxpjns,352 -virtualenv/util/lock.py,sha256=ZVE7NZdZG26zWqJL3ihvTz0vrsFQMFUbf8-B0FV_iP0,4812 +virtualenv/util/lock.py,sha256=qjbBwtJFbNrqapalxtzEnAEhqOapATmEw27jKsuvSIQ,4787 virtualenv/util/path/__init__.py,sha256=2aA04ZAJ53nk3ZKPxvL_DvOwismyH8r_WCujrObxT5o,401 virtualenv/util/path/__pycache__/__init__.cpython-39.pyc,, virtualenv/util/path/__pycache__/_permission.cpython-39.pyc,, @@ -214,7 +215,7 @@ virtualenv/util/path/_pathlib/__pycache__/__init__.cpython-39.pyc,, virtualenv/util/path/_pathlib/__pycache__/via_os_path.cpython-39.pyc,, virtualenv/util/path/_pathlib/via_os_path.py,sha256=fYDFAX483zVvC9hAOAC9FYtrGdZethS0vtYtKsL5r-s,3772 virtualenv/util/path/_permission.py,sha256=XpO2vGAk_92_biD4MEQcAQq2Zc8_rpm3M3n_hMUA1rw,745 -virtualenv/util/path/_sync.py,sha256=rheUrGsCqmhMwNs-uc5rDthNSUlsOrBJPoK8KZj3O1o,2393 +virtualenv/util/path/_sync.py,sha256=G-LJ8gZBL8cbYD6PMZuUw7gudKPmvKW6vsq6CkvUUH4,2392 virtualenv/util/path/_win.py,sha256=cxXH1z5pyqwOlcq8PmRC38ASfB1QZjEyPEF3aQmXRb8,709 virtualenv/util/six.py,sha256=_8KWXUWi3-AaFmz4LkdyNra-uNuf70vlxwjN7oeRo8g,1463 virtualenv/util/subprocess/__init__.py,sha256=1UmFrdBv2sVeUfZbDcO2yZpe28AE0ULOu9dRKlpJaa0,801 @@ -222,4 +223,4 @@ virtualenv/util/subprocess/__pycache__/__init__.cpython-39.pyc,, virtualenv/util/subprocess/__pycache__/_win_subprocess.cpython-39.pyc,, virtualenv/util/subprocess/_win_subprocess.py,sha256=nnYCcATKY_5ektDgGlk6OTdDQNyF_onbpfzCf13J5Qs,5697 virtualenv/util/zipapp.py,sha256=jtf4Vn7XBnjPs_B_ObIQv_x4pFlIlPKAWHYLFV59h6U,1054 -virtualenv/version.py,sha256=wJn2lbbirq93Uqq10IEgHgpcuEsxa_KGKLWyk5Afl5c,66 +virtualenv/version.py,sha256=3Hkpk9HOWydvrtK_FGtFb5SppHBqfi0WPQOP6CqOH-c,66 diff --git a/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/REQUESTED b/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/REQUESTED new file mode 100644 index 000000000..e69de29bb diff --git a/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/WHEEL b/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/WHEEL new file mode 100644 index 000000000..0b18a2811 --- /dev/null +++ b/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/entry_points.txt b/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/entry_points.txt similarity index 93% rename from src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/entry_points.txt rename to src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/entry_points.txt index c1ffbb19a..2bc2aed0f 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/entry_points.txt +++ b/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/entry_points.txt @@ -11,6 +11,7 @@ powershell = virtualenv.activation.powershell:PowerShellActivator python = virtualenv.activation.python:PythonActivator [virtualenv.create] +cpython2-mac-arm-framework = virtualenv.create.via_global_ref.builtin.cpython.mac_os:CPython2macOsArmFramework cpython2-mac-framework = virtualenv.create.via_global_ref.builtin.cpython.mac_os:CPython2macOsFramework cpython2-posix = virtualenv.create.via_global_ref.builtin.cpython.cpython2:CPython2Posix cpython2-win = virtualenv.create.via_global_ref.builtin.cpython.cpython2:CPython2Windows diff --git a/src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/top_level.txt b/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/top_level.txt similarity index 100% rename from src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/top_level.txt rename to src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/top_level.txt diff --git a/src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/zip-safe b/src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/zip-safe similarity index 100% rename from src/main/python/pybuilder/_vendor/virtualenv-20.10.0.dist-info/zip-safe rename to src/main/python/pybuilder/_vendor/virtualenv-20.13.0.dist-info/zip-safe diff --git a/src/main/python/pybuilder/_vendor/virtualenv/activation/fish/activate.fish b/src/main/python/pybuilder/_vendor/virtualenv/activation/fish/activate.fish index 3cc3c93cb..62f631ead 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/activation/fish/activate.fish +++ b/src/main/python/pybuilder/_vendor/virtualenv/activation/fish/activate.fish @@ -21,7 +21,7 @@ function deactivate -d 'Exit virtualenv mode and return to the normal environmen if test (echo $FISH_VERSION | head -c 1) -lt 3 set -gx PATH (_fishify_path "$_OLD_VIRTUAL_PATH") else - set -gx PATH "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH end set -e _OLD_VIRTUAL_PATH end @@ -63,7 +63,7 @@ set -gx VIRTUAL_ENV '__VIRTUAL_ENV__' if test (echo $FISH_VERSION | head -c 1) -lt 3 set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH) else - set -gx _OLD_VIRTUAL_PATH "$PATH" + set -gx _OLD_VIRTUAL_PATH $PATH end set -gx PATH "$VIRTUAL_ENV"'/__BIN_NAME__' $PATH diff --git a/src/main/python/pybuilder/_vendor/virtualenv/app_data/via_disk_folder.py b/src/main/python/pybuilder/_vendor/virtualenv/app_data/via_disk_folder.py index 2691b7ba2..3f44fe4f6 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/app_data/via_disk_folder.py +++ b/src/main/python/pybuilder/_vendor/virtualenv/app_data/via_disk_folder.py @@ -14,7 +14,7 @@ │ │ └── -> CopyPipInstall / SymlinkPipInstall │ │ └── -> pip-20.1.1-py2.py3-none-any │ └── embed -│ └── 1 +│ └── 3 -> json format versioning │ └── *.json -> for every distribution contains data about newer embed versions and releases └─── unzip └── @@ -101,7 +101,7 @@ def py_info_clear(self): filename.unlink() def embed_update_log(self, distribution, for_py_version): - return EmbedDistributionUpdateStoreDisk(self.lock / "wheel" / for_py_version / "embed" / "1", distribution) + return EmbedDistributionUpdateStoreDisk(self.lock / "wheel" / for_py_version / "embed" / "3", distribution) @property def house(self): diff --git a/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py b/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py index e215b7511..6541d24ed 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py +++ b/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py @@ -68,6 +68,12 @@ def image_ref(cls, interpreter): class CPython2macOsFramework(CPythonmacOsFramework, CPython2PosixBase): + @classmethod + def can_create(cls, interpreter): + if not IS_MAC_ARM64 and super(CPython2macOsFramework, cls).can_describe(interpreter): + return super(CPython2macOsFramework, cls).can_create(interpreter) + return False + @classmethod def image_ref(cls, interpreter): return Path(interpreter.prefix) / "Python" @@ -103,12 +109,40 @@ def reload_code(self): ) return result + +class CPython2macOsArmFramework(CPython2macOsFramework, CPythonmacOsFramework, CPython2PosixBase): @classmethod def can_create(cls, interpreter): - if IS_MAC_ARM64: - return False - else: + if IS_MAC_ARM64 and super(CPythonmacOsFramework, cls).can_describe(interpreter): return super(CPythonmacOsFramework, cls).can_create(interpreter) + return False + + def create(self): + super(CPython2macOsFramework, self).create() + self.fix_signature() + + def fix_signature(self): + """ + On Apple M1 machines (arm64 chips), rewriting the python executable invalidates its signature. + In python2 this results in a unusable python exe which just dies. + As a temporary workaround we can codesign the python exe during the creation process. + """ + exe = self.exe + try: + logging.debug("Changing signature of copied python exe %s", exe) + bak_dir = exe.parent / "bk" + # Reset the signing on Darwin since the exe has been modified. + # Note codesign fails on the original exe, it needs to be copied and moved back. + bak_dir.mkdir(parents=True, exist_ok=True) + subprocess.check_call(["cp", exe, bak_dir]) + subprocess.check_call(["mv", bak_dir / exe.name, exe]) + bak_dir.rmdir() + cmd = ["codesign", "-s", "-", "--preserve-metadata=identifier,entitlements,flags,runtime", "-f", exe] + logging.debug("Changing Signature: %s", cmd) + subprocess.check_call(cmd) + except Exception: + logging.fatal("Could not change MacOS code signing on copied python exe at %s", exe) + raise class CPython3macOsFramework(CPythonmacOsFramework, CPython3, CPythonPosix): diff --git a/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/pypy/common.py b/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/pypy/common.py index 6627217dc..db8612ee8 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/pypy/common.py +++ b/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/pypy/common.py @@ -43,11 +43,9 @@ def sources(cls, interpreter): def _add_shared_libs(cls, interpreter): # https://bitbucket.org/pypy/pypy/issue/1922/future-proofing-virtualenv python_dir = Path(interpreter.system_executable).resolve().parent - for libname in cls._shared_libs(): - src = python_dir / libname - if src.exists(): - yield src + for src in cls._shared_libs(python_dir): + yield src @classmethod - def _shared_libs(cls): + def _shared_libs(cls, python_dir): raise NotImplementedError diff --git a/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py b/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py index d09e78ca8..f940a0fd6 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py +++ b/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py @@ -87,8 +87,8 @@ def modules(cls): return super(PyPy2Posix, cls).modules() + ["posixpath"] @classmethod - def _shared_libs(cls): - return ["libpypy-c.so", "libpypy-c.dylib"] + def _shared_libs(cls, python_dir): + return python_dir.glob("libpypy*.*") @property def lib(self): @@ -111,8 +111,12 @@ def modules(cls): return super(Pypy2Windows, cls).modules() + ["ntpath"] @classmethod - def _shared_libs(cls): - return ["libpypy-c.dll", "libffi-7.dll", "libffi-8.dll"] + def _shared_libs(cls, python_dir): + # No glob in python2 PathLib + for candidate in ["libpypy-c.dll", "libffi-7.dll", "libffi-8.dll"]: + dll = python_dir / candidate + if dll.exists(): + yield dll @classmethod def sources(cls, interpreter): diff --git a/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py b/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py index b8bf16684..541c29e9c 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py +++ b/src/main/python/pybuilder/_vendor/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py @@ -23,7 +23,7 @@ def exe_names(cls, interpreter): class PyPy3Posix(PyPy3, PosixSupports): - """PyPy 2 on POSIX""" + """PyPy 3 on POSIX""" @property def stdlib(self): @@ -31,8 +31,9 @@ def stdlib(self): return self.dest / "lib" / "pypy{}".format(self.interpreter.version_release_str) / "site-packages" @classmethod - def _shared_libs(cls): - return ["libpypy3-c.so", "libpypy3-c.dylib"] + def _shared_libs(cls, python_dir): + # glob for libpypy3-c.so, libpypy3-c.dylib, libpypy3.9-c.so ... + return python_dir.glob("libpypy3*.*") def to_lib(self, src): return self.dest / "lib" / src.name @@ -71,5 +72,9 @@ def bin_dir(self): return self.dest / "Scripts" @classmethod - def _shared_libs(cls): - return ["libpypy3-c.dll", "libffi-7.dll", "libffi-8.dll"] + def _shared_libs(cls, python_dir): + # glob for libpypy*.dll and libffi*.dll + for pattern in ["libpypy*.dll", "libffi*.dll"]: + srcs = python_dir.glob(pattern) + for src in srcs: + yield src diff --git a/src/main/python/pybuilder/_vendor/virtualenv/discovery/py_info.py b/src/main/python/pybuilder/_vendor/virtualenv/discovery/py_info.py index 0238d91d0..f9feca525 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/discovery/py_info.py +++ b/src/main/python/pybuilder/_vendor/virtualenv/discovery/py_info.py @@ -152,7 +152,11 @@ def _distutils_install(): d = dist.Distribution({"script_args": "--no-user-cfg"}) # conf files not parsed so they do not hijack paths if hasattr(sys, "_framework"): sys._framework = None # disable macOS static paths for framework - i = d.get_command_obj("install", create=True) + + with warnings.catch_warnings(): # disable warning for PEP-632 + warnings.simplefilter("ignore") + i = d.get_command_obj("install", create=True) + i.prefix = os.sep # paths generated are relative to prefix that contains the path sep, this makes it relative i.finalize_options() result = {key: (getattr(i, "install_{}".format(key))[1:]).lstrip(os.sep) for key in SCHEME_KEYS} diff --git a/src/main/python/pybuilder/_vendor/virtualenv/run/plugin/base.py b/src/main/python/pybuilder/_vendor/virtualenv/run/plugin/base.py index a30ab871f..227efe1df 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/run/plugin/base.py +++ b/src/main/python/pybuilder/_vendor/virtualenv/run/plugin/base.py @@ -1,8 +1,16 @@ from __future__ import absolute_import, unicode_literals +import sys from collections import OrderedDict -from ....backports.entry_points_selectable import entry_points +if sys.version_info >= (3, 8): + from importlib.metadata import entry_points + + importlib_metadata_version = () +else: + from ....importlib_metadata import entry_points, version + + importlib_metadata_version = tuple(int(i) for i in version("importlib_metadata").split(".")[:2]) class PluginLoader(object): @@ -11,7 +19,10 @@ class PluginLoader(object): @classmethod def entry_points_for(cls, key): - return OrderedDict((e.name, e.load()) for e in cls.entry_points().select(group=key)) + if sys.version_info >= (3, 10) or importlib_metadata_version >= (3, 6): + return OrderedDict((e.name, e.load()) for e in cls.entry_points().select(group=key)) + else: + return OrderedDict((e.name, e.load()) for e in cls.entry_points().get(key, {})) @staticmethod def entry_points(): diff --git a/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/acquire.py b/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/acquire.py index af3cf4585..3e6f79c0e 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/acquire.py +++ b/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/acquire.py @@ -10,6 +10,7 @@ from ...util.subprocess import Popen, subprocess from .bundle import from_bundle +from .periodic_update import add_wheel_to_update_log from .util import Version, Wheel, discover_wheels @@ -18,11 +19,14 @@ def get_wheel(distribution, version, for_py_version, search_dirs, download, app_ Get a wheel with the given distribution-version-for_py_version trio, by using the extra search dir + download """ # not all wheels are compatible with all python versions, so we need to py version qualify it - # 1. acquire from bundle - wheel = from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update, env) + wheel = None - # 2. download from the internet - if version not in Version.non_version and download: + if not download or version != Version.bundle: + # 1. acquire from bundle + wheel = from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update, env) + + if download and wheel is None and version != Version.embed: + # 2. download from the internet wheel = download_wheel( distribution=distribution, version_spec=Version.as_version_spec(version), @@ -32,6 +36,9 @@ def get_wheel(distribution, version, for_py_version, search_dirs, download, app_ to_folder=app_data.house, env=env, ) + if wheel is not None and app_data.can_update: + add_wheel_to_update_log(wheel, for_py_version, app_data) + return wheel diff --git a/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/bundle.py b/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/bundle.py index ab2fe5fa5..39cd3d336 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/bundle.py +++ b/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/bundle.py @@ -15,7 +15,9 @@ def from_bundle(distribution, version, for_py_version, search_dirs, app_data, do if version != Version.embed: # 2. check if we have upgraded embed if app_data.can_update: - wheel = periodic_update(distribution, for_py_version, wheel, search_dirs, app_data, do_periodic_update, env) + wheel = periodic_update( + distribution, of_version, for_py_version, wheel, search_dirs, app_data, do_periodic_update, env + ) # 3. acquire from extra search dir found_wheel = from_dir(distribution, of_version, for_py_version, search_dirs) diff --git a/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/__init__.py b/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/__init__.py index f738dc15a..23a19fbe8 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/__init__.py +++ b/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/__init__.py @@ -5,43 +5,48 @@ BUNDLE_FOLDER = Path(__file__).absolute().parent BUNDLE_SUPPORT = { + "3.11": { + "pip": "pip-21.3.1-py3-none-any.whl", + "setuptools": "setuptools-60.2.0-py3-none-any.whl", + "wheel": "wheel-0.37.1-py2.py3-none-any.whl", + }, "3.10": { "pip": "pip-21.3.1-py3-none-any.whl", - "setuptools": "setuptools-58.3.0-py3-none-any.whl", - "wheel": "wheel-0.37.0-py2.py3-none-any.whl", + "setuptools": "setuptools-60.2.0-py3-none-any.whl", + "wheel": "wheel-0.37.1-py2.py3-none-any.whl", }, "3.9": { "pip": "pip-21.3.1-py3-none-any.whl", - "setuptools": "setuptools-58.3.0-py3-none-any.whl", - "wheel": "wheel-0.37.0-py2.py3-none-any.whl", + "setuptools": "setuptools-60.2.0-py3-none-any.whl", + "wheel": "wheel-0.37.1-py2.py3-none-any.whl", }, "3.8": { "pip": "pip-21.3.1-py3-none-any.whl", - "setuptools": "setuptools-58.3.0-py3-none-any.whl", - "wheel": "wheel-0.37.0-py2.py3-none-any.whl", + "setuptools": "setuptools-60.2.0-py3-none-any.whl", + "wheel": "wheel-0.37.1-py2.py3-none-any.whl", }, "3.7": { "pip": "pip-21.3.1-py3-none-any.whl", - "setuptools": "setuptools-58.3.0-py3-none-any.whl", - "wheel": "wheel-0.37.0-py2.py3-none-any.whl", + "setuptools": "setuptools-60.2.0-py3-none-any.whl", + "wheel": "wheel-0.37.1-py2.py3-none-any.whl", }, "3.6": { "pip": "pip-21.3.1-py3-none-any.whl", - "setuptools": "setuptools-58.3.0-py3-none-any.whl", - "wheel": "wheel-0.37.0-py2.py3-none-any.whl", + "setuptools": "setuptools-59.6.0-py3-none-any.whl", + "wheel": "wheel-0.37.1-py2.py3-none-any.whl", }, "3.5": { "pip": "pip-20.3.4-py2.py3-none-any.whl", "setuptools": "setuptools-50.3.2-py3-none-any.whl", - "wheel": "wheel-0.37.0-py2.py3-none-any.whl", + "wheel": "wheel-0.37.1-py2.py3-none-any.whl", }, "2.7": { "pip": "pip-20.3.4-py2.py3-none-any.whl", "setuptools": "setuptools-44.1.1-py2.py3-none-any.whl", - "wheel": "wheel-0.37.0-py2.py3-none-any.whl", + "wheel": "wheel-0.37.1-py2.py3-none-any.whl", }, } -MAX = "3.10" +MAX = "3.11" def get_embed_wheel(distribution, for_py_version): diff --git a/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/setuptools-58.3.0-py3-none-any.whl b/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/setuptools-59.6.0-py3-none-any.whl similarity index 67% rename from src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/setuptools-58.3.0-py3-none-any.whl rename to src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/setuptools-59.6.0-py3-none-any.whl index a5924e7a9..08d7e943c 100644 Binary files a/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/setuptools-58.3.0-py3-none-any.whl and b/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/setuptools-59.6.0-py3-none-any.whl differ diff --git a/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/setuptools-60.2.0-py3-none-any.whl b/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/setuptools-60.2.0-py3-none-any.whl new file mode 100644 index 000000000..bb533dca4 Binary files /dev/null and b/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/setuptools-60.2.0-py3-none-any.whl differ diff --git a/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/wheel-0.37.0-py2.py3-none-any.whl b/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/wheel-0.37.1-py2.py3-none-any.whl similarity index 74% rename from src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/wheel-0.37.0-py2.py3-none-any.whl rename to src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/wheel-0.37.1-py2.py3-none-any.whl index 8c3201ced..e6c4d95f5 100644 Binary files a/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/wheel-0.37.0-py2.py3-none-any.whl and b/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/wheel-0.37.1-py2.py3-none-any.whl differ diff --git a/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/periodic_update.py b/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/periodic_update.py index e08e4e931..040eadc60 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/periodic_update.py +++ b/src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/periodic_update.py @@ -36,23 +36,38 @@ pass # pragma: no cov -def periodic_update(distribution, for_py_version, wheel, search_dirs, app_data, do_periodic_update, env): +GRACE_PERIOD_CI = timedelta(hours=1) # prevent version switch in the middle of a CI run +GRACE_PERIOD_MINOR = timedelta(days=28) +UPDATE_PERIOD = timedelta(days=14) +UPDATE_ABORTED_DELAY = timedelta(hours=1) + + +def periodic_update(distribution, of_version, for_py_version, wheel, search_dirs, app_data, do_periodic_update, env): if do_periodic_update: handle_auto_update(distribution, for_py_version, wheel, search_dirs, app_data, env) now = datetime.now() + def _update_wheel(ver): + updated_wheel = Wheel(app_data.house / ver.filename) + logging.debug("using %supdated wheel %s", "periodically " if updated_wheel else "", updated_wheel) + return updated_wheel + u_log = UpdateLog.from_app_data(app_data, distribution, for_py_version) - u_log_older_than_hour = now - u_log.completed > timedelta(hours=1) if u_log.completed is not None else False - for _, group in groupby(u_log.versions, key=lambda v: v.wheel.version_tuple[0:2]): - version = next(group) # use only latest patch version per minor, earlier assumed to be buggy - if wheel is not None and Path(version.filename).name == wheel.name: - break - if u_log.periodic is False or (u_log_older_than_hour and version.use(now)): - updated_wheel = Wheel(app_data.house / version.filename) - logging.debug("using %supdated wheel %s", "periodically " if updated_wheel else "", updated_wheel) - wheel = updated_wheel - break + if of_version is None: + for _, group in groupby(u_log.versions, key=lambda v: v.wheel.version_tuple[0:2]): + # use only latest patch version per minor, earlier assumed to be buggy + all_patches = list(group) + ignore_grace_period_minor = any(version for version in all_patches if version.use(now)) + for version in all_patches: + if wheel is not None and Path(version.filename).name == wheel.name: + return wheel + if version.use(now, ignore_grace_period_minor): + return _update_wheel(version) + else: + for version in u_log.versions: + if version.wheel.version == of_version: + return _update_wheel(version) return wheel @@ -67,6 +82,19 @@ def handle_auto_update(distribution, for_py_version, wheel, search_dirs, app_dat trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, periodic=True, env=env) +def add_wheel_to_update_log(wheel, for_py_version, app_data): + embed_update_log = app_data.embed_update_log(wheel.distribution, for_py_version) + logging.debug("adding %s information to %s", wheel.name, embed_update_log.file) + u_log = UpdateLog.from_dict(embed_update_log.read()) + if any(version.filename == wheel.name for version in u_log.versions): + logging.warning("%s already present in %s", wheel.name, embed_update_log.file) + return + # we don't need a release date for sources other than "periodic" + version = NewVersion(wheel.name, datetime.now(), None, "download") + u_log.versions.append(version) # always write at the end for proper updates + embed_update_log.write(u_log.to_dict()) + + DATETIME_FMT = "%Y-%m-%dT%H:%M:%S.%fZ" @@ -79,10 +107,11 @@ def load_datetime(value): class NewVersion(object): - def __init__(self, filename, found_date, release_date): + def __init__(self, filename, found_date, release_date, source): self.filename = filename self.found_date = found_date self.release_date = release_date + self.source = source @classmethod def from_dict(cls, dictionary): @@ -90,6 +119,7 @@ def from_dict(cls, dictionary): filename=dictionary["filename"], found_date=load_datetime(dictionary["found_date"]), release_date=load_datetime(dictionary["release_date"]), + source=dictionary["source"], ) def to_dict(self): @@ -97,23 +127,32 @@ def to_dict(self): "filename": self.filename, "release_date": dump_datetime(self.release_date), "found_date": dump_datetime(self.found_date), + "source": self.source, } - def use(self, now): - compare_from = self.release_date or self.found_date - return now - compare_from >= timedelta(days=28) + def use(self, now, ignore_grace_period_minor=False, ignore_grace_period_ci=False): + if self.source == "manual": + return True + elif self.source == "periodic": + if self.found_date < now - GRACE_PERIOD_CI or ignore_grace_period_ci: + if not ignore_grace_period_minor: + compare_from = self.release_date or self.found_date + return now - compare_from >= GRACE_PERIOD_MINOR + return True + return False def __repr__(self): - return "{}(filename={}), found_date={}, release_date={})".format( + return "{}(filename={}), found_date={}, release_date={}, source={})".format( self.__class__.__name__, self.filename, self.found_date, self.release_date, + self.source, ) def __eq__(self, other): return type(self) == type(other) and all( - getattr(self, k) == getattr(other, k) for k in ["filename", "release_date", "found_date"] + getattr(self, k) == getattr(other, k) for k in ["filename", "release_date", "found_date", "source"] ) def __ne__(self, other): @@ -161,12 +200,12 @@ def needs_update(self): if self.completed is None: # never completed return self._check_start(now) else: - if now - self.completed <= timedelta(days=14): + if now - self.completed <= UPDATE_PERIOD: return False return self._check_start(now) def _check_start(self, now): - return self.started is None or now - self.started > timedelta(hours=1) + return self.started is None or now - self.started > UPDATE_ABORTED_DELAY def trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, env, periodic): @@ -222,12 +261,28 @@ def _run_do_update(app_data, distribution, embed_filename, for_py_version, perio embed_update_log = app_data.embed_update_log(distribution, for_py_version) u_log = UpdateLog.from_dict(embed_update_log.read()) now = datetime.now() + + update_versions, other_versions = [], [] + for version in u_log.versions: + if version.source in {"periodic", "manual"}: + update_versions.append(version) + else: + other_versions.append(version) + + if periodic: + source = "periodic" + else: + source = "manual" + # mark the most recent one as source "manual" + if update_versions: + update_versions[0].source = source + if wheel_filename is not None: dest = wheelhouse / wheel_filename.name if not dest.exists(): copy2(str(wheel_filename), str(wheelhouse)) - last, last_version, versions = None, None, [] - while last is None or not last.use(now): + last, last_version, versions, filenames = None, None, [], set() + while last is None or not last.use(now, ignore_grace_period_ci=True): download_time = datetime.now() dest = acquire.download_wheel( distribution=distribution, @@ -238,13 +293,14 @@ def _run_do_update(app_data, distribution, embed_filename, for_py_version, perio to_folder=wheelhouse, env=os.environ, ) - if dest is None or (u_log.versions and u_log.versions[0].filename == dest.name): + if dest is None or (update_versions and update_versions[0].filename == dest.name): break release_date = release_date_for_wheel_path(dest.path) - last = NewVersion(filename=dest.path.name, release_date=release_date, found_date=download_time) + last = NewVersion(filename=dest.path.name, release_date=release_date, found_date=download_time, source=source) logging.info("detected %s in %s", last, datetime.now() - download_time) versions.append(last) - last_wheel = Wheel(Path(last.filename)) + filenames.add(last.filename) + last_wheel = last.wheel last_version = last_wheel.version if embed_version is not None: if embed_version >= last_wheel.version_tuple: # stop download if we reach the embed version @@ -252,7 +308,9 @@ def _run_do_update(app_data, distribution, embed_filename, for_py_version, perio u_log.periodic = periodic if not u_log.periodic: u_log.started = now - u_log.versions = versions + u_log.versions + # update other_versions by removing version we just found + other_versions = [version for version in other_versions if version.filename not in filenames] + u_log.versions = versions + update_versions + other_versions u_log.completed = datetime.now() embed_update_log.write(u_log.to_dict()) return versions @@ -357,6 +415,7 @@ def _run_manual_upgrade(app_data, distribution, for_py_version, env): __all__ = ( + "add_wheel_to_update_log", "periodic_update", "do_update", "manual_upgrade", diff --git a/src/main/python/pybuilder/_vendor/virtualenv/util/lock.py b/src/main/python/pybuilder/_vendor/virtualenv/util/lock.py index 5275ac149..c8e7afbf9 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/util/lock.py +++ b/src/main/python/pybuilder/_vendor/virtualenv/util/lock.py @@ -25,10 +25,10 @@ def __init__(self, lock_file): self.count = 0 self.thread_safe = RLock() - def acquire(self, timeout=None, poll_intervall=0.05): + def acquire(self, timeout=None, poll_interval=0.05): with self.thread_safe: if self.count == 0: - super(_CountedFileLock, self).acquire(timeout=timeout, poll_intervall=poll_intervall) + super(_CountedFileLock, self).acquire(timeout, poll_interval) self.count += 1 def release(self, force=False): diff --git a/src/main/python/pybuilder/_vendor/virtualenv/util/path/_sync.py b/src/main/python/pybuilder/_vendor/virtualenv/util/path/_sync.py index a4f960970..a048a6f88 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/util/path/_sync.py +++ b/src/main/python/pybuilder/_vendor/virtualenv/util/path/_sync.py @@ -15,7 +15,6 @@ def norm(src): return ensure_text(str(src)) - else: norm = str diff --git a/src/main/python/pybuilder/_vendor/virtualenv/version.py b/src/main/python/pybuilder/_vendor/virtualenv/version.py index 57afbab56..e427ca8a5 100644 --- a/src/main/python/pybuilder/_vendor/virtualenv/version.py +++ b/src/main/python/pybuilder/_vendor/virtualenv/version.py @@ -1,3 +1,3 @@ from __future__ import unicode_literals -__version__ = "20.10.0" +__version__ = "20.13.0"