diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 54721fc2dd..624132ab1f 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -71,12 +71,12 @@ jobs: tests: name: ${{matrix.os}} / ${{ matrix.python-version }} needs: lint - runs-on: ${{ matrix.os }}-latest + runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: [3.8, 3.9, "3.10", 3.11, 3.12] - os: [MacOS, Ubuntu, Windows] + os: [MacOS-12, Ubuntu-latest, Windows-latest] steps: - uses: actions/checkout@v4 diff --git a/Pipfile b/Pipfile index 7b4a129f11..51adc8c0f2 100644 --- a/Pipfile +++ b/Pipfile @@ -9,7 +9,7 @@ sphinx = "*" sphinx-click = "==4.*" sphinxcontrib-spelling = "==7.*" click = "==8.0.3" -pypiserver = "==1.*" +pypiserver = "*" stdeb = {version="*", sys_platform = "== 'linux'"} zipp = {version = "==3.6.0", markers = "python_version < '3.10'"} pre-commit = "==2.*" diff --git a/Pipfile.lock b/Pipfile.lock index ff9e323ae1..b6d61effbd 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "b8d2126bc8bb139755c193b41d494c886fe5560760a5cddee992db697707a88d" + "sha256": "5838e1d97601cb8455abaa6ecf7208d7551ca334d9ab356349b6206d017f7a13" }, "pipfile-spec": 6, "requires": {}, @@ -392,6 +392,14 @@ "markers": "python_version >= '3.8'", "version": "==7.1.0" }, + "importlib-resources": { + "hashes": [ + "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c", + "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145" + ], + "markers": "python_version < '3.12' and python_version >= '3.9'", + "version": "==6.4.0" + }, "incremental": { "hashes": [ "sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0", @@ -609,11 +617,11 @@ }, "packaging": { "hashes": [ - "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5", - "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7" + "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5", + "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9" ], "markers": "python_version >= '3.7'", - "version": "==23.2" + "version": "==24.0" }, "parse": { "hashes": [ @@ -640,11 +648,11 @@ }, "pip": { "hashes": [ - "sha256:5052d7889c1f9d05224cd41741acb7c5d6fa735ab34e339624a614eaaa7e7d76", - "sha256:7fd9972f96db22c8077a1ee2691b172c8089b17a5652a44494a9ecb0d78f9149" + "sha256:ba0d021a166865d2265246961bec0152ff124de910c5cc39f1156ce3fa7c69dc", + "sha256:ea9bd1a847e8c5774a5777bb398c19e80bcd4e2aa16a4b301b718fe6f593aba2" ], "markers": "python_version >= '3.7'", - "version": "==23.3.2" + "version": "==24.0" }, "pipenv": { "editable": true, @@ -723,12 +731,12 @@ }, "pypiserver": { "hashes": [ - "sha256:09f2f797f92b30e92287821e2dc3ca72c8011aec6a2570019254adf98318ee5c", - "sha256:70760efadc3d89b3e1b3f54f078a6520f6c6a0c3dd718b46cd0cf466c9fd01b2" + "sha256:3f15a94969265add2ef44a035117b8dd18d7525af871ed58ac2571276293579b", + "sha256:8c7ed96b2f76f2843e4a27002846bd7ebb7217e143cf60456ee6fa2a415c2d73" ], "index": "pypi", "markers": "python_version >= '3.6'", - "version": "==1.5.2" + "version": "==2.1.1" }, "pyproject-hooks": { "hashes": [ diff --git a/pipenv/cli/options.py b/pipenv/cli/options.py index 701cf9ed83..e82e02c61f 100644 --- a/pipenv/cli/options.py +++ b/pipenv/cli/options.py @@ -467,7 +467,7 @@ def validate_python_path(ctx, param, value): # we'll report absolute paths which do not exist: if isinstance(value, (str, bytes)): if os.path.isabs(value) and not os.path.isfile(value): - raise BadParameter("Expected Python at path %s does not exist" % value) + raise BadParameter(f"Expected Python at path {value} does not exist") return value @@ -479,7 +479,7 @@ def validate_bool_or_none(ctx, param, value): def validate_pypi_mirror(ctx, param, value): if value and not is_valid_url(value): - raise BadParameter("Invalid PyPI mirror URL: %s" % value) + raise BadParameter(f"Invalid PyPI mirror URL: {value}") return value diff --git a/pipenv/environment.py b/pipenv/environment.py index d6533ac0c0..40e97af778 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -310,7 +310,7 @@ def build_command( lines = pylib_lines + pyinc_lines if scripts: lines.append( - "u'scripts': u'{0}'.format(%s)" % sysconfig_line.format("scripts") + "u'scripts': u'{{0}}'.format({})".format(sysconfig_line.format("scripts")) ) if py_version: lines.append( diff --git a/pipenv/exceptions.py b/pipenv/exceptions.py index 6495903483..5c4bc10596 100644 --- a/pipenv/exceptions.py +++ b/pipenv/exceptions.py @@ -157,7 +157,7 @@ def show(self, file=None): if self.cmd is not None and self.cmd.get_help_option(self.ctx) is not None: hint = f'Try "{self.ctx.command_path} {self.ctx.help_option_names[0]}" for help.\n' if self.ctx is not None: - click.echo(self.ctx.get_usage() + "\n%s" % hint, file=file, color=color) + click.echo(self.ctx.get_usage() + f"\n{hint}", file=file, color=color) click.echo(self.message, file=file) diff --git a/pipenv/utils/exceptions.py b/pipenv/utils/exceptions.py index 121399da17..a2fcbf4079 100644 --- a/pipenv/utils/exceptions.py +++ b/pipenv/utils/exceptions.py @@ -14,7 +14,7 @@ def __init__(self, param): @classmethod def get_message(cls, param): - return "Missing Parameter: %s" % param + return f"Missing Parameter: {param}" def show(self, param): print(self.message, file=sys.stderr, flush=True) @@ -37,9 +37,9 @@ def __init__(self, path, *args, **kwargs): super().__init__(self.message) def get_message(self, path, backup_path=None): - message = "ERROR: Failed to load file at %s" % path + message = f"ERROR: Failed to load file at {path}" if backup_path: - msg = "it will be backed up to %s and removed" % backup_path + msg = f"it will be backed up to {backup_path} and removed" else: msg = "it will be removed and replaced on the next lock." message = f"{message}\nYour lockfile is corrupt, {msg}" @@ -55,9 +55,9 @@ def __init__(self, path, backup_path=None): super().__init__(self.message) def get_message(self, path, backup_path=None): - message = "ERROR: Failed to load lockfile at %s" % path + message = f"ERROR: Failed to load lockfile at {path}" if backup_path: - msg = "it will be backed up to %s and removed" % backup_path + msg = f"it will be backed up to {backup_path} and removed" else: msg = "it will be removed and replaced on the next lock." message = f"{message}\nYour lockfile is corrupt, {msg}" @@ -73,9 +73,9 @@ def __init__(self, path, backup_path=None): super().__init__(self.message) def get_message(self, path, backup_path=None): - message = "ERROR: Failed to load Pipfile at %s" % path + message = f"ERROR: Failed to load Pipfile at {path}" if backup_path: - msg = "it will be backed up to %s and removed" % backup_path + msg = f"it will be backed up to {backup_path} and removed" else: msg = "it will be removed and replaced on the next lock." message = f"{message}\nYour Pipfile is corrupt, {msg}" diff --git a/pipenv/utils/requirementslib.py b/pipenv/utils/requirementslib.py index c4d508ba8d..298483ed90 100644 --- a/pipenv/utils/requirementslib.py +++ b/pipenv/utils/requirementslib.py @@ -361,7 +361,7 @@ def get_path(root, path, default=_UNSET): cur = cur[seg] except (ValueError, KeyError, IndexError, TypeError): if not getattr(cur, "__iter__", None): - exc = TypeError("%r object is not indexable" % type(cur).__name__) + exc = TypeError(f"{type(cur).__name__!r} object is not indexable") raise PathAccessError(exc, seg, path) except PathAccessError: if default is _UNSET: @@ -429,7 +429,7 @@ def dict_path_exit(path, key, old_parent, new_parent, new_items): except AttributeError: ret = new_parent.__class__(vals) # frozensets else: - raise RuntimeError("unexpected iterable type: %r" % type(new_parent)) + raise RuntimeError(f"unexpected iterable type: {type(new_parent)!r}") return ret @@ -519,14 +519,14 @@ def remap( # TODO: improve argument formatting in sphinx doc # TODO: enter() return (False, items) to continue traverse but cancel copy? if not callable(visit): - raise TypeError("visit expected callable, not: %r" % visit) + raise TypeError(f"visit expected callable, not: {visit!r}") if not callable(enter): - raise TypeError("enter expected callable, not: %r" % enter) + raise TypeError(f"enter expected callable, not: {enter!r}") if not callable(exit): - raise TypeError("exit expected callable, not: %r" % exit) + raise TypeError(f"exit expected callable, not: {exit!r}") reraise_visit = kwargs.pop("reraise_visit", True) if kwargs: - raise TypeError("unexpected keyword arguments: %r" % kwargs.keys()) + raise TypeError(f"unexpected keyword arguments: {kwargs.keys()!r}") path, registry, stack = (), {}, [(None, root)] new_items_stack = [] @@ -551,7 +551,7 @@ def remap( # TODO: handle False? raise TypeError( "enter should return a tuple of (new_parent," - " items_iterator), not: %r" % res + f" items_iterator), not: {res!r}" ) if new_items is not False: # traverse unless False is explicitly passed @@ -583,7 +583,7 @@ def remap( try: new_items_stack[-1][1].append(visited_item) except IndexError: - raise TypeError("expected remappable root, not: %r" % root) + raise TypeError(f"expected remappable root, not: {root!r}") return value diff --git a/pyproject.toml b/pyproject.toml index 9430c61755..a04d2c6b20 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -113,6 +113,9 @@ exclude = ''' exclude = [ "pipenv/patched/*", "pipenv/vendor/*", + "ests/pypi/*", + "tests/fixtures/*", + "tests/test_artifacts/*", ] select = [ "ASYNC", @@ -140,8 +143,7 @@ ignore = [ "PLW2901", ] line-length = 137 -target-version = "py37" - +target-version = "py38" [tool.ruff.mccabe] max-complexity = 44 diff --git a/tasks/release.py b/tasks/release.py index 4540f4a8f5..b47007d9eb 100644 --- a/tasks/release.py +++ b/tasks/release.py @@ -19,7 +19,7 @@ def log(msg): - print("[release] %s" % msg) + print(f"[release] {msg}") def get_version_file(ctx): @@ -126,11 +126,11 @@ def build_dists(ctx): executable = ctx.run( "python -c 'import sys; print(sys.executable)'", hide=True ).stdout.strip() - log("Building sdist using %s ...." % executable) + log(f"Building sdist using {executable} ....") os.environ["PIPENV_PYTHON"] = py_version ctx.run("pipenv install --dev", env=env) ctx.run("pipenv run pip install -e . --upgrade --upgrade-strategy=eager", env=env) - log("Building wheel using python %s ...." % py_version) + log(f"Building wheel using python {py_version} ....") ctx.run("pipenv run python -m build", env=env) @@ -224,8 +224,8 @@ def clean_mdchangelog(ctx, filename=None, content=None): def tag_version(ctx, push=False): version = find_version(ctx) version = semver.VersionInfo.parse(version) - log("Tagging revision: v%s" % version) - ctx.run("git tag v%s" % version) + log(f"Tagging revision: v{version}") + ctx.run(f"git tag v{version}") if push: log("Pushing tags...") ctx.run("git push origin master") @@ -283,17 +283,17 @@ def bump_version(ctx, dry_run=False, pre=False, dev=False): new_version = new_version.bump_prerelease(current_version, "dev") # Update the version file - log("Updating version to %s" % new_version) + log(f"Updating version to {new_version}") version = find_version(ctx) - log("Found current version: %s" % version) + log(f"Found current version: {version}") if dry_run: - log("Would update to: %s" % new_version) + log(f"Would update to: {new_version}") else: - log("Updating to: %s" % new_version) + log(f"Updating to: {new_version}") version_file = get_version_file(ctx) file_contents = version_file.read_text() version_file.write_text(file_contents.replace(version, str(new_version))) ctx.run(f"git add {version_file.as_posix()}") log("Committing...") - ctx.run('git commit -s -m "Bumped version to %s."' % new_version) + ctx.run(f'git commit -s -m "Bumped version to {new_version}."') return str(new_version) diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index cd3201e109..3292fdc552 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -147,14 +147,14 @@ def _get_patched_dir(ctx): def clean_vendor(ctx, vendor_dir): # Old _vendor cleanup remove_all(vendor_dir.glob("*.pyc")) - log("Cleaning %s" % vendor_dir) + log(f"Cleaning {vendor_dir}") for item in vendor_dir.iterdir(): if item.is_dir(): shutil.rmtree(str(item)) elif item.name not in FILE_WHITE_LIST: item.unlink() else: - log("Skipping %s" % item) + log(f"Skipping {item}") def detect_all_vendored_libs(ctx): @@ -216,13 +216,13 @@ def rewrite_file_imports(item, vendored_libs): for lib, to_lib in vendored_libs.items(): text = re.sub( - r"(?m)^(\s*)import %s((?:\.\S*)?\s+as)" % lib, - r"\1import %s\2" % to_lib, + rf"(?m)^(\s*)import {lib}((?:\.\S*)?\s+as)", + rf"\1import {to_lib}\2", text, ) - text = re.sub(r"(?m)^(\s*)from %s([\s\.]+)" % lib, r"\1from %s\2" % to_lib, text) + text = re.sub(rf"(?m)^(\s*)from {lib}([\s\.]+)", rf"\1from {to_lib}\2", text) text = re.sub( - r"(?m)^(\s*)import %s(\s*[,\n#])" % lib, + rf"(?m)^(\s*)import {lib}(\s*[,\n#])", rf"\1import {to_lib} as {lib}\2", text, ) @@ -232,8 +232,8 @@ def rewrite_file_imports(item, vendored_libs): def apply_patch(ctx, patch_file_path): - log("Applying patch %s" % patch_file_path.name) - ctx.run("git apply --ignore-whitespace --verbose %s" % patch_file_path) + log(f"Applying patch {patch_file_path.name}") + ctx.run(f"git apply --ignore-whitespace --verbose {patch_file_path}") def _recursive_write_to_zip(zf, path, root=None): @@ -267,7 +267,7 @@ def rename_if_needed(ctx, vendor_dir, item): def _ensure_package_in_requirements(ctx, requirements_file, package): requirement = None - log("using requirements file: %s" % requirements_file) + log(f"using requirements file: {requirements_file}") req_file_lines = list(requirements_file.read_text().splitlines()) if package: match = [r for r in req_file_lines if r.strip().lower().startswith(package)] @@ -280,10 +280,10 @@ def _ensure_package_in_requirements(ctx, requirements_file, package): ): matched_req = f"{m}" requirement = matched_req - log("Matched req: %r" % matched_req) + log(f"Matched req: {matched_req!r}") if not matched_req: req_file_lines.append(f"{package}") - log("Writing requirements file: %s" % requirements_file) + log(f"Writing requirements file: {requirements_file}") requirements_file.write_text("\n".join(req_file_lines)) requirement = f"{package}" return requirement @@ -292,7 +292,7 @@ def _ensure_package_in_requirements(ctx, requirements_file, package): def install(ctx, vendor_dir, package=None): requirements_file = vendor_dir / f"{vendor_dir.name}.txt" requirement = f"-r {requirements_file.as_posix()}" - log("Using requirements file: %s" % requirement) + log(f"Using requirements file: {requirement}") if package: requirement = _ensure_package_in_requirements(ctx, requirements_file, package) # We use --no-deps because we want to ensure that all of our dependencies @@ -378,7 +378,7 @@ def vendor(ctx, vendor_dir, package=None, rewrite=True): post_install_cleanup(ctx, vendor_dir) # Detect the vendored packages/modules vendored_libs = detect_all_vendored_libs(ctx) - log("Detected vendored libraries: %s" % ", ".join(vendored_libs)) + log("Detected vendored libraries: {}".format(", ".join(vendored_libs))) # Apply pre-patches log("Applying pre-patches...") @@ -395,7 +395,7 @@ def vendor(ctx, vendor_dir, package=None, rewrite=True): for item in vendor_dir.iterdir(): if item.is_dir(): if rewrite and not package or (package and item.name.lower() in package): - log("Rewriting imports for %s..." % item) + log(f"Rewriting imports for {item}...") rewrite_imports(item, vendored_libs) rename_if_needed(ctx, vendor_dir, item) elif item.name not in FILE_WHITE_LIST and ( @@ -419,12 +419,12 @@ def redo_imports(ctx, library, vendor_dir=None): vendor_dir = _get_vendor_dir(ctx) else: vendor_dir = Path(vendor_dir).absolute() - log("Using vendor dir: %s" % vendor_dir) + log(f"Using vendor dir: {vendor_dir}") vendored_libs = detect_all_vendored_libs(ctx) item = vendor_dir / library library_name = vendor_dir / f"{library}.py" - log("Detected vendored libraries: %s" % ", ".join(vendored_libs)) - log("Rewriting imports for %s..." % item) + log("Detected vendored libraries: {}".format(", ".join(vendored_libs))) + log(f"Rewriting imports for {item}...") if item.is_dir(): rewrite_imports(item, vendored_libs) else: @@ -435,9 +435,9 @@ def redo_imports(ctx, library, vendor_dir=None): def rewrite_all_imports(ctx): vendor_dir = _get_vendor_dir(ctx) patched_dir = _get_patched_dir(ctx) - log("Using vendor dir: %s" % vendor_dir) + log(f"Using vendor dir: {vendor_dir}") vendored_libs = detect_all_vendored_libs(ctx) - log("Detected vendored libraries: %s" % ", ".join(vendored_libs)) + log("Detected vendored libraries: {}".format(", ".join(vendored_libs))) log("Rewriting all imports related to vendored libs") for item in itertools.chain(patched_dir.iterdir(), vendor_dir.iterdir()): if item.is_dir(): @@ -761,14 +761,14 @@ def main(ctx, package=None, type=None): target_dirs = [vendor_dir, patched_dir] if package: if type is None or type == "vendor": - log("Using vendor dir: %s" % vendor_dir) + log(f"Using vendor dir: {vendor_dir}") vendor(ctx, vendor_dir, package=package) download_licenses(ctx, vendor_dir, package=package) elif type == "patched": - log("Using patched dir: %s" % patched_dir) + log(f"Using patched dir: {patched_dir}") vendor(ctx, patched_dir, package=package) download_licenses(ctx, patched_dir, package=package) - log("Vendored %s" % package) + log(f"Vendored {package}") return for package_dir in target_dirs: clean_vendor(ctx, package_dir)