diff --git a/client/devpi/main.py b/client/devpi/main.py index 952ca29ac..1a4108c0f 100644 --- a/client/devpi/main.py +++ b/client/devpi/main.py @@ -3,7 +3,6 @@ import sys import time import traceback -import py import argparse import shlex import shutil @@ -11,6 +10,7 @@ import textwrap from base64 import b64encode from contextlib import closing, contextmanager +from contextlib import suppress from devpi import hookspecs from devpi_common.terminal import TerminalWriter from devpi_common.types import lazydecorator, cached_property @@ -18,6 +18,7 @@ from devpi.use import PersistentCurrent from devpi_common.request import new_requests_session from devpi import __version__ as client_version +from pathlib import Path from pluggy import HookimplMarker from pluggy import PluginManager from shutil import rmtree @@ -70,7 +71,7 @@ class Hub: def __init__(self, args, file=None, pm=None): self._tw = TerminalWriter(file) self.args = args - self.cwd = py.path.local() + self.cwd = Path() self.quiet = False self._last_http_stati = [] self.http = new_requests_session(agent=("client", client_version)) @@ -95,24 +96,24 @@ def set_quiet(self): @property def clientdir(self): - return py.path.local(self.args.clientdir) + return Path(self.args.clientdir) @property def auth_path(self): - return self.clientdir.join("auth.json") + return self.clientdir / "auth.json" @property def local_current_path(self): venv = self.active_venv() if venv is not None: - return venv.join('devpi.json') + return venv / 'devpi.json' @property def current_path(self): local_path = self.local_current_path if local_path is not None and local_path.exists(): return local_path - return self.clientdir.join("current.json") + return self.clientdir / "current.json" def require_valid_current_with_index(self): current = self.current @@ -238,17 +239,16 @@ def remove_readonly(func, path, excinfo): else: return - workdir = py.path.local( - mkdtemp(prefix=prefix)) + workdir = Path(mkdtemp(prefix=prefix)) self.info("using workdir", workdir) try: yield workdir finally: - rmtree(workdir.strpath, onerror=remove_readonly) + rmtree(workdir, onerror=remove_readonly) def get_current(self, args_url=None): - self.clientdir.ensure(dir=1) + self.clientdir.mkdir(parents=True, exist_ok=True) current = PersistentCurrent(self.auth_path, self.current_path) index_url = getattr(self.args, "index", None) if "DEVPI_INDEX" in os.environ: @@ -307,10 +307,10 @@ def current(self): return self.get_current() def get_existing_file(self, arg): - p = py.path.local(arg, expanduser=True) + p = Path(arg).expanduser() if not p.exists(): self.fatal("file does not exist: %s" % p) - elif not p.isfile(): + elif not p.is_file(): self.fatal("is not a file: %s" % p) return p @@ -349,11 +349,11 @@ def venv(self): else: venvdir = self.current.venvdir or self.active_venv() if venvdir: - cand = self.cwd.join(venvdir, vbin, abs=True) - if not cand.check() and self.venvwrapper_home: - cand = self.venvwrapper_home.join(venvdir, vbin, abs=True) - venvdir = cand.dirpath().strpath - if not cand.check(): + cand = self.cwd / venvdir / vbin + if not cand.exists() and self.venvwrapper_home: + cand = self.venvwrapper_home / venvdir / vbin + venvdir = str(cand.parent) + if not cand.exists(): if self.current.venvdir: self.fatal( "No virtualenv found at: %r\n" @@ -376,14 +376,14 @@ def venvwrapper_home(self): path = os.environ.get("WORKON_HOME", None) if path is None: return - return py.path.local(path) + return Path(path) def active_venv(self): """current activated virtualenv""" path = os.environ.get("VIRTUAL_ENV", None) if path is None: return - return py.path.local(path) + return Path(path) def popen_output(self, args, cwd=None, report=True): if isinstance(args, str): @@ -426,15 +426,15 @@ def popen(self, args, cwd=None, dryrun=None, **popen_kwargs): def report_popen(self, args, cwd=None, extraenv=None): base = cwd or self.cwd - rel = py.path.local(args[0]).relto(base) - if not rel: - rel = str(args[0]) + rel = Path(args[0]) + with suppress(ValueError): + rel = rel.relative_to(base) if extraenv is not None: envadd = " [%s]" % ",".join( ["%s=%r" % item for item in sorted(extraenv.items())]) else: envadd = "" - self.line("--> ", base + "$", rel, " ".join(args[1:]), envadd) + self.line(f"--> {base}$ {rel} {' '.join(args[1:])} {envadd}") def popen_check(self, args, extraenv=None, **kwargs): assert args[0], args diff --git a/client/devpi/push.py b/client/devpi/push.py index 3816c76ce..e887ac481 100644 --- a/client/devpi/push.py +++ b/client/devpi/push.py @@ -1,6 +1,6 @@ -import py from devpi_common.metadata import parse_requirement, splitbasename from . import pypirc +from pathlib import Path import traceback @@ -34,11 +34,11 @@ def parse_target(hub, args): posturl = args.target[5:] pypirc_path = args.pypirc if pypirc_path is None: - pypirc_path = py.path.local._gethomedir().join(".pypirc") + pypirc_path = Path().home() / ".pypirc" else: - pypirc_path = py.path.local().join(args.pypirc, abs=True) - if not pypirc_path.check(): - hub.fatal("no pypirc file found at: %s" %(pypirc_path)) + pypirc_path = Path() / args.pypirc + if not pypirc_path.is_file(): + hub.fatal(f"no pypirc file found at: {pypirc_path}") hub.info("using pypirc", pypirc_path) auth = pypirc.Auth(pypirc_path) try: diff --git a/client/devpi/pypirc.py b/client/devpi/pypirc.py index 2594e7b2f..70b78cbf4 100644 --- a/client/devpi/pypirc.py +++ b/client/devpi/pypirc.py @@ -1,14 +1,14 @@ """ helpers for authenticating against info from .pypirc files. """ +from pathlib import Path import iniconfig -import py class Auth: def __init__(self, path=None): if path is None: - path = py.path.local._gethomedir().join(".pypirc") + path = Path().home() / ".pypirc" self.ini = iniconfig.IniConfig(path) def get_url_auth(self, secname): diff --git a/client/devpi/test.py b/client/devpi/test.py index 4f2ed078f..d6cafb05f 100644 --- a/client/devpi/test.py +++ b/client/devpi/test.py @@ -3,6 +3,7 @@ import hashlib import shutil from devpi_common.archive import Archive +from devpi_common.contextlib import chdir from devpi_common.metadata import parse_requirement import json import sys @@ -10,14 +11,15 @@ from devpi_common.url import URL from devpi_common.metadata import get_sorted_versions from devpi_common.viewhelp import ViewLinkStore +from pathlib import Path class DevIndex: def __init__(self, hub, rootdir, current): - self.rootdir = rootdir + self.rootdir = Path(rootdir) self.current = current self.hub = hub - self.dir_download = self.rootdir.mkdir("downloads") + self.dir_download = self.rootdir / "downloads" def download_and_unpack(self, versioninfo, link): basic_auth = self.hub.current.get_basic_auth(link.href) @@ -43,7 +45,8 @@ def download_and_unpack(self, versioninfo, link): assert digest == link.md5, (digest, link.md5) basename = URL(url).basename - path_archive = self.dir_download.join(basename) + self.dir_download.mkdir(exist_ok=True) + path_archive = self.dir_download / basename with path_archive.open("wb") as f: f.write(content) pkg = UnpackedPackage( @@ -64,7 +67,7 @@ def get_matching_versioninfo(self, pkgname, indexname): return ViewLinkStore(projurl, r.result[version]) def runtox(self, link, pkg, sdist_pkg=None, upload_tox_results=True): - jsonreport = pkg.rootdir.join("toxreport.json") + jsonreport = pkg.rootdir / "toxreport.json" path_archive = pkg.path_archive tox_path = self.hub.current.getvenvbin( "tox", venvdir=self.hub.venv, glob=True) @@ -94,7 +97,7 @@ def runtox(self, link, pkg, sdist_pkg=None, upload_tox_results=True): toxcmd.extend(self.get_tox_args(unpack_path=sdist_pkg.path_unpacked)) ret = 0 - with sdist_pkg.path_unpacked.as_cwd(): + with chdir(sdist_pkg.path_unpacked): try: self.hub.popen_check( toxcmd, @@ -115,6 +118,7 @@ def runtox(self, link, pkg, sdist_pkg=None, upload_tox_results=True): return 0 def get_tox_args(self, unpack_path): + assert isinstance(unpack_path, Path) hub = self.hub args = self.hub.args toxargs = [] @@ -122,8 +126,8 @@ def get_tox_args(self, unpack_path): toxargs.append("-e" + args.toxenv) if args.toxini: ini = hub.get_existing_file(args.toxini) - elif unpack_path.join("tox.ini").exists(): - ini = hub.get_existing_file(unpack_path.join("tox.ini")) + elif unpack_path.joinpath("tox.ini").is_file(): + ini = hub.get_existing_file(unpack_path / "tox.ini") elif args.fallback_ini: ini = hub.get_existing_file(args.fallback_ini) else: @@ -148,12 +152,12 @@ def __init__(self, hub, rootdir, path_archive, versioninfo, link): self.hub = hub basename = link.basename if basename.endswith(".whl"): - rootdir = rootdir.join(basename) + rootdir = rootdir / basename elif basename.endswith((".tar.gz", ".tgz")): - rootdir = rootdir.join("targz") + rootdir = rootdir / "targz" elif basename.endswith(".zip"): - rootdir = rootdir.join("zip") - assert not rootdir.check(), rootdir + rootdir = rootdir / "zip" + assert not rootdir.exists(), rootdir self.rootdir = rootdir self.path_archive = path_archive self.versioninfo = versioninfo @@ -168,13 +172,13 @@ def unpack(self): if self.link.basename.endswith(".whl"): inpkgdir = self.rootdir else: - inpkgdir = self.rootdir.join("%s-%s" %(pkgname, version)) - if not inpkgdir.check(): + inpkgdir = self.rootdir / f"{pkgname}-{version}" + if not inpkgdir.exists(): # sometimes dashes are replaced by underscores, # for example the source releases of argon2_cffi - inpkgdir = self.rootdir.join( - "%s-%s" % (pkgname.replace('-', '_'), version)) - if not inpkgdir.check(): + inpkgdir = self.rootdir.joinpath( + f"{pkgname.replace('-', '_')}-{version}") + if not inpkgdir.exists(): self.hub.fatal("Couldn't find unpacked package in", inpkgdir) self.path_unpacked = inpkgdir diff --git a/client/devpi/upload.py b/client/devpi/upload.py index 80697ecd5..b22478a6d 100644 --- a/client/devpi/upload.py +++ b/client/devpi/upload.py @@ -1,7 +1,6 @@ import iniconfig import os import sys -import py import re import shutil import zipfile @@ -13,8 +12,11 @@ from devpi_common.metadata import splitext_archive from devpi_common.archive import Archive from devpi_common.archive import zip_dir +from devpi_common.contextlib import chdir from devpi_common.types import CompareMixin from .main import HTTPReply, set_devpi_auth_header +from pathlib import Path +from shutil import rmtree def main(hub, args): @@ -71,10 +73,10 @@ def filter_latest(path_pkginfo): def main_fromfiles(hub, args): paths = [] for p in args.path: - p = py.path.local(os.path.expanduser(p)) - if not p.check(): + p = Path(p).expanduser() + if not p.exists(): hub.fatal("path does not exist: %s" % p) - if p.isdir() and not args.fromdir: + if p.is_dir() and not args.fromdir: hub.fatal("%s: is a directory but --from-dir not specified" % p) paths.append(p) @@ -102,7 +104,7 @@ def do_upload_paths(self, paths): pkginfo = get_pkginfo(archivepath) if pkginfo is None or pkginfo.name is None: hub.error("%s: does not contain PKGINFO, skipping" % - archivepath.basename) + archivepath.name) continue if isinstance(pkginfo, DocZipMeta): doczip2pkginfo[archivepath] = pkginfo @@ -122,15 +124,15 @@ def upload_doc(self, path, pkginfo): with self.hub.workdir() as tmp: if pkginfo.needs_repackage: if version is None: - fn = tmp.join('%s.doc.zip' % name) + fn = tmp / f'{name}.doc.zip' else: - fn = tmp.join('%s-%s.doc.zip' % (name, version)) - with zipfile.ZipFile(fn.strpath, "w") as z: + fn = tmp / f'{name}-{version}.doc.zip' + with zipfile.ZipFile(str(fn), "w") as z: with Archive(path) as archive: for aname in archive.namelist(): z.writestr(aname, archive.read(aname)) self.hub.info( - "repackaged %s to %s" % (path.basename, fn.basename)) + "repackaged %s to %s" % (path.name, fn.name)) path = fn self.post( "doc_upload", path, @@ -149,11 +151,11 @@ def post(self, action, path, meta): auth = (auth[0], hub.derive_token(auth[1], meta['name'])) set_devpi_auth_header(headers, auth) if path: - files = {"content": (path.basename, path.open("rb"))} + files = {"content": (path.name, path.open("rb"))} else: files = None if path: - msg = "%s of %s to %s" %(action, path.basename, self.pypisubmit) + msg = f"{action} of {path.name} to {self.pypisubmit}" else: msg = "%s %s-%s to %s" %(action, meta["name"], meta["version"], self.pypisubmit) @@ -187,7 +189,7 @@ def upload_release_file(self, path, pkginfo): meta = {} for attr in pkginfo: meta[attr] = getattr(pkginfo, attr) - pyver = get_pyversion_filetype(path.basename) + pyver = get_pyversion_filetype(path.name) meta["pyversion"], meta["filetype"] = pyver self.post("file_upload", path, meta=meta) @@ -196,14 +198,14 @@ def upload_release_file(self, path, pkginfo): def get_archive_files(path): - if path.isfile(): + if path.is_file(): yield path return - for x in path.visit(): - if not x.check(file=1): + for x in path.rglob("*"): + if not x.is_file(): continue for name in ALLOWED_ARCHIVE_EXTS: - if x.basename.endswith(name): + if x.name.endswith(name): yield x @@ -269,7 +271,7 @@ def __repr__(self): def get_pkginfo(archivepath): - info = get_name_version_doczip(archivepath.basename) + info = get_name_version_doczip(archivepath.name) if info is not None: return DocZipMeta(*info) @@ -296,6 +298,7 @@ def find_parent_subpath(startpath, relpath, raising=True): class Checkout: def __init__(self, hub, args, setupdir, hasvcs=None, setupdir_only=None): + setupdir = Path(setupdir) self.hub = hub self.args = args self.cm_ui = None @@ -304,7 +307,7 @@ def __init__(self, hub, args, setupdir, hasvcs=None, setupdir_only=None): hasvcs = not hasvcs and not args.novcs setupdir_only = bool(setupdir_only or args.setupdironly) if hasvcs: - with setupdir.as_cwd(): + with chdir(setupdir): try: if self.cm_ui: hasvcs = check_manifest.detect_vcs(self.cm_ui).metadata_name @@ -315,10 +318,10 @@ def __init__(self, hub, args, setupdir, hasvcs=None, setupdir_only=None): else: if hasvcs not in (".hg", ".git") or setupdir_only: # XXX for e.g. svn we don't do copying - self.rootpath = setupdir + self.rootpath = Path(setupdir) else: - for p in setupdir.parts(reverse=True): - if p.join(hasvcs).exists(): + for p in (setupdir, *setupdir.parents): + if p.joinpath(hasvcs).exists(): self.rootpath = p break else: @@ -328,37 +331,34 @@ def __init__(self, hub, args, setupdir, hasvcs=None, setupdir_only=None): self.setupdir_only = setupdir_only def export(self, basetemp): + assert isinstance(basetemp, Path) if not self.hasvcs: return Exported(self.hub, self.args, self.setupdir, self.setupdir) - with self.rootpath.as_cwd(): + with chdir(self.rootpath): if self.cm_ui: files = check_manifest.get_vcs_files(self.cm_ui) else: files = check_manifest.get_vcs_files() - newrepo = basetemp.join(self.rootpath.basename) + newrepo = basetemp / self.rootpath.name for fn in files: - source = self.rootpath.join(fn) - if source.islink(): - dest = newrepo.join(fn) - dest.dirpath().ensure(dir=1) - dest.mksymlinkto(source.readlink(), absolute=True) - elif source.isfile(): - dest = newrepo.join(fn) - dest.dirpath().ensure(dir=1) - source.copy(dest, mode=True) + source = self.rootpath / fn + dest = newrepo / fn + dest.parent.mkdir(parents=True, exist_ok=True) + shutil.copy2(source, dest, follow_symlinks=False) self.hub.debug("copied", len(files), "files to", newrepo) if self.hasvcs not in (".git", ".hg") or self.setupdir_only: self.hub.warn("not copying vcs repository metadata for", self.hasvcs) else: - srcrepo = self.rootpath.join(self.hasvcs) + srcrepo = self.rootpath / self.hasvcs assert srcrepo.exists(), srcrepo - destrepo = newrepo.join(self.hasvcs) - self.rootpath.join(self.hasvcs).copy(destrepo, mode=True) + destrepo = newrepo / self.hasvcs + source = self.rootpath / self.hasvcs + shutil.copytree(srcrepo, destrepo) self.hub.info("copied repo", srcrepo, "to", destrepo) self.hub.debug( "%s-exported project to %s -> new CWD" % (self.hasvcs, newrepo)) - setupdir_newrepo = newrepo.join(self.setupdir.relto(self.rootpath)) + setupdir_newrepo = newrepo / self.setupdir.relative_to(self.rootpath) return Exported(self.hub, self.args, setupdir_newrepo, self.setupdir) @@ -368,7 +368,7 @@ def __init__(self, hub, args, rootpath, origrepo): self.args = args self.rootpath = rootpath self.origrepo = origrepo - self.target_distdir = origrepo.join("dist") + self.target_distdir = origrepo / "dist" @property def python(self): @@ -394,7 +394,7 @@ def __str__(self): def setup_name_and_version(self): metadata = build.util.project_wheel_metadata( - self.rootpath.strpath, False + str(self.rootpath), False ) name = metadata["name"] version = metadata["version"] @@ -402,9 +402,9 @@ def setup_name_and_version(self): return name, version def prepare(self): - if self.target_distdir.check(): + if self.target_distdir.is_dir(): self.hub.line("pre-build: cleaning %s" % self.target_distdir) - self.target_distdir.remove() + rmtree(self.target_distdir) self.target_distdir.mkdir() @staticmethod @@ -498,10 +498,10 @@ def setup_build(self, default_formats=None): archives = [] for cmd in cmds: - distdir = self.rootpath.join("dist") + distdir = self.rootpath / "dist" if self.rootpath != self.origrepo: if distdir.exists(): - distdir.remove() + rmtree(distdir) if self.args.verbose: ret = self.hub.popen_check(cmd, cwd=self.rootpath) @@ -511,9 +511,9 @@ def setup_build(self, default_formats=None): if ret is None: # dryrun continue - for x in distdir.listdir(): # usually just one - target = self.target_distdir.join(x.basename) - x.move(target) + for x in distdir.iterdir(): # usually just one + target = self.target_distdir / x.name + x.rename(target) archives.append(target) self.log_build(target) @@ -521,19 +521,19 @@ def setup_build(self, default_formats=None): def setup_build_docs(self): name, version = self.setup_name_and_version() - build = self.rootpath.join("build") + build = self.rootpath / "build" if build.exists(): - build.remove() + rmtree(build) for guess in ("doc", "docs", "source"): - docs = self.rootpath.join(guess) - if docs.isdir(): - if docs.join("conf.py").exists(): + docs = self.rootpath / guess + if docs.is_dir(): + if docs.joinpath("conf.py").is_file(): + break + source = docs / "source" + if source.is_dir() and source.joinpath("conf.py").is_file(): + build = docs / "build" + docs = source break - else: - source = docs.join("source") - if source.isdir() and source.join("conf.py").exists(): - build, docs = docs.join("build"), source - break cmd = ["sphinx-build", "-E", docs, build] if self.args.verbose: ret = self.hub.popen_check(cmd, cwd=self.rootpath) @@ -541,13 +541,13 @@ def setup_build_docs(self): ret = self.hub.popen_output(cmd, cwd=self.rootpath) if ret is None: return - p = self.target_distdir.join("%s-%s.doc.zip" %(name, version)) + p = self.target_distdir / f"{name}-{version}.doc.zip" zip_dir(build, p) self.log_build(p, "[sphinx docs]") return p def log_build(self, path, suffix=None): - kb = path.size() / 1000 + kb = path.stat().st_size // 1000 if suffix: self.hub.line("built: %s %s %skb" % (path, suffix, kb)) else: @@ -590,8 +590,8 @@ def get(self, key): def read_setupcfg(hub, path): - setup_cfg = path.join("setup.cfg") - if setup_cfg.exists(): + setup_cfg = Path(path) / "setup.cfg" + if setup_cfg.is_file(): cfg = iniconfig.IniConfig(setup_cfg) if 'devpi:upload' in cfg.sections: hub.line("detected devpi:upload section in %s" % setup_cfg, bold=True) diff --git a/client/devpi/use.py b/client/devpi/use.py index 9211dab2e..e2f33ae4a 100644 --- a/client/devpi/use.py +++ b/client/devpi/use.py @@ -4,13 +4,14 @@ import itertools import os import sys -import py import re import json import shutil from devpi_common.types import cached_property from devpi_common.url import URL +from pathlib import Path + if sys.platform == "win32": vbin = "Scripts" @@ -357,7 +358,7 @@ def getvenvbin(self, name, venvdir=None, glob=True): if venvdir is None: venvdir = self.venvdir if venvdir: - bindir = py.path.local(venvdir).join(vbin) + bindir = Path(venvdir) / vbin return shutil.which(name, path=str(bindir)) if glob: return shutil.which(name) @@ -403,9 +404,10 @@ def get_simpleproject_url(self, name, indexname=None): def _load_json(path, dest): if path is None: return - if not path.check(): + path = Path(path) + if not path.is_file(): return - raw = path.read().strip() + raw = path.read_text().strip() if not raw: return data = json.loads(raw) @@ -425,19 +427,20 @@ def __init__(self, auth_path, current_path): _load_json(self.current_path, self._currentdict) def exists(self): - return self.current_path and self.current_path.check() + return self.current_path and self.current_path.is_file() def _persist(self, data, path, force_write=False): if path is None: return + path = Path(path) try: - olddata = json.loads(path.read()) + olddata = json.loads(path.read_text()) except Exception: olddata = {} if force_write or data != olddata: oldumask = os.umask(7 * 8 + 7) try: - path.write( + path.write_text( json.dumps(data, indent=2, sort_keys=True)) finally: os.umask(oldumask) @@ -475,7 +478,7 @@ def main(hub, args=None): if not hub.local_current_path.exists(): current = hub.current hub.info("Creating local configuration at %s" % hub.local_current_path) - hub.local_current_path.ensure() + hub.local_current_path.touch() current = current.switch_to_local( hub, current.index, hub.local_current_path) # now store existing data in new location @@ -485,7 +488,7 @@ def main(hub, args=None): if args.delete: if not hub.current.exists(): hub.error_and_out("NO configuration found") - hub.current.current_path.remove() + hub.current.current_path.unlink() hub.info("REMOVED configuration at", hub.current.current_path) return if current.exists(): @@ -625,16 +628,18 @@ def __init__(self, path=None): if path is None: path = self.default_location self.screen_name = str(path) - self.path = py.path.local(path, expanduser=True) - self.backup_path = self.path + "-bak" + self.path = Path(path).expanduser() + self.backup_path = self.path.with_name(self.path.name + "-bak") def exists(self): return self.path.exists() @property def indexserver(self): - if self.path.exists(): - for line in self.path.readlines(cr=0): + if not self.path.exists(): + return + with self.path.open() as f: + for line in f: m = self.regex.match(line) if m: return m.group(2) @@ -644,20 +649,22 @@ def write_default(self, indexserver): raise ValueError("config file already exists") content = "\n".join([self.section_name, "%s = %s\n" % (self.config_name, indexserver)]) - self.path.ensure().write(content) + self.path.parent.mkdir(parents=True, exist_ok=True) + self.path.write_text(content) def write_indexserver(self, indexserver): self.ensure_backup_file() if not self.path.exists(): self.write_default(indexserver) + return + if self.indexserver: + section = None else: - if self.indexserver: - section = None - else: - section = self.section_name - newlines = [] - found = False - for line in self.path.readlines(cr=1): + section = self.section_name + newlines = [] + found = False + with self.path.open() as f: + for line in f: if not section: m = self.regex.match(line) if m: @@ -670,19 +677,20 @@ def write_indexserver(self, indexserver): if not found: newlines.append(self.section_name + "\n") newlines.append("%s = %s\n" %(self.config_name, indexserver)) - self.path.write("".join(newlines)) + self.path.write_text("".join(newlines)) def ensure_backup_file(self): if self.path.exists() and not self.backup_path.exists(): - self.path.copy(self.backup_path) + self.backup_path.write_text(self.path.read_text()) + shutil.copyfile(self.path, self.backup_path) class DistutilsCfg(BaseCfg): section_name = "[easy_install]" - default_location = py.path.local( + default_location = Path( "~/.pydistutils.cfg" if sys.platform != "win32" - else "~/pydistutils.cfg", expanduser=True) + else "~/pydistutils.cfg").expanduser() class PipCfg(BaseCfg): @@ -703,20 +711,20 @@ def appdirs(self): @property def legacy_location(self): - confdir = py.path.local( - "~/.pip" if sys.platform != "win32" else "~/pip", - expanduser=True) - return confdir.join(self.pip_conf_name) + confdir = Path( + "~/.pip" if sys.platform != "win32" else "~/pip").expanduser() + return confdir / self.pip_conf_name @property def new_location(self): - return py.path.local( - self.appdirs.user_config_dir("pip")).join(self.pip_conf_name) + return Path( + self.appdirs.user_config_dir("pip")) / self.pip_conf_name @property def default_location(self): if self.venv: - default_location = py.path.local(self.venv, expanduser=True).join(self.pip_conf_name) + default_location = Path( + self.venv).expanduser() / self.pip_conf_name elif 'PIP_CONFIG_FILE' in os.environ: default_location = os.environ.get('PIP_CONFIG_FILE') elif self.legacy_location.exists(): @@ -737,20 +745,21 @@ def write_searchindexserver(self, searchindexserver): newlines = [] found = False insection = False - for line in self.path.readlines(cr=1): - if insection: - if line.strip().startswith('['): - insection = False - if section in line.lower() and not insection: - insection = True - if insection and re.match(r'index\s*=.*', line): - line = "index = %s\n" % searchindexserver - found = True - newlines.append(line) + with self.path.open() as f: + for line in f: + if insection: + if line.strip().startswith('['): + insection = False + if section in line.lower() and not insection: + insection = True + if insection and re.match(r'index\s*=.*', line): + line = "index = %s\n" % searchindexserver + found = True + newlines.append(line) if not found: newlines.append(section + "\n") newlines.append("index = %s\n" % searchindexserver) - self.path.write("".join(newlines)) + self.path.write_text("".join(newlines)) def write_trustedhost(self, indexserver): self.ensure_backup_file() @@ -761,22 +770,23 @@ def write_trustedhost(self, indexserver): insection = False indexserver = URL(indexserver) trustedhost = "trusted-host = %s\n" % indexserver.hostname - for line in self.path.readlines(cr=1): - if insection: - if line.strip().startswith('['): - if not found: - newlines.append(trustedhost) - found = True - insection = False - if not found and self.section_name in line.lower() and not insection: - insection = True - if not found and insection and re.match(r'trusted-host\s*=\s*%s' % indexserver.hostname, line): - found = True - newlines.append(line) + with self.path.open() as f: + for line in f: + if insection: + if line.strip().startswith('['): + if not found: + newlines.append(trustedhost) + found = True + insection = False + if not found and self.section_name in line.lower() and not insection: + insection = True + if not found and insection and re.match(r'trusted-host\s*=\s*%s' % indexserver.hostname, line): + found = True + newlines.append(line) if not found: newlines.append(self.section_name + "\n") newlines.append(trustedhost) - self.path.write("".join(newlines)) + self.path.write_text("".join(newlines)) def clear_trustedhost(self, indexserver): self.ensure_backup_file() @@ -784,18 +794,18 @@ def clear_trustedhost(self, indexserver): return newlines = [] indexserver = URL(indexserver) - for line in self.path.readlines(cr=1): - if not re.match(r'trusted-host\s*=\s*%s' % indexserver.hostname, line): - newlines.append(line) - self.path.write("".join(newlines)) + with self.path.open() as f: + for line in f: + if not re.match(r'trusted-host\s*=\s*%s' % indexserver.hostname, line): + newlines.append(line) + self.path.write_text("".join(newlines)) class BuildoutCfg(BaseCfg): section_name = "[buildout]" config_name = "index" regex = re.compile(r"(index)\s*=\s*(.*)") - default_location = py.path.local( - "~/.buildout/default.cfg", expanduser=True) + default_location = Path("~/.buildout/default.cfg").expanduser() class KeyValues(list): diff --git a/client/news/pylib.removal b/client/news/pylib.removal new file mode 100644 index 000000000..d341f9cb6 --- /dev/null +++ b/client/news/pylib.removal @@ -0,0 +1,2 @@ +Removed dependency on py package. +Plugins which expect py.path.local need to be adjusted to work with pathlib.Path. \ No newline at end of file diff --git a/client/pyproject.toml b/client/pyproject.toml index 0067c778e..e302cb0ea 100644 --- a/client/pyproject.toml +++ b/client/pyproject.toml @@ -16,7 +16,6 @@ dependencies = [ "pkginfo>=1.4.2", "platformdirs", "pluggy>=0.6.0,<2.0", - "py>=1.4.31", ] requires-python = ">=3.7" maintainers = [ diff --git a/client/testing/conftest.py b/client/testing/conftest.py index 7c7ebb32f..bb8c0afab 100644 --- a/client/testing/conftest.py +++ b/client/testing/conftest.py @@ -1,13 +1,14 @@ from _pytest import capture from contextlib import closing +from devpi_common.contextlib import chdir from devpi_common.metadata import parse_version from io import StringIO +from pathlib import Path import codecs import os import pytest import socket import textwrap -import py import shutil import sys import json @@ -246,7 +247,6 @@ def devpi(cmd_devpi, devpi_username, url_of_liveserver): def _path_parts(path): - path = path and str(path) # py.path.local support parts = [] while path: folder, name = os.path.split(path) @@ -259,13 +259,6 @@ def _path_parts(path): return parts -def _path_join(base, *args): - # workaround for a py.path.local bug on Windows (`path.join('/x', abs=1)` - # should be py.path.local('X:\\x') where `X` is the current drive, when in - # fact it comes out as py.path.local('\\x')) - return py.path.local(base.join(*args, abs=1)) - - def _filedefs_contains(base, filedefs, path): """ whether `filedefs` defines a file/folder with the given `path` @@ -277,10 +270,10 @@ def _filedefs_contains(base, filedefs, path): """ unknown = object() - base = py.path.local(base) - path = _path_join(base, path) + base = Path(base) + path = base / path - path_rel_parts = _path_parts(path.relto(base)) + path_rel_parts = _path_parts(path.relative_to(base)) for part in path_rel_parts: if not isinstance(filedefs, dict): return False @@ -338,7 +331,7 @@ def initproj_(nameversion, filedefs=None, src_root=".", kind="setup.py"): else: name, version = nameversion base = tmpdir.join(name) - src_root_path = _path_join(base, src_root) + src_root_path = base / src_root assert base == src_root_path or src_root_path.relto( base ), "`src_root` must be the constructed project folder or its direct or indirect subfolder" @@ -601,8 +594,8 @@ def runproc(): def runprocess(tmpdir, cmdargs): from _pytest.pytester import RunResult cmdargs = [str(x) for x in cmdargs] - p1 = tmpdir.join("stdout") - print_info("running", cmdargs, "curdir=", py.path.local()) + p1 = Path(tmpdir) / "stdout" + print_info("running", cmdargs, "curdir=", Path()) with codecs.open(str(p1), "w", encoding="utf8") as f1: now = time.time() popen = subprocess.Popen( @@ -639,9 +632,9 @@ def do_create_venv(): # we need to change directory, otherwise the path will become # too long on windows venvinstalldir.ensure_dir() - os.chdir(venvinstalldir.strpath) + os.chdir(venvinstalldir) subprocess.check_call([ - "virtualenv", "--never-download", venvdir.strpath]) + "virtualenv", "--never-download", str(venvdir)]) # activate if sys.platform == "win32": bindir = "Scripts" @@ -687,7 +680,7 @@ def mkhub(arglist): arglist.append("--clientdir=%s" % tmp) pm = get_pluginmanager() args = parse_args(["devpi_"] + arglist, pm) - with tmp.as_cwd(): + with chdir(tmp): return Hub(args) return mkhub diff --git a/client/testing/test_list_remove.py b/client/testing/test_list_remove.py index 514269596..e7d7b49af 100644 --- a/client/testing/test_list_remove.py +++ b/client/testing/test_list_remove.py @@ -6,7 +6,7 @@ from devpi.list_remove import out_index from devpi.list_remove import out_project from devpi.list_remove import show_commands -import py +from pathlib import Path import pytest @@ -108,7 +108,7 @@ def test_all(self, initproj, devpi, out_devpi): initproj("hello-1.0", {"doc": { "conf.py": "", "index.html": ""}}) - assert py.path.local("setup.py").check() + assert Path("setup.py").is_file() devpi("upload", "--no-isolation", "--formats", "sdist.zip") devpi("upload", "--no-isolation", "--formats", "sdist.zip,bdist_dumb") initproj("hello-1.1", {"doc": { @@ -142,7 +142,7 @@ def test_remove_file(self, initproj, devpi, out_devpi, server_version, url_of_li initproj("hello-1.0", {"doc": { "conf.py": "", "index.html": ""}}) - assert py.path.local("setup.py").check() + assert Path("setup.py").is_file() devpi("upload", "--no-isolation", "--formats", "sdist.zip") devpi("upload", "--no-isolation", "--formats", "sdist.zip,bdist_dumb") initproj("hello-1.1", {"doc": { @@ -168,7 +168,7 @@ def test_all_index_option(self, initproj, devpi, out_devpi, other_index): initproj("hello-1.0", {"doc": { "conf.py": "", "index.html": ""}}) - assert py.path.local("setup.py").check() + assert Path("setup.py").is_file() devpi("upload", "--no-isolation", "--formats", "sdist.zip") devpi("upload", "--no-isolation", "--formats", "sdist.zip,bdist_dumb") initproj("hello-1.1", {"doc": { @@ -211,7 +211,7 @@ def test_delete_version_with_inheritance(self, initproj, devpi, out_devpi): initproj("dddttt-0.666", {"doc": { "conf.py": "", "index.html": ""}}) - assert py.path.local("setup.py").check() + assert Path("setup.py").is_file() devpi("upload", "--no-isolation", "--formats", "sdist.zip") out = out_devpi("list", "dddttt", "--all") out.stdout.fnmatch_lines_random("*/dev/*/dddttt-0.666.zip") @@ -226,7 +226,7 @@ def test_delete_version_range_with_inheritance(self, initproj, devpi, out_devpi) initproj("dddttt-0.666", {"doc": { "conf.py": "", "index.html": ""}}) - assert py.path.local("setup.py").check() + assert Path("setup.py").is_file() devpi("upload", "--no-isolation", "--formats", "sdist.zip") # remember username out = out_devpi("use") @@ -237,13 +237,13 @@ def test_delete_version_range_with_inheritance(self, initproj, devpi, out_devpi) initproj("dddttt-1.0", {"doc": { "conf.py": "", "index.html": ""}}) - assert py.path.local("setup.py").check() + assert Path("setup.py").is_file() devpi("upload", "--no-isolation", "--formats", "sdist.zip") # upload 2.0 to dev2 index initproj("dddttt-2.0", {"doc": { "conf.py": "", "index.html": ""}}) - assert py.path.local("setup.py").check() + assert Path("setup.py").is_file() devpi("upload", "--no-isolation", "--formats", "sdist.zip") out = out_devpi("list", "dddttt", "--all") @@ -267,7 +267,7 @@ def test_delete_project_with_inheritance(self, initproj, devpi, out_devpi, simpy initproj("dddttt-0.666", {"doc": { "conf.py": "", "index.html": ""}}) - assert py.path.local("setup.py").check() + assert Path("setup.py").is_file() devpi("upload", "--no-isolation", "--formats", "sdist.zip") out = out_devpi("list", "dddttt", "--all") out.stdout.fnmatch_lines_random("*/dev/*/dddttt-0.666.zip") @@ -285,7 +285,7 @@ def test_delete_file_non_volatile(self, initproj, devpi, out_devpi, server_versi initproj("dddttt-0.666", {"doc": { "conf.py": "", "index.html": ""}}) - assert py.path.local("setup.py").check() + assert Path("setup.py").is_file() devpi("upload", "--no-isolation", "--formats", "sdist.zip") out = out_devpi("list", "dddttt", "--all") out.stdout.fnmatch_lines_random("*/dev/*/dddttt-0.666.zip") @@ -305,7 +305,7 @@ def test_delete_project_non_volatile(self, initproj, devpi, out_devpi, server_ve initproj("dddttt-0.666", {"doc": { "conf.py": "", "index.html": ""}}) - assert py.path.local("setup.py").check() + assert Path("setup.py").is_file() devpi("upload", "--no-isolation", "--formats", "sdist.zip") out = out_devpi("list", "dddttt", "--all") out.stdout.fnmatch_lines_random("*/dev/*/dddttt-0.666.zip") diff --git a/client/testing/test_test.py b/client/testing/test_test.py index 35b0392e5..4541ee682 100644 --- a/client/testing/test_test.py +++ b/client/testing/test_test.py @@ -1,6 +1,5 @@ import os import subprocess -import py import pytest import sys from devpi_common.viewhelp import ViewLinkStore @@ -8,6 +7,7 @@ from devpi.test import find_sdist_and_wheels from devpi.test import prepare_toxrun_args from devpi.test import post_tox_json_report +from pathlib import Path def test_post_tox_json_report(loghub, mock_http_api): @@ -49,7 +49,7 @@ def test_passthrough_args_toxargs(makehub, tmpdir, pseudo_current): hub = makehub(["test", "--tox-args", "-- -x", "somepkg"]) index = DevIndex(hub, tmpdir, pseudo_current) tmpdir.ensure("tox.ini") - args = index.get_tox_args(unpack_path=tmpdir) + args = index.get_tox_args(unpack_path=Path(tmpdir.strpath)) assert args[-2:] == ["--", "-x"] @@ -147,7 +147,7 @@ def test_toxini(makehub, tmpdir, pseudo_current): hub = makehub(["test", "-c", toxini, "somepkg"]) index = DevIndex(hub, tmpdir, pseudo_current) tmpdir.ensure("tox.ini") - args = index.get_tox_args(unpack_path=tmpdir) + args = index.get_tox_args(unpack_path=Path(tmpdir.strpath)) assert contains_sublist(args, ["-c", str(toxini)]) @@ -155,7 +155,7 @@ def test_passthrough_args_env(makehub, tmpdir, pseudo_current): hub = makehub(["test", "-epy27", "somepkg"]) index = DevIndex(hub, tmpdir, pseudo_current) tmpdir.ensure("tox.ini") - args = index.get_tox_args(unpack_path=tmpdir) + args = index.get_tox_args(unpack_path=Path(tmpdir.strpath)) assert contains_sublist(args, ["-epy27"]) @@ -163,10 +163,10 @@ def test_fallback_ini(makehub, tmpdir, pseudo_current): p = tmpdir.ensure("mytox.ini") hub = makehub(["test", "--fallback-ini", str(p), "somepkg"]) index = DevIndex(hub, tmpdir, pseudo_current) - args = index.get_tox_args(unpack_path=tmpdir) + args = index.get_tox_args(unpack_path=Path(tmpdir.strpath)) assert contains_sublist(args, ["-c", str(p)]) p2 = tmpdir.ensure("tox.ini") - args = index.get_tox_args(unpack_path=tmpdir) + args = index.get_tox_args(unpack_path=Path(tmpdir.strpath)) assert contains_sublist(args, ["-c", str(p2)]) @@ -243,16 +243,17 @@ def test_prepare_toxrun_args(self, loghub, pseudo_current, tmpdir, reqmock, init initproj("prep1-1.0", filedefs={}) subprocess.check_call(["python", "setup.py", "sdist", "--formats=gztar,zip"]) subprocess.check_call(["python", "setup.py", "bdist_wheel", "--universal"]) - for p in py.path.local("dist").listdir(): - reqmock.mockresponse("http://b/" + p.basename, code=200, method="GET", - data=p.read("rb")) + for p in Path("dist").iterdir(): + reqmock.mockresponse( + f"http://b/{p.name}", + code=200, data=p.read_bytes(), method="GET") toxrunargs = prepare_toxrun_args(dev_index, vl, sdist_links, wheel_links) assert len(toxrunargs) == 3 sdist1, sdist2, wheel1 = toxrunargs assert sdist1[0].basename == "prep1-1.0.tar.gz" - assert sdist1[1].path_unpacked.strpath.endswith("targz" + os.sep + "prep1-1.0") + assert str(sdist1[1].path_unpacked).endswith("targz" + os.sep + "prep1-1.0") assert sdist2[0].basename == "prep1-1.0.zip" - assert sdist2[1].path_unpacked.strpath.endswith("zip" + os.sep + "prep1-1.0") + assert str(sdist2[1].path_unpacked).endswith("zip" + os.sep + "prep1-1.0") assert wheel1[0].basename == "prep1-1.0-py2.py3-none-any.whl" assert str(wheel1[1].path_unpacked).endswith(wheel1[0].basename) @@ -273,16 +274,17 @@ def test_prepare_toxrun_args2(self, loghub, pseudo_current, tmpdir, reqmock, ini initproj("prep_under-1.0", filedefs={}) subprocess.check_call(["python", "setup.py", "sdist", "--formats=gztar,zip"]) subprocess.check_call(["python", "setup.py", "bdist_wheel", "--universal"]) - for p in py.path.local("dist").listdir(): - reqmock.mockresponse("http://b/" + p.basename, code=200, method="GET", - data=p.read("rb")) + for p in Path("dist").iterdir(): + reqmock.mockresponse( + f"http://b/{p.name}", + code=200, data=p.read_bytes(), method="GET") toxrunargs = prepare_toxrun_args(dev_index, vl, sdist_links, wheel_links) assert len(toxrunargs) == 3 sdist1, sdist2, wheel1 = toxrunargs assert sdist1[0].basename == "prep_under-1.0.tar.gz" - assert sdist1[1].path_unpacked.strpath.endswith("targz" + os.sep + "prep_under-1.0") + assert str(sdist1[1].path_unpacked).endswith("targz" + os.sep + "prep_under-1.0") assert sdist2[0].basename == "prep_under-1.0.zip" - assert sdist2[1].path_unpacked.strpath.endswith("zip" + os.sep + "prep_under-1.0") + assert str(sdist2[1].path_unpacked).endswith("zip" + os.sep + "prep_under-1.0") assert wheel1[0].basename == "prep_under-1.0-py2.py3-none-any.whl" assert str(wheel1[1].path_unpacked).endswith(wheel1[0].basename) @@ -302,9 +304,10 @@ def test_prepare_toxrun_args_select(self, loghub, pseudo_current, tmpdir, reqmoc initproj("prep_under-1.0", filedefs={}) subprocess.check_call(["python", "setup.py", "sdist", "--formats=gztar"]) subprocess.check_call(["python", "setup.py", "bdist_wheel"]) - for p in py.path.local("dist").listdir(): - reqmock.mockresponse("http://b/" + p.basename, code=200, method="GET", - data=p.read("rb")) + for p in Path("dist").iterdir(): + reqmock.mockresponse( + f"http://b/{p.name}", + code=200, data=p.read_bytes(), method="GET") toxrunargs = prepare_toxrun_args( dev_index, vl, sdist_links, wheel_links, select=pyver) assert len(toxrunargs) == 1 @@ -339,8 +342,8 @@ def test_wheels_only_download_selected(self, loghub, monkeypatch, pseudo_current select="(?:.*39)(?:.*linux)(?:.*whl)") ((wheel_link, wheel, wheel_sdist),) = toxrunargs assert wheel_link.basename == "prep_under-1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" - assert wheel.path_archive.basename == "prep_under-1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" - assert wheel_sdist.path_archive.basename == "prep_under-1.0.tar.gz" + assert wheel.path_archive.name == "prep_under-1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + assert wheel_sdist.path_archive.name == "prep_under-1.0.tar.gz" def test_wheels_and_sdist(self, out_devpi, create_and_upload): create_and_upload("exa-1.0", filedefs={ diff --git a/client/testing/test_upload.py b/client/testing/test_upload.py index 1c21271f7..b6e69003b 100644 --- a/client/testing/test_upload.py +++ b/client/testing/test_upload.py @@ -1,7 +1,6 @@ import json import os import stat -import py import pytest import re import shutil @@ -13,7 +12,9 @@ from devpi.upload import get_pkginfo from devpi.upload import main from devpi.upload import read_setupcfg +from devpi_common.contextlib import chdir from io import BytesIO +from pathlib import Path from textwrap import dedent from devpi_common.metadata import splitbasename from devpi_common.viewhelp import ViewLinkStore @@ -22,7 +23,7 @@ @pytest.fixture def datadir(): - return py.path.local(__file__).dirpath("data") + return Path(__file__).parent / "data" def runproc(cmd): @@ -78,7 +79,7 @@ def repo(self, request, setupdir_rel, tmpdir_factory): if request.param == "hg": if not shutil.which("hg"): pytest.skip("'hg' command not found") - with repo.as_cwd(): + with chdir(repo): runproc("hg init") runproc("hg add {0}/file {0}/link {0}/setup.py".format(setupdir_rel)) runproc("hg add {0}/file {0}/{1}".format(setupdir_rel, @@ -87,7 +88,7 @@ def repo(self, request, setupdir_rel, tmpdir_factory): return repo if not shutil.which("git"): pytest.skip("'git' command not found") - with repo.as_cwd(): + with chdir(repo): runproc("git init") runproc("git config user.email 'you@example.com'") runproc("git config user.name 'you'") @@ -100,57 +101,56 @@ def repo(self, request, setupdir_rel, tmpdir_factory): def test_vcs_export(self, uploadhub, repo, setupdir, tmpdir): checkout = Checkout(uploadhub, uploadhub.args, setupdir) assert checkout.rootpath == repo - newrepo = tmpdir.mkdir("newrepo") + newrepo = Path(tmpdir.mkdir("newrepo").strpath) result = checkout.export(newrepo) - assert result.rootpath.join("file").check() - assert result.rootpath.join("link").check() + assert result.rootpath.joinpath("file").exists() + assert result.rootpath.joinpath("link").exists() if not sys.platform.startswith("win"): - assert result.rootpath.join("link").readlink() == ".." - assert result.rootpath == newrepo.join(repo.basename).join( - repo.bestrelpath(setupdir)) + assert os.readlink(result.rootpath / "link") == ".." + assert result.rootpath == newrepo / repo.basename / repo.bestrelpath(setupdir) # ensure we also copied repo meta info if repo.join(".hg").exists(): - assert newrepo.join(repo.basename).join(".hg").listdir() + assert list(newrepo.joinpath(repo.basename, ".hg").iterdir()) else: - assert newrepo.join(repo.basename).join(".git").listdir() + assert list(newrepo.joinpath(repo.basename, ".git").iterdir()) with uploadhub.workdir() as uploadbase: checkout.export(uploadbase) - readonly = uploadbase.join("readonly") - readonly.write("foo") + readonly = uploadbase / "readonly" + readonly.write_text("foo") ro_bits = stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH os.chmod(str(readonly), ro_bits) - assert readonly.check() is False - assert uploadbase.check() is False + assert not readonly.exists() + assert not uploadbase.exists() def test_vcs_export_setupdironly(self, uploadhub, setupdir, tmpdir, monkeypatch): monkeypatch.setattr(uploadhub.args, "setupdironly", True) checkout = Checkout(uploadhub, uploadhub.args, setupdir) assert checkout.rootpath == setupdir - newrepo = tmpdir.mkdir("newrepo") + newrepo = Path(tmpdir.mkdir("newrepo").strpath) result = checkout.export(newrepo) - assert result.rootpath.join("file").check() - assert result.rootpath.join("link").check() - p = result.rootpath.join("setup.py") + assert result.rootpath.joinpath("file").exists() + assert result.rootpath.joinpath("link").exists() + p = result.rootpath / "setup.py" assert p.exists() if not sys.platform.startswith("win"): - assert p.stat().mode & int("0777", 8) == int("0777", 8) - assert result.rootpath.join("link").readlink() == '..' - assert result.rootpath == newrepo.join(setupdir.basename) + assert p.stat().st_mode & int("0777", 8) == int("0777", 8) + assert os.readlink(result.rootpath / "link") == '..' + assert result.rootpath == newrepo / setupdir.basename def test_vcs_export_disabled(self, uploadhub, setupdir, tmpdir, monkeypatch): monkeypatch.setattr(uploadhub.args, "novcs", True) checkout = Checkout(uploadhub, uploadhub.args, setupdir) assert not checkout.hasvcs - exported = checkout.export(tmpdir) + exported = checkout.export(Path(tmpdir.strpath)) assert exported.rootpath == checkout.setupdir def test_vcs_export_verify_setup(self, uploadhub, setupdir, tmpdir): subdir = setupdir.mkdir("subdir") subdir.ensure("setup.py") checkout = Checkout(uploadhub, uploadhub.args, subdir) - wc = tmpdir.mkdir("wc") + wc = Path(tmpdir.mkdir("wc").strpath) exported = checkout.export(wc) - assert not exported.rootpath.join("setup.py").check() + assert not exported.rootpath.joinpath("setup.py").exists() def test_export_attributes(self, uploadhub, setupdir, tmpdir): checkout = Checkout(uploadhub, uploadhub.args, setupdir) @@ -160,7 +160,7 @@ def test_export_attributes(self, uploadhub, setupdir, tmpdir): print("* foo, bar") setup(name="xyz", version="1.2.3") """)) - exported = checkout.export(tmpdir) + exported = checkout.export(Path(tmpdir.strpath)) name, version = exported.setup_name_and_version() assert name == "xyz" assert version == "1.2.3" @@ -171,7 +171,7 @@ def test_setup_build_docs(self, uploadhub, setupdir, tmpdir): from setuptools import setup setup(name="xyz", version="1.2.3") """)) - exported = checkout.export(tmpdir) + exported = checkout.export(Path(tmpdir.strpath)) assert exported.rootpath != exported.origrepo # we have to mock a bit unfortunately # to find out if the sphinx building popen command @@ -254,8 +254,8 @@ class args: withdocs = None initproj("pkg-1.0") - tmpdir = py.path.local() - certpath = tmpdir.join("cert.key").strpath + tmpdir = Path() + certpath = str(tmpdir / "cert.key") uploadhub.cwd = tmpdir uploadhub.http = Session() uploadhub.current.reconfigure(dict( @@ -307,8 +307,7 @@ def send(req, **kw): return Response() initproj("pkg-1.0") - tmpdir = py.path.local() - uploadhub.cwd = tmpdir + uploadhub.cwd = Path() uploadhub.current.reconfigure(dict( index="http://devpi/foo/bar", login="http://devpi/+login", @@ -365,8 +364,7 @@ class args: token = pypitoken.token.Token.load(passwd) assert pypitoken.ProjectNamesRestriction( project_names=["pkg"]) not in token.restrictions - tmpdir = py.path.local() - uploadhub.cwd = tmpdir + uploadhub.cwd = Path() uploadhub.http = Session() uploadhub.current.reconfigure(dict( index="http://devpi/foo/bar", @@ -401,7 +399,7 @@ def projname_version(self, projname_version_project): return projname_version_project[0] def test_plain_dry_run(self, devpi, out_devpi, projname_version): - assert py.path.local("setup.py").check() + assert Path("setup.py").is_file() out = out_devpi("upload", "--no-isolation", "--dry-run") assert out.ret == 0 out.stdout.fnmatch_lines(""" @@ -629,7 +627,7 @@ def test_upload_to_mirror( "conf.py": "#nothing", "contents.rst": "", "index.html": ""}}) - assert py.path.local("setup.py").check() + assert Path("setup.py").is_file() # use mirror out = out_devpi("use", "root/pypi") @@ -645,7 +643,7 @@ def test_index_option( "conf.py": "#nothing", "contents.rst": "", "index.html": ""}}) - assert py.path.local("setup.py").check() + assert Path("setup.py").is_file() # remember username out = out_devpi("use") user = re.search(r'\(logged in as (.+?)\)', out.stdout.str()).group(1) @@ -665,7 +663,7 @@ def test_index_option_with_environment_relative( "conf.py": "#nothing", "contents.rst": "", "index.html": ""}}) - assert py.path.local("setup.py").check() + assert Path("setup.py").is_file() # remember username out = out_devpi("use") user = re.search(r'\(logged in as (.+?)\)', out.stdout.str()).group(1) @@ -696,12 +694,12 @@ def test_fromdir(self, initproj, devpi, out_devpi, runproc): initproj("hello-1.1", {"doc": { "conf.py": "", "index.html": ""}}) - tmpdir = py.path.local() + tmpdir = Path() runproc(tmpdir, "python setup.py sdist --format=zip".split()) initproj("hello-1.2") runproc(tmpdir, "python setup.py sdist --format=zip".split()) - dist = tmpdir.join("dist") - assert len(dist.listdir()) == 2 + dist = tmpdir / "dist" + assert len(list(dist.iterdir())) == 2 hub = devpi("upload", "--no-isolation", "--from-dir", dist) for ver in ("1.1", '1.2'): url = hub.current.get_index_url().url + "hello/%s/" % ver @@ -724,12 +722,12 @@ def test_frompath(self, initproj, devpi, name_version, out_devpi, path, runproc) initproj(name_version, {"doc": { "conf.py": "", "index.html": ""}}) - tmpdir = py.path.local() + tmpdir = Path() runproc(tmpdir, "python setup.py sdist --format=zip".split()) - dist = tmpdir.join("dist") - zip_dir(tmpdir.join('doc'), dist.join("%s.doc.zip" % name_version_str)) - assert len(dist.listdir()) == 2 - (p, dp) = sorted(dist.listdir(), key=lambda x: '.doc.zip' in x.basename) + dist = tmpdir / "dist" + zip_dir(tmpdir / 'doc', dist / f"{name_version_str}.doc.zip") + assert len(list(dist.iterdir())) == 2 + (p, dp) = sorted(dist.iterdir(), key=lambda x: '.doc.zip' in x.name) hub = devpi("upload", "--no-isolation", p, dp) url = hub.current.get_index_url().url + path out = out_devpi("getjson", url) @@ -742,8 +740,8 @@ def test_frompath(self, initproj, devpi, name_version, out_devpi, path, runproc) def test_cli_sdist_precedence(self, initproj, devpi, out_devpi): initproj("pkg-1.0") - tmpdir = py.path.local() - tmpdir.join("setup.cfg").write(dedent(""" + tmpdir = Path() + tmpdir.joinpath("setup.cfg").write_text(dedent(""" [devpi:upload] formats=bdist_wheel,sdist.zip""")) hub = devpi("upload", "--sdist", "--no-isolation") @@ -756,8 +754,8 @@ def test_cli_sdist_precedence(self, initproj, devpi, out_devpi): def test_cli_wheel_precedence(self, initproj, devpi, out_devpi): initproj("pkg-1.0") - tmpdir = py.path.local() - tmpdir.join("setup.cfg").write(dedent(""" + tmpdir = Path() + tmpdir.joinpath("setup.cfg").write_text(dedent(""" [devpi:upload] formats=bdist_wheel,sdist.zip""")) hub = devpi("upload", "--wheel", "--no-isolation") @@ -772,10 +770,10 @@ def test_cli_wheel_precedence(self, initproj, devpi, out_devpi): def test_getpkginfo(datadir): - info = get_pkginfo(datadir.join("dddttt-0.1.dev45-py27-none-any.whl")) + info = get_pkginfo(datadir / "dddttt-0.1.dev45-py27-none-any.whl") assert info.name == "dddttt" assert info.metadata_version == "2.0" - info = get_pkginfo(datadir.join("ddd-1.0.doc.zip")) + info = get_pkginfo(datadir / "ddd-1.0.doc.zip") assert info.name == "ddd" assert info.version == "1.0" diff --git a/client/testing/test_use.py b/client/testing/test_use.py index aca4dd326..b00e20d48 100644 --- a/client/testing/test_use.py +++ b/client/testing/test_use.py @@ -83,15 +83,16 @@ def test_local_config(self, capfd, cmd_devpi, create_venv, mock_http_api, monkey hub = cmd_devpi("use", "http://devpi/foo/bar") (out, err) = capfd.readouterr() current_path = hub.current_path - assert current_path.strpath.endswith('client/current.json') + assert current_path.name == 'current.json' + assert current_path.parent.name == 'client' local_current_path = hub.local_current_path - assert venvdir.strpath in local_current_path.strpath - assert local_current_path.strpath.endswith('devpi.json') + assert venvdir.strpath in str(local_current_path) + assert local_current_path.name == 'devpi.json' assert not local_current_path.exists() assert venvdir.strpath in out hub = cmd_devpi("use", "--local") (out, err) = capfd.readouterr() - assert hub.current_path.strpath == local_current_path.strpath + assert str(hub.current_path) == str(local_current_path) assert local_current_path.exists() mock_http_api.set( "http://devpi/foo/ham/+api", 200, result=dict( @@ -111,7 +112,7 @@ def test_local_config(self, capfd, cmd_devpi, create_venv, mock_http_api, monkey hub = cmd_devpi("use") (out, err) = capfd.readouterr() assert 'current devpi index: http://devpi/foo/ham' in out - local_current_path.remove() + local_current_path.unlink() hub = cmd_devpi("use") (out, err) = capfd.readouterr() assert 'current devpi index: http://devpi/foo/bar' in out @@ -123,7 +124,8 @@ def test_local_config_no_auth_key(self, cmd_devpi, create_venv, monkeypatch): monkeypatch.setenv("VIRTUAL_ENV", venvdir.strpath) hub = cmd_devpi("use") current_path = hub.current_path - assert current_path.strpath.endswith('client/current.json') + assert current_path.name == 'current.json' + assert current_path.parent.name == 'client' local_current_path = hub.local_current_path assert not local_current_path.exists() with current_path.open("w") as f: @@ -681,7 +683,7 @@ def test_main_setcfg(self, scheme, basic_auth, capfd, mock_http_api, cmd_devpi, assert basic_auth not in out assert ':****@' in out assert PipCfg().default_location.exists() - content = PipCfg().default_location.read() + content = PipCfg().default_location.read_text() assert len( re.findall(r"index_url\s*=\s*%s://%sworld/simple" % ( scheme, basic_auth), content)) == 1 @@ -709,7 +711,7 @@ def test_main_setcfg(self, scheme, basic_auth, capfd, mock_http_api, cmd_devpi, cmd_devpi( "use", "--set-cfg", "--pip-set-trusted=yes", "%s://%sworld" % ( scheme, basic_auth)) - content = PipCfg().default_location.read() + content = PipCfg().default_location.read_text() assert len( re.findall(r"trusted-host\s*=\s*world", content)) == 1 hub = cmd_devpi("use", "--always-set-cfg=yes", "--pip-set-trusted=yes") diff --git a/client/tox.ini b/client/tox.ini index a31e0751f..8a676cf3e 100644 --- a/client/tox.ini +++ b/client/tox.ini @@ -20,7 +20,6 @@ deps = pypitoken importlib.metadata;python_version<"3.8" mock - py!=1.4.32 sphinx webtest wheel