From 786d839db1e584e33f2f0543b6c16cdfeefe11ee Mon Sep 17 00:00:00 2001 From: Daniel Hahler Date: Sat, 16 Nov 2019 17:17:57 +0100 Subject: [PATCH] cacheprovider: set: use json.dumps + write ``json.dump`` is slower since it iterates over chunks [1]. For 100 ``cache.set`` calls this saved ~0.5s (2.5s => 2s), using a dict with 1500 entries, and an encoded size of 500kb. Python 3.7.4. 1: https://github.com/blueyed/cpython/blob/1c2e81ed00/Lib/json/__init__.py#L177-L180 --- changelog/6206.improvement.rst | 1 + src/_pytest/cacheprovider.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog/6206.improvement.rst diff --git a/changelog/6206.improvement.rst b/changelog/6206.improvement.rst new file mode 100644 index 00000000000..67d8363b39c --- /dev/null +++ b/changelog/6206.improvement.rst @@ -0,0 +1 @@ +cacheprovider: improved robustness and performance with ``cache.set``. diff --git a/src/_pytest/cacheprovider.py b/src/_pytest/cacheprovider.py index 3c60fdb33a6..6e53545d630 100755 --- a/src/_pytest/cacheprovider.py +++ b/src/_pytest/cacheprovider.py @@ -125,13 +125,14 @@ def set(self, key, value): return if not cache_dir_exists_already: self._ensure_supporting_files() + data = json.dumps(value, indent=2, sort_keys=True) try: f = path.open("w") except (IOError, OSError): self.warn("cache could not write path {path}", path=path) else: with f: - json.dump(value, f, indent=2, sort_keys=True) + f.write(data) def _ensure_supporting_files(self): """Create supporting files in the cache dir that are not really part of the cache."""