From af3cd6a7e31f5b087073c6a009c845f154bd6a24 Mon Sep 17 00:00:00 2001 From: Tyler Reddy Date: Mon, 20 Oct 2025 10:39:55 -0600 Subject: [PATCH] BUG: futures with dir switching * Fixes gh-2065. * Add a regression test for the failure to collect `.coverage` files when parent and child (`concurrent.futures`) processes have different directory contexts. This test fails for the expected reason it seems. * Add a patch that fixes the problem described in the matching issue, but for some reason does not manage to restore the desired behavior in the regression test (something I'm not understanding about the "meta" testing?) --- coverage/control.py | 8 ++++++++ coverage/multiproc.py | 2 ++ coverage/sqldata.py | 9 +++++++++ tests/test_concurrency.py | 41 +++++++++++++++++++++++++++++++++++++++ 4 files changed, 60 insertions(+) diff --git a/coverage/control.py b/coverage/control.py index 0d50783cd..7f94f8150 100644 --- a/coverage/control.py +++ b/coverage/control.py @@ -559,12 +559,17 @@ def load(self) -> None: def _init_for_start(self) -> None: """Initialization for start()""" + self.orig_dir = None # Construct the collector. concurrency: list[str] = self.config.concurrency or [] + coverpath_file = os.path.join(os.getcwd(), ".coverpath") if "multiprocessing" in concurrency: if self.config.config_file is None: raise ConfigError("multiprocessing requires a configuration file") patch_multiprocessing(rcfile=self.config.config_file) + if os.path.exists(coverpath_file): + with open(coverpath_file, encoding="utf-8") as cf: + self.orig_dir = cf.read().strip() dycon = self.config.dynamic_context if not dycon or dycon == "none": @@ -656,6 +661,8 @@ def _init_for_start(self) -> None: def _init_data(self, suffix: str | bool | None) -> None: """Create a data file if we don't have one yet.""" + if not hasattr(self, "orig_dir"): + self.orig_dir = None if self._data is None: # Create the data file. We do this at construction time so that the # data file will be written into the directory where the process @@ -667,6 +674,7 @@ def _init_data(self, suffix: str | bool | None) -> None: warn=self._warn, debug=self._debug, no_disk=self._no_disk, + orig_dir=self.orig_dir, ) self._data_to_close.append(self._data) diff --git a/coverage/multiproc.py b/coverage/multiproc.py index 1c2d2f7d7..fa1ae524d 100644 --- a/coverage/multiproc.py +++ b/coverage/multiproc.py @@ -113,6 +113,8 @@ def get_preparation_data_with_stowaway(name: str) -> dict[str, Any]: """Get the original preparation data, and also insert our stowaway.""" d = original_get_preparation_data(name) d["stowaway"] = Stowaway(rcfile) + with open(".coverpath", "w", encoding="utf-8") as cpath: + cpath.write(d["orig_dir"]) return d spawn.get_preparation_data = get_preparation_data_with_stowaway diff --git a/coverage/sqldata.py b/coverage/sqldata.py index 3c958f405..cc7421277 100644 --- a/coverage/sqldata.py +++ b/coverage/sqldata.py @@ -11,6 +11,7 @@ import glob import itertools import os +import shutil import random import socket import sqlite3 @@ -232,6 +233,8 @@ def __init__( no_disk: bool = False, warn: TWarnFn | None = None, debug: TDebugCtl | None = None, + *, + orig_dir = None, ) -> None: """Create a :class:`CoverageData` object to hold coverage-measured data. @@ -271,6 +274,7 @@ def __init__( self._current_context: str | None = None self._current_context_id: int | None = None self._query_context_ids: list[int] | None = None + self.orig_dir = orig_dir __repr__ = auto_repr @@ -898,6 +902,11 @@ def read(self) -> None: def write(self) -> None: """Ensure the data is written to the data file.""" self._debug_dataio("Writing (no-op) data file", self._filename) + if self.orig_dir is not None: + for fname in os.listdir(): + if fname.startswith(".coverage."): + if not os.path.exists(os.path.join(self.orig_dir, fname)): + shutil.copy(fname, self.orig_dir) def _start_using(self) -> None: """Call this before using the database at all.""" diff --git a/tests/test_concurrency.py b/tests/test_concurrency.py index dd761e377..858bc6bc9 100644 --- a/tests/test_concurrency.py +++ b/tests/test_concurrency.py @@ -13,6 +13,7 @@ import re import sys import threading +import concurrent.futures import time from types import ModuleType @@ -412,6 +413,32 @@ def work(x): return sum_range((x+1)*100) """ +MULTI_CODE_DIR_CHANGE = """ + import os + import tempfile + from concurrent.futures import ProcessPoolExecutor + + def add(a, b): + return a + b + + def probe_dispatcher(): + orig_dir = os.getcwd() + with tempfile.TemporaryDirectory() as temp_dir: + os.chdir(temp_dir) + dispatcher() + os.chdir(orig_dir) + + def dispatcher(): + futures = [] + with ProcessPoolExecutor({NPROCS}) as executor: + futures.append(executor.submit(add, 2, 2)) + for future in futures: + future.result() + + if __name__ == "__main__": + probe_dispatcher() + """ + MULTI_CODE = """ # Above this will be a definition of work(). import multiprocessing @@ -522,6 +549,20 @@ def test_multiprocessing_simple(self, start_method: str) -> None: start_method=start_method, ) + def test_gh_2065(self, start_method: str) -> None: + nprocs = 1 + upto = 30 + code = (MULTI_CODE_DIR_CHANGE).format(NPROCS=nprocs, UPTO=upto) + total = 0 + expected_out = f"" + self.try_multiprocessing_code( + code, + expected_out, + concurrent.futures, + nprocs, + start_method=start_method, + ) + def test_multiprocessing_append(self, start_method: str) -> None: nprocs = 3 upto = 30