From 3a08de175f73f325b2b784e76c86410bb8e3c915 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Thu, 5 Aug 2021 14:47:02 -0400 Subject: [PATCH] feat: `coverage combine` now prints messages naming the files being combined. #1105 --- CHANGES.rst | 4 ++++ coverage/control.py | 1 + coverage/data.py | 6 +++++- tests/test_api.py | 4 ++++ tests/test_concurrency.py | 30 ++++++++++++++++++------------ 5 files changed, 32 insertions(+), 13 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 7977b1a5b..600c71f88 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -27,6 +27,9 @@ Unreleased - The ``coverage html`` command now prints a message indicating where the HTML report was written. Fixes `issue 1195`_. +- The ``coverage combine`` command now prints messages indicating each data + file being combined. Fixes `issue 1105`_. + - Unrecognized options in the configuration file are no longer errors. They are now warnings, to ease the use of coverage across versions. Fixes `issue 1035`_. @@ -35,6 +38,7 @@ Unreleased unsupported type." (`issue 1010`_). .. _issue 1035: https://github.com/nedbat/coveragepy/issues/1035 +.. _issue 1105: https://github.com/nedbat/coveragepy/issues/1105 .. _issue 1195: https://github.com/nedbat/coveragepy/issues/1195 diff --git a/coverage/control.py b/coverage/control.py index 958c98da0..8a55a3174 100644 --- a/coverage/control.py +++ b/coverage/control.py @@ -718,6 +718,7 @@ def combine(self, data_paths=None, strict=False, keep=False): data_paths=data_paths, strict=strict, keep=keep, + message=self._message, ) def get_data(self): diff --git a/coverage/data.py b/coverage/data.py index 752822b72..68ba7ec33 100644 --- a/coverage/data.py +++ b/coverage/data.py @@ -53,7 +53,9 @@ def add_data_to_hash(data, filename, hasher): hasher.update(data.file_tracer(filename)) -def combine_parallel_data(data, aliases=None, data_paths=None, strict=False, keep=False): +def combine_parallel_data( + data, aliases=None, data_paths=None, strict=False, keep=False, message=None, +): """Combine a number of data files together. Treat `data.filename` as a file prefix, and combine the data from all @@ -117,6 +119,8 @@ def combine_parallel_data(data, aliases=None, data_paths=None, strict=False, kee else: data.update(new_data, aliases=aliases) files_combined += 1 + if message: + message(f"Combined data file {os.path.relpath(f)}") if not keep: if data._debug.should('dataio'): data._debug.write(f"Deleting combined data file {f!r}") diff --git a/tests/test_api.py b/tests/test_api.py index 885f33706..5f7b3522d 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -437,6 +437,7 @@ def test_combining_twice(self): self.make_good_data_files() cov1 = coverage.Coverage() cov1.combine() + assert self.stdout() == "" cov1.save() self.check_code1_code2(cov1) self.assert_file_count(".coverage.*", 0) @@ -448,6 +449,7 @@ def test_combining_twice(self): cov3 = coverage.Coverage() cov3.combine() + assert self.stdout() == "" # Now the data is empty! _, statements, missing, _ = cov3.analysis("code1.py") assert statements == [1] @@ -469,6 +471,7 @@ def test_combining_with_a_used_coverage(self): cov.save() cov.combine() + assert self.stdout() == "" self.check_code1_code2(cov) def test_ordered_combine(self): @@ -483,6 +486,7 @@ def make_data_file(): def get_combined_filenames(): cov = coverage.Coverage() cov.combine() + assert self.stdout() == "" cov.save() data = cov.get_data() filenames = {relative_filename(f).replace("\\", "/") for f in data.measured_files()} diff --git a/tests/test_concurrency.py b/tests/test_concurrency.py index e1606e836..682e3cf09 100644 --- a/tests/test_concurrency.py +++ b/tests/test_concurrency.py @@ -392,7 +392,12 @@ def try_multiprocessing_code( assert len(glob.glob(".coverage.*")) == nprocs + 1 out = self.run_command("coverage combine") - assert out == "" + out_lines = out.splitlines() + assert len(out_lines) == nprocs + 1 + assert all( + re.fullmatch(r"Combined data file \.coverage\..*\.\d+\.\d+", line) + for line in out_lines + ) out = self.run_command("coverage report -m") last_line = self.squeezed_lines(out)[-1] @@ -426,8 +431,12 @@ def test_multiprocessing_and_gevent(self): code, expected_out, eventlet, nprocs, concurrency="multiprocessing,eventlet" ) - def try_multiprocessing_code_with_branching(self, code, expected_out): - """Run code using multiprocessing, it should produce `expected_out`.""" + def test_multiprocessing_with_branching(self): + nprocs = 3 + upto = 30 + code = (SQUARE_OR_CUBE_WORK + MULTI_CODE).format(NPROCS=nprocs, UPTO=upto) + total = sum(x*x if x%2 else x*x*x for x in range(upto)) + expected_out = f"{nprocs} pids, total = {total}" self.make_file("multi.py", code) self.make_file("multi.rc", """\ [run] @@ -444,20 +453,17 @@ def try_multiprocessing_code_with_branching(self, code, expected_out): assert out.rstrip() == expected_out out = self.run_command("coverage combine") - assert out == "" + out_lines = out.splitlines() + assert len(out_lines) == nprocs + 1 + assert all( + re.fullmatch(r"Combined data file \.coverage\..*\.\d+\.\d+", line) + for line in out_lines + ) out = self.run_command("coverage report -m") last_line = self.squeezed_lines(out)[-1] assert re.search(r"TOTAL \d+ 0 \d+ 0 100%", last_line) - def test_multiprocessing_with_branching(self): - nprocs = 3 - upto = 30 - code = (SQUARE_OR_CUBE_WORK + MULTI_CODE).format(NPROCS=nprocs, UPTO=upto) - total = sum(x*x if x%2 else x*x*x for x in range(upto)) - expected_out = f"{nprocs} pids, total = {total}" - self.try_multiprocessing_code_with_branching(code, expected_out) - def test_multiprocessing_bootstrap_error_handling(self): # An exception during bootstrapping will be reported. self.make_file("multi.py", """\