Skip to content

Commit

Permalink
Fix several bad usages of itertools.groupby() (#10976) (#10979)
Browse files Browse the repository at this point in the history
[ci skip-rust]
  • Loading branch information
Eric-Arellano authored Oct 19, 2020
1 parent 96f2a10 commit 7e31627
Show file tree
Hide file tree
Showing 4 changed files with 43 additions and 34 deletions.
36 changes: 21 additions & 15 deletions src/python/pants/backend/python/util_rules/pex.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,22 +140,28 @@ def and_constraints(parsed_constraints: Sequence[Requirement]) -> Requirement:
merged_specs: Set[Tuple[str, str]] = set()
expected_interpreter = parsed_constraints[0].project_name
for parsed_constraint in parsed_constraints:
if parsed_constraint.project_name != expected_interpreter:
attempted_interpreters = {
interp: sorted(
str(parsed_constraint) for parsed_constraint in parsed_constraints
)
for interp, parsed_constraints in itertools.groupby(
parsed_constraints,
key=lambda pc: pc.project_name,
)
}
raise ValueError(
"Tried ANDing Python interpreter constraints with different interpreter "
"types. Please use only one interpreter type. Got "
f"{attempted_interpreters}."
if parsed_constraint.project_name == expected_interpreter:
merged_specs.update(parsed_constraint.specs)
continue

def key_fn(req: Requirement):
return req.project_name

# NB: We must pre-sort the data for itertools.groupby() to work properly.
sorted_constraints = sorted(parsed_constraints, key=key_fn)
attempted_interpreters = {
interp: sorted(
str(parsed_constraint) for parsed_constraint in parsed_constraints
)
merged_specs.update(parsed_constraint.specs)
for interp, parsed_constraints in itertools.groupby(
sorted_constraints, key=key_fn
)
}
raise ValueError(
"Tried ANDing Python interpreter constraints with different interpreter "
"types. Please use only one interpreter type. Got "
f"{attempted_interpreters}."
)

formatted_specs = ",".join(f"{op}{version}" for op, version in merged_specs)
return Requirement.parse(f"{expected_interpreter}{formatted_specs}")
Expand Down
8 changes: 7 additions & 1 deletion src/python/pants/core/goals/lint.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,12 @@ async def lint(
for request in valid_requests
for field_set in request.field_sets
)

def key_fn(results: LintResults):
return results.linter_name

# NB: We must pre-sort the data for itertools.groupby() to work properly.
sorted_all_per_files_results = sorted(all_per_file_results, key=key_fn)
# We consolidate all results for each linter into a single `LintResults`.
all_results = tuple(
LintResults(
Expand All @@ -251,7 +257,7 @@ async def lint(
linter_name=linter_name,
)
for linter_name, all_linter_results in itertools.groupby(
all_per_file_results, key=lambda results: results.linter_name
sorted_all_per_files_results, key=key_fn
)
)
else:
Expand Down
11 changes: 7 additions & 4 deletions src/python/pants/core/goals/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from dataclasses import dataclass
from enum import Enum
from pathlib import PurePath
from typing import Any, Dict, Iterable, List, Optional, Tuple, Type, TypeVar, Union, cast
from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar, Union, cast

from pants.core.util_rules.filter_empty_sources import (
FieldSetsWithSources,
Expand Down Expand Up @@ -412,9 +412,12 @@ async def run_tests(
workspace.write_digest(merged_xml_results)

if test_subsystem.use_coverage:
all_coverage_data: Iterable[CoverageData] = [
result.coverage_data for result in results if result.coverage_data is not None
]
# NB: We must pre-sort the data for itertools.groupby() to work properly, using the same
# key function for both. However, you can't sort by `types`, so we call `str()` on it.
all_coverage_data = sorted(
(result.coverage_data for result in results if result.coverage_data is not None),
key=lambda cov_data: str(type(cov_data)),
)

coverage_types_to_collection_types: Dict[
Type[CoverageData], Type[CoverageDataCollection]
Expand Down
22 changes: 8 additions & 14 deletions src/python/pants/core/util_rules/stripped_source_files.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).

import itertools
from collections import defaultdict
from dataclasses import dataclass

from pants.core.util_rules.source_files import SourceFiles
Expand Down Expand Up @@ -42,18 +42,12 @@ async def strip_source_roots(source_files: SourceFiles) -> StrippedSourceFiles:
SourceRootsRequest.for_files(rooted_files_snapshot.files),
)

file_to_source_root = {
str(file): root for file, root in source_roots_result.path_to_root.items()
}
files_grouped_by_source_root = {
source_root.path: tuple(str(f) for f in files)
for source_root, files in itertools.groupby(
file_to_source_root.keys(), key=file_to_source_root.__getitem__
)
}
source_roots_to_files = defaultdict(set)
for f, root in source_roots_result.path_to_root.items():
source_roots_to_files[root.path].add(str(f))

if len(files_grouped_by_source_root) == 1:
source_root = next(iter(files_grouped_by_source_root.keys()))
if len(source_roots_to_files) == 1:
source_root = next(iter(source_roots_to_files.keys()))
if source_root == ".":
resulting_snapshot = rooted_files_snapshot
else:
Expand All @@ -63,11 +57,11 @@ async def strip_source_roots(source_files: SourceFiles) -> StrippedSourceFiles:
else:
digest_subsets = await MultiGet(
Get(Digest, DigestSubset(rooted_files_snapshot.digest, PathGlobs(files)))
for files in files_grouped_by_source_root.values()
for files in source_roots_to_files.values()
)
resulting_digests = await MultiGet(
Get(Digest, RemovePrefix(digest, source_root))
for digest, source_root in zip(digest_subsets, files_grouped_by_source_root.keys())
for digest, source_root in zip(digest_subsets, source_roots_to_files.keys())
)
resulting_snapshot = await Get(Snapshot, MergeDigests(resulting_digests))

Expand Down

0 comments on commit 7e31627

Please sign in to comment.