forked from pantsbuild/pants
-
Notifications
You must be signed in to change notification settings - Fork 0
/
setup_py.py
1170 lines (989 loc) · 45.4 KB
/
setup_py.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import enum
import itertools
import logging
import os
import pickle
from abc import ABC, abstractmethod
from collections import defaultdict
from dataclasses import dataclass
from functools import partial
from pathlib import PurePath
from typing import Any, DefaultDict, Dict, List, Mapping, Tuple, cast
from pants.backend.python.macros.python_artifact import PythonArtifact
from pants.backend.python.subsystems.setup import PythonSetup
from pants.backend.python.subsystems.setuptools import PythonDistributionFieldSet
from pants.backend.python.target_types import (
BuildBackendEnvVarsField,
GenerateSetupField,
LongDescriptionPathField,
PythonDistributionEntryPointsField,
PythonGeneratingSourcesBase,
PythonProvidesField,
PythonRequirementsField,
PythonSourceField,
ResolvedPythonDistributionEntryPoints,
ResolvePythonDistributionEntryPointsRequest,
SDistConfigSettingsField,
SDistField,
WheelConfigSettingsField,
WheelField,
)
from pants.backend.python.util_rules.dists import (
BuildSystem,
BuildSystemRequest,
DistBuildRequest,
DistBuildResult,
distutils_repr,
)
from pants.backend.python.util_rules.dists import rules as dists_rules
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.backend.python.util_rules.pex import Pex
from pants.backend.python.util_rules.pex_requirements import PexRequirements
from pants.backend.python.util_rules.python_sources import (
PythonSourceFiles,
PythonSourceFilesRequest,
StrippedPythonSourceFiles,
)
from pants.backend.python.util_rules.python_sources import rules as python_sources_rules
from pants.base.glob_match_error_behavior import GlobMatchErrorBehavior
from pants.base.specs import AncestorGlobSpec, RawSpecs
from pants.core.goals.package import BuiltPackage, BuiltPackageArtifact, PackageFieldSet
from pants.core.target_types import FileSourceField, ResourceSourceField
from pants.engine.addresses import Address, UnparsedAddressInputs
from pants.engine.collection import Collection, DeduplicatedCollection
from pants.engine.environment import Environment, EnvironmentRequest
from pants.engine.fs import (
AddPrefix,
CreateDigest,
Digest,
DigestContents,
DigestSubset,
FileContent,
MergeDigests,
PathGlobs,
Snapshot,
)
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.target import (
Dependencies,
DependenciesRequest,
InvalidFieldException,
SourcesField,
Target,
Targets,
TransitiveTargets,
TransitiveTargetsRequest,
targets_with_sources_types,
)
from pants.engine.unions import UnionMembership, UnionRule, union
from pants.option.option_types import BoolOption, EnumOption
from pants.option.subsystem import Subsystem
from pants.source.source_root import SourceRootsRequest, SourceRootsResult
from pants.util.docutil import doc_url
from pants.util.frozendict import FrozenDict
from pants.util.logging import LogLevel
from pants.util.memo import memoized_property
from pants.util.meta import frozen_after_init
from pants.util.ordered_set import FrozenOrderedSet
from pants.util.strutil import softwrap
logger = logging.getLogger(__name__)
class SetupPyError(Exception):
def __init__(self, msg: str):
super().__init__(f"{msg} See {doc_url('python-distributions')}.")
class InvalidSetupPyArgs(SetupPyError):
"""Indicates invalid arguments to setup.py."""
class TargetNotExported(SetupPyError):
"""Indicates a target that was expected to be exported is not."""
class InvalidEntryPoint(SetupPyError):
"""Indicates that a specified binary entry point was invalid."""
class OwnershipError(SetupPyError):
"""An error related to target ownership calculation."""
def __init__(self, msg: str):
super().__init__(
softwrap(
f"""
{msg} See {doc_url('python-distributions')} for
how python_sources targets are mapped to distributions.
"""
)
)
class NoOwnerError(OwnershipError):
"""Indicates an exportable target has no owning exported target."""
class AmbiguousOwnerError(OwnershipError):
"""Indicates an exportable target has more than one owning exported target."""
@dataclass(frozen=True)
class ExportedTarget:
"""A target that explicitly exports a setup.py artifact, using a `provides=` stanza.
The code provided by this artifact can be from this target or from any targets it owns.
"""
target: Target # In practice, a PythonDistribution.
@property
def provides(self) -> PythonArtifact:
return self.target[PythonProvidesField].value
@dataclass(frozen=True)
class DependencyOwner:
"""An ExportedTarget in its role as an owner of other targets.
We need this type to prevent rule ambiguities when computing the list of targets owned by an
ExportedTarget (which involves going from ExportedTarget -> dep -> owner (which is itself an
ExportedTarget) and checking if owner is the original ExportedTarget.
"""
exported_target: ExportedTarget
@dataclass(frozen=True)
class OwnedDependency:
"""A target that is owned by some ExportedTarget.
Code in this target is published in the owner's distribution.
The owner of a target T is T's closest filesystem ancestor among the python_distribution
targets that directly or indirectly depend on it (including T itself).
"""
target: Target
class OwnedDependencies(Collection[OwnedDependency]):
pass
class ExportedTargetRequirements(DeduplicatedCollection[str]):
"""The requirements of an ExportedTarget.
Includes:
- The "normal" 3rdparty requirements of the ExportedTarget and all targets it owns.
- The published versions of any other ExportedTargets it depends on.
"""
sort_input = True
@dataclass(frozen=True)
class DistBuildSources:
"""The source-root-stripped sources required to build a distribution with a generated setup.py.
Includes some information derived from analyzing the source, namely the packages, namespace
packages and resource files in the source.
"""
digest: Digest
packages: tuple[str, ...]
namespace_packages: tuple[str, ...]
package_data: tuple[PackageDatum, ...]
@dataclass(frozen=True)
class DistBuildChrootRequest:
"""A request to create a chroot for building a dist in."""
exported_target: ExportedTarget
interpreter_constraints: InterpreterConstraints
@frozen_after_init
@dataclass(unsafe_hash=True)
class SetupKwargs:
"""The keyword arguments to the `setup()` function in the generated `setup.py`."""
_pickled_bytes: bytes
def __init__(
self, kwargs: Mapping[str, Any], *, address: Address, _allow_banned_keys: bool = False
) -> None:
super().__init__()
if "name" not in kwargs:
raise InvalidSetupPyArgs(
f"Missing a `name` kwarg in the `provides` field for {address}."
)
if "version" not in kwargs:
raise InvalidSetupPyArgs(
f"Missing a `version` kwarg in the `provides` field for {address}."
)
if not _allow_banned_keys:
for arg in {
"data_files",
"install_requires",
"namespace_packages",
"package_data",
"package_dir",
"packages",
}:
if arg in kwargs:
raise ValueError(
softwrap(
f"""
{arg} cannot be set in the `provides` field for {address}, but it was
set to {kwargs[arg]}. Pants will dynamically set the value for you.
"""
)
)
# We serialize with `pickle` so that is hashable. We don't use `FrozenDict` because it
# would require that all values are immutable, and we may have lists and dictionaries as
# values. It's too difficult/clunky to convert those all, then to convert them back out of
# `FrozenDict`. We don't use JSON because it does not preserve data types like `tuple`.
self._pickled_bytes = pickle.dumps({k: v for k, v in sorted(kwargs.items())}, protocol=4)
@memoized_property
def kwargs(self) -> dict[str, Any]:
return cast(Dict[str, Any], pickle.loads(self._pickled_bytes))
@property
def name(self) -> str:
return cast(str, self.kwargs["name"])
@property
def version(self) -> str:
return cast(str, self.kwargs["version"])
# Note: This only exists as a hook for additional logic for the `setup()` kwargs, e.g. for plugin
# authors. To resolve `SetupKwargs`, call `await Get(SetupKwargs, ExportedTarget)`, which handles
# running any custom implementations vs. using the default implementation.
@union
@dataclass(frozen=True) # type: ignore[misc]
class SetupKwargsRequest(ABC):
"""A request to allow setting the kwargs passed to the `setup()` function.
By default, Pants will pass the kwargs provided in the BUILD file unchanged. To customize this
behavior, subclass `SetupKwargsRequest`, register the rule `UnionRule(SetupKwargsRequest,
MyCustomSetupKwargsRequest)`, and add a rule that takes your subclass as a parameter and returns
`SetupKwargs`.
"""
target: Target
@classmethod
@abstractmethod
def is_applicable(cls, target: Target) -> bool:
"""Whether the kwargs implementation should be used for this target or not."""
@property
def explicit_kwargs(self) -> dict[str, Any]:
return self.target[PythonProvidesField].value.kwargs
class FinalizedSetupKwargs(SetupKwargs):
"""The final kwargs used for the `setup()` function, after Pants added requirements and sources
information."""
def __init__(self, kwargs: Mapping[str, Any], *, address: Address) -> None:
super().__init__(kwargs, address=address, _allow_banned_keys=True)
@dataclass(frozen=True)
class DistBuildChroot:
"""A chroot containing PEP 517 build setup and the sources it operates on."""
digest: Digest
working_directory: str # Path to dir within digest.
@enum.unique
class FirstPartyDependencyVersionScheme(enum.Enum):
EXACT = "exact" # i.e., ==
COMPATIBLE = "compatible" # i.e., ~=
ANY = "any" # i.e., no specifier
class SetupPyGeneration(Subsystem):
options_scope = "setup-py-generation"
help = "Options to control how setup.py is generated from a `python_distribution` target."
# Generating setup is the more aggressive thing to do, so we'd prefer that the default
# be False. However that would break widespread existing usage, so we'll make that
# change in a future deprecation cycle.
generate_setup_default = BoolOption(
default=True,
help=softwrap(
"""
The default value for the `generate_setup` field on `python_distribution` targets.
Can be overridden per-target by setting that field explicitly. Set this to False
if you mostly rely on handwritten setup files (setup.py, setup.cfg and similar).
Leave as True if you mostly rely on Pants generating setup files for you.
"""
),
)
first_party_dependency_version_scheme = EnumOption(
default=FirstPartyDependencyVersionScheme.EXACT,
help=softwrap(
"""
What version to set in `install_requires` when a `python_distribution` depends on
other `python_distribution`s. If `exact`, will use `==`. If `compatible`, will
use `~=`. If `any`, will leave off the version. See
https://www.python.org/dev/peps/pep-0440/#version-specifiers.
"""
),
)
def first_party_dependency_version(self, version: str) -> str:
"""Return the version string (e.g. '~=4.0') for a first-party dependency.
If the user specified to use "any" version, then this will return an empty string.
"""
scheme = self.first_party_dependency_version_scheme
if scheme == FirstPartyDependencyVersionScheme.ANY:
return ""
specifier = "==" if scheme == FirstPartyDependencyVersionScheme.EXACT else "~="
return f"{specifier}{version}"
def validate_commands(commands: tuple[str, ...]):
# We rely on the dist dir being the default, so we know where to find the created dists.
if "--dist-dir" in commands or "-d" in commands:
raise InvalidSetupPyArgs(
softwrap(
"""
Cannot set --dist-dir/-d in setup.py args. To change where dists
are written, use the global --pants-distdir option.
"""
)
)
# We don't allow publishing via setup.py, as we don't want the setup.py running rule,
# which is not a @goal_rule, to side-effect (plus, we'd need to ensure that publishing
# happens in dependency order). Note that `upload` and `register` were removed in
# setuptools 42.0.0, in favor of Twine, but we still check for them in case the user modified
# the default version used by our Setuptools subsystem.
if "upload" in commands or "register" in commands:
raise InvalidSetupPyArgs("Cannot use the `upload` or `register` setup.py commands.")
class NoDistTypeSelected(ValueError):
pass
@union
@dataclass(frozen=True)
class DistBuildEnvironmentRequest:
target_addresses: tuple[Address, ...]
interpreter_constraints: InterpreterConstraints
@classmethod
def is_applicable(cls, tgt: Target) -> bool:
# Union members should override.
return False
@dataclass(frozen=True)
class DistBuildEnvironment:
"""Various extra information that might be needed to build a dist."""
extra_build_time_requirements: tuple[Pex, ...]
extra_build_time_inputs: Digest
@rule
async def package_python_dist(
field_set: PythonDistributionFieldSet,
python_setup: PythonSetup,
union_membership: UnionMembership,
) -> BuiltPackage:
transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address]))
exported_target = ExportedTarget(transitive_targets.roots[0])
dist_tgt = exported_target.target
wheel = dist_tgt.get(WheelField).value
sdist = dist_tgt.get(SDistField).value
if not wheel and not sdist:
raise NoDistTypeSelected(
softwrap(
f"""
In order to package {dist_tgt.address.spec} at least one of {WheelField.alias!r} or
{SDistField.alias!r} must be `True`.
"""
)
)
wheel_config_settings = dist_tgt.get(WheelConfigSettingsField).value or FrozenDict()
sdist_config_settings = dist_tgt.get(SDistConfigSettingsField).value or FrozenDict()
backend_env_vars = dist_tgt.get(BuildBackendEnvVarsField).value
if backend_env_vars:
extra_build_time_env = await Get(Environment, EnvironmentRequest(sorted(backend_env_vars)))
else:
extra_build_time_env = Environment()
interpreter_constraints = InterpreterConstraints.create_from_targets(
transitive_targets.closure, python_setup
) or InterpreterConstraints(python_setup.interpreter_constraints)
chroot = await Get(
DistBuildChroot,
DistBuildChrootRequest(
exported_target,
interpreter_constraints=interpreter_constraints,
),
)
# Find the source roots for the build-time 1stparty deps (e.g., deps of setup.py).
source_roots_result = await Get(
SourceRootsResult,
SourceRootsRequest(
files=[],
dirs={
PurePath(tgt.address.spec_path)
for tgt in transitive_targets.closure
if tgt.has_field(PythonSourceField) or tgt.has_field(ResourceSourceField)
},
),
)
source_roots = tuple(sorted({sr.path for sr in source_roots_result.path_to_root.values()}))
# Get any extra build-time environment (e.g., native extension requirements).
build_env_requests = []
build_env_request_types = union_membership.get(DistBuildEnvironmentRequest)
for build_env_request_type in build_env_request_types:
if build_env_request_type.is_applicable(dist_tgt):
build_env_requests.append(
build_env_request_type(
tuple(tt.address for tt in transitive_targets.closure), interpreter_constraints
)
)
build_envs = await MultiGet(
[
Get(DistBuildEnvironment, DistBuildEnvironmentRequest, build_env_request)
for build_env_request in build_env_requests
]
)
extra_build_time_requirements = tuple(
itertools.chain.from_iterable(
build_env.extra_build_time_requirements for build_env in build_envs
)
)
input_digest = await Get(
Digest,
MergeDigests(
[chroot.digest, *(build_env.extra_build_time_inputs for build_env in build_envs)]
),
)
# We prefix the entire chroot, and run with this prefix as the cwd, so that we can capture
# any changes setup made within it without also capturing other artifacts of the pex
# process invocation.
chroot_prefix = "chroot"
working_directory = os.path.join(chroot_prefix, chroot.working_directory)
prefixed_input = await Get(Digest, AddPrefix(input_digest, chroot_prefix))
build_system = await Get(BuildSystem, BuildSystemRequest(prefixed_input, working_directory))
setup_py_result = await Get(
DistBuildResult,
DistBuildRequest(
build_system=build_system,
interpreter_constraints=interpreter_constraints,
build_wheel=wheel,
build_sdist=sdist,
input=prefixed_input,
working_directory=working_directory,
build_time_source_roots=source_roots,
target_address_spec=exported_target.target.address.spec,
wheel_config_settings=wheel_config_settings,
sdist_config_settings=sdist_config_settings,
extra_build_time_requirements=extra_build_time_requirements,
extra_build_time_env=extra_build_time_env,
),
)
dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output)
return BuiltPackage(
setup_py_result.output,
tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files),
)
SETUP_BOILERPLATE = """
# DO NOT EDIT THIS FILE -- AUTOGENERATED BY PANTS
# Target: {target_address_spec}
from setuptools import setup
setup(**{setup_kwargs_str})
"""
@rule
async def determine_explicitly_provided_setup_kwargs(
exported_target: ExportedTarget, union_membership: UnionMembership
) -> SetupKwargs:
target = exported_target.target
setup_kwargs_requests = union_membership.get(SetupKwargsRequest)
applicable_setup_kwargs_requests = tuple(
request for request in setup_kwargs_requests if request.is_applicable(target)
)
# If no provided implementations, fall back to our default implementation that simply returns
# what the user explicitly specified in the BUILD file.
if not applicable_setup_kwargs_requests:
return SetupKwargs(exported_target.provides.kwargs, address=target.address)
if len(applicable_setup_kwargs_requests) > 1:
possible_requests = sorted(plugin.__name__ for plugin in applicable_setup_kwargs_requests)
raise ValueError(
softwrap(
f"""
Multiple of the registered `SetupKwargsRequest`s can work on the target
{target.address}, and it's ambiguous which to use: {possible_requests}
Please activate fewer implementations, or make the classmethod `is_applicable()`
more precise so that only one implementation is applicable for this target.
"""
)
)
setup_kwargs_request = tuple(applicable_setup_kwargs_requests)[0]
return await Get(SetupKwargs, SetupKwargsRequest, setup_kwargs_request(target)) # type: ignore[abstract]
@dataclass(frozen=True)
class GenerateSetupPyRequest:
exported_target: ExportedTarget
sources: DistBuildSources
interpreter_constraints: InterpreterConstraints
@dataclass(frozen=True)
class GeneratedSetupPy:
digest: Digest
@rule
async def generate_chroot(
request: DistBuildChrootRequest, subsys: SetupPyGeneration
) -> DistBuildChroot:
generate_setup = request.exported_target.target.get(GenerateSetupField).value
if generate_setup is None:
generate_setup = subsys.generate_setup_default
if generate_setup:
sources = await Get(DistBuildSources, DistBuildChrootRequest, request)
generated_setup_py = await Get(
GeneratedSetupPy,
GenerateSetupPyRequest(
request.exported_target, sources, request.interpreter_constraints
),
)
# We currently generate a setup.py that expects to be in the source root.
# TODO: It might make sense to generate one in the target's directory, for
# consistency with the existing setup.py case.
working_directory = ""
chroot_digest = await Get(Digest, MergeDigests((sources.digest, generated_setup_py.digest)))
else:
transitive_targets = await Get(
TransitiveTargets,
TransitiveTargetsRequest([request.exported_target.target.address]),
)
source_files = await Get(
PythonSourceFiles,
PythonSourceFilesRequest(
targets=transitive_targets.closure, include_resources=True, include_files=True
),
)
chroot_digest = source_files.source_files.snapshot.digest
working_directory = request.exported_target.target.address.spec_path
return DistBuildChroot(chroot_digest, working_directory)
@rule
async def generate_setup_py(request: GenerateSetupPyRequest) -> GeneratedSetupPy:
# Generate the setup script.
finalized_setup_kwargs = await Get(FinalizedSetupKwargs, GenerateSetupPyRequest, request)
setup_py_content = SETUP_BOILERPLATE.format(
target_address_spec=request.exported_target.target.address.spec,
setup_kwargs_str=distutils_repr(finalized_setup_kwargs.kwargs),
).encode()
files_to_create = [
FileContent("setup.py", setup_py_content),
FileContent("MANIFEST.in", b"include *.py"),
]
digest = await Get(Digest, CreateDigest(files_to_create))
return GeneratedSetupPy(digest)
@rule
async def determine_finalized_setup_kwargs(request: GenerateSetupPyRequest) -> FinalizedSetupKwargs:
exported_target = request.exported_target
sources = request.sources
requirements = await Get(ExportedTargetRequirements, DependencyOwner(exported_target))
# Generate the kwargs for the setup() call. In addition to using the kwargs that are either
# explicitly provided or generated via a user's plugin, we add additional kwargs based on the
# resolved requirements and sources.
target = exported_target.target
resolved_setup_kwargs = await Get(SetupKwargs, ExportedTarget, exported_target)
setup_kwargs = resolved_setup_kwargs.kwargs.copy()
# Check interpreter constraints
if len(request.interpreter_constraints) > 1:
raise SetupPyError(
softwrap(
f"""
Expected a single interpreter constraint for {target.address}, got:
{request.interpreter_constraints}.
Python distributions do not support multiple constraints, so this will need to be
translated into a single interpreter constraint using exclusions to get the same
effect.
As example, given two constraints:
>=2.7,<3 OR >=3.5,<3.11
these can be combined into a single constraint using exclusions:
>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,<3.11
"""
)
)
if len(request.interpreter_constraints) > 0:
# Do not replace value if already set.
setup_kwargs.setdefault(
"python_requires",
# Pick the first constraint using a generator detour, as the InterpreterConstraints is
# based on a FrozenOrderedSet which is not indexable.
next(str(ic.specifier) for ic in request.interpreter_constraints), # type: ignore[attr-defined]
)
# NB: We are careful to not overwrite these values, but we also don't expect them to have been
# set. The user must have have gone out of their way to use a `SetupKwargs` plugin, and to have
# specified `SetupKwargs(_allow_banned_keys=True)`.
setup_kwargs.update(
{
"packages": (*sources.packages, *(setup_kwargs.get("packages", []))),
"namespace_packages": (
*sources.namespace_packages,
*setup_kwargs.get("namespace_packages", []),
),
"package_data": {**dict(sources.package_data), **setup_kwargs.get("package_data", {})},
"install_requires": (*requirements, *setup_kwargs.get("install_requires", [])),
}
)
long_description_path = exported_target.target.get(LongDescriptionPathField).value
if "long_description" in setup_kwargs and long_description_path:
raise InvalidFieldException(
softwrap(
f"""
The {repr(LongDescriptionPathField.alias)} field of the
target {exported_target.target.address} is set, but
'long_description' is already provided explicitly in
the provides=setup_py() field. You may only set one
of these two values.
"""
)
)
if long_description_path:
digest_contents = await Get(
DigestContents,
PathGlobs(
[long_description_path],
description_of_origin=softwrap(
f"""
the {LongDescriptionPathField.alias}
field of {exported_target.target.address}
"""
),
glob_match_error_behavior=GlobMatchErrorBehavior.error,
),
)
long_description_content = digest_contents[0].content.decode()
setup_kwargs.update({"long_description": long_description_content})
# Resolve entry points from python_distribution(entry_points=...) and from
# python_distribution(provides=setup_py(entry_points=...)
resolved_from_entry_points_field, resolved_from_provides_field = await MultiGet(
Get(
ResolvedPythonDistributionEntryPoints,
ResolvePythonDistributionEntryPointsRequest(
entry_points_field=exported_target.target.get(PythonDistributionEntryPointsField)
),
),
Get(
ResolvedPythonDistributionEntryPoints,
ResolvePythonDistributionEntryPointsRequest(
provides_field=exported_target.target.get(PythonProvidesField)
),
),
)
def _format_entry_points(
resolved: ResolvedPythonDistributionEntryPoints,
) -> dict[str, dict[str, str]]:
return {
category: {ep_name: ep_val.entry_point.spec for ep_name, ep_val in entry_points.items()}
for category, entry_points in resolved.val.items()
}
# Gather entry points with source description for any error messages when merging them.
exported_addr = exported_target.target.address
entry_point_sources = {
f"{exported_addr}'s field `entry_points`": _format_entry_points(
resolved_from_entry_points_field
),
f"{exported_addr}'s field `provides=setup_py()`": _format_entry_points(
resolved_from_provides_field
),
}
# Merge all collected entry points and add them to the dist's entry points.
all_entry_points = merge_entry_points(*list(entry_point_sources.items()))
if all_entry_points:
setup_kwargs["entry_points"] = {
category: [f"{name} = {entry_point}" for name, entry_point in entry_points.items()]
for category, entry_points in all_entry_points.items()
}
return FinalizedSetupKwargs(setup_kwargs, address=target.address)
@rule
async def get_sources(
request: DistBuildChrootRequest, union_membership: UnionMembership
) -> DistBuildSources:
owned_deps, transitive_targets = await MultiGet(
Get(OwnedDependencies, DependencyOwner(request.exported_target)),
Get(
TransitiveTargets,
TransitiveTargetsRequest([request.exported_target.target.address]),
),
)
# files() targets aren't owned by a single exported target - they aren't code, so
# we allow them to be in multiple dists. This is helpful for, e.g., embedding
# a standard license file in a dist.
# TODO: This doesn't actually work, the generated setup.py has no way of referencing
# these, since they aren't in a package, so they won't get included in the built dists.
# There is a separate `license_files()` setup.py kwarg that we should use for this
# special case (see https://setuptools.pypa.io/en/latest/references/keywords.html).
file_targets = targets_with_sources_types(
[FileSourceField], transitive_targets.closure, union_membership
)
targets = Targets(itertools.chain((od.target for od in owned_deps), file_targets))
python_sources_request = PythonSourceFilesRequest(
targets=targets, include_resources=False, include_files=False
)
all_sources_request = PythonSourceFilesRequest(
targets=targets, include_resources=True, include_files=True
)
python_sources, all_sources = await MultiGet(
Get(StrippedPythonSourceFiles, PythonSourceFilesRequest, python_sources_request),
Get(StrippedPythonSourceFiles, PythonSourceFilesRequest, all_sources_request),
)
python_files = set(python_sources.stripped_source_files.snapshot.files)
all_files = set(all_sources.stripped_source_files.snapshot.files)
resource_files = all_files - python_files
init_py_digest_contents = await Get(
DigestContents,
DigestSubset(
python_sources.stripped_source_files.snapshot.digest, PathGlobs(["**/__init__.py"])
),
)
packages, namespace_packages, package_data = find_packages(
python_files=python_files,
resource_files=resource_files,
init_py_digest_contents=init_py_digest_contents,
# Whether to use py2 or py3 package semantics.
py2=request.interpreter_constraints.includes_python2(),
)
return DistBuildSources(
digest=all_sources.stripped_source_files.snapshot.digest,
packages=packages,
namespace_packages=namespace_packages,
package_data=package_data,
)
@rule(desc="Compute distribution's 3rd party requirements")
async def get_requirements(
dep_owner: DependencyOwner,
union_membership: UnionMembership,
setup_py_generation: SetupPyGeneration,
) -> ExportedTargetRequirements:
transitive_targets = await Get(
TransitiveTargets,
TransitiveTargetsRequest([dep_owner.exported_target.target.address]),
)
ownable_tgts = [
tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
]
owners = await MultiGet(Get(ExportedTarget, OwnedDependency(tgt)) for tgt in ownable_tgts)
owned_by_us: set[Target] = set()
owned_by_others: set[Target] = set()
for tgt, owner in zip(ownable_tgts, owners):
(owned_by_us if owner == dep_owner.exported_target else owned_by_others).add(tgt)
# Get all 3rdparty deps of our owned deps.
#
# Note that we need only consider requirements that are direct dependencies of our owned deps:
# If T depends on R indirectly, then it must be via some direct deps U1, U2, ... For each such U,
# if U is in the owned deps then we'll pick up R through U. And if U is not in the owned deps
# then it's owned by an exported target ET, and so R will be in the requirements for ET, and we
# will require ET.
direct_deps_tgts = await MultiGet(
Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in owned_by_us
)
transitive_excludes: FrozenOrderedSet[Target] = FrozenOrderedSet()
uneval_trans_excl = [
tgt.get(Dependencies).unevaluated_transitive_excludes for tgt in transitive_targets.closure
]
if uneval_trans_excl:
nested_trans_excl = await MultiGet(
Get(Targets, UnparsedAddressInputs, unparsed) for unparsed in uneval_trans_excl
)
transitive_excludes = FrozenOrderedSet(
itertools.chain.from_iterable(excludes for excludes in nested_trans_excl)
)
direct_deps_chained = FrozenOrderedSet(itertools.chain.from_iterable(direct_deps_tgts))
direct_deps_with_excl = direct_deps_chained.difference(transitive_excludes)
req_strs = list(
PexRequirements.req_strings_from_requirement_fields(
(
tgt[PythonRequirementsField]
for tgt in direct_deps_with_excl
if tgt.has_field(PythonRequirementsField)
),
)
)
# Add the requirements on any exported targets on which we depend.
kwargs_for_exported_targets_we_depend_on = await MultiGet(
Get(SetupKwargs, OwnedDependency(tgt)) for tgt in owned_by_others
)
req_strs.extend(
f"{kwargs.name}{setup_py_generation.first_party_dependency_version(kwargs.version)}"
for kwargs in set(kwargs_for_exported_targets_we_depend_on)
)
return ExportedTargetRequirements(req_strs)
@rule(desc="Find all code to be published in the distribution", level=LogLevel.DEBUG)
async def get_owned_dependencies(
dependency_owner: DependencyOwner, union_membership: UnionMembership
) -> OwnedDependencies:
"""Find the dependencies of dependency_owner that are owned by it.
Includes dependency_owner itself.
"""
transitive_targets = await Get(
TransitiveTargets,
TransitiveTargetsRequest([dependency_owner.exported_target.target.address]),
)
ownable_targets = [
tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
]
owners = await MultiGet(Get(ExportedTarget, OwnedDependency(tgt)) for tgt in ownable_targets)
owned_dependencies = [
tgt
for owner, tgt in zip(owners, ownable_targets)
if owner == dependency_owner.exported_target
]
return OwnedDependencies(OwnedDependency(t) for t in owned_dependencies)
@rule(desc="Get exporting owner for target")
async def get_exporting_owner(owned_dependency: OwnedDependency) -> ExportedTarget:
"""Find the exported target that owns the given target (and therefore exports it).
The owner of T (i.e., the exported target in whose artifact T's code is published) is:
1. An exported target that depends on T (or is T itself).
2. Is T's closest filesystem ancestor among those satisfying 1.
If there are multiple such exported targets at the same degree of ancestry, the ownership
is ambiguous and an error is raised. If there is no exported target that depends on T
and is its ancestor, then there is no owner and an error is raised.
"""
target = owned_dependency.target
ancestor_addrs = AncestorGlobSpec(target.address.spec_path)
ancestor_tgts = await Get(
Targets,
RawSpecs(
ancestor_globs=(ancestor_addrs,),
description_of_origin="the `python_distribution` `package` rules",
),
)
# Note that addresses sort by (spec_path, target_name), and all these targets are
# ancestors of the given target, i.e., their spec_paths are all prefixes. So sorting by
# address will effectively sort by closeness of ancestry to the given target.
exported_ancestor_tgts = sorted(
(t for t in ancestor_tgts if t.has_field(PythonProvidesField)),
key=lambda t: t.address,
reverse=True,
)
exported_ancestor_iter = iter(exported_ancestor_tgts)
for exported_ancestor in exported_ancestor_iter:
transitive_targets = await Get(
TransitiveTargets, TransitiveTargetsRequest([exported_ancestor.address])
)
if target in transitive_targets.closure:
owner = exported_ancestor
# Find any exported siblings of owner that also depend on target. They have the
# same spec_path as it, so they must immediately follow it in ancestor_iter.
sibling_owners = []
sibling = next(exported_ancestor_iter, None)
while sibling and sibling.address.spec_path == owner.address.spec_path:
transitive_targets = await Get(
TransitiveTargets, TransitiveTargetsRequest([sibling.address])
)
if target in transitive_targets.closure:
sibling_owners.append(sibling)
sibling = next(exported_ancestor_iter, None)
if sibling_owners:
all_owners = [exported_ancestor] + sibling_owners
raise AmbiguousOwnerError(
softwrap(
f"""
Found multiple sibling python_distribution targets that are the closest
ancestor dependees of {target.address} and are therefore candidates to
own it: {', '.join(o.address.spec for o in all_owners)}. Only a
single such owner is allowed, to avoid ambiguity.
"""
)
)
return ExportedTarget(owner)
raise NoOwnerError(
softwrap(
f"""
No python_distribution target found to own {target.address}. Note that
the owner must be in or above the owned target's directory, and must
depend on it (directly or indirectly).
"""
)
)
def is_ownable_target(tgt: Target, union_membership: UnionMembership) -> bool:
return (
# Note that we check for a PythonProvides field so that a python_distribution
# target can be owned (by itself). This is so that if there are any 3rdparty
# requirements directly on the python_distribution target, we apply them to the dist.
# This isn't particularly useful (3rdparty requirements should be on the python_sources
# that consumes them)... but users may expect it to work anyway.
tgt.has_field(PythonProvidesField)
or tgt.has_field(PythonSourceField)