/
pex_build_util.py
156 lines (126 loc) · 6.54 KB
/
pex_build_util.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
# coding=utf-8
# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from pex.fetcher import Fetcher
from pex.platforms import Platform
from pex.resolver import resolve
from twitter.common.collections import OrderedSet
from pants.backend.python.subsystems.python_setup import PythonSetup
from pants.backend.python.targets.python_binary import PythonBinary
from pants.backend.python.targets.python_library import PythonLibrary
from pants.backend.python.targets.python_requirement_library import PythonRequirementLibrary
from pants.backend.python.targets.python_tests import PythonTests
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.build_graph.files import Files
from pants.python.python_repos import PythonRepos
def has_python_sources(tgt):
# We'd like to take all PythonTarget subclasses, but currently PythonThriftLibrary and
# PythonAntlrLibrary extend PythonTarget, and until we fix that (which we can't do until
# we remove the old python pipeline entirely) we want to ignore those target types here.
return isinstance(tgt, (PythonLibrary, PythonTests, PythonBinary)) and tgt.has_sources()
def has_resources(tgt):
return isinstance(tgt, Files) and tgt.has_sources()
def has_python_requirements(tgt):
return isinstance(tgt, PythonRequirementLibrary)
def _create_source_dumper(builder, tgt):
if type(tgt) == Files:
# Loose `Files` as opposed to `Resources` or `PythonTarget`s have no (implied) package structure
# and so we chroot them relative to the build root so that they can be accessed via the normal
# python filesystem APIs just as they would be accessed outside the chrooted environment.
# NB: This requires we mark the pex as not zip safe so these `Files` can still be accessed in
# the context of a built pex distribution.
chroot_path = lambda relpath: relpath
builder.info.zip_safe = False
else:
chroot_path = lambda relpath: os.path.relpath(relpath, tgt.target_base)
dump = builder.add_resource if has_resources(tgt) else builder.add_source
buildroot = get_buildroot()
return lambda relpath: dump(os.path.join(buildroot, relpath), chroot_path(relpath))
def dump_sources(builder, tgt, log):
dump_source = _create_source_dumper(builder, tgt)
log.debug(' Dumping sources: {}'.format(tgt))
for relpath in tgt.sources_relative_to_buildroot():
try:
dump_source(relpath)
except OSError:
log.error('Failed to copy {} for target {}'.format(relpath, tgt.address.spec))
raise
if (getattr(tgt, '_resource_target_specs', None) or
getattr(tgt, '_synthetic_resources_target', None)):
# No one should be on old-style resources any more. And if they are,
# switching to the new python pipeline will be a great opportunity to fix that.
raise TaskError('Old-style resources not supported for target {}. '
'Depend on resources() targets instead.'.format(tgt.address.spec))
def dump_requirements(builder, interpreter, req_libs, log, platforms=None):
"""Multi-platform dependency resolution for PEX files.
Returns a list of distributions that must be included in order to satisfy a set of requirements.
That may involve distributions for multiple platforms.
:param builder: Dump the requirements into this builder.
:param interpreter: The :class:`PythonInterpreter` to resolve requirements for.
:param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
:param log: Use this logger.
:param platforms: A list of :class:`Platform`s to resolve requirements for.
Defaults to the platforms specified by PythonSetup.
"""
# Gather and de-dup all requirements.
reqs = OrderedSet()
for req_lib in req_libs:
for req in req_lib.requirements:
reqs.add(req)
# See which ones we need to build.
reqs_to_build = OrderedSet()
find_links = OrderedSet()
for req in reqs:
# TODO: should_build appears to be hardwired to always be True. Get rid of it?
if req.should_build(interpreter.python, Platform.current()):
reqs_to_build.add(req)
log.debug(' Dumping requirement: {}'.format(req))
builder.add_requirement(req.requirement)
if req.repository:
find_links.add(req.repository)
else:
log.debug(' Skipping {} based on version filter'.format(req))
# Resolve the requirements into distributions.
distributions = _resolve_multi(interpreter, reqs_to_build, platforms, find_links)
locations = set()
for platform, dists in distributions.items():
for dist in dists:
if dist.location not in locations:
log.debug(' Dumping distribution: .../{}'.format(os.path.basename(dist.location)))
builder.add_distribution(dist)
locations.add(dist.location)
def _resolve_multi(interpreter, requirements, platforms, find_links):
"""Multi-platform dependency resolution for PEX files.
Returns a list of distributions that must be included in order to satisfy a set of requirements.
That may involve distributions for multiple platforms.
:param interpreter: The :class:`PythonInterpreter` to resolve for.
:param requirements: A list of :class:`PythonRequirement` objects to resolve.
:param platforms: A list of :class:`Platform`s to resolve for.
:param find_links: Additional paths to search for source packages during resolution.
:return: Map of platform name -> list of :class:`pkg_resources.Distribution` instances needed
to satisfy the requirements on that platform.
"""
python_setup = PythonSetup.global_instance()
python_repos = PythonRepos.global_instance()
platforms = platforms or python_setup.platforms
find_links = find_links or []
distributions = {}
fetchers = python_repos.get_fetchers()
fetchers.extend(Fetcher([path]) for path in find_links)
for platform in platforms:
requirements_cache_dir = os.path.join(python_setup.resolver_cache_dir,
str(interpreter.identity))
distributions[platform] = resolve(
requirements=[req.requirement for req in requirements],
interpreter=interpreter,
fetchers=fetchers,
platform=None if platform == 'current' else platform,
context=python_repos.get_network_context(),
cache=requirements_cache_dir,
cache_ttl=python_setup.resolver_cache_ttl,
allow_prereleases=python_setup.resolver_allow_prereleases)
return distributions