Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove dependency links support #6060

Merged
merged 6 commits into from Jan 7, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 1 addition & 0 deletions news/6060.removal
@@ -0,0 +1 @@
Remove the deprecated --process-dependency-links option.
2 changes: 0 additions & 2 deletions src/pip/_internal/build_env.py
Expand Up @@ -189,8 +189,6 @@ def install_requirements(
args.extend(['--trusted-host', host])
if finder.allow_all_prereleases:
args.append('--pre')
if finder.process_dependency_links:
args.append('--process-dependency-links')
args.append('--')
args.extend(requirements)
with open_spinner(message) as spinner:
Expand Down
1 change: 0 additions & 1 deletion src/pip/_internal/cli/base_command.py
Expand Up @@ -296,7 +296,6 @@ def _build_package_finder(
index_urls=index_urls,
trusted_hosts=options.trusted_hosts,
allow_all_prereleases=options.pre,
process_dependency_links=options.process_dependency_links,
session=session,
platform=platform,
versions=python_versions,
Expand Down
12 changes: 0 additions & 12 deletions src/pip/_internal/cli/cmdoptions.py
Expand Up @@ -347,17 +347,6 @@ def trusted_host():
)


# Remove after 1.5
process_dependency_links = partial(
Option,
"--process-dependency-links",
dest="process_dependency_links",
action="store_true",
default=False,
help="Enable the processing of dependency links.",
) # type: Callable[..., Option]


def constraints():
# type: () -> Option
return Option(
Expand Down Expand Up @@ -773,6 +762,5 @@ def _merge_hash(option, opt_str, value, parser):
extra_index_url,
no_index,
find_links,
process_dependency_links,
]
} # type: Dict[str, Any]
9 changes: 0 additions & 9 deletions src/pip/_internal/commands/list.py
Expand Up @@ -118,7 +118,6 @@ def _build_package_finder(self, options, index_urls, session):
index_urls=index_urls,
allow_all_prereleases=options.pre,
trusted_hosts=options.trusted_hosts,
process_dependency_links=options.process_dependency_links,
session=session,
)

Expand Down Expand Up @@ -168,16 +167,8 @@ def iter_packages_latest_infos(self, packages, options):
logger.debug('Ignoring indexes: %s', ','.join(index_urls))
index_urls = []

dependency_links = []
for dist in packages:
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt'),
)

with self._build_session(options) as session:
finder = self._build_package_finder(options, index_urls, session)
finder.add_dependency_links(dependency_links)

for dist in packages:
typ = 'unknown'
Expand Down
47 changes: 4 additions & 43 deletions src/pip/_internal/index.py
Expand Up @@ -31,7 +31,6 @@
from pip._internal.models.link import Link
from pip._internal.pep425tags import get_supported
from pip._internal.utils.compat import ipaddress
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, WHEEL_EXTENSION, normalize_path,
Expand Down Expand Up @@ -268,7 +267,6 @@ def __init__(
index_urls, # type: List[str]
allow_all_prereleases=False, # type: bool
trusted_hosts=None, # type: Optional[Iterable[str]]
process_dependency_links=False, # type: bool
session=None, # type: Optional[PipSession]
format_control=None, # type: Optional[FormatControl]
platform=None, # type: Optional[str]
Expand Down Expand Up @@ -315,7 +313,6 @@ def __init__(
self.find_links.append(link)

self.index_urls = index_urls
self.dependency_links = [] # type: List[str]

# These are boring links that have already been logged somehow:
self.logged_links = set() # type: Set[Link]
Expand All @@ -331,9 +328,6 @@ def __init__(
# Do we want to allow _all_ pre-releases?
self.allow_all_prereleases = allow_all_prereleases

# Do we process dependency links?
self.process_dependency_links = process_dependency_links

# The Session we'll use to make requests
self.session = session

Expand Down Expand Up @@ -375,22 +369,6 @@ def get_formatted_locations(self):
)
return "\n".join(lines)

def add_dependency_links(self, links):
# type: (Iterable[str]) -> None
# FIXME: this shouldn't be global list this, it should only
# apply to requirements of the package that specifies the
# dependency_links value
# FIXME: also, we should track comes_from (i.e., use Link)
if self.process_dependency_links:
deprecated(
"Dependency Links processing has been deprecated and will be "
"removed in a future release.",
replacement="PEP 508 URL dependencies",
gone_in="19.0",
issue=4187,
)
self.dependency_links.extend(links)

@staticmethod
def _sort_locations(locations, expand_dir=False):
# type: (Sequence[str], bool) -> Tuple[List[str], List[str]]
Expand Down Expand Up @@ -587,7 +565,7 @@ def find_all_candidates(self, project_name):
# type: (str) -> List[Optional[InstallationCandidate]]
"""Find all available InstallationCandidate for project_name

This checks index_urls, find_links and dependency_links.
This checks index_urls and find_links.
All versions found are returned as an InstallationCandidate list.

See _link_package_versions for details on which files are accepted
Expand All @@ -597,21 +575,18 @@ def find_all_candidates(self, project_name):
fl_file_loc, fl_url_loc = self._sort_locations(
self.find_links, expand_dir=True,
)
dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links)

file_locations = (Link(url) for url in itertools.chain(
index_file_loc, fl_file_loc, dep_file_loc,
index_file_loc, fl_file_loc,
))

# We trust every url that the user has given us whether it was given
# via --index-url or --find-links
# We explicitly do not trust links that came from dependency_links
# via --index-url or --find-links.
# We want to filter out any thing which does not have a secure origin.
url_locations = [
link for link in itertools.chain(
(Link(url) for url in index_url_loc),
(Link(url) for url in fl_url_loc),
(Link(url) for url in dep_url_loc),
)
if self._validate_secure_origin(logger, link)
]
Expand Down Expand Up @@ -639,17 +614,6 @@ def find_all_candidates(self, project_name):
self._package_versions(page.iter_links(), search)
)

dependency_versions = self._package_versions(
(Link(url) for url in self.dependency_links), search
)
if dependency_versions:
logger.debug(
'dependency_links found: %s',
', '.join([
version.location.url for version in dependency_versions
])
)

file_versions = self._package_versions(file_locations, search)
if file_versions:
file_versions.sort(reverse=True)
Expand All @@ -662,10 +626,7 @@ def find_all_candidates(self, project_name):
)

# This is an intentional priority ordering
return (
file_versions + find_links_versions + page_versions +
dependency_versions
)
return file_versions + find_links_versions + page_versions

def find_requirement(self, req, upgrade):
# type: (InstallRequirement, bool) -> Optional[Link]
Expand Down
5 changes: 1 addition & 4 deletions src/pip/_internal/operations/check.py
Expand Up @@ -120,9 +120,6 @@ def check_install_conflicts(to_install):
)


# NOTE from @pradyunsg
# This required a minor update in dependency link handling logic over at
# operations.prepare.IsSDist.dist() to get it working
def _simulate_installation_of(to_install, package_set):
# type: (List[InstallRequirement], PackageSet) -> Set[str]
"""Computes the version of packages after installing to_install.
Expand All @@ -133,7 +130,7 @@ def _simulate_installation_of(to_install, package_set):

# Modify it as installing requirement_set would (assuming no errors)
for inst_req in to_install:
dist = make_abstract_dist(inst_req).dist(finder=None)
dist = make_abstract_dist(inst_req).dist()
name = canonicalize_name(dist.key)
package_set[name] = PackageDetails(dist.version, dist.requires())

Expand Down
27 changes: 10 additions & 17 deletions src/pip/_internal/operations/prepare.py
Expand Up @@ -71,21 +71,21 @@ def __init__(self, req):
# type: (InstallRequirement) -> None
self.req = req # type: InstallRequirement

def dist(self, finder):
# type: (PackageFinder) -> Any
def dist(self):
# type: () -> Any
"""Return a setuptools Dist object."""
raise NotImplementedError(self.dist)
raise NotImplementedError

def prep_for_dist(self, finder, build_isolation):
# type: (PackageFinder, bool) -> Any
"""Ensure that we can get a Dist for this requirement."""
raise NotImplementedError(self.dist)
raise NotImplementedError


class IsWheel(DistAbstraction):

def dist(self, finder):
# type: (PackageFinder) -> pkg_resources.Distribution
def dist(self):
# type: () -> pkg_resources.Distribution
return list(pkg_resources.find_distributions(
self.req.source_dir))[0]

Expand All @@ -97,15 +97,8 @@ def prep_for_dist(self, finder, build_isolation):

class IsSDist(DistAbstraction):

def dist(self, finder):
# type: (PackageFinder) -> pkg_resources.Distribution
dist = self.req.get_dist()
# FIXME: shouldn't be globally added.
if finder and dist.has_metadata('dependency_links.txt'):
finder.add_dependency_links(
dist.get_metadata_lines('dependency_links.txt')
)
return dist
def dist(self):
return self.req.get_dist()

def prep_for_dist(self, finder, build_isolation):
# type: (PackageFinder, bool) -> None
Expand Down Expand Up @@ -168,8 +161,8 @@ def _raise_conflicts(conflicting_with, conflicting_reqs):

class Installed(DistAbstraction):

def dist(self, finder):
# type: (PackageFinder) -> pkg_resources.Distribution
def dist(self):
# type: () -> pkg_resources.Distribution
return self.req.satisfied_by

def prep_for_dist(self, finder, build_isolation):
Expand Down
3 changes: 0 additions & 3 deletions src/pip/_internal/req/req_file.py
Expand Up @@ -55,7 +55,6 @@
cmdoptions.no_binary,
cmdoptions.only_binary,
cmdoptions.pre,
cmdoptions.process_dependency_links,
cmdoptions.trusted_host,
cmdoptions.require_hashes,
] # type: List[Callable[..., optparse.Option]]
Expand Down Expand Up @@ -251,8 +250,6 @@ def process_line(
finder.find_links.append(value)
if opts.pre:
finder.allow_all_prereleases = True
if opts.process_dependency_links:
finder.process_dependency_links = True
if opts.trusted_hosts:
finder.secure_origins.extend(
("*", host, "*") for host in opts.trusted_hosts)
Expand Down
2 changes: 1 addition & 1 deletion src/pip/_internal/resolve.py
Expand Up @@ -294,7 +294,7 @@ def _resolve_one(
abstract_dist = self._get_abstract_dist_for(req_to_install)

# Parse and return dependencies
dist = abstract_dist.dist(self.finder)
dist = abstract_dist.dist()
try:
check_dist_requires_python(dist)
except UnsupportedPythonVersion as err:
Expand Down
1 change: 0 additions & 1 deletion src/pip/_internal/utils/outdated.py
Expand Up @@ -127,7 +127,6 @@ def pip_version_check(session, options):
index_urls=[options.index_url] + options.extra_index_urls,
allow_all_prereleases=False, # Explicitly set to False
trusted_hosts=options.trusted_hosts,
process_dependency_links=options.process_dependency_links,
session=session,
)
all_candidates = finder.find_all_candidates("pip")
Expand Down
15 changes: 0 additions & 15 deletions src/pip/_internal/vcs/subversion.py
Expand Up @@ -4,7 +4,6 @@
import os
import re

from pip._internal.models.link import Link
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
display_path, make_vcs_requirement_url, rmtree, split_auth_from_netloc,
Expand Down Expand Up @@ -61,20 +60,6 @@ def update(self, dest, url, rev_options):
cmd_args = ['update'] + rev_options.to_args() + [dest]
self.run_command(cmd_args)

def get_location(self, dist, dependency_links):
for url in dependency_links:
egg_fragment = Link(url).egg_fragment
if not egg_fragment:
continue
if '-' in egg_fragment:
# FIXME: will this work when a package has - in the name?
key = '-'.join(egg_fragment.split('-')[:-1]).lower()
else:
key = egg_fragment
if key == dist.key:
return url.split('#', 1)[0]
return None

def get_revision(self, location):
"""
Return the maximum revision for all files under a given location
Expand Down
9 changes: 3 additions & 6 deletions tests/data/packages/LocalEnvironMarker/setup.py
Expand Up @@ -17,14 +17,11 @@ def path_to_url(path):
return 'file://' + url


HERE = os.path.dirname(__file__)
DEP_PATH = os.path.join(HERE, '..', '..', 'indexes', 'simple', 'simple')
DEP_URL = path_to_url(DEP_PATH)

setup(
name='LocalEnvironMarker',
version='0.0.1',
packages=find_packages(),
extras_require={":python_version == '2.7' or python_version == '3.4'": ['simple']},
dependency_links=[DEP_URL]
extras_require={
":python_version == '2.7' or python_version == '3.4'": ['simple'],
}
)
7 changes: 1 addition & 6 deletions tests/data/packages/LocalExtras-0.0.2/setup.py
Expand Up @@ -17,15 +17,10 @@ def path_to_url(path):
return 'file://' + url


HERE = os.path.dirname(__file__)
DEP_PATH = os.path.join(HERE, '..', '..', 'indexes', 'simple', 'simple')
DEP_URL = path_to_url(DEP_PATH)

setup(
name='LocalExtras',
version='0.0.2',
packages=find_packages(),
install_requires=['simple==1.0'],
extras_require={'bar': ['simple==2.0'], 'baz': ['singlemodule']},
dependency_links=[DEP_URL]
extras_require={'bar': ['simple==2.0'], 'baz': ['singlemodule']}
)
7 changes: 1 addition & 6 deletions tests/data/packages/LocalExtras/setup.py
Expand Up @@ -17,14 +17,9 @@ def path_to_url(path):
return 'file://' + url


HERE = os.path.dirname(__file__)
DEP_PATH = os.path.join(HERE, '..', '..', 'indexes', 'simple', 'simple')
DEP_URL = path_to_url(DEP_PATH)

setup(
name='LocalExtras',
version='0.0.1',
packages=find_packages(),
extras_require={'bar': ['simple'], 'baz': ['singlemodule']},
dependency_links=[DEP_URL]
extras_require={'bar': ['simple'], 'baz': ['singlemodule']}
)
4 changes: 2 additions & 2 deletions tests/functional/test_install_reqs.py
Expand Up @@ -175,8 +175,8 @@ def test_respect_order_in_requirements_file(script, data):

def test_install_local_editable_with_extras(script, data):
to_install = data.packages.join("LocalExtras")
res = script.pip(
'install', '-e', to_install + '[bar]', '--process-dependency-links',
res = script.pip_install_local(
'-e', to_install + '[bar]',
expect_error=False,
expect_stderr=True,
)
Expand Down