Skip to content
Browse files

Merge branch 'develop' into threaded-page-getting

  • Loading branch information...
2 parents 6083597 + b183a32 commit 235e1dea02abd3a89ab53ea8035fd4ee8a37887b @jezdez jezdez committed Sep 1, 2012
View
3 .travis.yml
@@ -5,6 +5,7 @@ python:
- 2.7
- 3.1
- 3.2
+ - pypy
before_install:
- sudo apt-get install subversion bzr mercurial
- echo -e "[web]\ncacerts = /etc/ssl/certs/ca-certificates.crt" >> ~/.hgrc
@@ -15,5 +16,7 @@ notifications:
branches:
only:
- develop
+matrix:
+ allow_failures:
env:
- PIP_USE_MIRRORS=true
View
3 AUTHORS.txt
@@ -6,9 +6,11 @@ Armin Ronacher
Brian Rosner
Carl Meyer
Christian Oudard
+Clay McClure
Cody Soyland
Daniel Holth
Dave Abrahams
+Donald Stufft
Francesco
Hugo Lopes Tavares
Ian Bicking
@@ -39,6 +41,7 @@ Paul Nasrat
Paul Oswald
Paul van der Linden
Peter Waller
+Phil Whelan
Piet Delport
Qiangning Hong
Rene Dudfield
View
12 docs/news.txt
@@ -13,6 +13,15 @@ Beta and final releases planned for the second half of 2012.
develop (unreleased)
-------------------
+* Fixed issue #605 - pypi mirror support broken on some DNS responses. Thanks
+ philwhin.
+
+* Fixed issue #355 - pip uninstall removes files it didn't install. Thanks
+ pjdelport.
+
+* Fixed issues #493, #494, #440, and #573 related to improving support for the
+ user installation scheme. Thanks Marcus Smith.
+
* Write failure log to temp file if default location is not writable. Thanks
andreigc.
@@ -54,6 +63,9 @@ develop (unreleased)
* Fixed issue #427 - clearer error message on a malformed VCS url. Thanks
Thomas Fenzl.
+* Added support for using any of the built in guaranteed algorithms in ``hashlib``
+ as a checksum hash.
+
1.1 (2012-02-16)
----------------
View
10 docs/usage.txt
@@ -106,6 +106,16 @@ within a :ref:`requirements file <requirements-files>` in addition to on the
command line directly.
+Package Checksum Hashes
+'''''''''''''''''''''''
+
+:term:`PyPI` provides a md5 hash of a package by having the link to the
+package include a #md5=<hash>. pip supports this, as well as any of the
+guaranteed hashlib algorithms (sha1, sha224, sha384, sha256, sha512, md5).
+
+The hash fragment is case sensitive (i.e. sha1 not SHA1).
+
+
Uninstall packages
------------------
View
13 pip/backwardcompat.py
@@ -1,17 +1,22 @@
"""Stuff that differs in different Python versions"""
+import os
+import imp
import sys
import site
__all__ = ['WindowsError']
+uses_pycache = hasattr(imp,'cache_from_source')
+
try:
WindowsError = WindowsError
except NameError:
class NeverUsedException(Exception):
"""this exception should never be raised"""
WindowsError = NeverUsedException
+
console_encoding = sys.__stdout__.encoding
if sys.version_info >= (3,):
@@ -97,3 +102,11 @@ def product(*args, **kwds):
result = [x+[y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
+
+def home_lib(home):
+ """Return the lib dir under the 'home' installation scheme"""
+ if hasattr(sys, 'pypy_version_info'):
+ lib = 'site-packages'
+ else:
+ lib = os.path.join('lib', 'python')
+ return os.path.join(home, lib)
View
6 pip/baseparser.py
@@ -285,10 +285,10 @@ def get_default_values(self):
choices=['s', 'i', 'w', 'b'],
default=[],
action='append',
- help="Default action when a path already exists."
- "Use this option more then one time to specify "
+ help="Default action when a path already exists. "
+ "Use this option more than one time to specify "
"another action if a certain option is not "
- "available, choices: "
+ "available. Choices: "
"(s)witch, (i)gnore, (w)ipe, (b)ackup")
parser.disable_interspersed_args()
View
3 pip/commands/install.py
@@ -9,6 +9,7 @@
from pip.basecommand import Command
from pip.index import PackageFinder
from pip.exceptions import InstallationError, CommandError
+from pip.backwardcompat import home_lib
class InstallCommand(Command):
@@ -276,7 +277,7 @@ def run(self, options, args):
if options.target_dir:
if not os.path.exists(options.target_dir):
os.makedirs(options.target_dir)
- lib_dir = os.path.join(temp_target_dir, "lib/python/")
+ lib_dir = home_lib(temp_target_dir)
for item in os.listdir(lib_dir):
shutil.move(
os.path.join(lib_dir, item),
View
2 pip/commands/zip.py
@@ -231,7 +231,7 @@ def remove_filename_from_pth(self, filename):
def add_filename_to_pth(self, filename):
path = os.path.dirname(filename)
- dest = os.path.join(path, filename + '.pth')
+ dest = filename + '.pth'
if path not in self.paths():
logger.warn('Adding .pth file %s, but it is not on sys.path' % display_path(dest))
if not self.simulate:
View
43 pip/download.py
@@ -1,6 +1,6 @@
import cgi
import getpass
-from hashlib import md5
+import hashlib
import mimetypes
import os
import re
@@ -322,16 +322,24 @@ def is_file_url(link):
return link.url.lower().startswith('file:')
-def _check_md5(download_hash, link):
- download_hash = download_hash.hexdigest()
- if download_hash != link.md5_hash:
- logger.fatal("MD5 hash of the package %s (%s) doesn't match the expected hash %s!"
- % (link, download_hash, link.md5_hash))
- raise InstallationError('Bad MD5 hash for package %s' % link)
+def _check_hash(download_hash, link):
+ if download_hash.digest_size != hashlib.new(link.hash_name).digest_size:
+ logger.fatal("Hash digest size of the package %d (%s) doesn't match the expected hash name %s!"
+ % (download_hash.digest_size, link, link.hash_name))
+ raise InstallationError('Hash name mismatch for package %s' % link)
+ if download_hash.hexdigest() != link.hash:
+ logger.fatal("Hash of the package %s (%s) doesn't match the expected hash %s!"
+ % (link, download_hash, link.hash))
+ raise InstallationError('Bad %s hash for package %s' % (link.hash_name, link))
-def _get_md5_from_file(target_file, link):
- download_hash = md5()
+def _get_hash_from_file(target_file, link):
+ try:
+ download_hash = hashlib.new(link.hash_name)
+ except (ValueError, TypeError):
+ logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
+ return None
+
fp = open(target_file, 'rb')
while True:
chunk = fp.read(4096)
@@ -345,8 +353,11 @@ def _get_md5_from_file(target_file, link):
def _download_url(resp, link, temp_location):
fp = open(temp_location, 'wb')
download_hash = None
- if link.md5_hash:
- download_hash = md5()
+ if link.hash and link.hash_name:
+ try:
+ download_hash = hashlib.new(link.hash_name)
+ except ValueError:
+ logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
try:
total_length = int(resp.info()['content-length'])
except (ValueError, KeyError, TypeError):
@@ -375,7 +386,7 @@ def _download_url(resp, link, temp_location):
logger.show_progress('%s' % format_size(downloaded))
else:
logger.show_progress('%3i%% %s' % (100*downloaded/total_length, format_size(downloaded)))
- if link.md5_hash:
+ if download_hash is not None:
download_hash.update(chunk)
fp.write(chunk)
fp.close()
@@ -424,8 +435,8 @@ def unpack_http_url(link, location, download_cache, download_dir=None):
fp = open(target_file+'.content-type')
content_type = fp.read().strip()
fp.close()
- if link.md5_hash:
- download_hash = _get_md5_from_file(target_file, link)
+ if link.hash and link.hash_name:
+ download_hash = _get_hash_from_file(target_file, link)
temp_location = target_file
logger.notify('Using download cache from %s' % target_file)
else:
@@ -450,8 +461,8 @@ def unpack_http_url(link, location, download_cache, download_dir=None):
filename += ext
temp_location = os.path.join(temp_dir, filename)
download_hash = _download_url(resp, link, temp_location)
- if link.md5_hash:
- _check_md5(download_hash, link)
+ if link.hash and link.hash_name:
+ _check_hash(download_hash, link)
if download_dir:
_copy_file(temp_location, download_dir, content_type, link)
unpack_file(temp_location, location, content_type, link)
View
24 pip/index.py
@@ -29,7 +29,7 @@
__all__ = ['PackageFinder']
-DEFAULT_MIRROR_URL = "last.pypi.python.org"
+DEFAULT_MIRROR_HOSTNAME = "last.pypi.python.org"
class PackageFinder(object):
@@ -661,11 +661,18 @@ def egg_fragment(self):
return None
return match.group(1)
- _md5_re = re.compile(r'md5=([a-f0-9]+)')
+ _hash_re = re.compile(r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)')
@property
- def md5_hash(self):
- match = self._md5_re.search(self.url)
+ def hash(self):
+ match = self._hash_re.search(self.url)
+ if match:
+ return match.group(2)
+ return None
+
+ @property
+ def hash_name(self):
+ match = self._hash_re.search(self.url)
if match:
return match.group(1)
return None
@@ -712,14 +719,17 @@ def get_mirrors(hostname=None):
Originally written for the distutils2 project by Alexis Metaireau.
"""
if hostname is None:
- hostname = DEFAULT_MIRROR_URL
+ hostname = DEFAULT_MIRROR_HOSTNAME
# return the last mirror registered on PyPI.
+ last_mirror_hostname = None
try:
- hostname = socket.gethostbyname_ex(hostname)[0]
+ last_mirror_hostname = socket.gethostbyname_ex(hostname)[0]
except socket.gaierror:
return []
- end_letter = hostname.split(".", 1)
+ if not last_mirror_hostname or last_mirror_hostname == DEFAULT_MIRROR_HOSTNAME:
+ last_mirror_hostname = "z.pypi.python.org"
+ end_letter = last_mirror_hostname.split(".", 1)
# determine the list from the last one.
return ["%s.%s" % (s, end_letter[1]) for s in string_range(end_letter[0])]
View
33 pip/req.py
@@ -1,5 +1,6 @@
from email.parser import FeedParser
import os
+import imp
import pkg_resources
import re
import sys
@@ -16,10 +17,10 @@
from pip.util import display_path, rmtree
from pip.util import ask, ask_path_exists, backup_dir
from pip.util import is_installable_dir, is_local, dist_is_local, dist_in_usersite
-from pip.util import renames, normalize_path, egg_link_path
+from pip.util import renames, normalize_path, egg_link_path, dist_in_site_packages
from pip.util import make_path_relative
from pip.util import call_subprocess
-from pip.backwardcompat import (urlparse, urllib,
+from pip.backwardcompat import (urlparse, urllib, uses_pycache,
ConfigParser, string_types, HTTPError,
get_python_version, b)
from pip.index import Link
@@ -245,14 +246,16 @@ def run_egg_info(self, force_root_egg_info=False):
_run_setup_py = """
__file__ = __SETUP_PY__
from setuptools.command import egg_info
+import pkg_resources
+import os
def replacement_run(self):
self.mkpath(self.egg_info)
installer = self.distribution.fetch_build_egg
- for ep in egg_info.iter_entry_points('egg_info.writers'):
+ for ep in pkg_resources.iter_entry_points('egg_info.writers'):
# require=False is the change we're making:
writer = ep.load(require=False)
if writer:
- writer(self, ep.name, egg_info.os.path.join(self.egg_info,ep.name))
+ writer(self, ep.name, os.path.join(self.egg_info,ep.name))
self.find_sources()
egg_info.egg_info.run = replacement_run
exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))
@@ -446,7 +449,9 @@ def uninstall(self, auto_confirm=False):
for installed_file in dist.get_metadata('installed-files.txt').splitlines():
path = os.path.normpath(os.path.join(egg_info_path, installed_file))
paths_to_remove.add(path)
- if dist.has_metadata('top_level.txt'):
+ #FIXME: need a test for this elif block
+ #occurs with --single-version-externally-managed/--record outside of pip
+ elif dist.has_metadata('top_level.txt'):
if dist.has_metadata('namespace_packages.txt'):
namespaces = dist.get_metadata('namespace_packages.txt')
else:
@@ -466,7 +471,7 @@ def uninstall(self, auto_confirm=False):
'easy-install.pth')
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
- elif os.path.isfile(develop_egg_link):
+ elif develop_egg_link:
# develop egg
fh = open(develop_egg_link, 'r')
link_pointer = os.path.normcase(fh.readline().strip())
@@ -685,6 +690,9 @@ def check_if_exists(self):
if self.use_user_site:
if dist_in_usersite(existing_dist):
self.conflicts_with = existing_dist
+ elif running_under_virtualenv() and dist_in_site_packages(existing_dist):
+ raise InstallationError("Will not install to the user site because it will lack sys.path precedence to %s in %s"
+ %(existing_dist.project_name, existing_dist.location))
else:
self.conflicts_with = existing_dist
return True
@@ -890,7 +898,7 @@ def uninstall(self, auto_confirm=False):
req.commit_uninstall()
def locate_files(self):
- ## FIXME: duplicates code from install_files; relevant code should
+ ## FIXME: duplicates code from prepare_files; relevant code should
## probably be factored out into a separate method
unnamed = list(self.unnamed_requirements)
reqs = list(self.requirements.values())
@@ -998,7 +1006,9 @@ def prepare_files(self, finder, force_root_egg_info=False, bundle=False):
##occurs when the script attempts to unpack the
##build directory
+ # NB: This call can result in the creation of a temporary build directory
location = req_to_install.build_location(self.build_dir, not self.is_download)
+
## FIXME: is the existance of the checkout good enough to use it? I don't think so.
unpack = True
url = None
@@ -1079,7 +1089,7 @@ def prepare_files(self, finder, force_root_egg_info=False, bundle=False):
self.add_requirement(subreq)
if req_to_install.name not in self.requirements:
self.requirements[req_to_install.name] = req_to_install
- if self.is_download:
+ if self.is_download or req_to_install._temp_build_dir is not None:
self.reqs_to_cleanup.append(req_to_install)
else:
self.reqs_to_cleanup.append(req_to_install)
@@ -1428,6 +1438,11 @@ def add(self, path):
else:
self._refuse.add(path)
+ # __pycache__ files can show up after 'installed-files.txt' is created, due to imports
+ if os.path.splitext(path)[1] == '.py' and uses_pycache:
+ self.add(imp.cache_from_source(path))
+
+
def add_pth(self, pth_file, entry):
pth_file = normalize_path(pth_file)
if self._permitted(pth_file):
@@ -1457,6 +1472,8 @@ def _stash(self, path):
def remove(self, auto_confirm=False):
"""Remove paths in ``self.paths`` with confirmation (unless
``auto_confirm`` is True)."""
+ if not self.paths:
+ raise InstallationError("Can't uninstall '%s'. No files were found to uninstall." % self.dist.project_name)
if not self._can_uninstall():
return
logger.notify('Uninstalling %s:' % self.dist.project_name)
View
47 pip/util.py
@@ -10,7 +10,7 @@
import subprocess
from pip.exceptions import InstallationError, BadCommand
from pip.backwardcompat import WindowsError, string_types, raw_input, console_to_str, user_site
-from pip.locations import site_packages, running_under_virtualenv
+from pip.locations import site_packages, running_under_virtualenv, virtualenv_no_global
from pip.log import logger
__all__ = ['rmtree', 'display_path', 'backup_dir',
@@ -303,6 +303,12 @@ def dist_in_usersite(dist):
else:
return False
+def dist_in_site_packages(dist):
+ """
+ Return True if given Distribution is installed in distutils.sysconfig.get_python_lib().
+ """
+ return normalize_path(dist_location(dist)).startswith(normalize_path(site_packages))
+
def get_installed_distributions(local_only=True, skip=('setuptools', 'pip', 'python')):
"""
@@ -325,16 +331,37 @@ def get_installed_distributions(local_only=True, skip=('setuptools', 'pip', 'pyt
def egg_link_path(dist):
"""
- Return the path where we'd expect to find a .egg-link file for
- this distribution. (There doesn't seem to be any metadata in the
- Distribution object for a develop egg that points back to its
- .egg-link and easy-install.pth files).
+ Return the path for the .egg-link file if it exists, otherwise, None.
+
+ There's 3 scenarios:
+ 1) not in a virtualenv
+ try to find in site.USER_SITE, then site_packages
+ 2) in a no-global virtualenv
+ try to find in site_packages
+ 3) in a yes-global virtualenv
+ try to find in site_packages, then site.USER_SITE (don't look in global location)
+
+ For #1 and #3, there could be odd cases, where there's an egg-link in 2 locations.
+ This method will just return the first one found.
+ """
- This won't find a globally-installed develop egg if we're in a
- virtualenv.
+ sites=[]
+ if running_under_virtualenv():
+ if virtualenv_no_global():
+ sites.append(site_packages)
+ else:
+ sites.append(site_packages)
+ if user_site:
+ sites.append(user_site)
+ else:
+ if user_site:
+ sites.append(user_site)
+ sites.append(site_packages)
- """
- return os.path.join(site_packages, dist.project_name) + '.egg-link'
+ for site in sites:
+ egglink = os.path.join(site, dist.project_name) + '.egg-link'
+ if os.path.isfile(egglink):
+ return egglink
def dist_location(dist):
@@ -346,7 +373,7 @@ def dist_location(dist):
"""
egg_link = egg_link_path(dist)
- if os.path.exists(egg_link):
+ if egg_link:
return egg_link
return dist.location
View
2 pip/vcs/__init__.py
@@ -19,7 +19,7 @@ class VcsSupport(object):
def __init__(self):
# Register more schemes with urlparse for various version control systems
urlparse.uses_netloc.extend(self.schemes)
- # Python 3.3 doesn't have uses_fragment
+ # Python >= 2.7.4, 3.3 doesn't have uses_fragment
if getattr(urlparse, 'uses_fragment', None):
urlparse.uses_fragment.extend(self.schemes)
super(VcsSupport, self).__init__()
View
7 pip/vcs/bazaar.py
@@ -19,8 +19,11 @@ class Bazaar(VersionControl):
def __init__(self, url=None, *args, **kwargs):
super(Bazaar, self).__init__(url, *args, **kwargs)
- urlparse.non_hierarchical.extend(['lp'])
- urlparse.uses_fragment.extend(['lp'])
+ # Python >= 2.7.4, 3.3 doesn't have uses_fragment or non_hierarchical
+ # Register lp but do not expose as a scheme to support bzr+lp.
+ if getattr(urlparse, 'uses_fragment', None):
+ urlparse.uses_fragment.extend(['lp'])
+ urlparse.non_hierarchical.extend(['lp'])
def parse_vcs_bundle_file(self, content):
url = rev = None
View
13 tests/git_submodule_helpers.py
@@ -44,19 +44,22 @@ def main():
packages=find_packages(),
)
'''), version_pkg_path)
- env.run('git', 'init', cwd=version_pkg_path)
- env.run('git', 'add', '.', cwd=version_pkg_path)
+ env.run('git', 'init', cwd=version_pkg_path, expect_error=True)
+ env.run('git', 'add', '.', cwd=version_pkg_path, expect_error=True)
env.run('git', 'commit', '-q',
'--author', 'Pip <python-virtualenv@googlegroups.com>',
- '-am', 'initial version', cwd=version_pkg_path)
+ '-am', 'initial version', cwd=version_pkg_path,
+ expect_error=True)
submodule_path = _create_test_package_submodule(env)
- env.run('git', 'submodule', 'add', submodule_path, 'testpkg/static', cwd=version_pkg_path)
+ env.run('git', 'submodule', 'add', submodule_path, 'testpkg/static', cwd=version_pkg_path,
+ expect_error=True)
env.run('git', 'commit', '-q',
'--author', 'Pip <python-virtualenv@googlegroups.com>',
- '-am', 'initial version w submodule', cwd=version_pkg_path)
+ '-am', 'initial version w submodule', cwd=version_pkg_path,
+ expect_error=True)
return version_pkg_path, submodule_path
View
17 tests/packages/HackedEggInfo/setup.py
@@ -0,0 +1,17 @@
+# -*- coding: utf-8 -*-
+
+from setuptools import setup
+from setuptools.command import egg_info as orig_egg_info
+
+class egg_info (orig_egg_info.egg_info):
+ def run(self):
+ orig_egg_info.egg_info.run(self)
+
+
+setup(
+ name = "hackedegginfo",
+ version = '0.0.0',
+ cmdclass = {'egg_info':egg_info },
+ zip_safe = False,
+)
+
View
3 tests/packages/README.txt
@@ -4,3 +4,6 @@ Version 0.2broken has a setup.py crafted to fail on install (and only on
install). If any earlier step would fail (i.e. egg-info-generation), the
already-installed version would never be uninstalled, so uninstall-rollback
would not come into play.
+
+The parent-0.1.tar.gz and child-0.1.tar.gz packages are used by
+test_uninstall:test_uninstall_overlapping_package.
View
BIN tests/packages/child-0.1.tar.gz
Binary file not shown.
View
BIN tests/packages/parent-0.1.tar.gz
Binary file not shown.
View
10 tests/test_basic.py
@@ -376,6 +376,16 @@ def test_install_with_pax_header():
run_pip('install', 'paxpkg.tar.bz2', cwd=run_from)
+def test_install_with_hacked_egg_info():
+ """
+ test installing a package which defines its own egg_info class
+ """
+ reset_env()
+ run_from = abspath(join(here, 'packages', 'HackedEggInfo'))
+ result = run_pip('install', '.', cwd=run_from)
+ assert 'Successfully installed hackedegginfo\n' in result.stdout
+
+
def test_install_using_install_option_and_editable():
"""
Test installing a tool using -e and --install-option
View
23 tests/test_cleanup.py
@@ -17,6 +17,7 @@ def test_cleanup_after_install_from_pypi():
src = env.scratch_path/"src"
assert not exists(build), "build/ dir still exists: %s" % build
assert not exists(src), "unexpected src/ dir exists: %s" % src
+ env.assert_no_temp()
def test_cleanup_after_install_editable_from_hg():
@@ -34,6 +35,7 @@ def test_cleanup_after_install_editable_from_hg():
src = env.venv_path/'src'
assert not exists(build), "build/ dir still exists: %s" % build
assert exists(src), "expected src/ dir doesn't exist: %s" % src
+ env.assert_no_temp()
def test_cleanup_after_install_from_local_directory():
@@ -48,6 +50,7 @@ def test_cleanup_after_install_from_local_directory():
src = env.venv_path/'src'
assert not exists(build), "unexpected build/ dir exists: %s" % build
assert not exists(src), "unexpected src/ dir exist: %s" % src
+ env.assert_no_temp()
def test_cleanup_after_create_bundle():
@@ -79,6 +82,7 @@ def test_cleanup_after_create_bundle():
src_bundle = env.scratch_path/"src-bundle"
assert not exists(build_bundle), "build-bundle/ dir still exists: %s" % build_bundle
assert not exists(src_bundle), "src-bundle/ dir still exists: %s" % src_bundle
+ env.assert_no_temp()
# Make sure previously created src/ from editable still exists
assert exists(src), "expected src dir doesn't exist: %s" % src
@@ -96,6 +100,25 @@ def test_no_install_and_download_should_not_leave_build_dir():
assert not os.path.exists(env.venv_path/'/build'), "build/ dir should be deleted"
+def test_cleanup_req_satisifed_no_name():
+ """
+ Test cleanup when req is already satisfied, and req has no 'name'
+ """
+ #this test confirms Issue #420 is fixed
+ #reqs with no 'name' that were already satisfied were leaving behind tmp build dirs
+ #2 examples of reqs that would do this
+ # 1) https://bitbucket.org/ianb/initools/get/tip.zip
+ # 2) parent-0.1.tar.gz
+
+ dist = abspath(join(here, 'packages', 'parent-0.1.tar.gz'))
+ env = reset_env()
+ result = run_pip('install', dist)
+ result = run_pip('install', dist)
+ build = env.venv_path/'build'
+ assert not exists(build), "unexpected build/ dir exists: %s" % build
+ env.assert_no_temp()
+
+
def test_download_should_not_delete_existing_build_dir():
"""
It should not delete build/ if existing before run the command
View
194 tests/test_hashes.py
@@ -0,0 +1,194 @@
+import os
+
+from nose.tools import assert_raises
+
+from pip.download import _get_hash_from_file, _check_hash
+from pip.exceptions import InstallationError
+from pip.index import Link
+
+
+def test_get_hash_from_file_md5():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#md5=d41d8cd98f00b204e9800998ecf8427e")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ assert download_hash.digest_size == 16
+ assert download_hash.hexdigest() == "d41d8cd98f00b204e9800998ecf8427e"
+
+
+def test_get_hash_from_file_sha1():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha1=da39a3ee5e6b4b0d3255bfef95601890afd80709")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ assert download_hash.digest_size == 20
+ assert download_hash.hexdigest() == "da39a3ee5e6b4b0d3255bfef95601890afd80709"
+
+
+def test_get_hash_from_file_sha224():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha224=d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ assert download_hash.digest_size == 28
+ assert download_hash.hexdigest() == "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f"
+
+
+def test_get_hash_from_file_sha384():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha384=38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ assert download_hash.digest_size == 48
+ assert download_hash.hexdigest() == "38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b"
+
+
+def test_get_hash_from_file_sha256():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha256=e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ assert download_hash.digest_size == 32
+ assert download_hash.hexdigest() == "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
+
+
+def test_get_hash_from_file_sha512():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha512=cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ assert download_hash.digest_size == 64
+ assert download_hash.hexdigest() == "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e"
+
+
+def test_get_hash_from_file_unknown():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#unknown_hash=d41d8cd98f00b204e9800998ecf8427e")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ assert download_hash is None
+
+
+def test_check_hash_md5_valid():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#md5=d41d8cd98f00b204e9800998ecf8427e")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ _check_hash(download_hash, file_link)
+
+
+def test_check_hash_md5_invalid():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#md5=deadbeef")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ assert_raises(InstallationError, _check_hash, download_hash, file_link)
+
+
+def test_check_hash_sha1_valid():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha1=da39a3ee5e6b4b0d3255bfef95601890afd80709")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ _check_hash(download_hash, file_link)
+
+
+def test_check_hash_sha1_invalid():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha1=deadbeef")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ assert_raises(InstallationError, _check_hash, download_hash, file_link)
+
+
+def test_check_hash_sha224_valid():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha224=d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f'")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ _check_hash(download_hash, file_link)
+
+
+def test_check_hash_sha224_invalid():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha224=deadbeef")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ assert_raises(InstallationError, _check_hash, download_hash, file_link)
+
+
+def test_check_hash_sha384_valid():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha384=38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ _check_hash(download_hash, file_link)
+
+
+def test_check_hash_sha384_invalid():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha384=deadbeef")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ assert_raises(InstallationError, _check_hash, download_hash, file_link)
+
+
+def test_check_hash_sha256_valid():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha256=e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ _check_hash(download_hash, file_link)
+
+
+def test_check_hash_sha256_invalid():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha256=deadbeef")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ assert_raises(InstallationError, _check_hash, download_hash, file_link)
+
+
+def test_check_hash_sha512_valid():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha512=cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ _check_hash(download_hash, file_link)
+
+
+def test_check_hash_sha512_invalid():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha512=deadbeef")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ assert_raises(InstallationError, _check_hash, download_hash, file_link)
+
+
+def test_check_hasher_mismsatch():
+ file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "packages", "gmpy-1.15.tar.gz")
+ file_link = Link("http://testserver/gmpy-1.15.tar.gz#md5=d41d8cd98f00b204e9800998ecf8427e")
+ other_link = Link("http://testserver/gmpy-1.15.tar.gz#sha256=e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
+
+ download_hash = _get_hash_from_file(file_path, file_link)
+
+ assert_raises(InstallationError, _check_hash, download_hash, other_link)
View
29 tests/test_index.py
@@ -1,4 +1,6 @@
-from pip.index import package_to_requirement, HTMLPage
+from pip.index import package_to_requirement, HTMLPage, get_mirrors, DEFAULT_MIRROR_HOSTNAME
+from string import ascii_lowercase
+from mock import patch
def test_package_name_should_be_converted_to_requirement():
@@ -26,3 +28,28 @@ def test_html_page_should_be_able_to_scrap_rel_links():
assert len(links) == 1
assert links[0].url == 'http://supervisord.org/'
+@patch('socket.gethostbyname_ex')
+def test_get_mirrors(mock_gethostbyname_ex):
+ # Test when the expected result comes back
+ # from socket.gethostbyname_ex
+ mock_gethostbyname_ex.return_value = ('g.pypi.python.org', [DEFAULT_MIRROR_HOSTNAME], ['129.21.171.98'])
+ mirrors = get_mirrors()
+ # Expect [a-g].pypi.python.org, since last mirror
+ # is returned as g.pypi.python.org
+ assert len(mirrors) == 7
+ for c in "abcdefg":
+ assert c + ".pypi.python.org" in mirrors
+
+@patch('socket.gethostbyname_ex')
+def test_get_mirrors_no_cname(mock_gethostbyname_ex):
+ # Test when the UNexpected result comes back
+ # from socket.gethostbyname_ex
+ # (seeing this in Japan and was resulting in 216k
+ # invalid mirrors and a hot CPU)
+ mock_gethostbyname_ex.return_value = (DEFAULT_MIRROR_HOSTNAME, [DEFAULT_MIRROR_HOSTNAME], ['129.21.171.98'])
+ mirrors = get_mirrors()
+ # Falls back to [a-z].pypi.python.org
+ assert len(mirrors) == 26
+ for c in ascii_lowercase:
+ assert c + ".pypi.python.org" in mirrors
+
View
26 tests/test_pip.py
@@ -314,12 +314,18 @@ def __init__(self, environ=None, use_distribute=None, sitecustomize=None):
assert self.venv_path == virtualenv_paths[0] # sanity check
for id, path in zip(('venv', 'lib', 'include', 'bin'), virtualenv_paths):
+ #fix for virtualenv issue #306
+ if hasattr(sys, "pypy_version_info") and id == 'lib':
+ path = os.path.join(self.venv_path, 'lib-python', pyversion)
setattr(self, id+'_path', Path(path))
setattr(self, id, relpath(self.root_path, path))
assert self.venv == TestPipEnvironment.venv # sanity check
- self.site_packages = self.lib/'site-packages'
+ if hasattr(sys, "pypy_version_info"):
+ self.site_packages = self.venv/'site-packages'
+ else:
+ self.site_packages = self.lib/'site-packages'
self.user_base_path = self.venv_path/'user'
self.user_site_path = self.venv_path/'user'/site_packages_suffix
@@ -362,6 +368,10 @@ def __init__(self, environ=None, use_distribute=None, sitecustomize=None):
if sitecustomize:
self._add_to_sitecustomize(sitecustomize)
+ # Ensure that $TMPDIR exists (because we use start_clear=False, it's not created for us)
+ if self.temp_path and not os.path.exists(self.temp_path):
+ os.makedirs(self.temp_path)
+
def _ignore_file(self, fn):
if fn.endswith('__pycache__') or fn.endswith(".pyc"):
result = True
@@ -444,12 +454,18 @@ def __init__(self, environ=None, sitecustomize=None):
virtualenv_paths = virtualenv.path_locations(self.venv_path)
for id, path in zip(('venv', 'lib', 'include', 'bin'), virtualenv_paths):
+ #fix for virtualenv issue #306
+ if hasattr(sys, "pypy_version_info") and id == 'lib':
+ path = os.path.join(self.venv_path, 'lib-python', pyversion)
setattr(self, id+'_path', Path(path))
setattr(self, id, relpath(self.root_path, path))
assert self.venv == TestPipEnvironment.venv # sanity check
- self.site_packages = self.lib/'site-packages'
+ if hasattr(sys, "pypy_version_info"):
+ self.site_packages = self.venv/'site-packages'
+ else:
+ self.site_packages = self.lib/'site-packages'
self.user_base_path = self.venv_path/'user'
self.user_site_path = self.venv_path/'user'/'lib'/self.lib.name/'site-packages'
@@ -510,6 +526,10 @@ def __init__(self, environ=None, sitecustomize=None):
assert self.root_path.exists
+ # Ensure that $TMPDIR exists (because we use start_clear=False, it's not created for us)
+ if self.temp_path and not os.path.exists(self.temp_path):
+ os.makedirs(self.temp_path)
+
def __del__(self):
pass # shutil.rmtree(str(self.root_path), ignore_errors=True)
@@ -663,5 +683,5 @@ def main():
if __name__ == '__main__':
- sys.stderr.write("Run pip's tests using nosetests. Requires virtualenv, ScriptTest, and nose.\n")
+ sys.stderr.write("Run pip's tests using nosetests. Requires virtualenv, ScriptTest, mock, and nose.\n")
sys.exit(1)
View
17 tests/test_requirements.py
@@ -121,19 +121,22 @@ def test_requirements_data_structure_implements__contains__():
assert 'pip' in requirements
assert 'nose' not in requirements
+@patch('os.path.normcase')
@patch('pip.req.os.getcwd')
@patch('pip.req.os.path.exists')
@patch('pip.req.os.path.isdir')
-def test_parse_editable_local(isdir_mock, exists_mock, getcwd_mock):
+def test_parse_editable_local(isdir_mock, exists_mock, getcwd_mock, normcase_mock):
exists_mock.return_value = isdir_mock.return_value = True
- getcwd_mock.return_value = "/some/path"
+ # mocks needed to support path operations on windows tests
+ normcase_mock.return_value = getcwd_mock.return_value = "/some/path"
assert_equal(
parse_editable('.', 'git'),
(None, 'file:///some/path', None)
)
+ normcase_mock.return_value = "/some/path/foo"
assert_equal(
parse_editable('foo', 'git'),
- (None, 'file://' + os.path.join("/some/path", 'foo'), None)
+ (None, 'file:///some/path/foo', None)
)
def test_parse_editable_default_vcs():
@@ -154,19 +157,21 @@ def test_parse_editable_vcs_extras():
('foo[extras]', 'svn+https://foo#egg=foo[extras]', None)
)
+@patch('os.path.normcase')
@patch('pip.req.os.getcwd')
@patch('pip.req.os.path.exists')
@patch('pip.req.os.path.isdir')
-def test_parse_editable_local_extras(isdir_mock, exists_mock, getcwd_mock):
+def test_parse_editable_local_extras(isdir_mock, exists_mock, getcwd_mock, normcase_mock):
exists_mock.return_value = isdir_mock.return_value = True
- getcwd_mock.return_value = "/some/path"
+ normcase_mock.return_value = getcwd_mock.return_value = "/some/path"
assert_equal(
parse_editable('.[extras]', 'git'),
(None, 'file://' + "/some/path", ('extras',))
)
+ normcase_mock.return_value = "/some/path/foo"
assert_equal(
parse_editable('foo[bar,baz]', 'git'),
- (None, 'file://' + os.path.join("/some/path", 'foo'), ('bar', 'baz'))
+ (None, 'file:///some/path/foo', ('bar', 'baz'))
)
def test_install_local_editable_with_extras():
View
21 tests/test_test.py
@@ -66,3 +66,24 @@ def test_sitecustomize_not_growing_in_fast_environment():
size2 = os.stat(sc2).st_size
assert size1==size2, "size before, %d != size after, %d" %(size1, size2)
+
+def test_tmp_dir_exists_in_env():
+ """
+ Test that $TMPDIR == env.temp_path and path exists, and env.assert_no_temp() passes
+ """
+ #need these tests to ensure the assert_no_temp feature of scripttest is working
+ env = reset_env(use_distribute=True)
+ env.assert_no_temp() #this fails if env.tmp_path doesn't exist
+ assert env.environ['TMPDIR'] == env.temp_path
+ assert isdir(env.temp_path)
+
+
+def test_tmp_dir_exists_in_fast_env():
+ """
+ Test that $TMPDIR == env.temp_path and path exists and env.assert_no_temp() passes (in fast env)
+ """
+ #need these tests to ensure the assert_no_temp feature of scripttest is working
+ env = reset_env()
+ env.assert_no_temp() #this fails if env.tmp_path doesn't exist
+ assert env.environ['TMPDIR'] == env.temp_path
+ assert isdir(env.temp_path)
View
4 tests/test_unicode.py
@@ -20,6 +20,6 @@ def test_install_package_that_emits_unicode():
env = reset_env()
to_install = os.path.abspath(os.path.join(here, 'packages', 'BrokenEmitsUTF8'))
- result = run_pip('install', to_install, expect_error=True)
- assert '__main__.FakeError: this package designed to fail on install' in result.stdout
+ result = run_pip('install', to_install, expect_error=True, expect_temp=True, quiet=True)
+ assert 'FakeError: this package designed to fail on install' in result.stdout
assert 'UnicodeDecodeError' not in result.stdout
View
57 tests/test_uninstall.py
@@ -1,7 +1,9 @@
import textwrap
import sys
-from os.path import join, abspath
+from os.path import join, abspath, normpath
from tempfile import mkdtemp
+from mock import Mock
+from nose.tools import assert_raises
from tests.test_pip import here, reset_env, run_pip, assert_all_changes, write_file, pyversion
from tests.local_repos import local_repo, local_checkout
@@ -16,6 +18,8 @@ def test_simple_uninstall():
env = reset_env()
result = run_pip('install', 'INITools==0.2')
assert join(env.site_packages, 'initools') in result.files_created, sorted(result.files_created.keys())
+ #the import forces the generation of __pycache__ if the version of python supports it
+ env.run('python', '-c', "import initools")
result2 = run_pip('uninstall', 'INITools', '-y')
assert_all_changes(result, result2, [env.venv/'build', 'cache'])
@@ -34,6 +38,19 @@ def test_uninstall_with_scripts():
assert_all_changes(result, result2, [env.venv/'build', 'cache'])
+def test_uninstall_easy_install_after_import():
+ """
+ Uninstall an easy_installed package after it's been imported
+
+ """
+ env = reset_env()
+ result = env.run('easy_install', 'INITools==0.2', expect_stderr=True)
+ #the import forces the generation of __pycache__ if the version of python supports it
+ env.run('python', '-c', "import initools")
+ result2 = run_pip('uninstall', 'INITools', '-y')
+ assert_all_changes(result, result2, [env.venv/'build', 'cache'])
+
+
def test_uninstall_namespace_package():
"""
Uninstall a distribution with a namespace package without clobbering
@@ -48,6 +65,33 @@ def test_uninstall_namespace_package():
assert join(env.site_packages, 'pd', 'find') in result2.files_deleted, sorted(result2.files_deleted.keys())
+def test_uninstall_overlapping_package():
+ """
+ Uninstalling a distribution that adds modules to a pre-existing package
+ should only remove those added modules, not the rest of the existing
+ package.
+
+ See: GitHub issue #355 (pip uninstall removes things it didn't install)
+ """
+ parent_pkg = abspath(join(here, 'packages', 'parent-0.1.tar.gz'))
+ child_pkg = abspath(join(here, 'packages', 'child-0.1.tar.gz'))
+ env = reset_env()
+ result1 = run_pip('install', parent_pkg, expect_error=False)
+ assert join(env.site_packages, 'parent') in result1.files_created, sorted(result1.files_created.keys())
+ result2 = run_pip('install', child_pkg, expect_error=False)
+ assert join(env.site_packages, 'child') in result2.files_created, sorted(result2.files_created.keys())
+ assert normpath(join(env.site_packages, 'parent/plugins/child_plugin.py')) in result2.files_created, sorted(result2.files_created.keys())
+ #the import forces the generation of __pycache__ if the version of python supports it
+ env.run('python', '-c', "import parent.plugins.child_plugin, child")
+ result3 = run_pip('uninstall', '-y', 'child', expect_error=False)
+ assert join(env.site_packages, 'child') in result3.files_deleted, sorted(result3.files_created.keys())
+ assert normpath(join(env.site_packages, 'parent/plugins/child_plugin.py')) in result3.files_deleted, sorted(result3.files_deleted.keys())
+ assert join(env.site_packages, 'parent') not in result3.files_deleted, sorted(result3.files_deleted.keys())
+ # Additional check: uninstalling 'child' should return things to the
+ # previous state, without unintended side effects.
+ assert_all_changes(result2, result3, [])
+
+
def test_uninstall_console_scripts():
"""
Test uninstalling a package with more files (console_script entry points, extra directories).
@@ -155,3 +199,14 @@ def test_uninstall_as_egg():
result2 = run_pip('uninstall', 'FSPkg', '-y', expect_error=True)
assert_all_changes(result, result2, [env.venv/'build', 'cache'])
+
+def test_uninstallpathset_no_paths():
+ """
+ Test UninstallPathSet raises installation error when there are no paths (uses mocking)
+
+ """
+ from pip.req import UninstallPathSet
+ from pip.exceptions import InstallationError
+ mock_dist = Mock(project_name='pkg')
+ uninstall_set = UninstallPathSet(mock_dist)
+ assert_raises(InstallationError, uninstall_set.remove)
View
4 tests/test_upgrade.py
@@ -207,8 +207,8 @@ def test_upgrade_vcs_req_with_no_dists_found():
def test_upgrade_vcs_req_with_dist_found():
"""It can upgrade a VCS requirement that has distributions on the index."""
reset_env()
- req = "%s#egg=virtualenv" % local_checkout(
- "git+git://github.com/pypa/virtualenv@c21fef2c2d53cf19f49bcc37f9c058a33fb50499")
+ # TODO(pnasrat) Using local_checkout fails on windows - oddness with the test path urls/git.
+ req = "%s#egg=virtualenv" % "git+git://github.com/pypa/virtualenv@c21fef2c2d53cf19f49bcc37f9c058a33fb50499"
run_pip("install", req)
result = run_pip("install", "-U", req)
assert not "pypi.python.org" in result.stdout, result.stdout
View
95 tests/test_user_site.py
@@ -6,7 +6,15 @@
from os.path import abspath, join, curdir, isdir, isfile
from nose import SkipTest
from tests.local_repos import local_checkout
-from tests.test_pip import here, reset_env, run_pip, pyversion
+from tests.test_pip import here, reset_env, run_pip, pyversion, assert_all_changes
+
+
+patch_dist_in_site_packages = """
+ def dist_in_site_packages(dist):
+ return False
+ import pip
+ pip.util.dist_in_site_packages=dist_in_site_packages
+"""
def test_install_curdir_usersite_fails_in_old_python():
@@ -27,6 +35,10 @@ def setup(self):
# --user only works on 2.6 or higher
if sys.version_info < (2, 6):
raise SkipTest()
+ # --user option is broken in pypy
+ if hasattr(sys, "pypy_version_info"):
+ raise SkipTest()
+
def test_reset_env_system_site_packages_usersite(self):
"""
@@ -58,9 +70,6 @@ def test_install_subversion_usersite_editable_with_distribute(self):
"""
Test installing current directory ('.') into usersite after installing distribute
"""
- # FIXME distutils --user option seems to be broken in pypy
- if hasattr(sys, "pypy_version_info"):
- raise SkipTest()
env = reset_env(use_distribute=True, system_site_packages=True)
result = run_pip('install', '--user', '-e',
'%s#egg=initools-dev' %
@@ -72,9 +81,6 @@ def test_install_curdir_usersite(self):
"""
Test installing current directory ('.') into usersite
"""
- # FIXME distutils --user option seems to be broken in pypy
- if hasattr(sys, "pypy_version_info"):
- raise SkipTest()
env = reset_env(use_distribute=True, system_site_packages=True)
run_from = abspath(join(here, 'packages', 'FSPkg'))
result = run_pip('install', '--user', curdir, cwd=run_from, expect_error=False)
@@ -111,15 +117,21 @@ def test_install_user_conflict_in_usersite(self):
assert not isfile(initools_v3_file), initools_v3_file
- def test_install_user_conflict_in_site(self):
+ def test_install_user_conflict_in_globalsite(self):
"""
- Test user install with conflict in site ignores site and installs to usersite
+ Test user install with conflict in global site ignores site and installs to usersite
"""
- #the test framework only supports testing using virtualenvs
- #this test will use a --system_site_packages virtualenv to achieve the conflict scenario.
+ # the test framework only supports testing using virtualenvs
+ # the sys.path ordering for virtualenvs with --system-site-packages is this: virtualenv-site, user-site, global-site
+ # this test will use 2 modifications to simulate the user-site/global-site relationship
+ # 1) a monkey patch which will make it appear INITools==0.2 is not in in the virtualenv site
+ # if we don't patch this, pip will return an installation error: "Will not install to the usersite because it will lack sys.path precedence..."
+ # 2) adding usersite to PYTHONPATH, so usersite as sys.path precedence over the virtualenv site
+
+ env = reset_env(system_site_packages=True, sitecustomize=patch_dist_in_site_packages)
+ env.environ["PYTHONPATH"] = env.root_path / env.user_site
- env = reset_env(system_site_packages=True)
result1 = run_pip('install', 'INITools==0.2')
result2 = run_pip('install', '--user', 'INITools==0.1')
@@ -141,14 +153,14 @@ def test_install_user_conflict_in_globalsite_and_usersite(self):
Test user install with conflict in globalsite and usersite ignores global site and updates usersite.
"""
- #the test framework only supports testing using virtualenvs
- #this test will use a --system_site_packages virtualenv to achieve the conflict scenario.
-
- env = reset_env(system_site_packages=True)
+ # the test framework only supports testing using virtualenvs.
+ # the sys.path ordering for virtualenvs with --system-site-packages is this: virtualenv-site, user-site, global-site.
+ # this test will use 2 modifications to simulate the user-site/global-site relationship
+ # 1) a monkey patch which will make it appear INITools==0.2 is not in in the virtualenv site
+ # if we don't patch this, pip will return an installation error: "Will not install to the usersite because it will lack sys.path precedence..."
+ # 2) adding usersite to PYTHONPATH, so usersite as sys.path precedence over the virtualenv site
- # the sys.path ordering for virtualenvs with --system-site-packages is this: virtualenv site, usersite, global site
- # given this ordering you *can't* use it to simulate the scenario for this test.
- # this test will add the usersite to PYTHONPATH to simulate the desired ordering
+ env = reset_env(system_site_packages=True, sitecustomize=patch_dist_in_site_packages)
env.environ["PYTHONPATH"] = env.root_path / env.user_site
result1 = run_pip('install', 'INITools==0.2')
@@ -166,3 +178,48 @@ def test_install_user_conflict_in_globalsite_and_usersite(self):
initools_folder = env.root_path / env.site_packages / 'initools'
assert isdir(egg_info_folder)
assert isdir(initools_folder)
+
+
+ def test_install_user_in_global_virtualenv_with_conflict_fails(self):
+ """
+ Test user install in --system-site-packages virtualenv with conflict in site fails.
+ """
+ env = reset_env(system_site_packages=True)
+ result1 = run_pip('install', 'INITools==0.2')
+ result2 = run_pip('install', '--user', 'INITools==0.1', expect_error=True)
+ resultp = env.run('python', '-c', "import pkg_resources; print(pkg_resources.get_distribution('initools').location)")
+ dist_location = resultp.stdout.strip()
+ assert result2.stdout.startswith("Will not install to the user site because it will lack sys.path precedence to %s in %s"
+ %('INITools', dist_location)), result2.stdout
+
+
+ def test_uninstall_from_usersite(self):
+ """
+ Test uninstall from usersite
+ """
+ env = reset_env(system_site_packages=True)
+ result1 = run_pip('install', '--user', 'INITools==0.3')
+ result2 = run_pip('uninstall', '-y', 'INITools')
+ assert_all_changes(result1, result2, [env.venv/'build', 'cache'])
+
+
+ def test_uninstall_editable_from_usersite(self):
+ """
+ Test uninstall editable local user install
+ """
+ env = reset_env(use_distribute=True, system_site_packages=True)
+
+ #install
+ to_install = abspath(join(here, 'packages', 'FSPkg'))
+ result1 = run_pip('install', '--user', '-e', to_install, expect_error=False)
+ egg_link = env.user_site/'FSPkg.egg-link'
+ assert egg_link in result1.files_created, str(result1.stdout)
+
+ #uninstall
+ result2 = run_pip('uninstall', '-y', 'FSPkg')
+ assert not isfile(env.root_path / egg_link)
+
+ assert_all_changes(result1, result2,
+ [env.venv/'build', 'cache', env.user_site/'easy-install.pth'])
+
+
View
140 tests/test_util.py
@@ -0,0 +1,140 @@
+"""
+util tests
+
+"""
+import os
+import pkg_resources
+from mock import Mock
+from nose.tools import eq_
+from tests.path import Path
+from pip.util import egg_link_path
+
+
+class Tests_EgglinkPath:
+ "util.egg_link_path() tests"
+
+ def setup(self):
+
+ project = 'foo'
+
+ self.mock_dist = Mock(project_name=project)
+ self.site_packages = 'SITE_PACKAGES'
+ self.user_site = 'USER_SITE'
+ self.user_site_egglink = os.path.join(self.user_site,'%s.egg-link' % project)
+ self.site_packages_egglink = os.path.join(self.site_packages,'%s.egg-link' % project)
+
+ #patches
+ from pip import util
+ self.old_site_packages = util.site_packages
+ self.mock_site_packages = util.site_packages = 'SITE_PACKAGES'
+ self.old_running_under_virtualenv = util.running_under_virtualenv
+ self.mock_running_under_virtualenv = util.running_under_virtualenv = Mock()
+ self.old_virtualenv_no_global = util.virtualenv_no_global
+ self.mock_virtualenv_no_global = util.virtualenv_no_global = Mock()
+ self.old_user_site = util.user_site
+ self.mock_user_site = util.user_site = self.user_site
+ from os import path
+ self.old_isfile = path.isfile
+ self.mock_isfile = path.isfile = Mock()
+
+
+ def teardown(self):
+ from pip import util
+ util.site_packages = self.old_site_packages
+ util.running_under_virtualenv = self.old_running_under_virtualenv
+ util.virtualenv_no_global = self.old_virtualenv_no_global
+ util.user_site = self.old_user_site
+ from os import path
+ path.isfile = self.old_isfile
+
+
+ def eggLinkInUserSite(self,egglink):
+ return egglink==self.user_site_egglink
+
+ def eggLinkInSitePackages(self,egglink):
+ return egglink==self.site_packages_egglink
+
+ #########################
+ ## egglink in usersite ##
+ #########################
+ def test_egglink_in_usersite_notvenv(self):
+ self.mock_virtualenv_no_global.return_value = False
+ self.mock_running_under_virtualenv.return_value = False
+ self.mock_isfile.side_effect = self.eggLinkInUserSite
+ eq_(egg_link_path(self.mock_dist), self.user_site_egglink)
+
+ def test_egglink_in_usersite_venv_noglobal(self):
+ self.mock_virtualenv_no_global.return_value = True
+ self.mock_running_under_virtualenv.return_value = True
+ self.mock_isfile.side_effect = self.eggLinkInUserSite
+ eq_(egg_link_path(self.mock_dist), None)
+
+ def test_egglink_in_usersite_venv_global(self):
+ self.mock_virtualenv_no_global.return_value = False
+ self.mock_running_under_virtualenv.return_value = True
+ self.mock_isfile.side_effect = self.eggLinkInUserSite
+ eq_(egg_link_path(self.mock_dist), self.user_site_egglink)
+
+ #########################
+ ## egglink in sitepkgs ##
+ #########################
+ def test_egglink_in_sitepkgs_notvenv(self):
+ self.mock_virtualenv_no_global.return_value = False
+ self.mock_running_under_virtualenv.return_value = False
+ self.mock_isfile.side_effect = self.eggLinkInSitePackages
+ eq_(egg_link_path(self.mock_dist), self.site_packages_egglink)
+
+ def test_egglink_in_sitepkgs_venv_noglobal(self):
+ self.mock_virtualenv_no_global.return_value = True
+ self.mock_running_under_virtualenv.return_value = True
+ self.mock_isfile.side_effect = self.eggLinkInSitePackages
+ eq_(egg_link_path(self.mock_dist), self.site_packages_egglink)
+
+ def test_egglink_in_sitepkgs_venv_global(self):
+ self.mock_virtualenv_no_global.return_value = False
+ self.mock_running_under_virtualenv.return_value = True
+ self.mock_isfile.side_effect = self.eggLinkInSitePackages
+ eq_(egg_link_path(self.mock_dist), self.site_packages_egglink)
+
+ ####################################
+ ## egglink in usersite & sitepkgs ##
+ ####################################
+ def test_egglink_in_both_notvenv(self):
+ self.mock_virtualenv_no_global.return_value = False
+ self.mock_running_under_virtualenv.return_value = False
+ self.mock_isfile.return_value = True
+ eq_(egg_link_path(self.mock_dist), self.user_site_egglink)
+
+ def test_egglink_in_both_venv_noglobal(self):
+ self.mock_virtualenv_no_global.return_value = True
+ self.mock_running_under_virtualenv.return_value = True
+ self.mock_isfile.return_value = True
+ eq_(egg_link_path(self.mock_dist), self.site_packages_egglink)
+
+ def test_egglink_in_both_venv_global(self):
+ self.mock_virtualenv_no_global.return_value = False
+ self.mock_running_under_virtualenv.return_value = True
+ self.mock_isfile.return_value = True
+ eq_(egg_link_path(self.mock_dist), self.site_packages_egglink)
+
+ ################
+ ## no egglink ##
+ ################
+ def test_noegglink_in_sitepkgs_notvenv(self):
+ self.mock_virtualenv_no_global.return_value = False
+ self.mock_running_under_virtualenv.return_value = False
+ self.mock_isfile.return_value = False
+ eq_(egg_link_path(self.mock_dist), None)
+
+ def test_noegglink_in_sitepkgs_venv_noglobal(self):
+ self.mock_virtualenv_no_global.return_value = True
+ self.mock_running_under_virtualenv.return_value = True
+ self.mock_isfile.return_value = False
+ eq_(egg_link_path(self.mock_dist), None)
+
+ def test_noegglink_in_sitepkgs_venv_global(self):
+ self.mock_virtualenv_no_global.return_value = False
+ self.mock_running_under_virtualenv.return_value = True
+ self.mock_isfile.return_value = False
+ eq_(egg_link_path(self.mock_dist), None)
+
View
5 tests/test_vcs_git.py
@@ -1,4 +1,6 @@
+import sys
from mock import patch
+from nose import SkipTest
from pip.vcs.git import Git
from tests.test_pip import (reset_env, run_pip,
_create_test_package,)
@@ -87,6 +89,9 @@ def test_check_submodule_addition():
Submodules are pulled in on install and updated on upgrade.
"""
+ # TODO(pnasrat) fix all helpers to do right things with paths on windows.
+ if sys.platform == 'win32':
+ raise SkipTest()
env = reset_env()
module_path, submodule_path = _create_test_package_with_submodule(env)

0 comments on commit 235e1de

Please sign in to comment.
Something went wrong with that request. Please try again.