Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

[svn r20974] Renaming pyinstall to pip

  • Loading branch information...
commit c2000d7de68ef955a85cf8f5f6e78d4f25c10103 0 parents
@ianb ianb authored
133 docs/conf.py
@@ -0,0 +1,133 @@
+# -*- coding: utf-8 -*-
+#
+# Paste documentation build configuration file, created by
+# sphinx-quickstart on Tue Apr 22 22:08:49 2008.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# The contents of this file are pickled, so don't put values in the namespace
+# that aren't pickleable (module imports are okay, they're removed automatically).
+#
+# All configuration values have a default value; values that are commented out
+# serve to show the default value.
+
+import sys
+
+# If your extensions are in another directory, add it here.
+#sys.path.append('some/directory')
+
+# General configuration
+# ---------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+#extensions = ['sphinx.ext.autodoc']
+extensions = []
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.txt'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General substitutions.
+project = 'pyinstall'
+copyright = '2008, The Open Planning Project'
+
+# The default replacements for |version| and |release|, also used in various
+# other places throughout the built documents.
+#
+# The short X.Y version.
+version = '0.1.3'
+# The full version, including alpha/beta/rc tags.
+release = '0.1.3'
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+unused_docs = []
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+
+# Options for HTML output
+# -----------------------
+
+# The style sheet to use for HTML and HTML Help pages. A file of that name
+# must exist either in Sphinx' static/ path, or in one of the custom paths
+# given in html_static_path.
+html_style = 'default.css'
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Content template for the index page.
+#html_index = ''
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If true, the reST sources are included in the HTML build as _sources/<name>.
+#html_copy_source = True
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'pyinstalldocs'
+
+
+# Options for LaTeX output
+# ------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, document class [howto/manual]).
+#latex_documents = []
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
192 docs/index.txt
@@ -0,0 +1,192 @@
+pyinstall
+=========
+
+.. toctree::
+
+ news
+ requirement-format
+
+.. comment: split here
+
+.. contents::
+
+Introduction
+------------
+
+pyinstall is a replacement for `easy_install
+<http://peak.telecommunity.com/DevCenter/EasyInstall>`_. It uses mostly the
+same techniques for finding packages, so packages that were made
+easy_installable should be pyinstallable as well.
+
+pyinstall is meant to improve on easy_install. Some of the improvements:
+
+* All packages are downloaded before installation. Partially-completed
+ installation doesn't occur as a result.
+
+* Care is taken to present useful output on the console.
+
+* The reasons for actions are kept track of. For instance, if a package is
+ being installed, pyinstall keeps track of why that package was required.
+
+* Error messages should be useful.
+
+* The code is relatively concise and cohesive, making it easier to use
+ programmatically.
+
+* Packages don't have to be installed as egg archives, they can be installed
+ flat (while keeping the egg metadata).
+
+* Maybe features like native support for other version control systems, or
+ uninstallation, will get added. (They might get added to easy_install, but I
+ think the chance for pyinstall is higher.)
+
+Also, pyinstall will eventually be merged directly with poacheggs, making it
+simple to define fixed sets of requirements and reliably reproduce a set of
+packages.
+
+pyinstall is complementary with `virtualenv
+<http://pypi.python.org/pypi/virtualenv>`_, and it is encouraged that you use
+virtualenv to isolate your installation.
+
+Community
+---------
+
+The homepage for pyinstall is temporarily located `on PyPI
+<http://pypi.python.org/pypi/pyinstall>`_ -- a more proper homepage
+will follow. Bugs can go on the `poacheggs Trac instance
+<http://trac.openplans.org/poacheggs/>`_ (probably that will change
+too). Discussion should happen on the `virtualenv email group
+<http://groups.google.com/group/python-virtualenv?hl=en>`_.
+
+Differences From easy_install
+-----------------------------
+
+pyinstall cannot install some packages. Specifically:
+
+* It cannot install from eggs. It only installs from source. (Maybe this will
+ be changed sometime, but it's low priority.)
+
+* It doesn't understand Setuptools extras (like ``package[test]``). This should
+ be added eventually.
+
+* It is incompatible with some packages that customize distutils or setuptools
+ in their ``setup.py`` files.
+
+* Maybe it doesn't work on Windows. At least, the author doesn't test on
+ Windows often.
+
+* It also has some extra features. Extra features the author thinks are great.
+
+.. _`requirements file`:
+
+Requirements Files
+------------------
+
+When installing software, and Python packages in particular, it's common that
+you get a lot of libraries installed. You just did ``easy_install MyPackage``
+and you get a dozen packages. Each of these packages has its own version.
+
+Maybe you ran that installation and it works. Great! Will it keep working?
+Did you have to provide special options to get it to find everything? Did you
+have to install a bunch of other optional pieces? Most of all, will you be able
+to do it again?
+
+If you've ever tried to setup an application on a new system, or with slightly
+updated pieces, and had it fail, pyinstall requirements are for you. If you
+haven't had this problem then you will eventually, so pyinstall requirements are
+for you too -- requirements make explicit, repeatable installation of packages.
+
+So what are requirements files? They are very simple: lists of packages to
+install. Instead of running something like ``pyinstall MyApp`` and getting
+whatever libraries come along, you can create a requirements file something like::
+
+ MyApp
+ Framework==0.9.4
+ Library>=0.2
+
+Then, regardless of what MyApp lists in ``setup.py``, you'll get a specific
+version of Framework and at least the 0.2 version of Library. (You might think
+you could list these specific versions in ``setup.py`` -- try it and you'll
+quickly see why that doesn't work.) You can add optional libraries and support
+tools that MyApp doesn't strictly require.
+
+You can also include "editable" packages -- packages that are checked out from
+subversion (in the future other VCS will be supported). These are just like
+using the ``-e`` option to pyinstall. They look like::
+
+ -e svn+http://myrepo/svn/MyApp#egg=MyApp
+
+You have to start the URL with ``svn+`` (eventually you'll be able to use
+``hg+`` etc), and you have to include ``#egg=Package`` so pyinstall knows what
+to expect at that URL. You can also include ``@rev`` in the URL, e.g., ``@275``
+to check out revision 275.
+
+Freezing Requirements
+---------------------
+
+So you have a working set of packages, and you want to be able to install them
+elsewhere. `Requirements files`_ let you install exact versions, but it won't
+tell you what all the exact versions are.
+
+To create a new requirements file from a known working environment, use::
+
+ $ pyinstall.py --freeze=stable-req.txt
+
+This will write a listing of *all* installed libraries to ``stable-req.txt``
+with exact versions for every library. You may want to edit the file down after
+generating (e.g., to eliminate unnecessary libraries), but it'll give you a
+stable starting point for constructing your requirements file.
+
+You can also give it an existing requirements file, and it will use that as a
+sort of template for the new file. So if you do::
+
+ $ pyinstall.py --freeze=stable-req.txt -r devel-req.txt
+
+it will keep the packages listed in ``devel-req.txt`` in order and preserve
+comments.
+
+Bundles
+-------
+
+Another way to distribute a set of libraries is a bundle format (specific to
+pyinstall). This format is not stable at this time (there simply hasn't been
+any feedback, nor a great deal of thought). A bundle file contains all the
+source for your package, and you can have pyinstall install then all together.
+Once you have the bundle file further network access won't be necessary. To
+build a bundle file, do::
+
+ $ pyinstall.py --bundle=MyApp.pybundle MyApp
+
+(Using a `requirements file`_ would be wise.) Then someone else can get the
+file ``MyApp.pybundle`` and run::
+
+ $ pyinstall.py MyApp.pybundle
+
+This is *not* a binary format. This only packages source. If you have binary
+packages, then the person who installs the files will have to have a compiler,
+any necessary headers installed, etc. Binary packages are hard, this is
+relatively easy.
+
+Using pyinstall With virtualenv
+-------------------------------
+
+pyinstall is most nutritious when used with `virtualenv
+<http://pypi.python.org/pypi/virtualenv>`_. One of the reasons pyinstall
+doesn't install "multi-version" eggs is that virtualenv removes much of the need
+for it.
+
+pyinstall does not have to be installed to use it, you can run ``python
+pyinstall.py`` and it will work. This is intended to avoid the bootstrapping
+problem of installation. You can also run pyinstall inside any virtualenv
+environment, like::
+
+ $ virtualenv new-env/
+ ... creates new-env/ ...
+ $ pyinstall.py -E new-env/ MyPackage
+
+This is exactly equivalent to::
+
+ $ ./new-env/bin/python pyinstall.py MyPackage
+
+Except, if you have ``virtualenv`` installed and the path ``new-env/``
+doesn't exist, then a new virtualenv will be created.
67 docs/news.txt
@@ -0,0 +1,67 @@
+News for pyinstall
+==================
+
+0.1.4
+-----
+
+* Added an option ``--install-option`` to pass options to pass
+ arguments to ``setup.py install``
+
+* ``.svn/`` directories are no longer included in bundles, as these
+ directories are specific to a version of svn -- if you build a
+ bundle on a system with svn 1.5, you can't use the checkout on a
+ system with svn 1.4. Instead a file ``svn-checkout.txt`` is
+ included that notes the original location and revision, and the
+ command you can use to turn it back into an svn checkout. (Probably
+ unpacking the bundle should, maybe optionally, recreate this
+ information -- but that is not currently implemented, and it would
+ require network access.)
+
+* Avoid ambiguities over project name case, where for instance
+ MyPackage and mypackage would be considered different packages.
+ This in particular caused problems on Macs, where ``MyPackage/`` and
+ ``mypackage/`` are the same directory.
+
+* Added support for an environmental variable
+ ``$PYINSTALL_DOWNLOAD_CACHE`` which will cache package downloads, so
+ future installations won't require large downloads. Network access
+ is still required, but just some downloads will be avoided when
+ using this.
+
+0.1.3
+-----
+
+* Always use ``svn checkout`` (not ``export``) so that
+ ``tag_svn_revision`` settings give the revision of the package.
+
+* Don't update checkouts that came from ``.pybundle`` files.
+
+0.1.2
+-----
+
+* Improve error text when there are errors fetching HTML pages when
+ seeking packages.
+
+* Improve bundles: include empty directories, make them work with
+ editable packages.
+
+* If you use ``-E env`` and the environment ``env/`` doesn't exist, a
+ new virtual environment will be created.
+
+* Fix ``dependency_links`` for finding packages.
+
+0.1.1
+-----
+
+* Fixed a NameError exception when running pyinstall outside of a
+ virtualenv environment.
+
+* Added HTTP proxy support (from Prabhu Ramachandran)
+
+* Fixed use of ``hashlib.md5`` on python2.5+ (also from Prabhu
+ Ramachandran)
+
+0.1
+---
+
+* Initial release
51 docs/requirement-format.txt
@@ -0,0 +1,51 @@
+The requirements file format
+============================
+
+The requirements file is what poacheggs uses to install packages.
+This document describes that format.
+
+Each line of the requirements file indicates something to be
+installed. For example::
+
+ MyPackage==3.0
+
+tells poacheggs to install the 3.0 version of MyPackage.
+
+You can also install a package in an "editable" form. This puts the
+source code into ``src/distname`` (making the name lower case) and
+runs ``python setup.py develop`` on the package. To indicate
+editable, use ``-e``, like::
+
+ -e svn+http://svn.myproject.org/svn/MyProject/trunk#egg=MyProject
+
+The ``#egg=MyProject`` part is important, because while you can
+install simply given the svn location, the project name is useful in
+other places.
+
+If you need to give poacheggs (and by association easy_install) hints
+about where to find a package, you can use the ``-f``
+(``--find-links``) option, like::
+
+ -f http://someserver.org/MyPackage-3.0.tar.gz
+
+If the package is named like ``PackageName-Version.tar.gz`` (or a zip)
+then you don't need ``#egg=...``. Note that you cannot provide
+multiple ``-f`` arguments to easy_install, but you can in a
+requirements file (they all get concatenated into a single ``-f`` for
+easy_install).
+
+Version Control
+---------------
+
+Right now poacheggs only knows Subversion. I hope to add Mercurial in
+the not-too-distant future, as that system in particular is used by
+quite a few open source Python projects (once that's added, support
+for Bazaar, git, etc. will probably be easy).
+
+You can also give specific revisions to an SVN URL, like::
+
+ -e svn+http://svn.myproject.org/svn/MyProject/trunk@2019
+
+which will check out revision 2019. ``@{20080101}`` would also check
+out the revision from 2008-01-01. You can only check out specific
+revisions using ``-e svn+...``.
2,538 pyinstall.py
@@ -0,0 +1,2538 @@
+#!/usr/bin/env python
+import sys
+import os
+import optparse
+import pkg_resources
+import urllib2
+import urllib
+import mimetypes
+import zipfile
+import tarfile
+import tempfile
+import subprocess
+import posixpath
+import re
+import shutil
+try:
+ from hashlib import md5
+except ImportError:
+ import md5 as md5_module
+ md5 = md5_module.new
+import urlparse
+from email.FeedParser import FeedParser
+import traceback
+from cStringIO import StringIO
+import socket
+from Queue import Queue
+from Queue import Empty as QueueEmpty
+import threading
+import httplib
+import time
+import logging
+
+class InstallationError(Exception):
+ """General exception during installation"""
+
+class DistributionNotFound(InstallationError):
+ """Raised when a distribution cannot be found to satisfy a requirement"""
+
+if getattr(sys, 'real_prefix', None):
+ ## FIXME: is build/ a good name?
+ base_prefix = os.path.join(sys.prefix, 'build')
+ base_src_prefix = os.path.join(sys.prefix, 'src')
+else:
+ ## FIXME: this isn't a very good default
+ base_prefix = os.path.join(os.getcwd(), 'build')
+ base_src_prefix = os.path.join(os.getcwd(), 'src')
+
+pypi_url = "http://pypi.python.org/simple"
+
+default_timeout = 15
+
+parser = optparse.OptionParser(
+ usage='%prog [OPTIONS] PACKAGE_NAMES')
+
+parser.add_option(
+ '-e', '--editable',
+ dest='editables',
+ action='append',
+ default=[],
+ metavar='svn+REPOS_URL[@REV]#egg=PACKAGE',
+ help='Install a package directly from a checkout. Source will be checked '
+ 'out into src/PACKAGE (lower-case) and installed in-place (using '
+ 'setup.py develop). This option may be provided multiple times.')
+parser.add_option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='FILENAME',
+ help='Install all the packages listed in the given requirements file. '
+ 'This option can be used multiple times.')
+
+parser.add_option(
+ '-f', '--find-links',
+ dest='find_links',
+ action='append',
+ default=[],
+ metavar='URL',
+ help='URL to look for packages at')
+parser.add_option(
+ '-i', '--index-url',
+ dest='index_url',
+ metavar='URL',
+ default=pypi_url,
+ help='base URL of Python Package Index')
+parser.add_option(
+ '--extra-index-url',
+ dest='extra_index_urls',
+ metavar='URL',
+ action='append',
+ default=[],
+ help='extra URLs of package indexes to use in addition to --index-url')
+
+parser.add_option(
+ '-b', '--build', '--build-dir', '--build-directory',
+ dest='build_dir',
+ metavar='DIR',
+ default=None,
+ help='Unpack packages into DIR (default %s) and build from there' % base_prefix)
+parser.add_option(
+ '--src', '--source',
+ dest='src_dir',
+ metavar='DIR',
+ default=None,
+ help='Check out --editable packages into DIR (default %s)' % base_src_prefix)
+parser.add_option(
+ '--timeout',
+ metavar='SECONDS',
+ dest='timeout',
+ type='float',
+ default=default_timeout,
+ help='Set the socket timeout (default %s seconds)' % default_timeout)
+
+parser.add_option(
+ '-U', '--upgrade',
+ dest='upgrade',
+ action='store_true',
+ help='Upgrade all packages to the newest available version')
+parser.add_option(
+ '-I', '--ignore-installed',
+ dest='ignore_installed',
+ action='store_true',
+ help='Ignore the installed packages (reinstalling instead)')
+parser.add_option(
+ '--no-install',
+ dest='no_install',
+ action='store_true',
+ help="Download and unpack all packages, but don't actually install them")
+
+parser.add_option(
+ '--bundle',
+ dest='bundle',
+ metavar='BUNDLE_FILE',
+ help="Collect all packages and create a .pybundle file.")
+parser.add_option(
+ '--freeze',
+ dest='freeze',
+ metavar='FREEZE_FILE',
+ help="Create a file that can be used with --requirement to reproduce the "
+ "installed packages. You can also give one --requirement file that will "
+ "be used as the basis of the new file.")
+
+parser.add_option(
+ '-E', '--environment',
+ dest='venv',
+ metavar='DIR',
+ help='virtualenv environment to run pyinstall in (either give the '
+ 'interpreter or the environment base directory)')
+
+parser.add_option(
+ '-v', '--verbose',
+ dest='verbose',
+ action='count',
+ default=0,
+ help='Give more output')
+parser.add_option(
+ '-q', '--quiet',
+ dest='quiet',
+ action='count',
+ default=0,
+ help='Give less output')
+parser.add_option(
+ '--log',
+ dest='log',
+ metavar='FILENAME',
+ help='Log file where a complete (maximum verbosity) record will be kept')
+
+parser.add_option(
+ '--proxy',
+ dest='proxy',
+ type='str',
+ default='',
+ help="Specify a proxy in the form user:passwd@proxy.server:port. "
+ "Note that the user:password@ is optional and required only if you "
+ "are behind an authenticated proxy. If you provide "
+ "user@proxy.server:port then you will be prompted for a password."
+ )
+
+parser.add_option(
+ '--install-option',
+ dest='install_options',
+ action='append',
+ help="Extra arguments to be supplied to the setup.py install "
+ "command (use like --install-option=\"--install-scripts=/usr/local/bin\"). "
+ "Use multiple --install-option options to pass multiple options to setup.py install"
+ )
+
+def get_proxy(proxystr=''):
+ """Get the proxy given the option passed on the command line. If an
+ empty string is passed it looks at the HTTP_PROXY environment
+ variable."""
+ if not proxystr:
+ proxystr = os.environ.get('HTTP_PROXY', '')
+ if proxystr:
+ if '@' in proxystr:
+ user_password, server_port = proxystr.split('@', 1)
+ if ':' in user_password:
+ user, password = user_password.split(':', 1)
+ else:
+ user = user_password
+ import getpass
+ prompt = 'Password for %s@%s: ' % (user, server_port)
+ password = urllib.quote(getpass.getpass(prompt))
+ return '%s:%s@%s' % (user, password, server_port)
+ else:
+ return proxystr
+ else:
+ return None
+
+def setup_proxy_handler(proxystr=''):
+ """Set the proxy handler given the option passed on the command
+ line. If an empty string is passed it looks at the HTTP_PROXY
+ environment variable. """
+ proxy = get_proxy(proxystr)
+ if proxy:
+ proxy_support = urllib2.ProxyHandler({"http": proxy, "ftp": proxy})
+ opener = urllib2.build_opener(proxy_support, urllib2.CacheFTPHandler)
+ urllib2.install_opener(opener)
+
+
+def main(initial_args=None):
+ global logger
+ if initial_args is None:
+ initial_args = sys.argv[1:]
+ options, args = parser.parse_args(initial_args)
+
+ if args and args[-1] == '___VENV_RESTART___':
+ ## FIXME: We don't do anything this this value yet:
+ venv_location = args[-2]
+ args = args[:-2]
+ options.venv = None
+ level = 1 # Notify
+ level += options.verbose
+ level -= options.quiet
+ level = Logger.level_for_integer(4-level)
+ complete_log = []
+ logger = Logger([(level, sys.stdout),
+ (Logger.DEBUG, complete_log.append)])
+ if options.venv:
+ if options.verbose > 0:
+ # The logger isn't setup yet
+ print 'Running in environment %s' % options.venv
+ restart_in_venv(options.venv, initial_args)
+ # restart_in_venv should actually never return, but for clarity...
+ return
+ ## FIXME: not sure if this sure come before or after venv restart
+ if options.log:
+ log_fp = open_logfile_append(options.log)
+ logger.consumers.append((logger.DEBUG, log_fp))
+ else:
+ log_fp = None
+
+ socket.setdefaulttimeout(options.timeout or None)
+
+ setup_proxy_handler(options.proxy)
+
+ if options.bundle:
+ if not options.build_dir:
+ options.build_dir = backup_dir(base_prefix, '-bundle')
+ if not options.src_dir:
+ options.src_dir = backup_dir(base_src_prefix, '-bundle')
+ # We have to get everything when creating a bundle:
+ options.ignore_installed = True
+ logger.notify('Putting temporary build files in %s and source/develop files in %s'
+ % (display_path(options.build_dir), display_path(options.src_dir)))
+ if not options.build_dir:
+ options.build_dir = base_prefix
+ if not options.src_dir:
+ options.src_dir = base_src_prefix
+ options.build_dir = os.path.abspath(options.build_dir)
+ options.src_dir = os.path.abspath(options.src_dir)
+ install_options = options.install_options or []
+ try:
+ if options.freeze:
+ if options.requirements:
+ if len(options.requirements) > 1:
+ raise InstallationError(
+ "When using --freeze you can only provide one --requirement option")
+ requirement = options.requirements[0]
+ else:
+ requirement = None
+ write_freeze(
+ options.freeze,
+ requirement=requirement,
+ find_links=options.find_links)
+ return
+ index_urls = [options.index_url] + options.extra_index_urls
+ finder = PackageFinder(
+ find_links=options.find_links,
+ index_urls=index_urls)
+ requirement_set = RequirementSet(build_dir=options.build_dir,
+ src_dir=options.src_dir,
+ upgrade=options.upgrade,
+ ignore_installed=options.ignore_installed)
+ for name in args:
+ requirement_set.add_requirement(
+ InstallRequirement.from_line(name, None))
+ for name in options.editables:
+ requirement_set.add_requirement(
+ InstallRequirement.from_editable(name))
+ for filename in options.requirements:
+ for req in parse_requirements(filename, finder=finder):
+ requirement_set.add_requirement(req)
+ exit = 0
+ requirement_set.install_files(finder)
+ if not options.no_install and not options.bundle:
+ requirement_set.install(install_options)
+ logger.notify('Successfully installed %s' % requirement_set)
+ elif options.bundle:
+ requirement_set.create_bundle(options.bundle)
+ logger.notify('Created bundle in %s' % options.bundle)
+ else:
+ logger.notify('Successfully downloaded %s' % requirement_set)
+ except InstallationError, e:
+ logger.fatal(str(e))
+ logger.info('Exception information:\n%s' % format_exc())
+ exit = 1
+ except:
+ logger.fatal('Exception:\n%s' % format_exc())
+ exit = 2
+ if log_fp is not None:
+ log_fp.close()
+ if exit:
+ log_fn = './pyinstall-log.txt'
+ text = '\n'.join(complete_log)
+ logger.fatal('Storing complete log in %s' % log_fn)
+ log_fp = open_logfile_append(log_fn)
+ log_fp.write(text)
+ log_fp.close()
+ sys.exit(exit)
+
+def format_exc(exc_info=None):
+ if exc_info is None:
+ exc_info = sys.exc_info()
+ out = StringIO()
+ traceback.print_exception(*exc_info, **dict(file=out))
+ return out.getvalue()
+
+def restart_in_venv(venv, args):
+ """
+ Restart this script using the interpreter in the given virtual environment
+ """
+ venv = os.path.abspath(venv)
+ if not os.path.exists(venv):
+ try:
+ import virtualenv
+ except ImportError:
+ print 'The virtual environment does not exist: %s' % venv
+ print 'and virtualenv is not installed, so a new environment cannot be created'
+ sys.exit(3)
+ print 'Creating new virtualenv environment in %s' % venv
+ virtualenv.logger = logger
+ logger.indent += 2
+ ## FIXME: always have no_site_packages?
+ virtualenv.create_environment(venv, site_packages=False)
+ if sys.platform == 'win32':
+ python = os.path.join(venv, 'Scripts', 'python')
+ else:
+ python = os.path.join(venv, 'bin', 'python')
+ if not os.path.exists(python):
+ python = venv
+ if not os.path.exists(python):
+ raise BadCommand('Cannot find virtual environment interpreter at %s' % python)
+ base = os.path.dirname(os.path.dirname(python))
+ os.execv(python, [python, __file__] + args + [base, '___VENV_RESTART___'])
+
+class PackageFinder(object):
+ """This finds packages.
+
+ This is meant to match easy_install's technique for looking for
+ packages, by reading pages and looking for appropriate links
+ """
+
+ failure_limit = 3
+
+ def __init__(self, find_links, index_urls):
+ self.find_links = find_links
+ self.index_urls = index_urls
+ self.dependency_links = []
+ self.cache = PageCache()
+
+ def add_dependency_links(self, links):
+ ## FIXME: this shouldn't be global list this, it should only
+ ## apply to requirements of the package that specifies the
+ ## dependency_links value
+ ## FIXME: also, we should track comes_from (i.e., use Link)
+ self.dependency_links.extend(links)
+
+ def find_requirement(self, req, upgrade):
+ url_name = req.url_name
+ # Check that we have the url_name correctly spelled:
+ main_index_url = Link(posixpath.join(self.index_urls[0], url_name))
+ # This will also cache the page, so it's okay that we get it again later:
+ page = self._get_page(main_index_url, req)
+ if page is None:
+ url_name = self._find_url_name(Link(self.index_urls[0]), url_name, req)
+ if url_name is not None:
+ locations = [
+ posixpath.join(url, url_name)
+ for url in self.index_urls] + self.find_links
+ else:
+ locations = list(self.find_links)
+ locations.extend(self.dependency_links)
+ for version in req.absolute_versions:
+ locations = [
+ posixpath.join(url, url_name, version)] + locations
+ locations = [Link(url) for url in locations]
+ logger.debug('URLs to search for versions for %s:' % req)
+ for location in locations:
+ logger.debug('* %s' % location)
+ found_versions = []
+ for page in self._get_pages(locations, req):
+ logger.debug('Analyzing links from page %s' % page.url)
+ logger.indent += 2
+ try:
+ found_versions.extend(self._package_versions(page.links, req.name.lower()))
+ finally:
+ logger.indent -= 2
+ dependency_versions = list(self._package_versions([Link(url) for url in self.dependency_links], req.name.lower()))
+ if dependency_versions:
+ logger.info('dependency_links found: %s' % ', '.join([link.url for parsed, link, version in dependency_versions]))
+ found_versions.extend(dependency_versions)
+ if not found_versions:
+ logger.fatal('Could not find any downloads that satisfy the requirement %s' % req)
+ raise DistributionNotFound('No distributions at all found for %s' % req)
+ if req.satisfied_by is not None:
+ found_versions.append((req.satisfied_by.parsed_version, Inf, req.satisfied_by.version))
+ found_versions.sort(reverse=True)
+ applicable_versions = []
+ for (parsed_version, link, version) in found_versions:
+ if version not in req.req:
+ logger.info("Ignoring link %s, version %s doesn't match %s"
+ % (link, version, ','.join([''.join(s) for s in req.req.specs])))
+ continue
+ applicable_versions.append((link, version))
+ existing_applicable = bool([link for link, version in applicable_versions if link is Inf])
+ if not upgrade and existing_applicable:
+ if applicable_versions[0][1] is Inf:
+ logger.info('Existing installed version (%s) is most up-to-date and satisfies requirement'
+ % req.satisfied_by.version)
+ else:
+ logger.info('Existing installed version (%s) satisfies requirement (most up-to-date version is %s)'
+ % (req.satisfied_by.version, application_versions[0][2]))
+ return None
+ if not applicable_versions:
+ logger.fatal('Could not find a version that satisfies the requirement %s (from versions: %s)'
+ % (req, ', '.join([version for parsed_version, link, version in found_versions])))
+ raise DistributionNotFound('No distributions matching the version for %s' % req)
+ if applicable_versions[0][0] is Inf:
+ # We have an existing version, and its the best version
+ logger.info('Installed version (%s) is most up-to-date (past versions: %s)'
+ % (req.satisfied_by.version, ', '.join([version for link, version in applicable_versions[1:]]) or 'none'))
+ return None
+ if len(applicable_versions) > 1:
+ logger.info('Using version %s (newest of versions: %s)' %
+ (applicable_versions[0][1], ', '.join([version for link, version in applicable_versions])))
+ return applicable_versions[0][0]
+
+ def _find_url_name(self, index_url, url_name, req):
+ """Finds the true URL name of a package, when the given name isn't quite correct.
+ This is usually used to implement case-insensitivity."""
+ if not index_url.url.endswith('/'):
+ # Vaguely part of the PyPI API... weird but true.
+ ## FIXME: bad to modify this?
+ index_url.url += '/'
+ page = self._get_page(index_url, req)
+ if page is None:
+ logger.fatal('Cannot fetch index base URL %s' % index_url)
+ raise DistributionNotFound('Cannot find requirement %s, nor fetch index URL %s' % (req, index_url))
+ norm_name = normalize_name(req.url_name)
+ for link in page.links:
+ base = posixpath.basename(link.path.rstrip('/'))
+ if norm_name == normalize_name(base):
+ logger.notify('Real name of requirement %s is %s' % (url_name, base))
+ return base
+ return None
+
+ def _get_pages(self, locations, req):
+ """Yields (page, page_url) from the given locations, skipping
+ locations that have errors, and adding download/homepage links"""
+ pending_queue = Queue()
+ for location in locations:
+ pending_queue.put(location)
+ done = []
+ seen = set()
+ threads = []
+ for i in range(min(10, len(locations))):
+ t = threading.Thread(target=self._get_queued_page, args=(req, pending_queue, done, seen))
+ t.setDaemon(True)
+ threads.append(t)
+ t.start()
+ for t in threads:
+ t.join()
+ return done
+
+ _log_lock = threading.Lock()
+
+ def _get_queued_page(self, req, pending_queue, done, seen):
+ while 1:
+ try:
+ location = pending_queue.get(False)
+ except QueueEmpty:
+ return
+ if location in seen:
+ continue
+ seen.add(location)
+ page = self._get_page(location, req)
+ if page is None:
+ continue
+ done.append(page)
+ for link in page.rel_links():
+ pending_queue.put(link)
+
+ _egg_fragment_re = re.compile(r'#egg=([^&]*)')
+ _egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.-]+)', re.I)
+ _py_version_re = re.compile(r'-py([123]\.[0-9])$')
+
+ def _package_versions(self, links, search_name):
+ seen_links = {}
+ for link in links:
+ if link.url in seen_links:
+ continue
+ seen_links[link.url] = None
+ if link.egg_fragment:
+ egg_info = link.egg_fragment
+ else:
+ path = link.path
+ egg_info, ext = link.splitext()
+ if not ext:
+ logger.debug('Skipping link %s; not a file' % link)
+ continue
+ if egg_info.endswith('.tar'):
+ # Special double-extension case:
+ egg_info = egg_info[:-4]
+ ext = '.tar' + ext
+ if ext not in ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip'):
+ logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext))
+ continue
+ version = self._egg_info_matches(egg_info, search_name, link)
+ if version is None:
+ logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
+ continue
+ match = self._py_version_re.search(version)
+ if match:
+ version = version[:match.start()]
+ py_version = match.group(1)
+ if py_version != sys.version[:3]:
+ logger.debug('Skipping %s because Python version is incorrect' % link)
+ continue
+ logger.debug('Found link %s, version: %s' % (link, version))
+ yield (pkg_resources.parse_version(version),
+ link,
+ version)
+
+ def _egg_info_matches(self, egg_info, search_name, link):
+ match = self._egg_info_re.search(egg_info)
+ if not match:
+ logger.debug('Could not parse version from link: %s' % link)
+ return None
+ name = match.group(0).lower()
+ # To match the "safe" name that pkg_resources creates:
+ name = name.replace('_', '-')
+ if name.startswith(search_name.lower()):
+ return match.group(0)[len(search_name):].lstrip('-')
+ else:
+ return None
+
+ def _get_page(self, link, req):
+ return HTMLPage.get_page(link, req, cache=self.cache)
+
+
+class InstallRequirement(object):
+
+ def __init__(self, req, comes_from, source_dir=None, editable=False,
+ url=None, update=True):
+ if isinstance(req, basestring):
+ req = pkg_resources.Requirement.parse(req)
+ self.req = req
+ self.comes_from = comes_from
+ self.source_dir = source_dir
+ self.editable = editable
+ self.url = url
+ self._egg_info_path = None
+ # This holds the pkg_resources.Distribution object if this requirement
+ # is already available:
+ self.satisfied_by = None
+ self._temp_build_dir = None
+ self._is_bundle = None
+ # True if the editable should be updated:
+ self.update = update
+
+ @classmethod
+ def from_editable(cls, editable_req, comes_from=None):
+ name, url = parse_editable(editable_req)
+ return cls(name, comes_from, editable=True, url=url)
+
+ @classmethod
+ def from_line(cls, name, comes_from=None):
+ """Creates an InstallRequirement from a name, which might be a
+ requirement, filename, or URL.
+ """
+ url = None
+ req = name
+ if is_url(name):
+ url = name
+ ## FIXME: I think getting the requirement here is a bad idea:
+ #req = get_requirement_from_url(url)
+ req = None
+ elif is_filename(name):
+ if not os.path.exists(name):
+ logger.warn('Requirement %r looks like a filename, but the file does not exist'
+ % name)
+ url = filename_to_url(name)
+ #req = get_requirement_from_url(url)
+ req = None
+ return cls(req, comes_from, url=url)
+
+ def __str__(self):
+ if self.req:
+ s = str(self.req)
+ if self.url:
+ s += ' from %s' % self.url
+ else:
+ s = self.url
+ if self.satisfied_by is not None:
+ s += ' in %s' % display_path(self.satisfied_by.location)
+ if self.editable:
+ if self.req:
+ s += ' checkout from %s' % self.url
+ if self.comes_from:
+ if isinstance(self.comes_from, basestring):
+ comes_from = self.comes_from
+ else:
+ comes_from = self.comes_from.from_path()
+ if comes_from:
+ s += ' (from %s)' % comes_from
+ return s
+
+ def from_path(self):
+ s = str(self.req)
+ if self.comes_from:
+ if isinstance(self.comes_from, basestring):
+ comes_from = self.comes_from
+ else:
+ comes_from = self.comes_from.from_path()
+ s += '->' + comes_from
+ return s
+
+ def build_location(self, build_dir):
+ if self._temp_build_dir is not None:
+ return self._temp_build_dir
+ if self.req is None:
+ self._temp_build_dir = tempfile.mkdtemp('-build', 'pyinstall-')
+ return self._temp_build_dir
+ if self.editable:
+ name = self.name.lower()
+ else:
+ name = self.name
+ return os.path.join(build_dir, name)
+
+ @property
+ def name(self):
+ if self.req is None:
+ return None
+ return self.req.project_name
+
+ @property
+ def url_name(self):
+ if self.req is None:
+ return None
+ return urllib.quote(self.req.unsafe_name)
+
+ @property
+ def setup_py(self):
+ return os.path.join(self.source_dir, 'setup.py')
+
+ def run_egg_info(self):
+ assert self.source_dir
+ if self.name:
+ logger.notify('Running setup.py egg_info for package %s' % self.name)
+ else:
+ logger.notify('Running setup.py egg_info for package from %s' % self.url)
+ logger.indent += 2
+ try:
+ script = self._run_setup_py
+ script = script.replace('__SETUP_PY__', repr(self.setup_py))
+ script = script.replace('__PKG_NAME__', repr(self.name))
+ # We can't put the .egg-info files at the root, because then the source code will be mistaken
+ # for an installed egg, causing problems
+ if self.editable:
+ egg_base_option = []
+ else:
+ egg_info_dir = os.path.join(self.source_dir, 'pyinstall-egg-info')
+ if not os.path.exists(egg_info_dir):
+ os.makedirs(egg_info_dir)
+ egg_base_option = ['--egg-base', 'pyinstall-egg-info']
+ call_subprocess(
+ [sys.executable, '-c', script, 'egg_info'] + egg_base_option,
+ cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False,
+ command_level=Logger.VERBOSE_DEBUG,
+ command_desc='python setup.py egg_info')
+ finally:
+ logger.indent -= 2
+ if not self.req:
+ self.req = pkg_resources.Requirement.parse(self.pkg_info()['Name'])
+
+ ## FIXME: This is a lame hack, entirely for PasteScript which has
+ ## a self-provided entry point that causes this awkwardness
+ _run_setup_py = """
+__file__ = __SETUP_PY__
+from setuptools.command import egg_info
+def replacement_run(self):
+ self.mkpath(self.egg_info)
+ installer = self.distribution.fetch_build_egg
+ for ep in egg_info.iter_entry_points('egg_info.writers'):
+ # require=False is the change we're making:
+ writer = ep.load(require=False)
+ writer(self, ep.name, egg_info.os.path.join(self.egg_info,ep.name))
+ self.find_sources()
+egg_info.egg_info.run = replacement_run
+execfile(__file__)
+"""
+
+ def egg_info_data(self, filename):
+ if self.satisfied_by is not None:
+ if not self.satisfied_by.has_metadata(filename):
+ return None
+ return self.satisfied_by.get_metadata(filename)
+ assert self.source_dir
+ filename = self.egg_info_path(filename)
+ if not os.path.exists(filename):
+ return None
+ fp = open(filename, 'r')
+ data = fp.read()
+ fp.close()
+ return data
+
+ def egg_info_path(self, filename):
+ if self._egg_info_path is None:
+ if self.editable:
+ base = self.source_dir
+ else:
+ base = os.path.join(self.source_dir, 'pyinstall-egg-info')
+ filenames = os.listdir(base)
+ if self.editable:
+ filenames = [f for f in filenames if f.endswith('.egg-info')]
+ assert len(filenames) == 1, "Unexpected files/directories in %s: %s" % (base, ' '.join(filenames))
+ self._egg_info_path = os.path.join(base, filenames[0])
+ return os.path.join(self._egg_info_path, filename)
+
+ def egg_info_lines(self, filename):
+ data = self.egg_info_data(filename)
+ if not data:
+ return []
+ result = []
+ for line in data.splitlines():
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ result.append(line)
+ return result
+
+ def pkg_info(self):
+ p = FeedParser()
+ data = self.egg_info_data('PKG-INFO')
+ if not data:
+ logger.warn('No PKG-INFO file found in %s' % display_path(self.egg_info_path('PKG-INFO')))
+ p.feed(data or '')
+ return p.close()
+
+ @property
+ def dependency_links(self):
+ return self.egg_info_lines('dependency_links.txt')
+
+ _requirements_section_re = re.compile(r'\[(.*?)\]')
+
+ def requirements(self, extras=()):
+ in_extra = None
+ for line in self.egg_info_lines('requires.txt'):
+ match = self._requirements_section_re.match(line)
+ if match:
+ in_extra = match.group(1)
+ continue
+ if in_extra and in_extra not in extras:
+ # Skip requirement for an extra we aren't requiring
+ continue
+ yield line
+
+ @property
+ def absolute_versions(self):
+ for qualifier, version in self.req.specs:
+ if qualifier == '==':
+ yield version
+
+ @property
+ def installed_version(self):
+ return self.pkg_info()['version']
+
+ def assert_source_matches_version(self):
+ assert self.source_dir
+ if self.comes_from == 'command line':
+ # We don't check the versions of things explicitly installed.
+ # This makes, e.g., "pyinstall Package==dev" possible
+ return
+ version = self.installed_version
+ if version not in self.req:
+ logger.fatal(
+ 'Source in %s has the version %s, which does not match the requirement %s'
+ % (display_path(self.source_dir), version, self))
+ raise InstallationError(
+ 'Source in %s has version %s that conflicts with %s'
+ % (display_path(self.source_dir), version, self))
+ else:
+ logger.debug('Source in %s has version %s, which satisfies requirement %s'
+ % (display_path(self.source_dir), version, self))
+
+ def update_editable(self):
+ if not self.url:
+ logger.info("Cannot update repository at %s; repository location is unknown" % self.source_dir)
+ return
+ assert self.editable
+ assert self.source_dir
+ assert '+' in self.url, "bad url: %r" % self.url
+ if not self.update:
+ return
+ vc_type, url = self.url.split('+', 1)
+ vc_type = vc_type.lower()
+ if vc_type == 'svn':
+ self.checkout_svn()
+ else:
+ assert 0, (
+ 'Unexpected version control type (in %s): %s'
+ % (self.url, vc_type))
+
+ def checkout_svn(self):
+ url = self.url.split('+', 1)[1]
+ url = url.split('#', 1)[0]
+ if '@' in url:
+ url, rev = url.split('@', 1)
+ else:
+ rev = None
+ if rev:
+ rev_options = ['-r', rev]
+ rev_display = ' (to revision %s)' % rev
+ else:
+ rev_options = []
+ rev_display = ''
+ dest = self.source_dir
+ checkout = True
+ if os.path.exists(os.path.join(self.source_dir, '.svn')):
+ existing_url = _get_svn_info(self.source_dir)[0]
+ checkout = False
+ if existing_url == url:
+ logger.info('Checkout in %s exists, and has correct URL (%s)'
+ % (display_path(self.source_dir), url))
+ logger.notify('Updating checkout %s%s' % (display_path(self.source_dir), rev_display))
+ call_subprocess(
+ ['svn', 'update'] + rev_options + [self.source_dir])
+ else:
+ logger.warn('svn checkout in %s exists with URL %s' % (display_path(self.source_dir), existing_url))
+ logger.warn('The plan is to install the svn repository %s' % url)
+ response = ask('What to do? (s)witch, (i)gnore, (w)ipe, (b)ackup', ('s', 'i', 'w', 'b'))
+ if response == 's':
+ logger.notify('Switching checkout %s to %s%s'
+ % (display_path(self.source_dir), url, rev_display))
+ call_subprocess(
+ ['svn', 'switch'] + rev_options + [url, self.source_dir])
+ elif response == 'i':
+ # do nothing
+ pass
+ elif response == 'w':
+ logger.warn('Deleting %s' % display_path(self.source_dir))
+ shutil.rmtree(self.source_dir)
+ checkout = True
+ elif response == 'b':
+ dest_dir = backup_dir(self.source_dir)
+ logger.warn('Backing up %s to %s' % display_path(self.source_dir, dest_dir))
+ shutil.move(self.source_dir, dest_dir)
+ checkout = True
+ if checkout:
+ logger.notify('Checking out %s%s to %s' % (url, rev_display, display_path(self.source_dir)))
+ call_subprocess(
+ ['svn', 'checkout', '-q'] + rev_options + [url, self.source_dir])
+
+ def install(self, install_options):
+ if self.editable:
+ self.install_editable()
+ return
+ ## FIXME: this is not a useful record:
+ ## Also a bad location
+ ## And not right on Windows
+ install_location = os.path.join(sys.prefix, 'lib', 'python%s' % sys.version[:3])
+ record_filename = os.path.join(install_location, 'install-record-%s.txt' % self.name)
+ ## FIXME: I'm not sure if this is a reasonable location; probably not
+ ## but we can't put it in the default location, as that is a virtualenv symlink that isn't writable
+ header_dir = os.path.join(os.path.dirname(os.path.dirname(self.source_dir)), 'lib', 'include')
+ logger.notify('Running setup.py install for %s' % self.name)
+ logger.indent += 2
+ try:
+ call_subprocess(
+ [sys.executable, '-c',
+ "import setuptools; __file__=%r; execfile(%r)" % (self.setup_py, self.setup_py),
+ 'install', '--single-version-externally-managed', '--record', record_filename,
+ '--install-headers', header_dir] + install_options,
+ cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False)
+ finally:
+ logger.indent -= 2
+
+ def remove_temporary_source(self):
+ """Remove the source files from this requirement, if they are marked
+ for deletion"""
+ if self.is_bundle or os.path.exists(self.delete_marker_filename):
+ logger.info('Removing source in %s' % self.source_dir)
+ if self.source_dir:
+ shutil.rmtree(self.source_dir)
+ self.source_dir = None
+ if self._temp_build_dir and os.path.exists(self._temp_build_dir):
+ shutil.rmtree(self._temp_build_dir)
+ self._temp_build_dir = None
+
+ def install_editable(self):
+ logger.notify('Running setup.py develop for %s' % self.name)
+ logger.indent += 2
+ try:
+ ## FIXME: should we do --install-headers here too?
+ call_subprocess(
+ [sys.executable, '-c',
+ "import setuptools; __file__=%r; execfile(%r)" % (self.setup_py, self.setup_py),
+ 'develop', '--no-deps'], cwd=self.source_dir, filter_stdout=self._filter_install,
+ show_stdout=False)
+ finally:
+ logger.indent -= 2
+
+ def _filter_install(self, line):
+ level = Logger.NOTIFY
+ for regex in [r'^running .*', r'^writing .*', '^creating .*', '^[Cc]opying .*',
+ r'^reading .*', r"^removing .*\.egg-info' \(and everything under it\)$",
+ r'^byte-compiling ',
+ # Not sure what this warning is, but it seems harmless:
+ r"^warning: manifest_maker: standard file '-c' not found$"]:
+ if re.search(regex, line.strip()):
+ level = Logger.INFO
+ break
+ return (level, line)
+
+ def check_if_exists(self):
+ """Checks if this requirement is satisfied by something already installed"""
+ if self.req is None:
+ return False
+ try:
+ dist = pkg_resources.get_distribution(self.req)
+ except pkg_resources.DistributionNotFound:
+ return False
+ self.satisfied_by = dist
+ return True
+
+ @property
+ def is_bundle(self):
+ if self._is_bundle is not None:
+ return self._is_bundle
+ base = self._temp_build_dir
+ if not base:
+ ## FIXME: this doesn't seem right:
+ return False
+ self._is_bundle = os.path.exists(os.path.join(base, 'pyinstall-manifest.txt'))
+ return self._is_bundle
+
+ def bundle_requirements(self):
+ base = self._temp_build_dir
+ assert base
+ src_dir = os.path.join(base, 'src')
+ build_dir = os.path.join(base, 'build')
+ if os.path.exists(src_dir):
+ for package in os.listdir(src_dir):
+ ## FIXME: svnism:
+ svn_checkout = os.path.join(src_dir, package, 'svn-checkout.txt')
+ url = rev = None
+ if os.path.exists(svn_checkout):
+ fp = open(svn_checkout)
+ content = fp.read()
+ fp.close()
+ url, rev = _parse_svn_checkout_text(content)
+ if url:
+ url = 'svn+%s@%s' % (url, rev)
+ else:
+ url = None
+ yield InstallRequirement(
+ package, self, editable=True, url=url,
+ update=False, source_dir=os.path.join(src_dir, package))
+ if os.path.exists(build_dir):
+ for package in os.listdir(build_dir):
+ yield InstallRequirement(
+ package, self,
+ source_dir=os.path.join(build_dir, package))
+
+ def move_bundle_files(self, dest_build_dir, dest_src_dir):
+ base = self._temp_build_dir
+ assert base
+ src_dir = os.path.join(base, 'src')
+ build_dir = os.path.join(base, 'build')
+ for source_dir, dest_dir in [(src_dir, dest_src_dir),
+ (build_dir, dest_build_dir)]:
+ if os.path.exists(source_dir):
+ for dirname in os.listdir(source_dir):
+ dest = os.path.join(dest_dir, dirname)
+ if os.path.exists(dest):
+ logger.warn('The directory %s (containing package %s) already exists; cannot move source from bundle %s'
+ % (dest, dirname, self))
+ continue
+ if not os.path.exists(dest_dir):
+ logger.info('Creating directory %s' % dest_dir)
+ os.makedirs(dest_dir)
+ shutil.move(os.path.join(source_dir, dirname), dest)
+
+ @property
+ def delete_marker_filename(self):
+ assert self.source_dir
+ return os.path.join(self.source_dir, 'pyinstall-delete-this-directory.txt')
+
+DELETE_MARKER_MESSAGE = '''\
+This file is placed here by pyinstall to indicate the source was put
+here by pyinstall.
+
+Once this package is successfully installed this source code will be
+deleted (unless you remove this file).
+'''
+
+class RequirementSet(object):
+
+ def __init__(self, build_dir, src_dir, upgrade=False, ignore_installed=False):
+ self.build_dir = build_dir
+ self.src_dir = src_dir
+ self.upgrade = upgrade
+ self.ignore_installed = ignore_installed
+ self.requirements = {}
+ # Mapping of alias: real_name
+ self.requirement_aliases = {}
+ self.unnamed_requirements = []
+
+ def __str__(self):
+ reqs = [req for req in self.requirements.values()
+ if not req.comes_from]
+ reqs.sort(key=lambda req: req.name.lower())
+ return ' '.join([str(req.req) for req in reqs])
+
+ def add_requirement(self, install_req):
+ name = install_req.name
+ if not name:
+ self.unnamed_requirements.append(install_req)
+ else:
+ if self.has_requirement(name):
+ raise InstallationError(
+ 'Double requirement given: %s (aready in %s, name=%r)'
+ % (install_req, self.get_requirement(name), name))
+ self.requirements[name] = install_req
+ ## FIXME: what about other normalizations? E.g., _ vs. -?
+ if name.lower() != name:
+ self.requirement_aliases[name.lower()] = name
+
+ def has_requirement(self, project_name):
+ for name in project_name, project_name.lower():
+ if name in self.requirements or name in self.requirement_aliases:
+ return True
+ return False
+
+ def get_requirement(self, project_name):
+ for name in project_name, project_name.lower():
+ if name in self.requirements:
+ return self.requirements[name]
+ if name in self.requirement_aliases:
+ return self.requirements[self.requirement_aliases[name]]
+ raise KeyError("No project with the name %r" % project_name)
+
+ def install_files(self, finder):
+ unnamed = list(self.unnamed_requirements)
+ reqs = self.requirements.values()
+ while reqs or unnamed:
+ if unnamed:
+ req_to_install = unnamed.pop(0)
+ else:
+ req_to_install = reqs.pop(0)
+ install = True
+ if not self.ignore_installed and not req_to_install.editable:
+ if req_to_install.check_if_exists():
+ if not self.upgrade:
+ # If we are upgrading, we still need to check the version
+ install = False
+ if req_to_install.satisfied_by is not None:
+ logger.notify('Requirement already satisfied: %s' % req_to_install)
+ elif req_to_install.editable:
+ logger.notify('Checking out %s' % req_to_install)
+ else:
+ if req_to_install.url and req_to_install.url.lower().startswith('file:'):
+ logger.notify('Unpacking %s' % display_path(url_to_filename(req_to_install.url)))
+ else:
+ logger.notify('Downloading/unpacking %s' % req_to_install)
+ logger.indent += 2
+ is_bundle = False
+ try:
+ if req_to_install.editable:
+ location = req_to_install.build_location(self.src_dir)
+ req_to_install.source_dir = location
+ req_to_install.update_editable()
+ req_to_install.run_egg_info()
+ elif install:
+ location = req_to_install.build_location(self.build_dir)
+ ## FIXME: is the existance of the checkout good enough to use it? I'm don't think so.
+ unpack = True
+ if not os.path.exists(os.path.join(location, 'setup.py')):
+ ## FIXME: this won't upgrade when there's an existing package unpacked in `location`
+ if req_to_install.url is None:
+ url = finder.find_requirement(req_to_install, upgrade=self.upgrade)
+ else:
+ ## FIXME: should req_to_install.url already be a link?
+ url = Link(req_to_install.url)
+ assert url
+ if url:
+ try:
+ self.unpack_url(url, location)
+ except urllib2.HTTPError, e:
+ logger.fatal('Could not install requirement %s because of error %s'
+ % (req_to_install, e))
+ raise InstallationError(
+ 'Could not install requirement %s because of HTTP error %s for URL %s'
+ % (req_to_install, e, url))
+ else:
+ unpack = False
+ if unpack:
+ is_bundle = req_to_install.is_bundle
+ if is_bundle:
+ for subreq in req_to_install.bundle_requirements():
+ reqs.append(subreq)
+ self.add_requirement(subreq)
+ req_to_install.move_bundle_files(self.build_dir, self.src_dir)
+ else:
+ req_to_install.source_dir = location
+ req_to_install.run_egg_info()
+ req_to_install.assert_source_matches_version()
+ f = open(req_to_install.delete_marker_filename, 'w')
+ f.write(DELETE_MARKER_MESSAGE)
+ f.close()
+ if not is_bundle:
+ ## FIXME: shouldn't be globally added:
+ finder.add_dependency_links(req_to_install.dependency_links)
+ ## FIXME: add extras in here:
+ for req in req_to_install.requirements():
+ try:
+ name = pkg_resources.Requirement.parse(req).project_name
+ except ValueError, e:
+ ## FIXME: proper warning
+ logger.error('Invalid requirement: %r (%s) in requirement %s' % (req, e, req_to_install))
+ continue
+ if self.has_requirement(name):
+ ## FIXME: check for conflict
+ continue
+ subreq = InstallRequirement(req, req_to_install)
+ reqs.append(subreq)
+ self.add_requirement(subreq)
+ if req_to_install.name not in self.requirements:
+ self.requirements[req_to_install.name] = req_to_install
+ else:
+ req_to_install.remove_temporary_source()
+ finally:
+ logger.indent -= 2
+
+ def unpack_url(self, link, location):
+ if link.scheme == 'svn' or link.scheme == 'svn+ssh':
+ self.svn_checkout(link, location)
+ return
+ dir = tempfile.mkdtemp()
+ if link.url.lower().startswith('file:'):
+ source = url_to_filename(link.url)
+ content_type = mimetypes.guess_type(source)
+ self.unpack_file(source, location, content_type, link)
+ return
+ md5_hash = link.md5_hash
+ target_url = link.url.split('#', 1)[0]
+ target_file = None
+ if os.environ.get('PYINSTALL_DOWNLOAD_CACHE'):
+ target_file = os.path.join(os.environ['PYINSTALL_DOWNLOAD_CACHE'],
+ urllib.quote(target_url, ''))
+ if (target_file and os.path.exists(target_file)
+ and os.path.exists(target_file+'.content-type')):
+ fp = open(target_file+'.content-type')
+ content_type = fp.read().strip()
+ fp.close()
+ if md5_hash:
+ download_hash = md5()
+ fp = open(target_file, 'rb')
+ while 1:
+ chunk = fp.read(4096)
+ if not chunk:
+ break
+ download_hash.update(chunk)
+ fp.close()
+ temp_location = target_file
+ logger.notify('Using download cache from %s' % target_file)
+ else:
+ try:
+ resp = urllib2.urlopen(target_url)
+ except urllib2.HTTPError, e:
+ logger.fatal("HTTP error %s while getting %s" % (e.code, link))
+ raise
+ except IOError, e:
+ # Typically an FTP error
+ logger.fatal("Error %s while getting %s" % (e, link))
+ raise
+ content_type = resp.info()['content-type']
+ filename = link.filename
+ ext = splitext(filename)
+ if not ext:
+ ext = mimetypes.guess_extension(content_type)
+ filename += ext
+ temp_location = os.path.join(dir, filename)
+ fp = open(temp_location, 'wb')
+ if md5_hash:
+ download_hash = md5()
+ try:
+ total_length = int(resp.info()['content-length'])
+ except (ValueError, KeyError):
+ total_length = 0
+ downloaded = 0
+ show_progress = total_length > 40*1000 or not total_length
+ show_url = link.show_url
+ try:
+ if show_progress:
+ ## FIXME: the URL can get really long in this message:
+ if total_length:
+ logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length)))
+ else:
+ logger.start_progress('Downloading %s (unknown size): ' % show_url)
+ else:
+ logger.notify('Downloading %s' % show_url)
+ logger.debug('Downloading from URL %s' % link)
+ while 1:
+ chunk = resp.read(4096)
+ if not chunk:
+ break
+ downloaded += len(chunk)
+ if show_progress:
+ if not total_length:
+ logger.show_progress('%s' % format_size(downloaded))
+ else:
+ logger.show_progress('%3i%% %s' % (100*downloaded/total_length, format_size(downloaded)))
+ if md5_hash:
+ download_hash.update(chunk)
+ fp.write(chunk)
+ fp.close()
+ finally:
+ if show_progress:
+ logger.end_progress('%s downloaded' % format_size(downloaded))
+ if md5_hash:
+ download_hash = download_hash.hexdigest()
+ if download_hash != md5_hash:
+ logger.fatal("MD5 hash of the package %s (%s) doesn't match the expected hash %s!"
+ % (link, download_hash, md5_hash))
+ raise InstallationError('Bad MD5 hash for package %s' % link)
+ self.unpack_file(temp_location, location, content_type, link)
+ if target_file and target_file != temp_location:
+ logger.notify('Storing download in cache at %s' % display_path(target_file))
+ shutil.copyfile(temp_location, target_file)
+ fp = open(target_file+'.content-type', 'w')
+ fp.write(content_type)
+ fp.close()
+ os.unlink(temp_location)
+
+ def unpack_file(self, filename, location, content_type, link):
+ if (content_type == 'application/zip'
+ or filename.endswith('.zip')
+ or filename.endswith('.pybundle')):
+ self.unzip_file(filename, location, flatten=not filename.endswith('.pybundle'))
+ elif (content_type == 'application/x-gzip'
+ or tarfile.is_tarfile(filename)
+ or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz')):
+ self.untar_file(filename, location)
+ elif (content_type.startswith('text/html')
+ and is_svn_page(file_contents(filename))):
+ # We don't really care about this
+ self.svn_checkout(link.url, location)
+ else:
+ ## FIXME: handle?
+ ## FIXME: magic signatures?
+ logger.fatal('Cannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive format'
+ % (filename, location, content_type))
+ raise InstallationError('Cannot determine archive format of %s' % location)
+
+ def unzip_file(self, filename, location, flatten=True):
+ """Unzip the file (zip file located at filename) to the destination
+ location"""
+ if not os.path.exists(location):
+ os.makedirs(location)
+ zipfp = open(filename, 'rb')
+ try:
+ zip = zipfile.ZipFile(zipfp)
+ leading = has_leading_dir(zip.namelist()) and flatten
+ for name in zip.namelist():
+ data = zip.read(name)
+ fn = name
+ if leading:
+ fn = split_leading_dir(name)[1]
+ fn = os.path.join(location, fn)
+ dir = os.path.dirname(fn)
+ if not os.path.exists(dir):
+ os.makedirs(dir)
+ if fn.endswith('/'):
+ # A directory
+ if not os.path.exists(fn):
+ os.makedirs(fn)
+ else:
+ fp = open(fn, 'wb')
+ try:
+ fp.write(data)
+ finally:
+ fp.close()
+ finally:
+ zipfp.close()
+
+ def untar_file(self, filename, location):
+ """Untar the file (tar file located at filename) to the destination location"""
+ if not os.path.exists(location):
+ os.makedirs(location)
+ if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
+ mode = 'r:gz'
+ elif filename.lower().endswith('.bz2'):
+ mode = 'r:bz2'
+ elif filename.lower().endswith('.tar'):
+ mode = 'r'
+ else:
+ logger.warn('Cannot determine compression type for file %s' % filename)
+ mode = 'r:*'
+ tar = tarfile.open(filename, mode)
+ try:
+ leading = has_leading_dir([member.name for member in tar.getmembers()])
+ for member in tar.getmembers():
+ fn = member.name
+ if leading:
+ fn = split_leading_dir(fn)[1]
+ path = os.path.join(location, fn)
+ if member.isdir():
+ if not os.path.exists(path):
+ os.makedirs(path)
+ else:
+ fp = tar.extractfile(member)
+ if not os.path.exists(os.path.dirname(path)):
+ os.makedirs(os.path.dirname(path))
+ destfp = open(path, 'wb')
+ try:
+ shutil.copyfileobj(fp, destfp)
+ finally:
+ destfp.close()
+ fp.close()
+ finally:
+ tar.close()
+
+ def svn_checkout(self, url, location):
+ """Check out the svn repository at the url to the destination location"""
+ if '#' in url:
+ url = url.split('#', 1)[0]
+ logger.notify('Checking out svn repository %s to %s' % (url, location))
+ logger.indent += 2
+ try:
+ ## FIXME: not sure that --force is good, but it is needed
+ ## when installing directly (not via a requirement),
+ ## because the destination directory already exists.
+ call_subprocess(['svn', 'checkout', '--force', url, location],
+ filter_stdout=self._filter_svn, show_stdout=False)
+ finally:
+ logger.indent -= 2
+
+ def _filter_svn(self, line):
+ return (Logger.INFO, line)
+
+ def install(self, install_options):
+ """Install everything in this set (after having downloaded and unpacked the packages)"""
+ requirements = sorted(self.requirements.values(), key=lambda p: p.name.lower())
+ logger.notify('Installing collected packages: %s' % (', '.join([req.name for req in requirements])))
+ logger.indent += 2
+ try:
+ for requirement in self.requirements.values():
+ if requirement.satisfied_by is not None:
+ # Already installed
+ continue
+ requirement.install(install_options)
+ requirement.remove_temporary_source()
+ finally:
+ logger.indent -= 2
+
+ def create_bundle(self, bundle_filename):
+ ## FIXME: can't decide which is better; zip is easier to read
+ ## random files from, but tar.bz2 is smaller and not as lame a
+ ## format.
+
+ ## FIXME: this file should really include a manifest of the
+ ## packages, maybe some other metadata files. It would make
+ ## it easier to detect as well.
+ zip = zipfile.ZipFile(bundle_filename, 'w', zipfile.ZIP_DEFLATED)
+ svn_dirs = []
+ for dir, basename in (self.build_dir, 'build'), (self.src_dir, 'src'):
+ dir = os.path.normcase(os.path.abspath(dir))
+ for dirpath, dirnames, filenames in os.walk(dir):
+ svn_url = svn_rev = None
+ if '.svn' in dirnames:
+ for svn_dir in svn_dirs:
+ if dirpath.startswith(svn_dir):
+ # svn-checkout.txt already in parent directory
+ break
+ else:
+ svn_url, svn_rev = _get_svn_info(os.path.join(dir, dirpath))
+ svn_dirs.append(dirpath)
+ dirnames.remove('.svn')
+ for dirname in dirnames:
+ dirname = os.path.join(dirpath, dirname)
+ name = self._clean_zip_name(dirname, dir)
+ zip.writestr(basename + '/' + name + '/', '')
+ for filename in filenames:
+ filename = os.path.join(dirpath, filename)
+ name = self._clean_zip_name(filename, dir)
+ zip.write(filename, basename + '/' + name)
+ if svn_url:
+ name = os.path.join(dirpath, 'svn-checkout.txt')
+ name = self._clean_zip_name(name, dir)
+ zip.writestr(basename + '/' + name, _svn_checkout_text(svn_url, svn_rev))
+ zip.writestr('pyinstall-manifest.txt', self.bundle_requirements())
+ zip.close()
+ # Unlike installation, this will always delete the build directories
+ logger.info('Removing temporary build dir %s and source dir %s'
+ % (self.build_dir, self.src_dir))
+ for dir in self.build_dir, self.src_dir:
+ if os.path.exists(dir):
+ shutil.rmtree(dir)
+
+
+ BUNDLE_HEADER = '''\
+# This is a pyinstall bundle file, that contains many source packages
+# that can be installed as a group. You can install this like:
+# pyinstall this_file.zip
+# The rest of the file contains a list of all the packages included:
+'''
+
+ def bundle_requirements(self):
+ parts = [self.BUNDLE_HEADER]
+ for req in sorted(
+ [req for req in self.requirements.values()
+ if not req.comes_from],
+ key=lambda x: x.name):
+ parts.append('%s==%s\n' % (req.name, req.installed_version))
+ parts.append('# These packages were installed to satisfy the above requirements:\n')
+ for req in sorted(
+ [req for req in self.requirements.values()
+ if req.comes_from],
+ key=lambda x: x.name):
+ parts.append('%s==%s\n' % (req.name, req.installed_version))
+ ## FIXME: should we do something with self.unnamed_requirements?
+ return ''.join(parts)
+
+ def _clean_zip_name(self, name, prefix):
+ assert name.startswith(prefix+'/'), (
+ "name %r doesn't start with prefix %r" % (name, prefix))
+ name = name[len(prefix)+1:]
+ name = name.replace(os.path.sep, '/')
+ return name
+
+class HTMLPage(object):
+ """Represents one page, along with its URL"""
+
+ ## FIXME: these regexes are horrible hacks:
+ _homepage_re = re.compile(r'<th>\s*home\s*page', re.I)
+ _download_re = re.compile(r'<th>\s*download\s+url', re.I)
+ ## These aren't so aweful:
+ _rel_re = re.compile("""<[^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*>""", re.I)
+ _href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S)
+
+ def __init__(self, content, url, headers=None):
+ self.content = content
+ self.url = url
+ self.headers = headers
+
+ def __str__(self):
+ return self.url
+
+ @classmethod
+ def get_page(cls, link, req, cache=None, skip_archives=True):
+ url = link.url
+ url = url.split('#', 1)[0]
+ if cache.too_many_failures(url):
+ return None
+ if url.lower().startswith('svn'):
+ logger.debug('Cannot look at svn URL %s' % link)
+ return None
+ if cache is not None:
+ inst = cache.get_page(url)
+ if inst is not None:
+ return inst
+ try:
+ if skip_archives:
+ if cache is not None:
+ if cache.is_archive(url):
+ return None
+ filename = link.filename
+ for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']:
+ if filename.endswith(bad_ext):
+ content_type = cls._get_content_type(url)
+ if content_type.lower().startswith('text/html'):
+ break
+ else:
+ logger.debug('Skipping page %s because of Content-Type: %s' % (link, content_type))
+ if cache is not None:
+ cache.set_is_archive(url)
+ return None
+ logger.debug('Getting page %s' % url)
+ resp = urllib2.urlopen(url)
+ real_url = resp.geturl()
+ headers = resp.info()
+ inst = cls(resp.read(), real_url, headers)
+ except (urllib2.HTTPError, urllib2.URLError, socket.timeout, socket.error), e:
+ desc = str(e)
+ if isinstance(e, socket.timeout):
+ log_meth = logger.warn
+ level =1
+ desc = 'timed out'
+ elif isinstance(e, urllib2.URLError):
+ log_meth = logger.warn
+ if hasattr(e, 'reason') and isinstance(e.reason, socket.timeout):
+ desc = 'timed out'
+ level = 1
+ else:
+ level = 2
+ elif isinstance(e, urllib2.HTTPError) and e.code == 404:
+ ## FIXME: notify?
+ log_meth = logger.info
+ level = 2
+ else:
+ log_meth = logger.warn
+ level = 1
+ log_meth('Could not fetch URL %s: %s' % (link, desc))
+ log_meth('Will skip URL %s when looking for download links for %s' % (link.url, req))
+ if cache is not None:
+ cache.add_page_failure(url, level)
+ return None
+ if cache is not None:
+ cache.add_page([url, real_url], inst)
+ return inst
+
+ @staticmethod
+ def _get_content_type(url):
+ """Get the Content-Type of the given url, using a HEAD request"""
+ scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
+ if scheme == 'http':
+ ConnClass = httplib.HTTPConnection
+ elif scheme == 'https':
+ ConnClass = httplib.HTTPSConnection
+ else:
+ ## FIXME: some warning or something?
+ ## assertion error?
+ return ''
+ if query:
+ path += '?' + query
+ conn = ConnClass(netloc)
+ try:
+ conn.request('HEAD', path, headers={'Host': netloc})
+ resp = conn.getresponse()
+ if resp.status != 200:
+ ## FIXME: doesn't handle redirects
+ return ''
+ return resp.getheader('Content-Type') or ''
+ finally:
+ conn.close()
+
+ @property
+ def links(self):
+ """Yields all links in the page"""
+ for match in self._href_re.finditer(self.content):
+ url = match.group(1) or match.group(2) or match.group(3)
+ yield Link(urlparse.urljoin(self.url, url), self)
+
+ def rel_links(self):
+ for url in self.explicit_rel_links():
+ yield url
+ for url in self.scraped_rel_links():
+ yield url
+
+ def explicit_rel_links(self, rels=('homepage', 'download')):
+ """Yields all links with the given relations"""
+ for match in self._rel_re.finditer(self.content):
+ found_rels = match.group(1).lower().split()
+ for rel in rels:
+ if rel in found_rels:
+ break
+ else:
+ continue
+ match = self._href_re.search(match.group(0))
+ if not match:
+ continue
+ url = match.group(1) or match.group(2) or match.group(3)
+ yield Link(urlparse.urljoin(self.url, url), self)
+
+ def scraped_rel_links(self):
+ for regex in (self._homepage_re, self._download_re):
+ match = regex.search(self.content)
+ if not match:
+ continue
+ href_match = self._href_re.search(self.content, pos=match.end())
+ if not href_match:
+ continue
+ url = match.group(1) or match.group(2) or match.group(3)
+ if not url:
+ continue
+ url = urlparse.urljoin(self.url, url)
+ yield Link(url, self)
+
+class PageCache(object):
+ """Cache of HTML pages"""
+
+ failure_limit = 3
+
+ def __init__(self):
+ self._failures = {}
+ self._pages = {}
+ self._archives = {}
+
+ def too_many_failures(self, url):
+ return self._failures.get(url, 0) >= self.failure_limit
+
+ def get_page(self, url):
+ return self._pages.get(url)
+
+ def is_archive(self, url):
+ return self._archives.get(url, False)
+
+ def set_is_archive(self, url, value=True):
+ self._archives[url] = value
+
+ def add_page_failure(self, url, level):
+ self._failures[url] = self._failures.get(url, 0)+level
+
+ def add_page(self, urls, page):
+ for url in urls:
+ self._pages[url] = page
+
+class Link(object):
+
+ def __init__(self, url, comes_from=None):
+ self.url = url
+ self.comes_from = comes_from
+
+ def __str__(self):
+ if self.comes_from:
+ return '%s (from %s)' % (self.url, self.comes_from)
+ else:
+ return self.url
+
+ def __repr__(self):
+ return '<Link %s>' % self
+
+ @property
+ def filename(self):
+ url = self.url
+ url = url.split('#', 1)[0]
+ url = url.split('?', 1)[0]
+ url = url.rstrip('/')
+ name = posixpath.basename(url)
+ assert name, (
+ 'URL %r produced no filename' % url)
+ return name
+
+ @property
+ def scheme(self):
+ return urlparse.urlsplit(self.url)[0]
+
+ @property
+ def path(self):
+ return urlparse.urlsplit(self.url)[2]
+
+ def splitext(self):
+ return splitext(posixpath.basename(self.path.rstrip('/')))
+
+ _egg_fragment_re = re.compile(r'#egg=([^&]*)')
+
+ @property
+ def egg_fragment(self):
+ match = self._egg_fragment_re.search(self.url)
+ if not match:
+ return None
+ return match.group(1)
+
+ _md5_re = re.compile(r'md5=([a-f0-9]+)')
+
+ @property
+ def md5_hash(self):
+ match = self._md5_re.search(self.url)
+ if match:
+ return match.group(1)
+ return None
+
+ @property
+ def show_url(self):
+ return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
+
+############################################################
+## Writing freeze files
+
+
+def write_freeze(filename, requirement, find_links, find_tags=False):
+ if filename == '-':
+ logger.move_stdout_to_stderr()
+ dependency_links = []
+ if filename == '-':
+ f = sys.stdout
+ else:
+ ## FIXME: should be possible to overwrite requirement file
+ logger.notify('Writing frozen requirements to %s' % filename)
+ f = open(filename, 'w')
+ for dist in pkg_resources.working_set:
+ if dist.has_metadata('dependency_links.txt'):
+ dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'))
+ for link in find_links:
+ if '#egg=' in link:
+ dependency_links.append(link)
+ for link in find_links:
+ f.write('-f %s\n' % link)
+ installations = {}
+ for dist in pkg_resources.working_set:
+ if dist.key in ('setuptools', 'pyinstall', 'python'):
+ ## FIXME: also skip virtualenv?
+ continue
+ req = FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags)
+ installations[req.name] = req
+ if requirement:
+ req_f = open(requirement)
+ for line in req_f:
+ if not line or line.strip().startswith('#'):
+ f.write(line)
+ continue
+ elif line.startswith('-e') or line.startswith('--editable'):
+ if line.startswith('-e'):
+ line = line[2:].strip()
+ else:
+ line = line[len('--editable'):].strip().lstrip('=')
+ line_req = InstallRequirement.from_editable(line)
+ elif (line.startswith('-r') or line.startswith('--requirement')
+ or line.startswith('-Z') or line.startswith('--always-unzip')):
+ logger.debug('Skipping line %r' % line.strip())
+ continue
+ else:
+ line_req = InstallRequirement.from_line(line)
+ if not line_req.name:
+ logger.notify("Skipping line because it's not clear what it would install: %s"
+ % line.strip())
+ continue
+ if line_req.name not in installations:
+ logger.warn("Requirement file contains %s, but that package is not installed"
+ % line.strip())
+ continue
+ f.write(str(installations[line_req.name]))
+ del installations[line_req.name]
+ f.write('## The following requirements were added by pyinstall --freeze:\n')
+ for installation in sorted(installations.values(), key=lambda x: x.name):
+ f.write(str(installation))
+ if filename != '-':
+ logger.notify('Put requirements in %s' % filename)
+ f.close()
+
+class FrozenRequirement(object):
+
+ def __init__(self, name, req, editable, comments=()):
+ self.name = name
+ self.req = req
+ self.editable = editable
+ self.comments = comments
+
+ _rev_re = re.compile(r'-r(\d+)$')
+ _date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
+
+ @classmethod
+ def from_dist(cls, dist, dependency_links, find_tags=False):
+ location = os.path.normcase(os.path.abspath(dist.location))
+ comments = []
+ if os.path.exists(os.path.join(location, '.svn')):
+ editable = True
+ req = get_src_requirement(dist, location, find_tags)
+ if req is None:
+ logger.warn('Could not determine svn location of %s' % location)
+ comments.append('## !! Could not determine svn location')
+ req = dist.as_requirement()
+ editable = False
+ else:
+ editable = False
+ req = dist.as_requirement()
+ specs = req.specs
+ assert len(specs) == 1 and specs[0][0] == '=='
+ version = specs[0][1]
+ ver_match = cls._rev_re.search(version)
+ date_match = cls._date_re.search(version)
+ if ver_match or date_match:
+ svn_location = get_svn_location(dist, dependency_links)
+ if not svn_location:
+ logger.warn(
+ 'Warning: cannot find svn location for %s' % req)
+ comments.append('## FIXME: could not find svn URL in dependency_links for this package:')
+ else:
+ comments.append('# Installing as editable to satisfy requirement %s:' % req)
+ if ver_match:
+ rev = ver_match.group(1)
+ else:
+ rev = '{%s}' % date_match.group(1)
+ editable = True
+ req = 'svn+%s@%s#egg=%s' % (svn_location, rev, cls.egg_name(dist))
+ return cls(dist.project_name, req, editable, comments)
+
+ @staticmethod
+ def egg_name(dist):
+ name = dist.egg_name()
+ match = re.search(r'-py\d\.\d$', name)
+ if match:
+ name = name[:match.start()]
+ return name
+
+ def __str__(self):
+ req = self.req
+ if self.editable:
+ req = '-e %s' % req
+ return '\n'.join(list(self.comments)+[str(req)])+'\n'
+
+def get_svn_location(dist, dependency_links):
+ egg_fragment_re = re.compile(r'#egg=(.*)$')
+ for url in dependency_links:
+ egg_fragment = Link(url).egg_fragment
+ if not egg_fragment:
+ continue
+ if '-' in egg_fragment:
+ ## FIXME: will this work when a package has - in the name?
+ key = '-'.join(egg_fragment.split('-')[:-1]).lower()
+ else:
+ key = egg_fragment
+ if key == dist.key:
+ return url.split('#', 1)[0]
+ return None
+
+def get_src_requirement(dist, location, find_tags):
+ if not os.path.exists(os.path.join(location, '.svn')):
+ logger.warn('cannot determine version of editable source in %s (is not svn checkout)' % location)
+ return dist.as_requirement()
+ repo = get_svn_url(location)
+ if repo is None:
+ return None
+ parts = repo.split('/')
+ ## FIXME: why not project name?
+ egg_project_name = dist.egg_name().split('-', 1)[0]
+ if parts[-2] in ('tags', 'tag'):
+ # It's a tag, perfect!
+ return 'svn+%s#egg=%s-%s' % (repo, egg_project_name, parts[-1])
+ elif parts[-2] in ('branches', 'branch'):
+ # It's a branch :(
+ rev = get_svn_revision(location)
+ return 'svn+%s@%s#egg=%s%s-r%s' % (repo, rev, dist.egg_name(), parts[-1], rev)
+ elif parts[-1] == 'trunk':
+ # Trunk :-/
+ rev = get_svn_revision(location)
+ if find_tags:
+ tag_url = '/'.join(parts[:-1]) + '/tags'
+ tag_revs = get_tag_revs(tag_url)
+ match = find_tag_match(rev, tag_revs)
+ if match:
+ logger.notify('trunk checkout %s seems to be equivalent to tag %s' % match)
+ return 'svn+%s/%s#egg=%s-%s' % (tag_url, match, egg_project_name, match)
+ return 'svn+%s@%s#egg=%s-dev' % (repo, rev, dist.egg_name())
+ else:
+ # Don't know what it is
+ logger.warn('svn URL does not fit normal structure (tags/branches/trunk): %s' % repo)
+ rev = get_svn_revision(location)
+ return '%s@%s#egg=%s-dev' % (repo, rev, egg_project_name)
+
+_svn_url_re = re.compile('url="([^"]+)"')
+_svn_rev_re = re.compile('committed-rev="(\d+)"')
+
+def get_svn_revision(location):
+ """
+ Return the maximum revision for all files under a given location
+ """
+ # Note: taken from setuptools.command.egg_info
+ revision = 0
+
+ for base, dirs, files in os.walk(location):
+ if '.svn' not in dirs:
+ dirs[:] = []
+ continue # no sense walking uncontrolled subdirs
+ dirs.remove('.svn')
+ entries_fn = os.path.join(base, '.svn', 'entries')
+ if not os.path.exists(entries_fn):
+ ## FIXME: should we warn?
+ continue
+ f = open(entries_fn)
+ data = f.read()
+ f.close()
+
+ if data.startswith('8') or data.startswith('9'):
+ data = map(str.splitlines,data.split('\n\x0c\n'))
+ del data[0][0] # get rid of the '8'
+ dirurl = data[0][3]
+ revs = [int(d[9]) for d in data if len(d)>9 and d[9]]+[0]
+ if revs:
+ localrev = max(revs)
+ else:
+ localrev = 0
+ elif data.startswith('<?xml'):
+ dirurl = _svn_url_re.search(data).group(1) # get repository URL
+ revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)]+[0]
+ if revs:
+ lo