Skip to content

Commit

Permalink
Allow passing a --no-progress-bar to the install script to surpress p…
Browse files Browse the repository at this point in the history
…rogress bar
  • Loading branch information
AvnerCohen committed Dec 22, 2016
1 parent 0999d91 commit e80db16
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 10 deletions.
10 changes: 10 additions & 0 deletions pip/cmdoptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,15 @@ def getname(n):
' levels).')
)

no_progress_bar = partial(
Option,
"--no-progress-bar",
dest="no_progress_bar",
action="store_true",
default=False,
help="Surpress progress bar.")


log = partial(
Option,
"--log", "--log-file", "--local-log",
Expand Down Expand Up @@ -529,6 +538,7 @@ def only_binary():
help="Don't periodically check PyPI to determine whether a new version "
"of pip is available for download. Implied with --no-index.")


# Deprecated, Remove later
always_unzip = partial(
Option,
Expand Down
2 changes: 2 additions & 0 deletions pip/commands/install.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,7 @@ def __init__(self, *args, **kw):
cmd_opts.add_option(cmdoptions.pre())
cmd_opts.add_option(cmdoptions.no_clean())
cmd_opts.add_option(cmdoptions.require_hashes())
cmd_opts.add_option(cmdoptions.no_progress_bar())

index_opts = cmdoptions.make_option_group(
cmdoptions.index_group,
Expand Down Expand Up @@ -306,6 +307,7 @@ def run(self, options, args):
isolated=options.isolated_mode,
wheel_cache=wheel_cache,
require_hashes=options.require_hashes,
progress_bar=(not options.no_progress_bar),
)

self.populate_requirement_set(
Expand Down
20 changes: 12 additions & 8 deletions pip/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -514,16 +514,17 @@ def _progress_indicator(iterable, *args, **kwargs):
return iterable


def _download_url(resp, link, content_file, hashes):
def _download_url(resp, link, content_file, hashes, progress_bar):
try:
total_length = int(resp.headers['content-length'])
except (ValueError, KeyError, TypeError):
total_length = 0

cached_resp = getattr(resp, "from_cache", False)

if logger.getEffectiveLevel() > logging.INFO:
show_progress = False
elif not progress_bar:
show_progress = False
elif cached_resp:
show_progress = False
elif total_length > (40 * 1000):
Expand Down Expand Up @@ -638,7 +639,7 @@ def _copy_file(filename, location, link):


def unpack_http_url(link, location, download_dir=None,
session=None, hashes=None):
session=None, hashes=None, progress_bar=True):
if session is None:
raise TypeError(
"unpack_http_url() missing 1 required keyword argument: 'session'"
Expand All @@ -661,7 +662,8 @@ def unpack_http_url(link, location, download_dir=None,
from_path, content_type = _download_http_url(link,
session,
temp_dir,
hashes)
hashes,
progress_bar)

# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
Expand Down Expand Up @@ -790,7 +792,8 @@ def request(self, host, handler, request_body, verbose=False):


def unpack_url(link, location, download_dir=None,
only_download=False, session=None, hashes=None):
only_download=False, session=None, hashes=None,
progress_bar=True):
"""Unpack link.
If link is a VCS link:
if only_download, export into download_dir and ignore location
Expand Down Expand Up @@ -823,13 +826,14 @@ def unpack_url(link, location, download_dir=None,
location,
download_dir,
session,
hashes=hashes
hashes=hashes,
progress_bar=progress_bar
)
if only_download:
write_delete_marker_file(location)


def _download_http_url(link, session, temp_dir, hashes):
def _download_http_url(link, session, temp_dir, hashes, progress_bar):
"""Download link url into temp_dir using provided session"""
target_url = link.url.split('#', 1)[0]
try:
Expand Down Expand Up @@ -884,7 +888,7 @@ def _download_http_url(link, session, temp_dir, hashes):
filename += ext
file_path = os.path.join(temp_dir, filename)
with open(file_path, 'wb') as content_file:
_download_url(resp, link, content_file, hashes)
_download_url(resp, link, content_file, hashes, progress_bar)
return file_path, content_type


Expand Down
6 changes: 4 additions & 2 deletions pip/req/req_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def __init__(self, build_dir, src_dir, download_dir, upgrade=False,
force_reinstall=False, use_user_site=False, session=None,
pycompile=True, isolated=False, wheel_download_dir=None,
wheel_cache=None, require_hashes=False,
ignore_requires_python=False):
ignore_requires_python=False, progress_bar=True):
"""Create a RequirementSet.
:param wheel_download_dir: Where still-packed .whl files should be
Expand Down Expand Up @@ -182,6 +182,7 @@ def __init__(self, build_dir, src_dir, download_dir, upgrade=False,
self.unnamed_requirements = []
self.ignore_dependencies = ignore_dependencies
self.ignore_requires_python = ignore_requires_python
self.progress_bar = progress_bar
self.successfully_downloaded = []
self.successfully_installed = []
self.reqs_to_cleanup = []
Expand Down Expand Up @@ -618,7 +619,8 @@ def _prepare_file(self,
unpack_url(
req_to_install.link, req_to_install.source_dir,
download_dir, autodelete_unpacked,
session=self.session, hashes=hashes)
session=self.session, hashes=hashes,
progress_bar=self.progress_bar)
except requests.HTTPError as exc:
logger.critical(
'Could not install requirement %s because '
Expand Down

0 comments on commit e80db16

Please sign in to comment.