Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use the proxy option in the search command #932

Closed
wants to merge 7 commits into from
6 changes: 5 additions & 1 deletion pip/commands/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,11 @@ def run(self, options, args):
return NO_MATCHES_FOUND

def search(self, query, index_url):
pypi = xmlrpclib.ServerProxy(index_url)
trans = pip.download.xmlrpclib_transport
from pip.backwardcompat import urlparse
h_type = urlparse.urlparse(index_url).scheme
trans.https = h_type == 'https'
pypi = xmlrpclib.ServerProxy(index_url, trans)
hits = pypi.search({'name': query, 'summary': query}, 'or')
return hits

Expand Down
26 changes: 24 additions & 2 deletions pip/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import pip

from pip.backwardcompat import (urllib, urllib2, httplib,
from pip.backwardcompat import (xmlrpclib, urllib, urllib2, httplib,
urlparse, string_types, get_http_message_param,
match_hostname, CertificateError)
from pip.exceptions import InstallationError, HashMismatch
Expand All @@ -24,12 +24,32 @@
from pip.log import logger
from pip.locations import default_cert_path

__all__ = ['get_file_content', 'urlopen',
__all__ = ['xmlrpclib_transport', 'get_file_content', 'urlopen',
'is_url', 'url_to_path', 'path_to_url', 'path_to_url2',
'geturl', 'is_archive_file', 'unpack_vcs_link',
'unpack_file_url', 'is_vcs_url', 'is_file_url', 'unpack_http_url']


class Urllib2Transport(xmlrpclib.Transport):
def __init__(self, opener=None, https=False, use_datetime=0):
xmlrpclib.Transport.__init__(self, use_datetime)
self.opener = opener or urllib2.build_opener()
self.https = https

def request(self, host, handler, request_body, verbose=0):
proto = ('http', 'https')[bool(self.https)]
req = urllib2.Request('%s://%s%s' % (proto, host, handler), request_body)
req.add_header('User-agent', self.user_agent)
req.add_header('Content-Type', 'text/xml')
self.verbose = verbose
return self.parse_response(self.opener.open(req))

class ProxiedTransport(Urllib2Transport):
def __init__(self, proxy_handler):
opener = proxy_handler and urllib2.build_opener(proxy_handler) or None
Urllib2Transport.__init__(self, opener)


def build_user_agent():
"""Return a string representing the user agent."""
_implementation = platform.python_implementation()
Expand Down Expand Up @@ -313,6 +333,8 @@ def get_proxy(self, proxystr=''):

urlopen = URLOpener()

xmlrpclib_transport = ProxiedTransport(urlopen.proxy_handler)


def is_url(name):
"""Returns true if the name looks like a URL"""
Expand Down