Skip to content

Commit

Permalink
Upgrade requests to 2.5.1
Browse files Browse the repository at this point in the history
  • Loading branch information
jamesls committed Feb 5, 2015
1 parent 30bd64c commit 0ed96b7
Show file tree
Hide file tree
Showing 77 changed files with 7,675 additions and 6,077 deletions.
12 changes: 6 additions & 6 deletions botocore/vendored/requests/__init__.py
Expand Up @@ -13,7 +13,7 @@
usage:
>>> import requests
>>> r = requests.get('http://python.org')
>>> r = requests.get('https://www.python.org')
>>> r.status_code
200
>>> 'Python is a programming language' in r.content
Expand All @@ -22,7 +22,7 @@
... or POST:
>>> payload = dict(key1='value1', key2='value2')
>>> r = requests.post("http://httpbin.org/post", data=payload)
>>> r = requests.post('http://httpbin.org/post', data=payload)
>>> print(r.text)
{
...
Expand All @@ -36,17 +36,17 @@
The other HTTP methods are supported - see `requests.api`. Full documentation
is at <http://python-requests.org>.
:copyright: (c) 2013 by Kenneth Reitz.
:copyright: (c) 2014 by Kenneth Reitz.
:license: Apache 2.0, see LICENSE for more details.
"""

__title__ = 'requests'
__version__ = '2.0.1'
__build__ = 0x020001
__version__ = '2.5.1'
__build__ = 0x020501
__author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2013 Kenneth Reitz'
__copyright__ = 'Copyright 2014 Kenneth Reitz'

# Attempt to enable urllib3's SNI support, if possible
try:
Expand Down
193 changes: 128 additions & 65 deletions botocore/vendored/requests/adapters.py
Expand Up @@ -11,20 +11,25 @@
import socket

from .models import Response
from .packages.urllib3 import Retry
from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
from .packages.urllib3.response import HTTPResponse
from .packages.urllib3.util import Timeout as TimeoutSauce
from .compat import urlparse, basestring, urldefrag, unquote
from .compat import urlparse, basestring
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
except_on_missing_scheme, get_auth_from_url)
prepend_scheme_if_needed, get_auth_from_url, urldefragauth)
from .structures import CaseInsensitiveDict
from .packages.urllib3.exceptions import MaxRetryError
from .packages.urllib3.exceptions import TimeoutError
from .packages.urllib3.exceptions import SSLError as _SSLError
from .packages.urllib3.exceptions import ConnectTimeoutError
from .packages.urllib3.exceptions import HTTPError as _HTTPError
from .packages.urllib3.exceptions import MaxRetryError
from .packages.urllib3.exceptions import ProxyError as _ProxyError
from .packages.urllib3.exceptions import ProtocolError
from .packages.urllib3.exceptions import ReadTimeoutError
from .packages.urllib3.exceptions import SSLError as _SSLError
from .packages.urllib3.exceptions import ResponseError
from .cookies import extract_cookies_to_jar
from .exceptions import ConnectionError, Timeout, SSLError, ProxyError
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
ProxyError, RetryError)
from .auth import _basic_auth_str

DEFAULT_POOLBLOCK = False
Expand Down Expand Up @@ -55,14 +60,20 @@ class HTTPAdapter(BaseAdapter):
:param pool_connections: The number of urllib3 connection pools to cache.
:param pool_maxsize: The maximum number of connections to save in the pool.
:param max_retries: The maximum number of retries each connection should attempt.
:param int max_retries: The maximum number of retries each connection
should attempt. Note, this applies only to failed DNS lookups, socket
connections and connection timeouts, never to requests where data has
made it to the server. By default, Requests does not retry failed
connections. If you need granular control over the conditions under
which we retry a request, import urllib3's ``Retry`` class and pass
that instead.
:param pool_block: Whether the connection pool should block for connections.
Usage::
>>> import requests
>>> s = requests.Session()
>>> a = requests.adapters.HTTPAdapter()
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
>>> s.mount('http://', a)
"""
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
Expand All @@ -71,7 +82,10 @@ class HTTPAdapter(BaseAdapter):
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
pool_block=DEFAULT_POOLBLOCK):
self.max_retries = max_retries
if max_retries == DEFAULT_RETRIES:
self.max_retries = Retry(0, read=False)
else:
self.max_retries = Retry.from_int(max_retries)
self.config = {}
self.proxy_manager = {}

Expand All @@ -88,28 +102,59 @@ def __getstate__(self):
self.__attrs__)

def __setstate__(self, state):
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
# because self.poolmanager uses a lambda function, which isn't pickleable.
self.proxy_manager = {}
self.config = {}

for attr, value in state.items():
setattr(self, attr, value)

self.init_poolmanager(self._pool_connections, self._pool_maxsize,
block=self._pool_block)

def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK):
"""Initializes a urllib3 PoolManager. This method should not be called
from user code, and is only exposed for use when subclassing the
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
"""Initializes a urllib3 PoolManager.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param connections: The number of urllib3 connection pools to cache.
:param maxsize: The maximum number of connections to save in the pool.
:param block: Block when no free connections are available.
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
"""
# save these values for pickling
self._pool_connections = connections
self._pool_maxsize = maxsize
self._pool_block = block

self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
block=block)
block=block, strict=True, **pool_kwargs)

def proxy_manager_for(self, proxy, **proxy_kwargs):
"""Return urllib3 ProxyManager for the given proxy.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param proxy: The proxy to return a urllib3 ProxyManager for.
:param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
:returns: ProxyManager
"""
if not proxy in self.proxy_manager:
proxy_headers = self.proxy_headers(proxy)
self.proxy_manager[proxy] = proxy_from_url(
proxy,
proxy_headers=proxy_headers,
num_pools=self._pool_connections,
maxsize=self._pool_maxsize,
block=self._pool_block,
**proxy_kwargs)

return self.proxy_manager[proxy]

def cert_verify(self, conn, url, verify, cert):
"""Verify a SSL certificate. This method should not be called from user
Expand Down Expand Up @@ -196,17 +241,14 @@ def get_connection(self, url, proxies=None):
proxy = proxies.get(urlparse(url.lower()).scheme)

if proxy:
except_on_missing_scheme(proxy)
proxy_headers = self.proxy_headers(proxy)

if not proxy in self.proxy_manager:
self.proxy_manager[proxy] = proxy_from_url(
proxy,
proxy_headers=proxy_headers)

conn = self.proxy_manager[proxy].connection_from_url(url)
proxy = prepend_scheme_if_needed(proxy, 'http')
proxy_manager = self.proxy_manager_for(proxy)
conn = proxy_manager.connection_from_url(url)
else:
conn = self.poolmanager.connection_from_url(url.lower())
# Only scheme should be lower case
parsed = urlparse(url)
url = parsed.geturl()
conn = self.poolmanager.connection_from_url(url)

return conn

Expand All @@ -232,11 +274,11 @@ def request_url(self, request, proxies):
:param proxies: A dictionary of schemes to proxy URLs.
"""
proxies = proxies or {}
scheme = urlparse(request.url).scheme.lower()
scheme = urlparse(request.url).scheme
proxy = proxies.get(scheme)

if proxy and scheme != 'https':
url, _ = urldefrag(request.url)
url = urldefragauth(request.url)
else:
url = request.path_url

Expand Down Expand Up @@ -273,10 +315,6 @@ def proxy_headers(self, proxy):
username, password = get_auth_from_url(proxy)

if username and password:
# Proxy auth usernames and passwords will be urlencoded, we need
# to decode them.
username = unquote(username)
password = unquote(password)
headers['Proxy-Authorization'] = _basic_auth_str(username,
password)

Expand All @@ -287,9 +325,12 @@ def send(self, request, stream=False, timeout=None, verify=True, cert=None, prox
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) The timeout on the request.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a (`connect timeout, read
timeout <user/advanced.html#timeouts>`_) tuple.
:type timeout: float or tuple
:param verify: (optional) Whether to verify SSL certificates.
:param vert: (optional) Any user-provided SSL certificate to be trusted.
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
"""

Expand All @@ -301,8 +342,16 @@ def send(self, request, stream=False, timeout=None, verify=True, cert=None, prox

chunked = not (request.body is None or 'Content-Length' in request.headers)

if stream:
timeout = TimeoutSauce(connect=timeout)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError as e:
# this may raise a string formatting error.
err = ("Invalid timeout {0}. Pass a (connect, read) "
"timeout tuple, or a single float to set "
"both timeouts to the same value".format(timeout))
raise ValueError(err)
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)

Expand All @@ -327,48 +376,62 @@ def send(self, request, stream=False, timeout=None, verify=True, cert=None, prox
conn = conn.proxy_pool

low_conn = conn._get_conn(timeout=timeout)
low_conn.putrequest(request.method, url, skip_accept_encoding=True)

for header, value in request.headers.items():
low_conn.putheader(header, value)

low_conn.endheaders()

for i in request.body:
low_conn.send(hex(len(i))[2:].encode('utf-8'))
low_conn.send(b'\r\n')
low_conn.send(i)
low_conn.send(b'\r\n')
low_conn.send(b'0\r\n\r\n')
try:
low_conn.putrequest(request.method,
url,
skip_accept_encoding=True)

for header, value in request.headers.items():
low_conn.putheader(header, value)

low_conn.endheaders()

for i in request.body:
low_conn.send(hex(len(i))[2:].encode('utf-8'))
low_conn.send(b'\r\n')
low_conn.send(i)
low_conn.send(b'\r\n')
low_conn.send(b'0\r\n\r\n')

r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(
r,
pool=conn,
connection=low_conn,
preload_content=False,
decode_content=False
)
except:
# If we hit any problems here, clean up the connection.
# Then, reraise so that we can handle the actual exception.
low_conn.close()
raise
else:
# All is well, return the connection to the pool.
conn._put_conn(low_conn)

except (ProtocolError, socket.error) as err:
raise ConnectionError(err, request=request)

r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(r,
pool=conn,
connection=low_conn,
preload_content=False,
decode_content=False
)
except MaxRetryError as e:
if isinstance(e.reason, ConnectTimeoutError):
raise ConnectTimeout(e, request=request)

except socket.error as sockerr:
raise ConnectionError(sockerr)
if isinstance(e.reason, ResponseError):
raise RetryError(e, request=request)

except MaxRetryError as e:
raise ConnectionError(e)
raise ConnectionError(e, request=request)

except _ProxyError as e:
raise ProxyError(e)

except (_SSLError, _HTTPError) as e:
if isinstance(e, _SSLError):
raise SSLError(e)
elif isinstance(e, TimeoutError):
raise Timeout(e)
raise SSLError(e, request=request)
elif isinstance(e, ReadTimeoutError):
raise ReadTimeout(e, request=request)
else:
raise

r = self.build_response(request, resp)

if not stream:
r.content

return r
return self.build_response(request, resp)

0 comments on commit 0ed96b7

Please sign in to comment.