Permalink
Browse files

CHUNKED REQUESTS!

  • Loading branch information...
1 parent 4a5b5bc commit ef8563ab36c6b52834ee9c35f6f75a424cd9ceef @kennethreitz kennethreitz committed Jan 10, 2013
Showing with 123 additions and 39 deletions.
  1. +43 −13 requests/adapters.py
  2. +51 −26 requests/models.py
  3. +22 −0 requests/structures.py
  4. +7 −0 requests/utils.py
View
@@ -12,6 +12,7 @@
from .models import Response
from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
+from .packages.urllib3.response import HTTPResponse
from .hooks import dispatch_hook
from .compat import urlparse, basestring, urldefrag
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
@@ -154,23 +155,52 @@ def send(self, request, stream=False, timeout=None, verify=True, cert=None, prox
conn = self.get_connection(request.url, proxies)
self.cert_verify(conn, request.url, verify, cert)
-
url = self.request_url(request, proxies)
+ chunked = not (request.body is None or 'Content-Length' in request.headers)
+
try:
+ if not chunked:
+ resp = conn.urlopen(
+ method=request.method,
+ url=url,
+ body=request.body,
+ headers=request.headers,
+ redirect=False,
+ assert_same_host=False,
+ preload_content=False,
+ decode_content=False,
+ retries=self.max_retries,
+ timeout=timeout
+ )
+
# Send the request.
- resp = conn.urlopen(
- method=request.method,
- url=url,
- body=request.body,
- headers=request.headers,
- redirect=False,
- assert_same_host=False,
- preload_content=False,
- decode_content=False,
- retries=self.max_retries,
- timeout=timeout,
- )
+ else:
+ if hasattr(conn, 'proxy_pool'):
+ conn = conn.proxy_pool
+
+ low_conn = conn._get_conn(timeout=timeout)
+ low_conn.putrequest(request.method, url, skip_accept_encoding=True)
+
+ for header, value in request.headers.items():
+ low_conn.putheader(header, value)
+
+ low_conn.endheaders()
+
+ for i in request.body:
+ low_conn.send(hex(len(i))[2:].encode('utf-8'))
+ low_conn.send(b'\r\n')
+ low_conn.send(i)
+ low_conn.send(b'\r\n')
+ low_conn.send(b'0\r\n\r\n')
+
+ r = low_conn.getresponse()
+ resp = HTTPResponse.from_httplib(r,
+ pool=conn,
+ connection=low_conn,
+ preload_content=False,
+ decode_content=False
+ )
except socket.error as sockerr:
raise ConnectionError(sockerr)
View
@@ -22,7 +22,7 @@
from .utils import (
stream_untransfer, guess_filename, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
- iter_slices, guess_json_utf)
+ iter_slices, guess_json_utf, super_len)
from .compat import (
cookielib, urlparse, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, json, builtin_str, basestring)
@@ -321,37 +321,62 @@ def prepare_headers(self, headers):
def prepare_body(self, data, files):
"""Prepares the given HTTP body data."""
- # If a generator is provided, error out.
- if isinstance(data, type(_ for _ in [])):
- raise NotImplementedError('Generator bodies are not supported yet.')
+ # Check if file, fo, generator, iterator.
+ # If not, run through normal process.
# Nottin' on you.
body = None
content_type = None
+ length = None
+ is_stream = False
- # Multi-part file uploads.
- if files:
- (body, content_type) = self._encode_files(files, data)
- else:
- if data:
+ is_stream = all([
+ hasattr(data, '__iter__'),
+ not isinstance(data, basestring),
+ not isinstance(data, dict)
+ ])
- body = self._encode_params(data)
- if isinstance(data, str) or isinstance(data, builtin_str) or hasattr(data, 'read'):
- content_type = None
- else:
- content_type = 'application/x-www-form-urlencoded'
-
- self.headers['Content-Length'] = '0'
- if hasattr(body, 'seek') and hasattr(body, 'tell'):
- body.seek(0, 2)
- self.headers['Content-Length'] = str(body.tell())
- body.seek(0, 0)
- elif body is not None:
- self.headers['Content-Length'] = str(len(body))
-
- # Add content-type if it wasn't explicitly provided.
- if (content_type) and (not 'content-type' in self.headers):
- self.headers['Content-Type'] = content_type
+ try:
+ length = super_len(data)
+ except (TypeError, AttributeError):
+ length = False
+
+ if is_stream:
+ body = data
+
+ if files:
+ raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
+
+ if length:
+ self.headers['Content-Length'] = length
+ else:
+ self.headers['Transfer-Encoding'] = 'chunked'
+ # Check if file, fo, generator, iterator.
+ # If not, run through normal process.
+
+ else:
+ # Multi-part file uploads.
+ if files:
+ (body, content_type) = self._encode_files(files, data)
+ else:
+ if data:
+ body = self._encode_params(data)
+ if isinstance(data, str) or isinstance(data, builtin_str) or hasattr(data, 'read'):
+ content_type = None
+ else:
+ content_type = 'application/x-www-form-urlencoded'
+
+ self.headers['Content-Length'] = '0'
+ if hasattr(body, 'seek') and hasattr(body, 'tell'):
+ body.seek(0, 2)
+ self.headers['Content-Length'] = str(body.tell())
+ body.seek(0, 0)
+ elif body is not None:
+ self.headers['Content-Length'] = str(len(body))
+
+ # Add content-type if it wasn't explicitly provided.
+ if (content_type) and (not 'content-type' in self.headers):
+ self.headers['Content-Type'] = content_type
self.body = body
View
@@ -8,6 +8,28 @@
"""
+import os
+from itertools import islice
+
+class IteratorProxy(object):
+ """docstring for IteratorProxy"""
+ def __init__(self, i):
+ self.i = i
+ # self.i = chain.from_iterable(i)
+
+ def __iter__(self):
+ return self.i
+
+ def __len__(self):
+ if hasattr(self.i, '__len__'):
+ return len(self.i)
+ if hasattr(self.i, 'len'):
+ return self.i.len
+ if hasattr(self.i, 'fileno'):
+ return os.fstat(self.i.fileno()).st_size
+
+ def read(self, n):
+ return "".join(islice(self.i, None, n))
class CaseInsensitiveDict(dict):
"""Case-insensitive Dictionary
View
@@ -40,6 +40,13 @@ def dict_to_sequence(d):
return d
+def super_len(o):
+ if hasattr(o, '__len__'):
+ return len(o)
+ if hasattr(o, 'len'):
+ return o.len
+ if hasattr(o, 'fileno'):
+ return os.fstat(o.fileno()).st_size
def get_netrc_auth(url):
"""Returns the Requests tuple auth for a given url from netrc."""

0 comments on commit ef8563a

Please sign in to comment.