Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Working towards HTTP/2 - httpx approach. #973

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions httpie/cli/argparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,6 +337,7 @@ def _parse_items(self):
self.args.data = request_items.data
self.args.files = request_items.files
self.args.params = request_items.params
self.args.removed_headers = request_items.removed_headers

if self.args.files and not self.args.form:
# `http url @/path/to/file`
Expand Down
9 changes: 0 additions & 9 deletions httpie/cli/dicts.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,5 @@
from collections import OrderedDict

from requests.structures import CaseInsensitiveDict


class RequestHeadersDict(CaseInsensitiveDict):
"""
Headers are case-insensitive and multiple values are currently not supported.

"""


class RequestJSONDataDict(OrderedDict):
pass
Expand Down
12 changes: 9 additions & 3 deletions httpie/cli/requestitems.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@
SEPARATOR_FILE_UPLOAD_TYPE, SEPARATOR_HEADER, SEPARATOR_HEADER_EMPTY,
SEPARATOR_QUERY_PARAM,
)
import httpx
from httpie.cli.dicts import (
RequestDataDict, RequestFilesDict, RequestHeadersDict, RequestJSONDataDict,
RequestDataDict, RequestFilesDict, RequestJSONDataDict,
RequestQueryParamsDict,
)
from httpie.cli.exceptions import ParseError
Expand All @@ -20,10 +21,11 @@
class RequestItems:

def __init__(self, as_form=False):
self.headers = RequestHeadersDict()
self.headers = httpx.Headers()
self.data = RequestDataDict() if as_form else RequestJSONDataDict()
self.files = RequestFilesDict()
self.params = RequestQueryParamsDict()
self.removed_headers = {}

@classmethod
def from_args(
Expand Down Expand Up @@ -69,7 +71,11 @@ def from_args(

for arg in request_item_args:
processor_func, target_dict = rules[arg.sep]
target_dict[arg.key] = processor_func(arg)
value = processor_func(arg)
if arg.sep == SEPARATOR_HEADER and value is None:
instance.removed_headers[arg.key] = None
else:
target_dict[arg.key] = value

return instance

Expand Down
141 changes: 65 additions & 76 deletions httpie/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,20 +8,16 @@
from typing import Iterable, Union
from urllib.parse import urlparse, urlunparse

import requests
import httpx
# noinspection PyPackageRequirements
import urllib3
from httpie import __version__
from httpie.cli.dicts import RequestHeadersDict
from httpie.plugins.registry import plugin_manager
from httpie.sessions import get_httpie_session
from httpie.ssl import AVAILABLE_SSL_VERSION_ARG_MAPPING, HTTPieHTTPSAdapter
from httpie.uploads import get_multipart_data_and_content_type
from httpie.utils import get_expired_cookies, repr_dict


urllib3.disable_warnings()

FORM_CONTENT_TYPE = 'application/x-www-form-urlencoded; charset=utf-8'
JSON_CONTENT_TYPE = 'application/json'
JSON_ACCEPT = f'{JSON_CONTENT_TYPE}, */*;q=0.5'
Expand All @@ -31,7 +27,7 @@
def collect_messages(
args: argparse.Namespace,
config_dir: Path,
) -> Iterable[Union[requests.PreparedRequest, requests.Response]]:
) -> Iterable[Union[httpx.Request, httpx.Response]]:
httpie_session = None
httpie_session_headers = None
if args.session or args.session_read_only:
Expand All @@ -49,15 +45,16 @@ def collect_messages(
)
send_kwargs = make_send_kwargs(args)
send_kwargs_mergeable_from_env = make_send_kwargs_mergeable_from_env(args)
requests_session = build_requests_session(
httpx_session = build_httpx_session(
ssl_version=args.ssl_version,
ciphers=args.ciphers,
verify=bool(send_kwargs_mergeable_from_env['verify'])
verify=bool(send_kwargs_mergeable_from_env['verify']),
removed_headers=args.removed_headers
)

if httpie_session:
httpie_session.update_headers(request_kwargs['headers'])
requests_session.cookies = httpie_session.cookies
httpx_session.cookies = httpie_session.cookies
if args.auth_plugin:
# Save auth from CLI to HTTPie session.
httpie_session.auth = {
Expand All @@ -72,42 +69,37 @@ def collect_messages(
# TODO: reflect the split between request and send kwargs.
dump_request(request_kwargs)

request = requests.Request(**request_kwargs)
prepared_request = requests_session.prepare_request(request)
request = httpx_session.build_request(**request_kwargs)
if args.path_as_is:
prepared_request.url = ensure_path_as_is(
request.url = ensure_path_as_is(
orig_url=args.url,
prepped_url=prepared_request.url,
prepped_url=request.url,
)
if args.compress and prepared_request.body:
compress_body(prepared_request, always=args.compress > 1)
request.read()
if args.compress and request.content:
request = compress_body(request, always=args.compress > 1)
response_count = 0
expired_cookies = []
while prepared_request:
yield prepared_request
while request:
yield request
if not args.offline:
send_kwargs_merged = requests_session.merge_environment_settings(
url=prepared_request.url,
**send_kwargs_mergeable_from_env,
)
with max_headers(args.max_headers):
response = requests_session.send(
request=prepared_request,
**send_kwargs_merged,
response = httpx_session.send(
request=request,
**send_kwargs,
)

# noinspection PyProtectedMember
expired_cookies += get_expired_cookies(
headers=response.raw._original_response.msg._headers
)
# expired_cookies += get_expired_cookies(
# headers=response.raw._original_response.msg._headers
# )

response_count += 1
if response.next:
if response.next_request:
if args.max_redirects and response_count == args.max_redirects:
raise requests.TooManyRedirects
raise httpx.TooManyRedirects("Too many redirects", request=request)
if args.follow:
prepared_request = response.next
request = response.next_request
if args.all:
yield response
continue
Expand All @@ -116,7 +108,7 @@ def collect_messages(

if httpie_session:
if httpie_session.is_new() or not args.session_read_only:
httpie_session.cookies = requests_session.cookies
httpie_session.cookies = httpx_session.cookies
httpie_session.remove_cookies(
# TODO: take path & domain into account?
cookie['name'] for cookie in expired_cookies
Expand All @@ -137,74 +129,71 @@ def max_headers(limit):
http.client._MAXHEADERS = orig


def compress_body(request: requests.PreparedRequest, always: bool):
def compress_body(request: httpx.Request, always: bool) -> httpx.Request:
deflater = zlib.compressobj()
body_bytes = (
request.body
if isinstance(request.body, bytes)
else request.body.encode()
)
deflated_data = deflater.compress(body_bytes)
deflated_data = deflater.compress(request.content)
deflated_data += deflater.flush()
is_economical = len(deflated_data) < len(body_bytes)
is_economical = len(deflated_data) < len(request.content)
if is_economical or always:
request.body = deflated_data
request = httpx.Request(method=request.method, url=request.url, headers=request.headers, content=deflated_data)
request.headers['Content-Encoding'] = 'deflate'
request.headers['Content-Length'] = str(len(deflated_data))
return request


def build_requests_session(
def build_httpx_session(
verify: bool,
ssl_version: str = None,
ciphers: str = None,
) -> requests.Session:
requests_session = requests.Session()

# Install our adapter.
https_adapter = HTTPieHTTPSAdapter(
ciphers=ciphers,
verify=verify,
ssl_version=(
AVAILABLE_SSL_VERSION_ARG_MAPPING[ssl_version]
if ssl_version else None
),
)
requests_session.mount('https://', https_adapter)

# Install adapters from plugins.
for plugin_cls in plugin_manager.get_transport_plugins():
transport_plugin = plugin_cls()
requests_session.mount(
prefix=transport_plugin.prefix,
adapter=transport_plugin.get_adapter(),
)

return requests_session
removed_headers: dict = None
) -> httpx.Client:
httpx_session = httpx.Client()

# # Install our adapter.
# https_adapter = HTTPieHTTPSAdapter(
# ciphers=ciphers,
# verify=verify,
# ssl_version=(
# AVAILABLE_SSL_VERSION_ARG_MAPPING[ssl_version]
# if ssl_version else None
# ),
# )
# httpx_session.mount('https://', https_adapter)
#
# # Install adapters from plugins.
# for plugin_cls in plugin_manager.get_transport_plugins():
# transport_plugin = plugin_cls()
# httpx_session.mount(
# prefix=transport_plugin.prefix,
# adapter=transport_plugin.get_adapter(),
# )

if removed_headers is not None:
for header in removed_headers.keys():
httpx_session.headers.pop(header, None)

return httpx_session


def dump_request(kwargs: dict):
sys.stderr.write(
f'\n>>> requests.request(**{repr_dict(kwargs)})\n\n')
f'\n>>> httpx.request(**{repr_dict(kwargs)})\n\n')


def finalize_headers(headers: RequestHeadersDict) -> RequestHeadersDict:
final_headers = RequestHeadersDict()
def finalize_headers(headers: httpx.Headers) -> httpx.Headers:
final_headers = httpx.Headers()
for name, value in headers.items():
if value is not None:
# “leading or trailing LWS MAY be removed without
# changing the semantics of the field value”
# <https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html>
# Also, requests raises `InvalidHeader` for leading spaces.
value = value.strip()
if isinstance(value, str):
# See <https://github.com/jakubroztocil/httpie/issues/212>
value = value.encode('utf8')
final_headers[name] = value
return final_headers


def make_default_headers(args: argparse.Namespace) -> RequestHeadersDict:
default_headers = RequestHeadersDict({
def make_default_headers(args: argparse.Namespace) -> httpx.Headers:
default_headers = httpx.Headers({
'User-Agent': DEFAULT_UA
})

Expand All @@ -225,6 +214,7 @@ def make_send_kwargs(args: argparse.Namespace) -> dict:
kwargs = {
'timeout': args.timeout or None,
'allow_redirects': False,
'auth': args.auth,
}
return kwargs

Expand All @@ -251,10 +241,10 @@ def make_send_kwargs_mergeable_from_env(args: argparse.Namespace) -> dict:

def make_request_kwargs(
args: argparse.Namespace,
base_headers: RequestHeadersDict = None
base_headers: httpx.Headers = None
) -> dict:
"""
Translate our `args` into `requests.Request` keyword arguments.
Translate our `args` into `httpx.Request` keyword arguments.

"""
files = args.files
Expand Down Expand Up @@ -290,7 +280,6 @@ def make_request_kwargs(
'url': args.url,
'headers': headers,
'data': data,
'auth': args.auth,
'params': args.params,
'files': files,
}
Expand Down
12 changes: 6 additions & 6 deletions httpie/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
import sys
from typing import List, Union

import requests
import httpx
from pygments import __version__ as pygments_version
from requests import __version__ as requests_version
from httpx import __version__ as httpx_version

from httpie import __version__ as httpie_version
from httpie.client import collect_messages
Expand Down Expand Up @@ -84,10 +84,10 @@ def main(
if include_traceback:
raise
exit_status = ExitStatus.ERROR
except requests.Timeout:
except httpx.TimeoutException:
exit_status = ExitStatus.ERROR_TIMEOUT
env.log_error(f'Request timed out ({parsed_args.timeout}s).')
except requests.TooManyRedirects:
except httpx.TooManyRedirects:
exit_status = ExitStatus.ERROR_TOO_MANY_REDIRECTS
env.log_error(
f'Too many redirects'
Expand Down Expand Up @@ -141,7 +141,7 @@ def program(
env=env,
args=args,
)
if isinstance(message, requests.PreparedRequest):
if isinstance(message, httpx.Request):
if not initial_request:
initial_request = message
else:
Expand Down Expand Up @@ -191,7 +191,7 @@ def program(
def print_debug_info(env: Environment):
env.stderr.writelines([
f'HTTPie {httpie_version}\n',
f'Requests {requests_version}\n',
f'HTTPX {httpx_version}\n',
f'Pygments {pygments_version}\n',
f'Python {sys.version}\n{sys.executable}\n',
f'{platform.system()} {platform.release()}',
Expand Down
6 changes: 3 additions & 3 deletions httpie/downloads.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from typing import IO, Optional, Tuple
from urllib.parse import urlsplit

import requests
import httpx

from httpie.models import HTTPResponse
from httpie.output.streams import RawStream
Expand Down Expand Up @@ -232,7 +232,7 @@ def pre_request(self, request_headers: dict):
def start(
self,
initial_url: str,
final_response: requests.Response
final_response: httpx.Response
) -> Tuple[RawStream, IO]:
"""
Initiate and return a stream for `response` body with progress
Expand Down Expand Up @@ -328,7 +328,7 @@ def chunk_downloaded(self, chunk: bytes):
@staticmethod
def _get_output_file_from_response(
initial_url: str,
final_response: requests.Response,
final_response: httpx.Response,
) -> IO:
# Output file not specified. Pick a name that doesn't exist yet.
filename = None
Expand Down
Loading