Skip to content

Commit

Permalink
fix: swap to httpx (#13)
Browse files Browse the repository at this point in the history
aiohttp appears to have issues related to Akamai Global Host and developers
do not seem interested in resolving as a bug.
aio-libs/aiohttp#5643

BREAKING CHANGE: API has changed due to use of httpx.
Modifiers, test_url, and other items that access aiohttp ClientResponse
will need to be fixed.
  • Loading branch information
alandtse committed Apr 27, 2021
1 parent d7b8f69 commit 311e998
Show file tree
Hide file tree
Showing 12 changed files with 863 additions and 597 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ on:
jobs:
markdown-link-check:
name: Check Markdown links
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- uses: gaurav-nelson/github-action-markdown-link-check@v1
Expand Down
12 changes: 5 additions & 7 deletions .github/workflows/pull-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,11 @@ jobs:
- name: Test with tox
run: |
tox -v
- name: Update coveralls
run: |
pip install coveralls
coveralls
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
# - name: Coveralls
# uses: coverallsapp/github-action@master
# with:
# github-token: ${{ secrets.GITHUB_TOKEN }}
# path-to-lcov: cov.xml
- uses: codecov/codecov-action@v1
with:
verbose: true # optional (default = false)
3 changes: 1 addition & 2 deletions .github/workflows/push-main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -56,13 +56,12 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install python-semantic-release
- name: Setup Git
run: |
git config --global user.name "semantic-release"
git config --global user.email "semantic-release@GitHub"
- name: Python Semantic Release
uses: relekang/python-semantic-release@v7.14.0
uses: relekang/python-semantic-release@master
with:
github_token: ${{ secrets.GH_TOKEN }}
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
174 changes: 95 additions & 79 deletions authcaptureproxy/auth_capture_proxy.py

Large diffs are not rendered by default.

8 changes: 4 additions & 4 deletions authcaptureproxy/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@
from functools import partial, wraps
from typing import Any, Dict, Text

import httpx
import typer
from aiohttp import ClientResponse
from yarl import URL

from authcaptureproxy import AuthCaptureProxy, __copyright__, __title__, __version__, metadata
Expand Down Expand Up @@ -85,11 +85,11 @@ async def proxy_example(
callback_url = URL(callback)
proxy_obj: AuthCaptureProxy = AuthCaptureProxy(proxy_url=proxy_url, host_url=host_url)

def test_url(resp: ClientResponse, data: Dict[Text, Any], query: Dict[Text, Any]):
def test_url(resp: httpx.Response, data: Dict[Text, Any], query: Dict[Text, Any]):
"""Test for a successful Amazon URL.
Args:
resp (ClientResponse): The aiohttp response.
resp (httpx.Response): The httpx response.
data (Dict[Text, Any]): Dictionary of all post data captured through proxy with overwrites for duplicate keys.
query (Dict[Text, Any]): Dictionary of all query data with overwrites for duplicate keys.
Expand All @@ -104,7 +104,7 @@ def test_url(resp: ClientResponse, data: Dict[Text, Any], query: Dict[Text, Any]
asyncio.create_task(proxy_obj.stop_proxy(3)) # stop proxy in 3 seconds
if callback_url:
return URL(callback_url) # 302 redirect
return f"Successfully logged in {data.get('email')} and {data.get('password')}. Please close the window.<br /><b>Post data</b><br />{json.dumps(data)}<br /><b>Query Data:</b><br />{json.dumps(query)}<br /><b>Cookies:</b></br>{proxy_obj.session.cookie_jar.filter_cookies(proxy_obj._host_url.with_path('/'))}"
return f"Successfully logged in {data.get('email')} and {data.get('password')}. Please close the window.<br /><b>Post data</b><br />{json.dumps(data)}<br /><b>Query Data:</b><br />{json.dumps(query)}<br /><b>Cookies:</b></br>{json.dumps(list(proxy_obj.session.cookies.items()))}"

await proxy_obj.start_proxy()
# add tests and modifiers after the proxy has started so that port data is available for self.access_url()
Expand Down
10 changes: 6 additions & 4 deletions authcaptureproxy/examples/testers.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,27 +7,29 @@

from typing import Any, Dict, Optional, Text, Union

from aiohttp.client_reqrep import ClientResponse
import httpx
from yarl import URL


def test_amazon(
self, resp: ClientResponse, data: Dict[Text, Any], query: Dict[Text, Any]
self, resp: httpx.Response, data: Dict[Text, Any], query: Dict[Text, Any]
) -> Optional[Union[URL, Text]]:
"""Test Amazon login example.
This is a simplifed example based on alexapy. https://gitlab.com/keatontaylor/alexapy/-/blob/dev/alexapy/alexaproxy.py
Args:
resp (ClientResponse): The aiohttp response.
resp (httpx.Response): The httpx response.
data (Dict[Text, Any]): Dictionary of all post data captured through proxy with overwrites for duplicate keys.
query (Dict[Text, Any]): Dictionary of all query data with overwrites for duplicate keys.
Returns:
Optional[Union[URL, Text]]: URL for a http 302 redirect or Text to display on success. None indicates test did not pass.
"""
if not resp.url:
return None
if resp.url.path in ["/ap/maplanding", "/spa/index.html"]:
access_token = resp.url.query.get("openid.oa2.access_token")
access_token = URL(str(resp.url)).query.get("openid.oa2.access_token")
config_flow_id = self.init_query.get("config_flow_id")
callback_url = self.init_query.get("callback_url")
if callback_url:
Expand Down
71 changes: 56 additions & 15 deletions authcaptureproxy/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,39 +11,37 @@
from http.cookies import SimpleCookie
from typing import Any, Callable, Dict, List, Mapping, Text, Union

from aiohttp import ClientResponse
from multidict import MultiDict
import httpx
from multidict import MultiDict, MultiDictProxy
from yarl import URL

_LOGGER = logging.getLogger(__name__)


def print_resp(resp: ClientResponse) -> None:
def print_resp(resp: httpx.Response) -> None:
"""Print response info.
Args:
resp (ClientResponse): The client response to show
resp (httpx.Response): The client response to show
Returns:
None
"""
if resp.history:
for item in resp.history:
_LOGGER.debug("%s: redirected from\n%s", item.method, item.url)
url = resp.request_info.url
method = resp.request_info.method
status = resp.status
reason = resp.reason
headers = ast.literal_eval(
str(resp.request_info.headers).replace("<CIMultiDictProxy(", "{").replace(")>", "}")
)
_LOGGER.debug("%s: redirected from\n%s", item.request.method, item.url)
url = resp.request.url
method = resp.request.method
status = resp.status_code
reason = resp.reason_phrase
headers = ast.literal_eval(str(resp.request.headers)[8:-1])
cookies = {}
if headers.get("Cookie"):
if headers.get("cookie"):
cookie: SimpleCookie = SimpleCookie()
cookie.load(headers.get("Cookie"))
cookie.load(headers.get("cookie"))
for key, morsel in cookie.items():
cookies[key] = morsel.value
headers["Cookie"] = cookies
headers["cookie"] = cookies
_LOGGER.debug(
"%s: \n%s with\n%s\nreturned %s:%s with response %s",
method,
Expand Down Expand Up @@ -195,3 +193,46 @@ def get_nested_dict_keys(
else:
result.append(key)
return result


def get_content_type(resp: httpx.Response) -> str:
"""Get content_type from httpx Response.
Args:
resp (httpx.Response): Response from httpx request
Returns:
str: The content_type
"""
content_type = ""
content_type_string = resp.headers.get("content-type")
if content_type_string and ";" in content_type_string:
content_type = content_type_string.split(";")[0].strip()
elif content_type_string:
content_type = content_type_string
return content_type


def convert_multidict_to_dict(multidict: Union[MultiDict, MultiDictProxy]) -> dict:
"""Convert a multdict to a dict for httpx.
https://www.python-httpx.org/quickstart/#sending-form-encoded-data
Args:
multidict (MultiDict | MultiDictProxy): The multidict to convert
Returns:
dict: A dictionary where duplicate keys will be added as a list
"""
result: dict = {}
for k, v in multidict.items():
old_value = result.get(k)
if old_value:
list_value = []
if not isinstance(old_value, list):
list_value.append(old_value)
list_value.append(v)
result[k] = list_value
else:
result[k] = v
return result
Loading

0 comments on commit 311e998

Please sign in to comment.