Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 7 additions & 6 deletions .github/workflows/ci-cd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -237,23 +237,24 @@ jobs:
PIP_USER: 1
run: >-
PATH="${HOME}/Library/Python/3.11/bin:${HOME}/.local/bin:${PATH}"
pytest --junitxml=junit.xml -m 'not dev_mode and not autobahn'
pytest --junitxml=junit.xml --numprocesses=auto --cov=aiohttp/ --cov=tests/
-m 'not dev_mode and not autobahn'
shell: bash
- name: Re-run the failing tests with maximum verbosity
if: failure()
env:
COLOR: yes
AIOHTTP_NO_EXTENSIONS: ${{ matrix.no-extensions }}
run: >- # `exit 1` makes sure that the job remains red with flaky runs
pytest --no-cov --numprocesses=0 -vvvvv --lf && exit 1
pytest --no-cov -vvvvv --lf && exit 1
shell: bash
- name: Run dev_mode tests
env:
COLOR: yes
AIOHTTP_NO_EXTENSIONS: ${{ matrix.no-extensions }}
PIP_USER: 1
PYTHONDEVMODE: 1
run: pytest -m dev_mode --cov-append --numprocesses=0
run: pytest -m dev_mode --cov=aiohttp/ --cov=tests/ --cov-append
shell: bash
- name: Turn coverage into xml
env:
Expand Down Expand Up @@ -345,7 +346,7 @@ jobs:
PIP_USER: 1
run: >-
PATH="${HOME}/Library/Python/3.11/bin:${HOME}/.local/bin:${PATH}"
pytest --junitxml=junit.xml --numprocesses=0 -m autobahn
pytest --junitxml=junit.xml --cov=aiohttp/ --cov=tests/ -m autobahn
shell: bash
- name: Turn coverage into xml
env:
Expand Down Expand Up @@ -413,7 +414,7 @@ jobs:
uses: CodSpeedHQ/action@v4
with:
mode: instrumentation
run: python -Im pytest --no-cov --numprocesses=0 -vvvvv --codspeed
run: python -Im pytest --no-cov -vvvvv --codspeed


cython-coverage:
Expand Down Expand Up @@ -462,7 +463,7 @@ jobs:
PIP_USER: 1
run: >-
pytest tests/test_client_functional.py tests/test_http_parser.py tests/test_http_writer.py tests/test_web_functional.py tests/test_web_response.py tests/test_websocket_parser.py
--cov-config=.coveragerc-cython.toml
--cov-config=.coveragerc-cython.toml --cov=aiohttp/ --cov=tests/ --numprocesses=auto
-m 'not dev_mode and not autobahn'
shell: bash
- name: Turn coverage into xml
Expand Down
2 changes: 2 additions & 0 deletions CHANGES/10600.bugfix.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
Fixed http parser not rejecting HTTP/1.1 requests that do not have valid Host header.
-- by :user:`Cycloctane`.
1 change: 1 addition & 0 deletions CHANGES/12364.contrib.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Disabled ``coverage`` and ``xdist`` by default to ease local development -- by :user:`Dreamsorcerer`.
7 changes: 5 additions & 2 deletions aiohttp/_http_parser.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -457,6 +457,7 @@ cdef class HttpParser:
cdef _on_headers_complete(self):
self._process_header()

http_version = self.http_version()
should_close = not cparser.llhttp_should_keep_alive(self._cparser)
upgrade = self._cparser.upgrade
chunked = self._cparser.flags & cparser.F_CHUNKED
Expand All @@ -465,6 +466,8 @@ cdef class HttpParser:
headers = CIMultiDictProxy(CIMultiDict(self._headers))

if self._cparser.type == cparser.HTTP_REQUEST:
if http_version == HttpVersion11 and hdrs.HOST not in headers:
raise BadHttpMessage("Missing 'Host' header in request.")
h_upg = headers.get("upgrade", "")
allowed = upgrade and h_upg.isascii() and h_upg.lower() in ALLOWED_UPGRADES
if allowed or self._cparser.method == cparser.HTTP_CONNECT:
Expand All @@ -488,11 +491,11 @@ cdef class HttpParser:
method = http_method_str(self._cparser.method)
msg = _new_request_message(
method, self._path,
self.http_version(), headers, raw_headers,
http_version, headers, raw_headers,
should_close, encoding, upgrade, chunked, self._url)
else:
msg = _new_response_message(
self.http_version(), self._cparser.status_code, self._reason,
http_version, self._cparser.status_code, self._reason,
headers, raw_headers, should_close, encoding,
upgrade, chunked)

Expand Down
5 changes: 2 additions & 3 deletions aiohttp/_websocket/writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from ..base_protocol import BaseProtocol
from ..client_exceptions import ClientConnectionResetError
from ..compression_utils import ZLibBackend, ZLibCompressor
from ..helpers import DEFAULT_CHUNK_SIZE
from .helpers import (
MASK_LEN,
MSG_SIZE,
Expand All @@ -21,8 +22,6 @@
)
from .models import WS_DEFLATE_TRAILING, WSMsgType

DEFAULT_LIMIT: Final[int] = 2**18

# WebSocket opcode boundary: opcodes 0-7 are data frames, 8-15 are control frames
# Control frames (ping, pong, close) are never compressed
WS_CONTROL_FRAME_OPCODE: Final[int] = 8
Expand Down Expand Up @@ -52,7 +51,7 @@ def __init__(
transport: asyncio.Transport,
*,
use_mask: bool = False,
limit: int = DEFAULT_LIMIT,
limit: int = DEFAULT_CHUNK_SIZE,
random: random.Random = random.Random(),
compress: int = 0,
notakeover: bool = False,
Expand Down
5 changes: 3 additions & 2 deletions aiohttp/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@
from .cookiejar import CookieJar
from .helpers import (
_SENTINEL,
DEFAULT_CHUNK_SIZE,
EMPTY_BODY_METHODS,
BasicAuth,
TimeoutHandle,
Expand Down Expand Up @@ -331,7 +332,7 @@ def __init__(
trust_env: bool = False,
requote_redirect_url: bool = True,
trace_configs: list[TraceConfig[object]] | None = None,
read_bufsize: int = 2**18,
read_bufsize: int = DEFAULT_CHUNK_SIZE,
max_line_size: int = 8190,
max_field_size: int = 8190,
max_headers: int = 128,
Expand Down Expand Up @@ -1226,7 +1227,7 @@ async def _ws_connect(

transport = conn.transport
assert transport is not None
reader = WebSocketDataQueue(conn_proto, 2**18, loop=self._loop)
reader = WebSocketDataQueue(conn_proto, DEFAULT_CHUNK_SIZE, loop=self._loop)
writer = WebSocketWriter(
conn_proto,
transport,
Expand Down
3 changes: 2 additions & 1 deletion aiohttp/client_proto.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
)
from .helpers import (
_EXC_SENTINEL,
DEFAULT_CHUNK_SIZE,
EMPTY_BODY_STATUS_CODES,
BaseTimerContext,
ErrorableProtocol,
Expand Down Expand Up @@ -231,7 +232,7 @@ def set_response_params(
read_until_eof: bool = False,
auto_decompress: bool = True,
read_timeout: float | None = None,
read_bufsize: int = 2**18,
read_bufsize: int = DEFAULT_CHUNK_SIZE,
timeout_ceil_threshold: float = 5,
max_line_size: int = 8190,
max_field_size: int = 8190,
Expand Down
3 changes: 0 additions & 3 deletions aiohttp/compression_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,6 @@


MAX_SYNC_CHUNK_SIZE = 4096
# Matches the max size we receive from sockets:
# https://github.com/python/cpython/blob/1857a40807daeae3a1bf5efb682de9c9ae6df845/Lib/asyncio/selector_events.py#L766
DEFAULT_MAX_DECOMPRESS_SIZE = 256 * 1024

# Unlimited decompression constants - different libraries use different conventions
ZLIB_MAX_LENGTH_UNLIMITED = 0 # zlib uses 0 to mean unlimited
Expand Down
4 changes: 4 additions & 0 deletions aiohttp/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,10 @@

__all__ = ("BasicAuth", "ChainMapProxy", "ETag", "frozen_dataclass_decorator", "reify")

# This is the default size/limit for several operations.
# Matches the max size we receive from sockets:
# https://github.com/python/cpython/blob/1857a40807daeae3a1bf5efb682de9c9ae6df845/Lib/asyncio/selector_events.py#L766
DEFAULT_CHUNK_SIZE = 2**18 # 256 KiB
COOKIE_MAX_LENGTH = 4096

_T = TypeVar("_T")
Expand Down
11 changes: 7 additions & 4 deletions aiohttp/http_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
from . import hdrs
from .base_protocol import BaseProtocol
from .compression_utils import (
DEFAULT_MAX_DECOMPRESS_SIZE,
HAS_BROTLI,
HAS_ZSTD,
BrotliDecompressor,
Expand All @@ -32,6 +31,7 @@
from .helpers import (
_EXC_SENTINEL,
DEBUG,
DEFAULT_CHUNK_SIZE,
EMPTY_BODY_METHODS,
EMPTY_BODY_STATUS_CODES,
NO_EXTENSIONS,
Expand All @@ -49,7 +49,7 @@
LineTooLong,
TransferEncodingError,
)
from .http_writer import HttpVersion, HttpVersion10
from .http_writer import HttpVersion, HttpVersion10, HttpVersion11
from .streams import EMPTY_PAYLOAD, StreamReader
from .typedefs import RawHeaders

Expand Down Expand Up @@ -672,6 +672,9 @@ def parse_message(self, lines: list[bytes]) -> RawRequestMessage:
chunked,
) = self.parse_headers(lines[1:])

if version_o == HttpVersion11 and hdrs.HOST not in headers:
raise BadHttpMessage("Missing 'Host' header in request.")

if close is None: # then the headers weren't set in the request
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
close = True
Expand Down Expand Up @@ -810,7 +813,7 @@ def __init__(
max_line_size: int = 8190,
max_field_size: int = 8190,
max_trailers: int = 128,
limit: int = DEFAULT_MAX_DECOMPRESS_SIZE,
limit: int = DEFAULT_CHUNK_SIZE,
) -> None:
self._length = 0
self._paused = False
Expand Down Expand Up @@ -1061,7 +1064,7 @@ def __init__(
self,
out: StreamReader,
encoding: str | None,
max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE,
max_decompress_size: int = DEFAULT_CHUNK_SIZE,
) -> None:
self.out = out
self.size = 0
Expand Down
12 changes: 4 additions & 8 deletions aiohttp/multipart.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,19 +14,15 @@
from multidict import CIMultiDict, CIMultiDictProxy

from .abc import AbstractStreamWriter
from .compression_utils import (
DEFAULT_MAX_DECOMPRESS_SIZE,
ZLibCompressor,
ZLibDecompressor,
)
from .compression_utils import ZLibCompressor, ZLibDecompressor
from .hdrs import (
CONTENT_DISPOSITION,
CONTENT_ENCODING,
CONTENT_LENGTH,
CONTENT_TRANSFER_ENCODING,
CONTENT_TYPE,
)
from .helpers import CHAR, TOKEN, parse_mimetype, reify
from .helpers import CHAR, DEFAULT_CHUNK_SIZE, TOKEN, parse_mimetype, reify
from .http import HeadersParser
from .http_exceptions import BadHttpMessage
from .log import internal_logger
Expand Down Expand Up @@ -267,7 +263,7 @@ def __init__(
*,
subtype: str = "mixed",
default_charset: str | None = None,
max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE,
max_decompress_size: int = DEFAULT_CHUNK_SIZE,
client_max_size: int = sys.maxsize,
max_size_error_cls: type[Exception] = ValueError,
) -> None:
Expand Down Expand Up @@ -641,7 +637,7 @@ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> byt

async def write(self, writer: AbstractStreamWriter) -> None:
field = self._value
while chunk := await field.read_chunk(size=2**18):
while chunk := await field.read_chunk(size=DEFAULT_CHUNK_SIZE):
async for d in field.decode_iter(chunk):
await writer.write(d)

Expand Down
32 changes: 20 additions & 12 deletions aiohttp/payload.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from .abc import AbstractStreamWriter
from .helpers import (
_SENTINEL,
DEFAULT_CHUNK_SIZE,
content_disposition_header,
guess_filename,
parse_mimetype,
Expand All @@ -43,7 +44,6 @@
)

TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
READ_SIZE: Final[int] = 2**18 # 256 KiB
_CLOSE_FUTURES: set[asyncio.Future[None]] = set()


Expand Down Expand Up @@ -489,7 +489,7 @@ def _read_and_available_len(

Args:
remaining_content_len: Optional limit on how many bytes to read in this operation.
If None, READ_SIZE will be used as the default chunk size.
If None, DEFAULT_CHUNK_SIZE will be used as the default chunk size.

Returns:
A tuple containing:
Expand All @@ -504,7 +504,11 @@ def _read_and_available_len(
self._set_or_restore_start_position()
size = self.size # Call size only once since it does I/O
return size, self._value.read(
min(READ_SIZE, size or READ_SIZE, remaining_content_len or READ_SIZE)
min(
DEFAULT_CHUNK_SIZE,
size or DEFAULT_CHUNK_SIZE,
remaining_content_len or DEFAULT_CHUNK_SIZE,
)
)

def _read(self, remaining_content_len: int | None) -> bytes:
Expand All @@ -513,7 +517,7 @@ def _read(self, remaining_content_len: int | None) -> bytes:

Args:
remaining_content_len: Optional maximum number of bytes to read.
If None, READ_SIZE will be used as the default chunk size.
If None, DEFAULT_CHUNK_SIZE will be used as the default chunk size.

Returns:
A chunk of bytes read from the file object, respecting the
Expand All @@ -523,7 +527,7 @@ def _read(self, remaining_content_len: int | None) -> bytes:
the initial _read_and_available_len call has been made.

"""
return self._value.read(remaining_content_len or READ_SIZE) # type: ignore[no-any-return]
return self._value.read(remaining_content_len or DEFAULT_CHUNK_SIZE) # type: ignore[no-any-return]

@property
def size(self) -> int | None:
Expand Down Expand Up @@ -626,9 +630,9 @@ async def write_with_length(
None,
self._read,
(
min(READ_SIZE, remaining_content_len)
min(DEFAULT_CHUNK_SIZE, remaining_content_len)
if remaining_content_len is not None
else READ_SIZE
else DEFAULT_CHUNK_SIZE
),
)

Expand Down Expand Up @@ -753,7 +757,7 @@ def _read_and_available_len(

Args:
remaining_content_len: Optional limit on how many bytes to read in this operation.
If None, READ_SIZE will be used as the default chunk size.
If None, DEFAULT_CHUNK_SIZE will be used as the default chunk size.

Returns:
A tuple containing:
Expand All @@ -772,7 +776,11 @@ def _read_and_available_len(
self._set_or_restore_start_position()
size = self.size
chunk = self._value.read(
min(READ_SIZE, size or READ_SIZE, remaining_content_len or READ_SIZE)
min(
DEFAULT_CHUNK_SIZE,
size or DEFAULT_CHUNK_SIZE,
remaining_content_len or DEFAULT_CHUNK_SIZE,
)
)
return size, chunk.encode(self._encoding) if self._encoding else chunk.encode()

Expand All @@ -782,7 +790,7 @@ def _read(self, remaining_content_len: int | None) -> bytes:

Args:
remaining_content_len: Optional maximum number of bytes to read.
If None, READ_SIZE will be used as the default chunk size.
If None, DEFAULT_CHUNK_SIZE will be used as the default chunk size.

Returns:
A chunk of bytes read from the file object and encoded using the payload's
Expand All @@ -794,7 +802,7 @@ def _read(self, remaining_content_len: int | None) -> bytes:
the specified encoding (or UTF-8 if none was provided).

"""
chunk = self._value.read(remaining_content_len or READ_SIZE)
chunk = self._value.read(remaining_content_len or DEFAULT_CHUNK_SIZE)
return chunk.encode(self._encoding) if self._encoding else chunk.encode()

def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
Expand Down Expand Up @@ -878,7 +886,7 @@ async def write_with_length(
self._set_or_restore_start_position()
loop_count = 0
remaining_bytes = content_length
while chunk := self._value.read(READ_SIZE):
while chunk := self._value.read(DEFAULT_CHUNK_SIZE):
if loop_count > 0:
# Avoid blocking the event loop
# if they pass a large BytesIO object
Expand Down
Loading
Loading