Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2284,7 +2284,7 @@ usage: -m [-h] [--tunnel-hostname TUNNEL_HOSTNAME] [--tunnel-port TUNNEL_PORT]
[--ca-cert-file CA_CERT_FILE] [--ca-file CA_FILE]
[--ca-signing-key-file CA_SIGNING_KEY_FILE]
[--auth-plugin AUTH_PLUGIN] [--cache-dir CACHE_DIR]
[--proxy-pool PROXY_POOL] [--enable-web-server]
[--cache-requests] [--proxy-pool PROXY_POOL] [--enable-web-server]
[--enable-static-server] [--static-server-dir STATIC_SERVER_DIR]
[--min-compression-length MIN_COMPRESSION_LENGTH]
[--enable-reverse-proxy] [--pac-file PAC_FILE]
Expand Down Expand Up @@ -2425,6 +2425,7 @@ options:
Default: /Users/abhinavsingh/.proxy/cache. Flag only
applicable when cache plugin is used with on-disk
storage.
--cache-requests Default: False. Whether to also cache request packets.
--proxy-pool PROXY_POOL
List of upstream proxies to use in the pool
--enable-web-server Default: False. Whether to enable
Expand Down
1 change: 1 addition & 0 deletions proxy/common/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,7 @@ def _env_threadless_compliant() -> bool:
DEFAULT_CACHE_DIRECTORY_PATH = os.path.join(
DEFAULT_DATA_DIRECTORY_PATH, 'cache',
)
DEFAULT_CACHE_REQUESTS = False

# Cor plugins enabled by default or via flags
DEFAULT_ABC_PLUGINS = [
Expand Down
13 changes: 5 additions & 8 deletions proxy/http/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,21 +163,18 @@ def handle_data(self, data: memoryview) -> Optional[bool]:
self.work.closed = True
return True
try:
# Don't parse incoming data any further after 1st request has completed.
#
# This specially does happen for pipeline requests.
# We don't parse incoming data any further after 1st HTTP request packet.
#
# Plugins can utilize on_client_data for such cases and
# apply custom logic to handle request data sent after 1st
# valid request.
if self.request.state != httpParserStates.COMPLETE:
if self._parse_first_request(data):
return True
else:
# HttpProtocolHandlerPlugin.on_client_data
# Can raise HttpProtocolException to tear down the connection
if self.plugin:
data = self.plugin.on_client_data(data) or data
# HttpProtocolHandlerPlugin.on_client_data
# Can raise HttpProtocolException to tear down the connection
elif self.plugin:
self.plugin.on_client_data(data)
except HttpProtocolException as e:
logger.info('HttpProtocolException: %s' % e)
response: Optional[memoryview] = e.response(self.request)
Expand Down
4 changes: 2 additions & 2 deletions proxy/http/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,9 +71,9 @@ def protocols() -> List[int]:
raise NotImplementedError()

@abstractmethod
def on_client_data(self, raw: memoryview) -> Optional[memoryview]:
def on_client_data(self, raw: memoryview) -> None:
"""Called only after original request has been completely received."""
return raw # pragma: no cover
pass # pragma: no cover

@abstractmethod
def on_request_complete(self) -> Union[socket.socket, bool]:
Expand Down
1 change: 0 additions & 1 deletion proxy/http/proxy/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,6 @@ def handle_client_request(
Return None to drop the request data, e.g. in case a response has already been queued.
Raise HttpRequestRejected or HttpProtocolException directly to
tear down the connection with client.

"""
return request # pragma: no cover

Expand Down
15 changes: 9 additions & 6 deletions proxy/http/proxy/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -398,7 +398,7 @@ def on_response_chunk(self, chunk: List[memoryview]) -> List[memoryview]:
return chunk

# Can return None to tear down connection
def on_client_data(self, raw: memoryview) -> Optional[memoryview]:
def on_client_data(self, raw: memoryview) -> None:
# For scenarios when an upstream connection was never established,
# let plugin do whatever they wish to. These are special scenarios
# where plugins are trying to do something magical. Within the core
Expand All @@ -413,7 +413,7 @@ def on_client_data(self, raw: memoryview) -> Optional[memoryview]:
for plugin in self.plugins.values():
o = plugin.handle_client_data(raw)
if o is None:
return None
return
raw = o
elif self.upstream and not self.upstream.closed:
# For http proxy requests, handle pipeline case.
Expand All @@ -429,12 +429,17 @@ def on_client_data(self, raw: memoryview) -> Optional[memoryview]:
# upgrade request. Incoming client data now
# must be treated as WebSocket protocol packets.
self.upstream.queue(raw)
return None
return

if self.pipeline_request is None:
# For pipeline requests, we never
# want to use --enable-proxy-protocol flag
# as proxy protocol header will not be present
#
# TODO: HTTP parser must be smart about detecting
# HA proxy protocol or we must always explicitly pass
# the flag when we are expecting HA proxy protocol
# request line before HTTP request lines.
self.pipeline_request = HttpParser(
httpParserTypes.REQUEST_PARSER,
)
Expand All @@ -447,7 +452,7 @@ def on_client_data(self, raw: memoryview) -> Optional[memoryview]:
assert self.pipeline_request is not None
r = plugin.handle_client_request(self.pipeline_request)
if r is None:
return None
return
self.pipeline_request = r
assert self.pipeline_request is not None
# TODO(abhinavsingh): Remove memoryview wrapping here after
Expand All @@ -463,8 +468,6 @@ def on_client_data(self, raw: memoryview) -> Optional[memoryview]:
# simply queue for upstream server.
else:
self.upstream.queue(raw)
return None
return raw

def on_request_complete(self) -> Union[socket.socket, bool]:
self.emit_request_complete()
Expand Down
5 changes: 2 additions & 3 deletions proxy/http/server/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ async def read_from_descriptors(self, r: Readables) -> bool:
return True
return False

def on_client_data(self, raw: memoryview) -> Optional[memoryview]:
def on_client_data(self, raw: memoryview) -> None:
if self.switched_protocol == httpProtocolTypes.WEBSOCKET:
# TODO(abhinavsingh): Remove .tobytes after websocket frame parser
# is memoryview compliant
Expand All @@ -211,7 +211,7 @@ def on_client_data(self, raw: memoryview) -> Optional[memoryview]:
assert self.route
self.route.on_websocket_message(frame)
frame.reset()
return None
return
# If 1st valid request was completed and it's a HTTP/1.1 keep-alive
# And only if we have a route, parse pipeline requests
if self.request.is_complete and \
Expand All @@ -231,7 +231,6 @@ def on_client_data(self, raw: memoryview) -> Optional[memoryview]:
'Pipelined request is not keep-alive, will tear down request...',
)
self.pipeline_request = None
return raw

def on_response_chunk(self, chunk: List[memoryview]) -> List[memoryview]:
return chunk
Expand Down
1 change: 1 addition & 0 deletions proxy/plugin/cache/cache_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
self.flags.cache_dir,
'responses',
),
cache_requests=self.flags.cache_requests,
)
self.set_store(self.disk_store)

Expand Down
17 changes: 15 additions & 2 deletions proxy/plugin/cache/store/disk.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
from ....common.flag import flags
from ....http.parser import HttpParser
from ....common.utils import text_
from ....common.constants import DEFAULT_CACHE_DIRECTORY_PATH
from ....common.constants import (
DEFAULT_CACHE_REQUESTS, DEFAULT_CACHE_DIRECTORY_PATH,
)


logger = logging.getLogger(__name__)
Expand All @@ -30,12 +32,21 @@
'Flag only applicable when cache plugin is used with on-disk storage.',
)

flags.add_argument(
'--cache-requests',
action='store_true',
default=DEFAULT_CACHE_REQUESTS,
help='Default: False. ' +
'Whether to also cache request packets.',
)


class OnDiskCacheStore(CacheStore):

def __init__(self, uid: str, cache_dir: str) -> None:
def __init__(self, uid: str, cache_dir: str, cache_requests: bool) -> None:
super().__init__(uid)
self.cache_dir = cache_dir
self.cache_requests = cache_requests
self.cache_file_path: Optional[str] = None
self.cache_file: Optional[BinaryIO] = None

Expand All @@ -47,6 +58,8 @@ def open(self, request: HttpParser) -> None:
self.cache_file = open(self.cache_file_path, "wb")

def cache_request(self, request: HttpParser) -> Optional[HttpParser]:
if self.cache_file and self.cache_requests:
self.cache_file.write(request.build())
return request

def cache_response_chunk(self, chunk: memoryview) -> memoryview:
Expand Down
Loading