ProcessPoolExecutor in background cause freeze after keep-alive timeout #1883
Closed
Description
Long story short
If we run heavy task in ProcessPoolExecutor in background, aiohttp don't handle request after some timeout. It happens after ~75 seconds. It's equal keepalive_timeout value at https://github.com/aio-libs/aiohttp/blob/2.0.7/aiohttp/web_protocol.py#L82
If we change keepalive_timeout value we get freeze after specific time.
Expected behaviour
Handle all requests.
Actual behaviour
Freeze on request after keep-alive timeout.
Steps to reproduce
We set keep-alive timeout to 1 second for faster bug reproducing.
- Run server
import asyncio
import logging
from multiprocessing import Manager
from concurrent.futures import ProcessPoolExecutor
from aiohttp.web import Application, Response, run_app
logging.basicConfig(level=logging.DEBUG)
async def index(request):
body = '''<!DOCTYPE html>
<html>
<body>
<script>
function request(path) {
var xhr = new XMLHttpRequest();
xhr.open('GET', path, false);
xhr.onload = function (e) {
if (xhr.readyState == 4 && xhr.status == 200) {
console.log(xhr.responseText);
}
}
xhr.send(null);
}
function start() {
return request('/start');
}
function stop() {
return request('/stop');
}
</script>
<button style="width: 200px; height: 100px;" onclick="start()"> Start </button>
<button style="width: 200px; height: 100px;" onclick="stop()"> Stop </button>
</body>
</html>
'''
return Response(body=body, content_type='document')
async def start(request):
await request.app['worker'].queue.put('data')
return Response(body='Start')
async def stop(request):
request.app['worker'].stop_event.set()
return Response(body='Stop')
def hard_work(stop_event):
while True:
if stop_event.is_set():
break
class Worker:
async def start(self, loop):
self.queue = asyncio.Queue(loop=loop)
while True:
data = await self.queue.get()
executor = ProcessPoolExecutor()
manager = Manager()
stop_event = manager.Event()
self.stop_event = stop_event
hard_future = loop.run_in_executor(executor, hard_work, stop_event)
try:
await asyncio.wait_for(hard_future, timeout=999999, loop=loop)
except asyncio.TimeoutError:
print('Time out')
stop_event.set()
else:
print('Done')
def get_app():
app = Application(handler_args={'keepalive_timeout': 1})
app.router.add_get('/', index)
app.router.add_get('/start', start)
app.router.add_get('/stop', stop)
async def start_worker(app):
app.loop.set_debug(True)
worker = Worker()
app['worker'] = worker
app.loop.create_task(worker.start(app.loop))
app.on_startup.append(start_worker)
return app
def main():
app = get_app()
run_app(app, host='0.0.0.0', port=5000)
if __name__ == '__main__':
main()- Make first
/startrequest. Wait for keep-alive timeout. Make second/stoprequest.aiohttpwill not handle second/stoprequest.
import time
import asyncio
import aiohttp
async def do_requests():
session = aiohttp.ClientSession()
print('GET /start')
response = await session.get('http://127.0.0.1:5000/start')
print('Response:', await response.text())
wait = 3
print('Sleep in {} seconds'.format(wait))
await asyncio.sleep(wait)
print('GET /stop')
response = await session.get('http://127.0.0.1:5000/stop')
print('Response:', await response.text())
def main():
loop = asyncio.get_event_loop()
loop.run_until_complete(do_requests())
if __name__ == '__main__':
main()Your environment
Debian 8 Jessie
Python 3.5.3
aiohttp == 2.0.7
Metadata
Assignees
Labels
No labels