-
-
Notifications
You must be signed in to change notification settings - Fork 2.1k
Description
Long story short
While using aiohttp found a lot of ServerDisconnectedError messages, while simple curl fetched same target with no problems. So after small research I made some scripts to reproduce behavior. And looks like session can't finish get request with compress=True.
Expected behaviour
Sessions get method finishes properly. Is there way I can use compression in such conditions?
Actual behaviour
Using compress=True in get request - produces ServerDisconnectedError.
....
connector = TCPConnector(force_close=False)
async with ClientSession(connector=connector) as session:
while not tq.empty():
num = tq.get()
try:
async with session.get(url=url, compress=True, allow_redirects=True) as response:
txt = await response.read()
.....
I was trying to force closing connection by force_close=True in connector. But that caused creation of new hanging connections (tracking them with watch -n 0.1 'netstat -n | grep :8080 | wc'
).
When using compress=False. Everything works fine.
Steps to reproduce
server.py
import asyncio
from datetime import datetime
from aiohttp import web
import random
random.seed(1)
html_body = b'Hello world!!!'
async def hello(request):
n = datetime.now().isoformat()
delay = random.randint(0, 0)
await asyncio.sleep(delay)
headers = {"content_type": "text/html", "delay": str(delay), 'request_catched': str(n)}
response = web.Response(body=html_body, headers=headers)
return response
app = web.Application()
app.router.add_route("GET", "/{name}", hello)
web.run_app(app)
client.py
import sys
import traceback
import asyncio
from aiohttp import ClientSession, TCPConnector
import queue
import datetime
max_tasks = 10
err_count = 0
loop = asyncio.get_event_loop()
tq = queue.Queue()
rq = queue.Queue()
async def fetch(worker_id):
global err_count
url = "http://localhost:8080/hello"
connector = TCPConnector(force_close=False)
async with ClientSession(connector=connector) as session:
while not tq.empty():
num = tq.get()
try:
async with session.get(url=url, compress=True, allow_redirects=True) as response:
txt = await response.read()
sys.stdout.write("[{}]: {}: OK: {}: {}. result: {}\n".format(
worker_id, datetime.datetime.now(), num, response.url, txt))
rq.put(num)
sys.stdout.write("{}\n{}\n".format(response.cookies, response.raw_headers))
sys.stdout.write("{}\n".format('-' * 10))
except Exception as e0:
sys.stdout.write('[{}]: {}: Exception: {}\n'.format(worker_id, datetime.datetime.now(), e0))
traceback.print_exc()
tq.put(num)
err_count += 1
async def control():
global err_count
dif = 0
prev_size = 0
qsize = rq.qsize()
while qsize < max_tasks:
await asyncio.sleep(1)
qsize = rq.qsize()
sys.stdout.write('{}: qsize: {}; dif: {}; err_count: {}\n'.format(
datetime.datetime.now(), qsize, qsize - prev_size, err_count
))
err_count = 0
prev_size = qsize
loop.stop()
for t in range(max_tasks):
tq.put(t)
asyncio.ensure_future(control())
workers_count = 1
for i in range(workers_count):
asyncio.ensure_future(fetch(i))
loop.run_forever()
loop.close()
Start running server.py, and then client.py. Got such logs from client.
[0]: 2018-11-29 14:36:04.136142: OK: 0: http://localhost:8080/hello. result: b'Hello world!!!'
....
got no results, and after a minute ....
....
async with session.get(url=url, compress=True, allow_redirects=True) as response:
File "/usr/local/lib/python3.6/site-packages/aiohttp/client.py", line 855, in __aenter__
self._resp = await self._coro
File "/usr/local/lib/python3.6/site-packages/aiohttp/client.py", line 391, in _request
await resp.start(conn)
File "/usr/local/lib/python3.6/site-packages/aiohttp/client_reqrep.py", line 757, in start
message, payload = await self._protocol.read()
File "/usr/local/lib/python3.6/site-packages/aiohttp/streams.py", line 543, in read
await self._waiter
aiohttp.client_exceptions.ServerDisconnectedError: None
after this message again OK result, and then same error.
Your environment
Ubuntu 14.04
Python 3.6.1
aiohttp 3.4.4