Skip to content

Commit 5c17b64

Browse files
Benchmark tests for decompression optimisations (#12358) (#12381)
(cherry picked from commit adf7799)
1 parent ceca3a1 commit 5c17b64

5 files changed

Lines changed: 75 additions & 30 deletions

File tree

CHANGES/12358.misc.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Changed ``zlib_executor_size`` default so compressed payloads are async by default -- by :user:`Dreamsorcerer`.

aiohttp/web_response.py

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616

1717
from . import hdrs, payload
1818
from .abc import AbstractStreamWriter
19-
from .compression_utils import ZLibCompressor
19+
from .compression_utils import MAX_SYNC_CHUNK_SIZE, ZLibCompressor
2020
from .helpers import (
2121
ETAG_ANY,
2222
QUOTED_ETAG_RE,
@@ -35,7 +35,6 @@
3535
from .typedefs import JSONBytesEncoder, JSONEncoder, LooseHeaders
3636

3737
REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus}
38-
LARGE_BODY_SIZE = 1024**2
3938

4039
__all__ = (
4140
"ContentCoding",
@@ -665,7 +664,7 @@ def __init__(
665664
headers: LooseHeaders | None = None,
666665
content_type: str | None = None,
667666
charset: str | None = None,
668-
zlib_executor_size: int | None = None,
667+
zlib_executor_size: int = MAX_SYNC_CHUNK_SIZE,
669668
zlib_executor: Executor | None = None,
670669
) -> None:
671670
if body is not None and text is not None:
@@ -846,13 +845,6 @@ async def _do_start_compression(self, coding: ContentCoding) -> None:
846845
executor=self._zlib_executor,
847846
)
848847
assert self._body is not None
849-
if self._zlib_executor_size is None and len(self._body) > LARGE_BODY_SIZE:
850-
warnings.warn(
851-
"Synchronous compression of large response bodies "
852-
f"({len(self._body)} bytes) might block the async event loop. "
853-
"Consider providing a custom value to zlib_executor_size/"
854-
"zlib_executor response properties or disabling compression on it."
855-
)
856848
self._compressed_body = (
857849
await compressor.compress(self._body) + compressor.flush()
858850
)

tests/test_benchmarks_client.py

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -504,6 +504,36 @@ def _run() -> None:
504504
loop.run_until_complete(run_client_benchmark())
505505

506506

507+
@pytest.mark.usefixtures("parametrize_zlib_backend")
508+
def test_ten_compressed_responses_iter_chunked_1mb(
509+
loop: asyncio.AbstractEventLoop,
510+
aiohttp_client: AiohttpClient,
511+
benchmark: BenchmarkFixture,
512+
) -> None:
513+
"""Benchmark compressed GET request read via large iter_chunked."""
514+
MB = 2**20
515+
data = b"x" * 10 * MB
516+
517+
async def handler(request: web.Request) -> web.Response:
518+
resp = web.Response(body=data)
519+
resp.enable_compression()
520+
return resp
521+
522+
app = web.Application()
523+
app.router.add_route("GET", "/", handler)
524+
525+
async def run_client_benchmark() -> None:
526+
client = await aiohttp_client(app)
527+
resp = await client.get("/")
528+
async for _ in resp.content.iter_chunked(MB):
529+
pass
530+
await client.close()
531+
532+
@benchmark
533+
def _run() -> None:
534+
loop.run_until_complete(run_client_benchmark())
535+
536+
507537
def test_ten_streamed_responses_iter_chunks(
508538
loop: asyncio.AbstractEventLoop,
509539
aiohttp_client: AiohttpClient,
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
"""codspeed benchmarks for web request reading."""
2+
3+
import asyncio
4+
import zlib
5+
6+
import pytest
7+
from pytest_codspeed import BenchmarkFixture
8+
9+
from aiohttp import web
10+
from aiohttp.pytest_plugin import AiohttpClient
11+
12+
13+
@pytest.mark.usefixtures("parametrize_zlib_backend")
14+
def test_read_compressed_post_body(
15+
loop: asyncio.AbstractEventLoop,
16+
aiohttp_client: AiohttpClient,
17+
benchmark: BenchmarkFixture,
18+
) -> None:
19+
"""Benchmark server Request.read() with a compressed POST body."""
20+
original = b"B" * (5 * 2**20)
21+
compressed = zlib.compress(original)
22+
23+
async def handler(request: web.Request) -> web.Response:
24+
body = await request.read()
25+
return web.Response(text=str(len(body)))
26+
27+
app = web.Application(client_max_size=10 * 2**20)
28+
app.router.add_post("/", handler)
29+
30+
async def run_benchmark() -> None:
31+
client = await aiohttp_client(app)
32+
resp = await client.post(
33+
"/",
34+
data=compressed,
35+
headers={"Content-Encoding": "deflate"},
36+
)
37+
assert int(await resp.read()) == len(original)
38+
await client.close()
39+
40+
@benchmark
41+
def _run() -> None:
42+
loop.run_until_complete(run_benchmark())

tests/test_web_response.py

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -584,26 +584,6 @@ async def test_force_compression_deflate() -> None:
584584
assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING)
585585

586586

587-
@pytest.mark.usefixtures("parametrize_zlib_backend")
588-
async def test_force_compression_deflate_large_payload() -> None:
589-
"""Make sure a warning is thrown for large payloads compressed in the event loop."""
590-
req = make_request(
591-
"GET", "/", headers=CIMultiDict({hdrs.ACCEPT_ENCODING: "gzip, deflate"})
592-
)
593-
resp = Response(body=b"large")
594-
595-
resp.enable_compression(ContentCoding.deflate)
596-
assert resp.compression
597-
598-
with (
599-
pytest.warns(Warning, match="Synchronous compression of large response bodies"),
600-
mock.patch("aiohttp.web_response.LARGE_BODY_SIZE", 2),
601-
):
602-
msg = await resp.prepare(req)
603-
assert msg is not None
604-
assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING)
605-
606-
607587
@pytest.mark.usefixtures("parametrize_zlib_backend")
608588
async def test_force_compression_no_accept_deflate() -> None:
609589
req = make_request("GET", "/")

0 commit comments

Comments
 (0)