11import asyncio
22import collections
3+ import sys
34import warnings
45from collections .abc import Awaitable , Callable
56from typing import Final , Generic , TypeVar
@@ -67,31 +68,7 @@ async def __anext__(self) -> tuple[bytes, bool]:
6768 return rv
6869
6970
70- class AsyncStreamReaderMixin :
71-
72- __slots__ = ()
73-
74- def __aiter__ (self ) -> AsyncStreamIterator [bytes ]:
75- return AsyncStreamIterator (self .readline ) # type: ignore[attr-defined]
76-
77- def iter_chunked (self , n : int ) -> AsyncStreamIterator [bytes ]:
78- """Returns an asynchronous iterator that yields chunks of size n."""
79- return AsyncStreamIterator (lambda : self .read (n )) # type: ignore[attr-defined]
80-
81- def iter_any (self ) -> AsyncStreamIterator [bytes ]:
82- """Yield all available data as soon as it is received."""
83- return AsyncStreamIterator (self .readany ) # type: ignore[attr-defined]
84-
85- def iter_chunks (self ) -> ChunkTupleAsyncStreamIterator :
86- """Yield chunks of data as they are received by the server.
87-
88- The yielded objects are tuples
89- of (bytes, bool) as returned by the StreamReader.readchunk method.
90- """
91- return ChunkTupleAsyncStreamIterator (self ) # type: ignore[arg-type]
92-
93-
94- class StreamReader (AsyncStreamReaderMixin ):
71+ class StreamReader :
9572 """An enhancement of asyncio.StreamReader.
9673
9774 Supports asynchronous iteration by line, chunk or as available::
@@ -176,9 +153,35 @@ def __repr__(self) -> str:
176153 info .append ("e=%r" % self ._exception )
177154 return "<%s>" % " " .join (info )
178155
156+ def __aiter__ (self ) -> AsyncStreamIterator [bytes ]:
157+ return AsyncStreamIterator (self .readline )
158+
159+ def iter_chunked (self , n : int ) -> AsyncStreamIterator [bytes ]:
160+ """Returns an asynchronous iterator that yields chunks of size n."""
161+ self .set_read_chunk_size (n )
162+ return AsyncStreamIterator (lambda : self .read (n ))
163+
164+ def iter_any (self ) -> AsyncStreamIterator [bytes ]:
165+ """Yield all available data as soon as it is received."""
166+ return AsyncStreamIterator (self .readany )
167+
168+ def iter_chunks (self ) -> ChunkTupleAsyncStreamIterator :
169+ """Yield chunks of data as they are received by the server.
170+
171+ The yielded objects are tuples
172+ of (bytes, bool) as returned by the StreamReader.readchunk method.
173+ """
174+ return ChunkTupleAsyncStreamIterator (self )
175+
179176 def get_read_buffer_limits (self ) -> tuple [int , int ]:
180177 return (self ._low_water , self ._high_water )
181178
179+ def set_read_chunk_size (self , n : int ) -> None :
180+ """Raise buffer limits to match the consumer's chunk size."""
181+ if n > self ._low_water :
182+ self ._low_water = n
183+ self ._high_water = n * 2
184+
182185 def exception (self ) -> BaseException | None :
183186 return self ._exception
184187
@@ -427,10 +430,8 @@ async def read(self, n: int = -1) -> bytes:
427430 return b""
428431
429432 if n < 0 :
430- # This used to just loop creating a new waiter hoping to
431- # collect everything in self._buffer, but that would
432- # deadlock if the subprocess sends more than self.limit
433- # bytes. So just call self.readany() until EOF.
433+ # Reading everything — remove decompression chunk limit.
434+ self .set_read_chunk_size (sys .maxsize )
434435 blocks = []
435436 while True :
436437 block = await self .readany ()
@@ -439,6 +440,7 @@ async def read(self, n: int = -1) -> bytes:
439440 blocks .append (block )
440441 return b"" .join (blocks )
441442
443+ self .set_read_chunk_size (n )
442444 # TODO: should be `if` instead of `while`
443445 # because waiter maybe triggered on chunk end,
444446 # without feeding any data
@@ -612,6 +614,9 @@ async def wait_eof(self) -> None:
612614 def feed_data (self , data : bytes , n : int = 0 ) -> bool :
613615 return False
614616
617+ def set_read_chunk_size (self , n : int ) -> None :
618+ return
619+
615620 async def readline (self , * , max_line_length : int | None = None ) -> bytes :
616621 return b""
617622
0 commit comments