gh-117722: Fix Stream.readuntil with non-bytes buffer objects (#117723)

gh-16429 introduced support for an iterable of separators in
Stream.readuntil. Since bytes-like types are themselves iterable, this
can introduce ambiguities in deciding whether the argument is an
iterator of separators or a singleton separator. In gh-16429, only 'bytes'
was considered a singleton, but this will break code that passes other
buffer object types.

Fix it by only supporting tuples rather than arbitrary iterables.

Closes gh-117722.
This commit is contained in:
Bruce Merry 2024-04-11 16:41:55 +02:00 committed by GitHub
parent 898f6de63f
commit 01a51f9494
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 27 additions and 14 deletions

View file

@ -260,7 +260,7 @@ StreamReader
buffer is reset. The :attr:`IncompleteReadError.partial` attribute
may contain a portion of the separator.
The *separator* may also be an :term:`iterable` of separators. In this
The *separator* may also be a tuple of separators. In this
case the return value will be the shortest possible that has any
separator as the suffix. For the purposes of :exc:`LimitOverrunError`,
the shortest possible separator is considered to be the one that
@ -270,7 +270,7 @@ StreamReader
.. versionchanged:: 3.13
The *separator* parameter may now be an :term:`iterable` of
The *separator* parameter may now be a :class:`tuple` of
separators.
.. method:: at_eof()

View file

@ -324,6 +324,10 @@ asyncio
:exc:`asyncio.QueueShutDown`) for queue termination.
(Contributed by Laurie Opperman and Yves Duprat in :gh:`104228`.)
* Accept a tuple of separators in :meth:`asyncio.StreamReader.readuntil`,
stopping when one of them is encountered.
(Contributed by Bruce Merry in :gh:`81322`.)
base64
------

View file

@ -591,17 +591,17 @@ class StreamReader:
LimitOverrunError exception will be raised, and the data
will be left in the internal buffer, so it can be read again.
The ``separator`` may also be an iterable of separators. In this
The ``separator`` may also be a tuple of separators. In this
case the return value will be the shortest possible that has any
separator as the suffix. For the purposes of LimitOverrunError,
the shortest possible separator is considered to be the one that
matched.
"""
if isinstance(separator, bytes):
separator = [separator]
else:
# Makes sure shortest matches wins, and supports arbitrary iterables
if isinstance(separator, tuple):
# Makes sure shortest matches wins
separator = sorted(separator, key=len)
else:
separator = [separator]
if not separator:
raise ValueError('Separator should contain at least one element')
min_seplen = len(separator[0])

View file

@ -384,9 +384,9 @@ class StreamTests(test_utils.TestCase):
with self.assertRaisesRegex(ValueError, 'Separator should be'):
self.loop.run_until_complete(stream.readuntil(separator=b''))
with self.assertRaisesRegex(ValueError, 'Separator should be'):
self.loop.run_until_complete(stream.readuntil(separator=[b'']))
self.loop.run_until_complete(stream.readuntil(separator=(b'',)))
with self.assertRaisesRegex(ValueError, 'Separator should contain'):
self.loop.run_until_complete(stream.readuntil(separator=[]))
self.loop.run_until_complete(stream.readuntil(separator=()))
def test_readuntil_multi_chunks(self):
stream = asyncio.StreamReader(loop=self.loop)
@ -475,15 +475,15 @@ class StreamTests(test_utils.TestCase):
# Simple case
stream.feed_data(b'line 1\nline 2\r')
data = self.loop.run_until_complete(stream.readuntil([b'\r', b'\n']))
data = self.loop.run_until_complete(stream.readuntil((b'\r', b'\n')))
self.assertEqual(b'line 1\n', data)
data = self.loop.run_until_complete(stream.readuntil([b'\r', b'\n']))
data = self.loop.run_until_complete(stream.readuntil((b'\r', b'\n')))
self.assertEqual(b'line 2\r', data)
self.assertEqual(b'', stream._buffer)
# First end position matches, even if that's a longer match
stream.feed_data(b'ABCDEFG')
data = self.loop.run_until_complete(stream.readuntil([b'DEF', b'BCDE']))
data = self.loop.run_until_complete(stream.readuntil((b'DEF', b'BCDE')))
self.assertEqual(b'ABCDE', data)
self.assertEqual(b'FG', stream._buffer)
@ -493,7 +493,7 @@ class StreamTests(test_utils.TestCase):
with self.assertRaisesRegex(asyncio.LimitOverrunError,
'is found') as cm:
self.loop.run_until_complete(stream.readuntil([b'A', b'ome dataA']))
self.loop.run_until_complete(stream.readuntil((b'A', b'ome dataA')))
self.assertEqual(b'some dataA', stream._buffer)
@ -504,7 +504,7 @@ class StreamTests(test_utils.TestCase):
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(b'data')
readuntil_task = self.loop.create_task(stream.readuntil([b'A', b'long sep']))
readuntil_task = self.loop.create_task(stream.readuntil((b'A', b'long sep')))
self.loop.call_soon(stream.feed_data, b'Z')
self.loop.call_soon(stream.feed_data, b'Aaaa')
@ -512,6 +512,13 @@ class StreamTests(test_utils.TestCase):
self.assertEqual(b'dataZA', data)
self.assertEqual(b'aaa', stream._buffer)
def test_readuntil_bytearray(self):
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(b'some data\r\n')
data = self.loop.run_until_complete(stream.readuntil(bytearray(b'\r\n')))
self.assertEqual(b'some data\r\n', data)
self.assertEqual(b'', stream._buffer)
def test_readexactly_zero_or_less(self):
# Read exact number of bytes (zero or less).
stream = asyncio.StreamReader(loop=self.loop)

View file

@ -0,0 +1,2 @@
Change the new multi-separator support in :meth:`asyncio.Stream.readuntil`
to only accept tuples of separators rather than arbitrary iterables.