Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions Lib/test/test_io/test_bufferedio.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from collections import deque, UserList
from itertools import cycle, count
from test import support
from test.support import check_sanitizer
from test.support import os_helper, threading_helper
from .utils import byteslike, CTestCase, PyTestCase

Expand Down Expand Up @@ -623,6 +624,25 @@ def test_bad_readinto_type(self):
bufio.readline()
self.assertIsInstance(cm.exception.__cause__, TypeError)

@unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform')
@unittest.skipIf(check_sanitizer(thread=True),
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Will other sanitizers work?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Only the non-free-threaded TSan build failed; the UBSan and free-threaded TSan builds are both OK.

Please refer to https://github.com/python/cpython/actions/runs/20896527108/job/60035968975?pr=143690

'ThreadSanitizer aborts on huge allocations (exit code 66).')
def test_read1_error_does_not_cause_reentrant_failure(self):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does it need @bigmemtest decorator?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thank you. I don’t think @bigmemtest is needed here, since the goal is only to trigger the “request too large → failure” error path, not to actually allocate a large amount of memory.

self.addCleanup(os_helper.unlink, os_helper.TESTFN)
with self.open(os_helper.TESTFN, "wb") as f:
f.write(b"hello")

with self.open(os_helper.TESTFN, "rb", buffering=0) as raw:
bufio = self.tp(raw, buffer_size=8)
# To request a size that is far too huge to ever be satisfied,
# so that the internal buffer allocation reliably fails with MemoryError.
huge = sys.maxsize // 2 + 1
with self.assertRaises(MemoryError):
bufio.read1(huge)

# Used to crash before gh-143689:
self.assertEqual(bufio.read1(1), b"h")


class PyBufferedReaderTest(BufferedReaderTest, PyTestCase):
tp = pyio.BufferedReader
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fix :meth:`io.BufferedReader.read1` state cleanup on buffer allocation failure.
1 change: 1 addition & 0 deletions Modules/_io/bufferedio.c
Original file line number Diff line number Diff line change
Expand Up @@ -1073,6 +1073,7 @@ _io__Buffered_read1_impl(buffered *self, Py_ssize_t n)

PyBytesWriter *writer = PyBytesWriter_Create(n);
if (writer == NULL) {
LEAVE_BUFFERED(self)
return NULL;
}

Expand Down
Loading