gh-121464: Make concurrent iteration over enumerate safe under free-threading (#125734)

This commit is contained in:
Pieter Eendebak 2025-03-13 19:44:05 +01:00 committed by GitHub
parent 7ea6e88eb4
commit ec46a55d63
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 77 additions and 20 deletions

View file

@ -0,0 +1,38 @@
import unittest
import sys
from threading import Thread, Barrier
from test.support import threading_helper
threading_helper.requires_working_threading(module=True)
class EnumerateThreading(unittest.TestCase):
@threading_helper.reap_threads
def test_threading(self):
number_of_threads = 10
number_of_iterations = 8
n = 100
start = sys.maxsize - 40
barrier = Barrier(number_of_threads)
def work(enum):
barrier.wait()
while True:
try:
_ = next(enum)
except StopIteration:
break
for it in range(number_of_iterations):
enum = enumerate(tuple(range(start, start + n)))
worker_threads = []
for ii in range(number_of_threads):
worker_threads.append(
Thread(target=work, args=[enum]))
with threading_helper.start_threads(worker_threads):
pass
barrier.reset()
if __name__ == "__main__":
unittest.main()

View file

@ -0,0 +1 @@
Make concurrent iterations over the same :func:`enumerate` iterator safe under free-threading. See `Strategy for Iterators in Free Threading <https://github.com/python/cpython/issues/124397>`_.

View file

@ -171,32 +171,45 @@ enum_traverse(PyObject *op, visitproc visit, void *arg)
return 0;
}
// increment en_longindex with lock held, return the next index to be used
// or NULL on error
static inline PyObject *
increment_longindex_lock_held(enumobject *en)
{
PyObject *next_index = en->en_longindex;
if (next_index == NULL) {
next_index = PyLong_FromSsize_t(PY_SSIZE_T_MAX);
if (next_index == NULL) {
return NULL;
}
}
assert(next_index != NULL);
PyObject *stepped_up = PyNumber_Add(next_index, en->one);
if (stepped_up == NULL) {
return NULL;
}
en->en_longindex = stepped_up;
return next_index;
}
static PyObject *
enum_next_long(enumobject *en, PyObject* next_item)
{
PyObject *result = en->en_result;
PyObject *next_index;
PyObject *stepped_up;
PyObject *old_index;
PyObject *old_item;
if (en->en_longindex == NULL) {
en->en_longindex = PyLong_FromSsize_t(PY_SSIZE_T_MAX);
if (en->en_longindex == NULL) {
Py_DECREF(next_item);
return NULL;
}
}
next_index = en->en_longindex;
assert(next_index != NULL);
stepped_up = PyNumber_Add(next_index, en->one);
if (stepped_up == NULL) {
Py_BEGIN_CRITICAL_SECTION(en);
next_index = increment_longindex_lock_held(en);
Py_END_CRITICAL_SECTION();
if (next_index == NULL) {
Py_DECREF(next_item);
return NULL;
}
en->en_longindex = stepped_up;
if (Py_REFCNT(result) == 1) {
if (_PyObject_IsUniquelyReferenced(result)) {
Py_INCREF(result);
old_index = PyTuple_GET_ITEM(result, 0);
old_item = PyTuple_GET_ITEM(result, 1);
@ -237,17 +250,18 @@ enum_next(PyObject *op)
if (next_item == NULL)
return NULL;
if (en->en_index == PY_SSIZE_T_MAX)
Py_ssize_t en_index = FT_ATOMIC_LOAD_SSIZE_RELAXED(en->en_index);
if (en_index == PY_SSIZE_T_MAX)
return enum_next_long(en, next_item);
next_index = PyLong_FromSsize_t(en->en_index);
next_index = PyLong_FromSsize_t(en_index);
if (next_index == NULL) {
Py_DECREF(next_item);
return NULL;
}
en->en_index++;
FT_ATOMIC_STORE_SSIZE_RELAXED(en->en_index, en_index + 1);
if (Py_REFCNT(result) == 1) {
if (_PyObject_IsUniquelyReferenced(result)) {
Py_INCREF(result);
old_index = PyTuple_GET_ITEM(result, 0);
old_item = PyTuple_GET_ITEM(result, 1);
@ -277,10 +291,14 @@ static PyObject *
enum_reduce(PyObject *op, PyObject *Py_UNUSED(ignored))
{
enumobject *en = _enumobject_CAST(op);
PyObject *result;
Py_BEGIN_CRITICAL_SECTION(en);
if (en->en_longindex != NULL)
return Py_BuildValue("O(OO)", Py_TYPE(en), en->en_sit, en->en_longindex);
result = Py_BuildValue("O(OO)", Py_TYPE(en), en->en_sit, en->en_longindex);
else
return Py_BuildValue("O(On)", Py_TYPE(en), en->en_sit, en->en_index);
result = Py_BuildValue("O(On)", Py_TYPE(en), en->en_sit, en->en_index);
Py_END_CRITICAL_SECTION();
return result;
}
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");