mirror of
https://github.com/python/cpython.git
synced 2025-07-08 03:45:36 +00:00
GH-132554: "Virtual" iterators (GH-132555)
* FOR_ITER now pushes either the iterator and NULL or leaves the iterable and pushes tagged zero * NEXT_ITER uses the tagged int as the index into the sequence or, if TOS is NULL, iterates as before.
This commit is contained in:
parent
9300a596d3
commit
f6f4e8a662
25 changed files with 713 additions and 618 deletions
|
@ -341,7 +341,7 @@ dummy_func(
|
|||
}
|
||||
|
||||
pure inst(POP_TOP, (value --)) {
|
||||
PyStackRef_CLOSE(value);
|
||||
PyStackRef_XCLOSE(value);
|
||||
}
|
||||
|
||||
tier2 op(_POP_TWO, (nos, tos --)) {
|
||||
|
@ -362,9 +362,14 @@ dummy_func(
|
|||
PyStackRef_CLOSE(value);
|
||||
}
|
||||
|
||||
macro(POP_ITER) = POP_TOP;
|
||||
|
||||
no_save_ip tier1 inst(INSTRUMENTED_END_FOR, (receiver, value -- receiver)) {
|
||||
inst(POP_ITER, (iter, index_or_null -- )) {
|
||||
(void)index_or_null;
|
||||
DEAD(index_or_null);
|
||||
PyStackRef_CLOSE(iter);
|
||||
}
|
||||
|
||||
no_save_ip tier1 inst(INSTRUMENTED_END_FOR, (receiver, index_or_null, value -- receiver, index_or_null)) {
|
||||
/* Need to create a fake StopIteration error here,
|
||||
* to conform to PEP 380 */
|
||||
if (PyStackRef_GenCheck(receiver)) {
|
||||
|
@ -376,7 +381,9 @@ dummy_func(
|
|||
PyStackRef_CLOSE(value);
|
||||
}
|
||||
|
||||
tier1 inst(INSTRUMENTED_POP_ITER, (iter -- )) {
|
||||
tier1 inst(INSTRUMENTED_POP_ITER, (iter, index_or_null -- )) {
|
||||
(void)index_or_null;
|
||||
DEAD(index_or_null);
|
||||
INSTRUMENTED_JUMP(prev_instr, this_instr+1, PY_MONITORING_EVENT_BRANCH_RIGHT);
|
||||
PyStackRef_CLOSE(iter);
|
||||
}
|
||||
|
@ -3041,15 +3048,24 @@ dummy_func(
|
|||
values_or_none = PyStackRef_FromPyObjectSteal(values_or_none_o);
|
||||
}
|
||||
|
||||
inst(GET_ITER, (iterable -- iter)) {
|
||||
inst(GET_ITER, (iterable -- iter, index_or_null)) {
|
||||
#ifdef Py_STATS
|
||||
_Py_GatherStats_GetIter(iterable);
|
||||
#endif
|
||||
/* before: [obj]; after [getiter(obj)] */
|
||||
PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable));
|
||||
PyStackRef_CLOSE(iterable);
|
||||
ERROR_IF(iter_o == NULL);
|
||||
iter = PyStackRef_FromPyObjectSteal(iter_o);
|
||||
PyTypeObject *tp = PyStackRef_TYPE(iterable);
|
||||
if (tp == &PyTuple_Type || tp == &PyList_Type) {
|
||||
iter = iterable;
|
||||
DEAD(iterable);
|
||||
index_or_null = PyStackRef_TagInt(0);
|
||||
}
|
||||
else {
|
||||
PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable));
|
||||
PyStackRef_CLOSE(iterable);
|
||||
ERROR_IF(iter_o == NULL);
|
||||
iter = PyStackRef_FromPyObjectSteal(iter_o);
|
||||
index_or_null = PyStackRef_NULL;
|
||||
}
|
||||
}
|
||||
|
||||
inst(GET_YIELD_FROM_ITER, (iterable -- iter)) {
|
||||
|
@ -3096,11 +3112,11 @@ dummy_func(
|
|||
FOR_ITER_GEN,
|
||||
};
|
||||
|
||||
specializing op(_SPECIALIZE_FOR_ITER, (counter/1, iter -- iter)) {
|
||||
specializing op(_SPECIALIZE_FOR_ITER, (counter/1, iter, null_or_index -- iter, null_or_index)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_ForIter(iter, next_instr, oparg);
|
||||
_Py_Specialize_ForIter(iter, null_or_index, next_instr, oparg);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
OPCODE_DEFERRED_INC(FOR_ITER);
|
||||
|
@ -3108,33 +3124,44 @@ dummy_func(
|
|||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
}
|
||||
|
||||
replaced op(_FOR_ITER, (iter -- iter, next)) {
|
||||
replaced op(_FOR_ITER, (iter, null_or_index -- iter, null_or_index, next)) {
|
||||
/* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
|
||||
if (next_o == NULL) {
|
||||
if (_PyErr_Occurred(tstate)) {
|
||||
int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
|
||||
if (!matches) {
|
||||
ERROR_NO_POP();
|
||||
}
|
||||
_PyEval_MonitorRaise(tstate, frame, this_instr);
|
||||
_PyErr_Clear(tstate);
|
||||
if (PyStackRef_IsTaggedInt(null_or_index)) {
|
||||
next = _PyForIter_NextWithIndex(iter_o, null_or_index);
|
||||
if (PyStackRef_IsNull(next)) {
|
||||
null_or_index = PyStackRef_TagInt(-1);
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
}
|
||||
/* iterator ended normally */
|
||||
assert(next_instr[oparg].op.code == END_FOR ||
|
||||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
|
||||
/* Jump forward oparg, then skip following END_FOR */
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
|
||||
}
|
||||
else {
|
||||
PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
|
||||
if (next_o == NULL) {
|
||||
if (_PyErr_Occurred(tstate)) {
|
||||
int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
|
||||
if (!matches) {
|
||||
ERROR_NO_POP();
|
||||
}
|
||||
_PyEval_MonitorRaise(tstate, frame, this_instr);
|
||||
_PyErr_Clear(tstate);
|
||||
}
|
||||
/* iterator ended normally */
|
||||
assert(next_instr[oparg].op.code == END_FOR ||
|
||||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
|
||||
/* Jump forward oparg, then skip following END_FOR */
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
}
|
||||
next = PyStackRef_FromPyObjectSteal(next_o);
|
||||
}
|
||||
next = PyStackRef_FromPyObjectSteal(next_o);
|
||||
// Common case: no jump, leave it to the code generator
|
||||
}
|
||||
|
||||
op(_FOR_ITER_TIER_TWO, (iter -- iter, next)) {
|
||||
op(_FOR_ITER_TIER_TWO, (iter, null_or_index -- iter, null_or_index, next)) {
|
||||
/* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
EXIT_IF(!PyStackRef_IsNull(null_or_index));
|
||||
PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
|
||||
if (next_o == NULL) {
|
||||
if (_PyErr_Occurred(tstate)) {
|
||||
|
@ -3156,63 +3183,63 @@ dummy_func(
|
|||
macro(FOR_ITER) = _SPECIALIZE_FOR_ITER + _FOR_ITER;
|
||||
|
||||
|
||||
inst(INSTRUMENTED_FOR_ITER, (unused/1, iter -- iter, next)) {
|
||||
inst(INSTRUMENTED_FOR_ITER, (unused/1, iter, null_or_index -- iter, null_or_index, next)) {
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
|
||||
if (next_o != NULL) {
|
||||
next = PyStackRef_FromPyObjectSteal(next_o);
|
||||
if (PyStackRef_IsTaggedInt(null_or_index)) {
|
||||
next = _PyForIter_NextWithIndex(iter_o, null_or_index);
|
||||
if (PyStackRef_IsNull(next)) {
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
}
|
||||
INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT);
|
||||
}
|
||||
else {
|
||||
if (_PyErr_Occurred(tstate)) {
|
||||
int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
|
||||
if (!matches) {
|
||||
ERROR_NO_POP();
|
||||
}
|
||||
_PyEval_MonitorRaise(tstate, frame, this_instr);
|
||||
_PyErr_Clear(tstate);
|
||||
PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
|
||||
if (next_o != NULL) {
|
||||
next = PyStackRef_FromPyObjectSteal(next_o);
|
||||
INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT);
|
||||
}
|
||||
else {
|
||||
if (_PyErr_Occurred(tstate)) {
|
||||
int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
|
||||
if (!matches) {
|
||||
ERROR_NO_POP();
|
||||
}
|
||||
_PyEval_MonitorRaise(tstate, frame, this_instr);
|
||||
_PyErr_Clear(tstate);
|
||||
}
|
||||
/* iterator ended normally */
|
||||
assert(next_instr[oparg].op.code == END_FOR ||
|
||||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
|
||||
/* Skip END_FOR */
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
}
|
||||
/* iterator ended normally */
|
||||
assert(next_instr[oparg].op.code == END_FOR ||
|
||||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
|
||||
/* Skip END_FOR */
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
op(_ITER_CHECK_LIST, (iter -- iter)) {
|
||||
op(_ITER_CHECK_LIST, (iter, null_or_index -- iter, null_or_index)) {
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
EXIT_IF(Py_TYPE(iter_o) != &PyListIter_Type);
|
||||
EXIT_IF(Py_TYPE(iter_o) != &PyList_Type);
|
||||
assert(PyStackRef_IsTaggedInt(null_or_index));
|
||||
#ifdef Py_GIL_DISABLED
|
||||
EXIT_IF(!_PyObject_IsUniquelyReferenced(iter_o));
|
||||
_PyListIterObject *it = (_PyListIterObject *)iter_o;
|
||||
EXIT_IF(!_Py_IsOwnedByCurrentThread((PyObject *)it->it_seq) ||
|
||||
!_PyObject_GC_IS_SHARED(it->it_seq));
|
||||
EXIT_IF(!_Py_IsOwnedByCurrentThread(iter_o) && !_PyObject_GC_IS_SHARED(iter_o));
|
||||
#endif
|
||||
}
|
||||
|
||||
replaced op(_ITER_JUMP_LIST, (iter -- iter)) {
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(iter_o) == &PyListIter_Type);
|
||||
// For free-threaded Python, the loop exit can happen at any point during
|
||||
// item retrieval, so it doesn't make much sense to check and jump
|
||||
// separately before item retrieval. Any length check we do here can be
|
||||
// invalid by the time we actually try to fetch the item.
|
||||
replaced op(_ITER_JUMP_LIST, (iter, null_or_index -- iter, null_or_index)) {
|
||||
#ifdef Py_GIL_DISABLED
|
||||
assert(_PyObject_IsUniquelyReferenced(iter_o));
|
||||
(void)iter_o;
|
||||
// For free-threaded Python, the loop exit can happen at any point during
|
||||
// item retrieval, so it doesn't make much sense to check and jump
|
||||
// separately before item retrieval. Any length check we do here can be
|
||||
// invalid by the time we actually try to fetch the item.
|
||||
#else
|
||||
_PyListIterObject *it = (_PyListIterObject *)iter_o;
|
||||
PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(list_o) == &PyList_Type);
|
||||
STAT_INC(FOR_ITER, hit);
|
||||
PyListObject *seq = it->it_seq;
|
||||
if (seq == NULL || (size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) {
|
||||
it->it_index = -1;
|
||||
if (seq != NULL) {
|
||||
it->it_seq = NULL;
|
||||
Py_DECREF(seq);
|
||||
}
|
||||
if ((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyList_GET_SIZE(list_o)) {
|
||||
null_or_index = PyStackRef_TagInt(-1);
|
||||
/* Jump forward oparg, then skip following END_FOR instruction */
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
|
@ -3221,73 +3248,54 @@ dummy_func(
|
|||
}
|
||||
|
||||
// Only used by Tier 2
|
||||
op(_GUARD_NOT_EXHAUSTED_LIST, (iter -- iter)) {
|
||||
op(_GUARD_NOT_EXHAUSTED_LIST, (iter, null_or_index -- iter, null_or_index)) {
|
||||
#ifndef Py_GIL_DISABLED
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
_PyListIterObject *it = (_PyListIterObject *)iter_o;
|
||||
assert(Py_TYPE(iter_o) == &PyListIter_Type);
|
||||
PyListObject *seq = it->it_seq;
|
||||
EXIT_IF(seq == NULL);
|
||||
if ((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) {
|
||||
it->it_index = -1;
|
||||
EXIT_IF(1);
|
||||
}
|
||||
PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(list_o) == &PyList_Type);
|
||||
EXIT_IF((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyList_GET_SIZE(list_o));
|
||||
#endif
|
||||
}
|
||||
|
||||
replaced op(_ITER_NEXT_LIST, (iter -- iter, next)) {
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
_PyListIterObject *it = (_PyListIterObject *)iter_o;
|
||||
assert(Py_TYPE(iter_o) == &PyListIter_Type);
|
||||
PyListObject *seq = it->it_seq;
|
||||
assert(seq);
|
||||
replaced op(_ITER_NEXT_LIST, (iter, null_or_index -- iter, null_or_index, next)) {
|
||||
PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(PyList_CheckExact(list_o));
|
||||
#ifdef Py_GIL_DISABLED
|
||||
assert(_PyObject_IsUniquelyReferenced(iter_o));
|
||||
assert(_Py_IsOwnedByCurrentThread((PyObject *)seq) ||
|
||||
_PyObject_GC_IS_SHARED(seq));
|
||||
assert(_Py_IsOwnedByCurrentThread(list_o) ||
|
||||
_PyObject_GC_IS_SHARED(list_o));
|
||||
STAT_INC(FOR_ITER, hit);
|
||||
int result = _PyList_GetItemRefNoLock(seq, it->it_index, &next);
|
||||
int result = _PyList_GetItemRefNoLock((PyListObject *)list_o, PyStackRef_UntagInt(null_or_index), &next);
|
||||
// A negative result means we lost a race with another thread
|
||||
// and we need to take the slow path.
|
||||
DEOPT_IF(result < 0);
|
||||
if (result == 0) {
|
||||
it->it_index = -1;
|
||||
null_or_index = PyStackRef_TagInt(-1);
|
||||
/* Jump forward oparg, then skip following END_FOR instruction */
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
}
|
||||
it->it_index++;
|
||||
#else
|
||||
assert(it->it_index < PyList_GET_SIZE(seq));
|
||||
next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(seq, it->it_index++));
|
||||
next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(list_o, PyStackRef_UntagInt(null_or_index)));
|
||||
#endif
|
||||
null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
|
||||
}
|
||||
|
||||
// Only used by Tier 2
|
||||
op(_ITER_NEXT_LIST_TIER_TWO, (iter -- iter, next)) {
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
_PyListIterObject *it = (_PyListIterObject *)iter_o;
|
||||
assert(Py_TYPE(iter_o) == &PyListIter_Type);
|
||||
PyListObject *seq = it->it_seq;
|
||||
assert(seq);
|
||||
op(_ITER_NEXT_LIST_TIER_TWO, (iter, null_or_index -- iter, null_or_index, next)) {
|
||||
PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(PyList_CheckExact(list_o));
|
||||
#ifdef Py_GIL_DISABLED
|
||||
assert(_PyObject_IsUniquelyReferenced(iter_o));
|
||||
assert(_Py_IsOwnedByCurrentThread((PyObject *)seq) ||
|
||||
_PyObject_GC_IS_SHARED(seq));
|
||||
assert(_Py_IsOwnedByCurrentThread((PyObject *)list_o) ||
|
||||
_PyObject_GC_IS_SHARED(list_o));
|
||||
STAT_INC(FOR_ITER, hit);
|
||||
int result = _PyList_GetItemRefNoLock(seq, it->it_index, &next);
|
||||
int result = _PyList_GetItemRefNoLock((PyListObject *)list_o, PyStackRef_UntagInt(null_or_index), &next);
|
||||
// A negative result means we lost a race with another thread
|
||||
// and we need to take the slow path.
|
||||
EXIT_IF(result < 0);
|
||||
if (result == 0) {
|
||||
it->it_index = -1;
|
||||
EXIT_IF(1);
|
||||
}
|
||||
it->it_index++;
|
||||
DEOPT_IF(result <= 0);
|
||||
#else
|
||||
assert(it->it_index < PyList_GET_SIZE(seq));
|
||||
next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(seq, it->it_index++));
|
||||
assert(PyStackRef_UntagInt(null_or_index) < PyList_GET_SIZE(list_o));
|
||||
next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(list_o, PyStackRef_UntagInt(null_or_index)));
|
||||
#endif
|
||||
null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
|
||||
}
|
||||
|
||||
macro(FOR_ITER_LIST) =
|
||||
|
@ -3296,31 +3304,19 @@ dummy_func(
|
|||
_ITER_JUMP_LIST +
|
||||
_ITER_NEXT_LIST;
|
||||
|
||||
op(_ITER_CHECK_TUPLE, (iter -- iter)) {
|
||||
op(_ITER_CHECK_TUPLE, (iter, null_or_index -- iter, null_or_index)) {
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
EXIT_IF(Py_TYPE(iter_o) != &PyTupleIter_Type);
|
||||
#ifdef Py_GIL_DISABLED
|
||||
EXIT_IF(!_PyObject_IsUniquelyReferenced(iter_o));
|
||||
#endif
|
||||
EXIT_IF(Py_TYPE(iter_o) != &PyTuple_Type);
|
||||
assert(PyStackRef_IsTaggedInt(null_or_index));
|
||||
}
|
||||
|
||||
replaced op(_ITER_JUMP_TUPLE, (iter -- iter)) {
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
(void)iter_o;
|
||||
assert(Py_TYPE(iter_o) == &PyTupleIter_Type);
|
||||
#ifdef Py_GIL_DISABLED
|
||||
assert(_PyObject_IsUniquelyReferenced(iter_o));
|
||||
#endif
|
||||
_PyTupleIterObject *it = (_PyTupleIterObject *)iter_o;
|
||||
replaced op(_ITER_JUMP_TUPLE, (iter, null_or_index -- iter, null_or_index)) {
|
||||
PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
(void)tuple_o;
|
||||
assert(Py_TYPE(tuple_o) == &PyTuple_Type);
|
||||
STAT_INC(FOR_ITER, hit);
|
||||
PyTupleObject *seq = it->it_seq;
|
||||
if (seq == NULL || (size_t)it->it_index >= (size_t)PyTuple_GET_SIZE(seq)) {
|
||||
#ifndef Py_GIL_DISABLED
|
||||
if (seq != NULL) {
|
||||
it->it_seq = NULL;
|
||||
Py_DECREF(seq);
|
||||
}
|
||||
#endif
|
||||
if ((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyTuple_GET_SIZE(tuple_o)) {
|
||||
null_or_index = PyStackRef_TagInt(-1);
|
||||
/* Jump forward oparg, then skip following END_FOR instruction */
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
|
@ -3328,29 +3324,19 @@ dummy_func(
|
|||
}
|
||||
|
||||
// Only used by Tier 2
|
||||
op(_GUARD_NOT_EXHAUSTED_TUPLE, (iter -- iter)) {
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
_PyTupleIterObject *it = (_PyTupleIterObject *)iter_o;
|
||||
assert(Py_TYPE(iter_o) == &PyTupleIter_Type);
|
||||
#ifdef Py_GIL_DISABLED
|
||||
assert(_PyObject_IsUniquelyReferenced(iter_o));
|
||||
#endif
|
||||
PyTupleObject *seq = it->it_seq;
|
||||
EXIT_IF(seq == NULL);
|
||||
EXIT_IF(it->it_index >= PyTuple_GET_SIZE(seq));
|
||||
op(_GUARD_NOT_EXHAUSTED_TUPLE, (iter, null_or_index -- iter, null_or_index)) {
|
||||
PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(tuple_o) == &PyTuple_Type);
|
||||
EXIT_IF((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyTuple_GET_SIZE(tuple_o));
|
||||
}
|
||||
|
||||
op(_ITER_NEXT_TUPLE, (iter -- iter, next)) {
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
_PyTupleIterObject *it = (_PyTupleIterObject *)iter_o;
|
||||
assert(Py_TYPE(iter_o) == &PyTupleIter_Type);
|
||||
PyTupleObject *seq = it->it_seq;
|
||||
#ifdef Py_GIL_DISABLED
|
||||
assert(_PyObject_IsUniquelyReferenced(iter_o));
|
||||
#endif
|
||||
assert(seq);
|
||||
assert(it->it_index < PyTuple_GET_SIZE(seq));
|
||||
next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(seq, it->it_index++));
|
||||
op(_ITER_NEXT_TUPLE, (iter, null_or_index -- iter, null_or_index, next)) {
|
||||
PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(tuple_o) == &PyTuple_Type);
|
||||
uintptr_t i = PyStackRef_UntagInt(null_or_index);
|
||||
assert((size_t)i < (size_t)PyTuple_GET_SIZE(tuple_o));
|
||||
next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(tuple_o, i));
|
||||
null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
|
||||
}
|
||||
|
||||
macro(FOR_ITER_TUPLE) =
|
||||
|
@ -3359,7 +3345,7 @@ dummy_func(
|
|||
_ITER_JUMP_TUPLE +
|
||||
_ITER_NEXT_TUPLE;
|
||||
|
||||
op(_ITER_CHECK_RANGE, (iter -- iter)) {
|
||||
op(_ITER_CHECK_RANGE, (iter, null_or_index -- iter, null_or_index)) {
|
||||
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
|
||||
EXIT_IF(Py_TYPE(r) != &PyRangeIter_Type);
|
||||
#ifdef Py_GIL_DISABLED
|
||||
|
@ -3367,7 +3353,7 @@ dummy_func(
|
|||
#endif
|
||||
}
|
||||
|
||||
replaced op(_ITER_JUMP_RANGE, (iter -- iter)) {
|
||||
replaced op(_ITER_JUMP_RANGE, (iter, null_or_index -- iter, null_or_index)) {
|
||||
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(r) == &PyRangeIter_Type);
|
||||
#ifdef Py_GIL_DISABLED
|
||||
|
@ -3382,13 +3368,13 @@ dummy_func(
|
|||
}
|
||||
|
||||
// Only used by Tier 2
|
||||
op(_GUARD_NOT_EXHAUSTED_RANGE, (iter -- iter)) {
|
||||
op(_GUARD_NOT_EXHAUSTED_RANGE, (iter, null_or_index -- iter, null_or_index)) {
|
||||
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(r) == &PyRangeIter_Type);
|
||||
EXIT_IF(r->len <= 0);
|
||||
}
|
||||
|
||||
op(_ITER_NEXT_RANGE, (iter -- iter, next)) {
|
||||
op(_ITER_NEXT_RANGE, (iter, null_or_index -- iter, null_or_index, next)) {
|
||||
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(r) == &PyRangeIter_Type);
|
||||
#ifdef Py_GIL_DISABLED
|
||||
|
@ -3409,7 +3395,7 @@ dummy_func(
|
|||
_ITER_JUMP_RANGE +
|
||||
_ITER_NEXT_RANGE;
|
||||
|
||||
op(_FOR_ITER_GEN_FRAME, (iter -- iter, gen_frame: _PyInterpreterFrame*)) {
|
||||
op(_FOR_ITER_GEN_FRAME, (iter, null -- iter, null, gen_frame: _PyInterpreterFrame*)) {
|
||||
PyGenObject *gen = (PyGenObject *)PyStackRef_AsPyObjectBorrow(iter);
|
||||
DEOPT_IF(Py_TYPE(gen) != &PyGen_Type);
|
||||
#ifdef Py_GIL_DISABLED
|
||||
|
|
|
@ -3425,6 +3425,26 @@ _PyEval_LoadName(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject *na
|
|||
return value;
|
||||
}
|
||||
|
||||
_PyStackRef
|
||||
_PyForIter_NextWithIndex(PyObject *seq, _PyStackRef index)
|
||||
{
|
||||
assert(PyStackRef_IsTaggedInt(index));
|
||||
assert(PyTuple_CheckExact(seq) || PyList_CheckExact(seq));
|
||||
intptr_t i = PyStackRef_UntagInt(index);
|
||||
if (PyTuple_CheckExact(seq)) {
|
||||
size_t size = PyTuple_GET_SIZE(seq);
|
||||
if ((size_t)i >= size) {
|
||||
return PyStackRef_NULL;
|
||||
}
|
||||
return PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(seq, i));
|
||||
}
|
||||
PyObject *item = _PyList_GetItemRef((PyListObject *)seq, i);
|
||||
if (item == NULL) {
|
||||
return PyStackRef_NULL;
|
||||
}
|
||||
return PyStackRef_FromPyObjectSteal(item);
|
||||
}
|
||||
|
||||
/* Check if a 'cls' provides the given special method. */
|
||||
static inline int
|
||||
type_has_special_method(PyTypeObject *cls, PyObject *name)
|
||||
|
|
|
@ -525,6 +525,15 @@ codegen_unwind_fblock(compiler *c, location *ploc,
|
|||
return SUCCESS;
|
||||
|
||||
case COMPILE_FBLOCK_FOR_LOOP:
|
||||
/* Pop the iterator */
|
||||
if (preserve_tos) {
|
||||
ADDOP_I(c, *ploc, SWAP, 3);
|
||||
}
|
||||
ADDOP(c, *ploc, POP_TOP);
|
||||
ADDOP(c, *ploc, POP_TOP);
|
||||
return SUCCESS;
|
||||
|
||||
case COMPILE_FBLOCK_ASYNC_FOR_LOOP:
|
||||
/* Pop the iterator */
|
||||
if (preserve_tos) {
|
||||
ADDOP_I(c, *ploc, SWAP, 2);
|
||||
|
@ -629,7 +638,8 @@ codegen_unwind_fblock_stack(compiler *c, location *ploc,
|
|||
c, *ploc, "'break', 'continue' and 'return' cannot appear in an except* block");
|
||||
}
|
||||
if (loop != NULL && (top->fb_type == COMPILE_FBLOCK_WHILE_LOOP ||
|
||||
top->fb_type == COMPILE_FBLOCK_FOR_LOOP)) {
|
||||
top->fb_type == COMPILE_FBLOCK_FOR_LOOP ||
|
||||
top->fb_type == COMPILE_FBLOCK_ASYNC_FOR_LOOP)) {
|
||||
*loop = top;
|
||||
return SUCCESS;
|
||||
}
|
||||
|
@ -2125,7 +2135,7 @@ codegen_async_for(compiler *c, stmt_ty s)
|
|||
ADDOP(c, LOC(s->v.AsyncFor.iter), GET_AITER);
|
||||
|
||||
USE_LABEL(c, start);
|
||||
RETURN_IF_ERROR(_PyCompile_PushFBlock(c, loc, COMPILE_FBLOCK_FOR_LOOP, start, end, NULL));
|
||||
RETURN_IF_ERROR(_PyCompile_PushFBlock(c, loc, COMPILE_FBLOCK_ASYNC_FOR_LOOP, start, end, NULL));
|
||||
|
||||
/* SETUP_FINALLY to guard the __anext__ call */
|
||||
ADDOP_JUMP(c, loc, SETUP_FINALLY, except);
|
||||
|
@ -2142,7 +2152,7 @@ codegen_async_for(compiler *c, stmt_ty s)
|
|||
/* Mark jump as artificial */
|
||||
ADDOP_JUMP(c, NO_LOCATION, JUMP, start);
|
||||
|
||||
_PyCompile_PopFBlock(c, COMPILE_FBLOCK_FOR_LOOP, start);
|
||||
_PyCompile_PopFBlock(c, COMPILE_FBLOCK_ASYNC_FOR_LOOP, start);
|
||||
|
||||
/* Except block for __anext__ */
|
||||
USE_LABEL(c, except);
|
||||
|
@ -3895,10 +3905,11 @@ maybe_optimize_function_call(compiler *c, expr_ty e, jump_target_label end)
|
|||
NEW_JUMP_TARGET_LABEL(c, loop);
|
||||
NEW_JUMP_TARGET_LABEL(c, cleanup);
|
||||
|
||||
ADDOP(c, loc, PUSH_NULL); // Push NULL index for loop
|
||||
USE_LABEL(c, loop);
|
||||
ADDOP_JUMP(c, loc, FOR_ITER, cleanup);
|
||||
if (const_oparg == CONSTANT_BUILTIN_TUPLE) {
|
||||
ADDOP_I(c, loc, LIST_APPEND, 2);
|
||||
ADDOP_I(c, loc, LIST_APPEND, 3);
|
||||
ADDOP_JUMP(c, loc, JUMP, loop);
|
||||
}
|
||||
else {
|
||||
|
@ -4442,13 +4453,12 @@ codegen_sync_comprehension_generator(compiler *c, location loc,
|
|||
}
|
||||
if (IS_JUMP_TARGET_LABEL(start)) {
|
||||
VISIT(c, expr, gen->iter);
|
||||
ADDOP(c, LOC(gen->iter), GET_ITER);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (IS_JUMP_TARGET_LABEL(start)) {
|
||||
depth++;
|
||||
depth += 2;
|
||||
ADDOP(c, LOC(gen->iter), GET_ITER);
|
||||
USE_LABEL(c, start);
|
||||
ADDOP_JUMP(c, LOC(gen->iter), FOR_ITER, anchor);
|
||||
|
@ -4543,9 +4553,9 @@ codegen_async_comprehension_generator(compiler *c, location loc,
|
|||
else {
|
||||
/* Sub-iter - calculate on the fly */
|
||||
VISIT(c, expr, gen->iter);
|
||||
ADDOP(c, LOC(gen->iter), GET_AITER);
|
||||
}
|
||||
}
|
||||
ADDOP(c, LOC(gen->iter), GET_AITER);
|
||||
|
||||
USE_LABEL(c, start);
|
||||
/* Runtime will push a block here, so we need to account for that */
|
||||
|
@ -4757,19 +4767,6 @@ pop_inlined_comprehension_state(compiler *c, location loc,
|
|||
return SUCCESS;
|
||||
}
|
||||
|
||||
static inline int
|
||||
codegen_comprehension_iter(compiler *c, comprehension_ty comp)
|
||||
{
|
||||
VISIT(c, expr, comp->iter);
|
||||
if (comp->is_async) {
|
||||
ADDOP(c, LOC(comp->iter), GET_AITER);
|
||||
}
|
||||
else {
|
||||
ADDOP(c, LOC(comp->iter), GET_ITER);
|
||||
}
|
||||
return SUCCESS;
|
||||
}
|
||||
|
||||
static int
|
||||
codegen_comprehension(compiler *c, expr_ty e, int type,
|
||||
identifier name, asdl_comprehension_seq *generators, expr_ty elt,
|
||||
|
@ -4789,9 +4786,7 @@ codegen_comprehension(compiler *c, expr_ty e, int type,
|
|||
|
||||
outermost = (comprehension_ty) asdl_seq_GET(generators, 0);
|
||||
if (is_inlined) {
|
||||
if (codegen_comprehension_iter(c, outermost)) {
|
||||
goto error;
|
||||
}
|
||||
VISIT(c, expr, outermost->iter);
|
||||
if (push_inlined_comprehension_state(c, loc, entry, &inline_state)) {
|
||||
goto error;
|
||||
}
|
||||
|
|
189
Python/executor_cases.c.h
generated
189
Python/executor_cases.c.h
generated
|
@ -534,7 +534,7 @@
|
|||
stack_pointer += -1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyStackRef_CLOSE(value);
|
||||
PyStackRef_XCLOSE(value);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
break;
|
||||
}
|
||||
|
@ -577,6 +577,20 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case _POP_ITER: {
|
||||
_PyStackRef index_or_null;
|
||||
_PyStackRef iter;
|
||||
index_or_null = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
(void)index_or_null;
|
||||
stack_pointer += -2;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyStackRef_CLOSE(iter);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
break;
|
||||
}
|
||||
|
||||
case _END_SEND: {
|
||||
_PyStackRef value;
|
||||
_PyStackRef receiver;
|
||||
|
@ -4172,25 +4186,37 @@
|
|||
case _GET_ITER: {
|
||||
_PyStackRef iterable;
|
||||
_PyStackRef iter;
|
||||
_PyStackRef index_or_null;
|
||||
iterable = stack_pointer[-1];
|
||||
#ifdef Py_STATS
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
_Py_GatherStats_GetIter(iterable);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
#endif
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable));
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
stack_pointer += -1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyStackRef_CLOSE(iterable);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (iter_o == NULL) {
|
||||
JUMP_TO_ERROR();
|
||||
|
||||
PyTypeObject *tp = PyStackRef_TYPE(iterable);
|
||||
if (tp == &PyTuple_Type || tp == &PyList_Type) {
|
||||
iter = iterable;
|
||||
index_or_null = PyStackRef_TagInt(0);
|
||||
}
|
||||
iter = PyStackRef_FromPyObjectSteal(iter_o);
|
||||
stack_pointer[0] = iter;
|
||||
else {
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable));
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
stack_pointer += -1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyStackRef_CLOSE(iterable);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (iter_o == NULL) {
|
||||
JUMP_TO_ERROR();
|
||||
}
|
||||
iter = PyStackRef_FromPyObjectSteal(iter_o);
|
||||
index_or_null = PyStackRef_NULL;
|
||||
stack_pointer += 1;
|
||||
}
|
||||
stack_pointer[-1] = iter;
|
||||
stack_pointer[0] = index_or_null;
|
||||
stack_pointer += 1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
break;
|
||||
|
@ -4237,10 +4263,16 @@
|
|||
/* _FOR_ITER is not a viable micro-op for tier 2 because it is replaced */
|
||||
|
||||
case _FOR_ITER_TIER_TWO: {
|
||||
_PyStackRef null_or_index;
|
||||
_PyStackRef iter;
|
||||
_PyStackRef next;
|
||||
iter = stack_pointer[-1];
|
||||
null_or_index = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
if (!PyStackRef_IsNull(null_or_index)) {
|
||||
UOP_STAT_INC(uopcode, miss);
|
||||
JUMP_TO_JUMP_TARGET();
|
||||
}
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
|
@ -4272,21 +4304,18 @@
|
|||
/* _INSTRUMENTED_FOR_ITER is not a viable micro-op for tier 2 because it is instrumented */
|
||||
|
||||
case _ITER_CHECK_LIST: {
|
||||
_PyStackRef null_or_index;
|
||||
_PyStackRef iter;
|
||||
iter = stack_pointer[-1];
|
||||
null_or_index = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
if (Py_TYPE(iter_o) != &PyListIter_Type) {
|
||||
if (Py_TYPE(iter_o) != &PyList_Type) {
|
||||
UOP_STAT_INC(uopcode, miss);
|
||||
JUMP_TO_JUMP_TARGET();
|
||||
}
|
||||
assert(PyStackRef_IsTaggedInt(null_or_index));
|
||||
#ifdef Py_GIL_DISABLED
|
||||
if (!_PyObject_IsUniquelyReferenced(iter_o)) {
|
||||
UOP_STAT_INC(uopcode, miss);
|
||||
JUMP_TO_JUMP_TARGET();
|
||||
}
|
||||
_PyListIterObject *it = (_PyListIterObject *)iter_o;
|
||||
if (!_Py_IsOwnedByCurrentThread((PyObject *)it->it_seq) ||
|
||||
!_PyObject_GC_IS_SHARED(it->it_seq)) {
|
||||
if (!_Py_IsOwnedByCurrentThread(iter_o) && !_PyObject_GC_IS_SHARED(iter_o)) {
|
||||
UOP_STAT_INC(uopcode, miss);
|
||||
JUMP_TO_JUMP_TARGET();
|
||||
}
|
||||
|
@ -4297,24 +4326,17 @@
|
|||
/* _ITER_JUMP_LIST is not a viable micro-op for tier 2 because it is replaced */
|
||||
|
||||
case _GUARD_NOT_EXHAUSTED_LIST: {
|
||||
_PyStackRef null_or_index;
|
||||
_PyStackRef iter;
|
||||
iter = stack_pointer[-1];
|
||||
null_or_index = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
#ifndef Py_GIL_DISABLED
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
_PyListIterObject *it = (_PyListIterObject *)iter_o;
|
||||
assert(Py_TYPE(iter_o) == &PyListIter_Type);
|
||||
PyListObject *seq = it->it_seq;
|
||||
if (seq == NULL) {
|
||||
PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(list_o) == &PyList_Type);
|
||||
if ((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyList_GET_SIZE(list_o)) {
|
||||
UOP_STAT_INC(uopcode, miss);
|
||||
JUMP_TO_JUMP_TARGET();
|
||||
}
|
||||
if ((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) {
|
||||
it->it_index = -1;
|
||||
if (1) {
|
||||
UOP_STAT_INC(uopcode, miss);
|
||||
JUMP_TO_JUMP_TARGET();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
break;
|
||||
}
|
||||
|
@ -4322,38 +4344,30 @@
|
|||
/* _ITER_NEXT_LIST is not a viable micro-op for tier 2 because it is replaced */
|
||||
|
||||
case _ITER_NEXT_LIST_TIER_TWO: {
|
||||
_PyStackRef null_or_index;
|
||||
_PyStackRef iter;
|
||||
_PyStackRef next;
|
||||
iter = stack_pointer[-1];
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
_PyListIterObject *it = (_PyListIterObject *)iter_o;
|
||||
assert(Py_TYPE(iter_o) == &PyListIter_Type);
|
||||
PyListObject *seq = it->it_seq;
|
||||
assert(seq);
|
||||
null_or_index = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(PyList_CheckExact(list_o));
|
||||
#ifdef Py_GIL_DISABLED
|
||||
assert(_PyObject_IsUniquelyReferenced(iter_o));
|
||||
assert(_Py_IsOwnedByCurrentThread((PyObject *)seq) ||
|
||||
_PyObject_GC_IS_SHARED(seq));
|
||||
assert(_Py_IsOwnedByCurrentThread((PyObject *)list_o) ||
|
||||
_PyObject_GC_IS_SHARED(list_o));
|
||||
STAT_INC(FOR_ITER, hit);
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
int result = _PyList_GetItemRefNoLock(seq, it->it_index, &next);
|
||||
int result = _PyList_GetItemRefNoLock((PyListObject *)list_o, PyStackRef_UntagInt(null_or_index), &next);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (result < 0) {
|
||||
if (result <= 0) {
|
||||
UOP_STAT_INC(uopcode, miss);
|
||||
JUMP_TO_JUMP_TARGET();
|
||||
}
|
||||
if (result == 0) {
|
||||
it->it_index = -1;
|
||||
if (1) {
|
||||
UOP_STAT_INC(uopcode, miss);
|
||||
JUMP_TO_JUMP_TARGET();
|
||||
}
|
||||
}
|
||||
it->it_index++;
|
||||
#else
|
||||
assert(it->it_index < PyList_GET_SIZE(seq));
|
||||
next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(seq, it->it_index++));
|
||||
assert(PyStackRef_UntagInt(null_or_index) < PyList_GET_SIZE(list_o));
|
||||
next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(list_o, PyStackRef_UntagInt(null_or_index)));
|
||||
#endif
|
||||
null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
|
||||
stack_pointer[-1] = null_or_index;
|
||||
stack_pointer[0] = next;
|
||||
stack_pointer += 1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
|
@ -4361,39 +4375,29 @@
|
|||
}
|
||||
|
||||
case _ITER_CHECK_TUPLE: {
|
||||
_PyStackRef null_or_index;
|
||||
_PyStackRef iter;
|
||||
iter = stack_pointer[-1];
|
||||
null_or_index = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
if (Py_TYPE(iter_o) != &PyTupleIter_Type) {
|
||||
if (Py_TYPE(iter_o) != &PyTuple_Type) {
|
||||
UOP_STAT_INC(uopcode, miss);
|
||||
JUMP_TO_JUMP_TARGET();
|
||||
}
|
||||
#ifdef Py_GIL_DISABLED
|
||||
if (!_PyObject_IsUniquelyReferenced(iter_o)) {
|
||||
UOP_STAT_INC(uopcode, miss);
|
||||
JUMP_TO_JUMP_TARGET();
|
||||
}
|
||||
#endif
|
||||
assert(PyStackRef_IsTaggedInt(null_or_index));
|
||||
break;
|
||||
}
|
||||
|
||||
/* _ITER_JUMP_TUPLE is not a viable micro-op for tier 2 because it is replaced */
|
||||
|
||||
case _GUARD_NOT_EXHAUSTED_TUPLE: {
|
||||
_PyStackRef null_or_index;
|
||||
_PyStackRef iter;
|
||||
iter = stack_pointer[-1];
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
_PyTupleIterObject *it = (_PyTupleIterObject *)iter_o;
|
||||
assert(Py_TYPE(iter_o) == &PyTupleIter_Type);
|
||||
#ifdef Py_GIL_DISABLED
|
||||
assert(_PyObject_IsUniquelyReferenced(iter_o));
|
||||
#endif
|
||||
PyTupleObject *seq = it->it_seq;
|
||||
if (seq == NULL) {
|
||||
UOP_STAT_INC(uopcode, miss);
|
||||
JUMP_TO_JUMP_TARGET();
|
||||
}
|
||||
if (it->it_index >= PyTuple_GET_SIZE(seq)) {
|
||||
null_or_index = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(tuple_o) == &PyTuple_Type);
|
||||
if ((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyTuple_GET_SIZE(tuple_o)) {
|
||||
UOP_STAT_INC(uopcode, miss);
|
||||
JUMP_TO_JUMP_TARGET();
|
||||
}
|
||||
|
@ -4401,19 +4405,18 @@
|
|||
}
|
||||
|
||||
case _ITER_NEXT_TUPLE: {
|
||||
_PyStackRef null_or_index;
|
||||
_PyStackRef iter;
|
||||
_PyStackRef next;
|
||||
iter = stack_pointer[-1];
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
_PyTupleIterObject *it = (_PyTupleIterObject *)iter_o;
|
||||
assert(Py_TYPE(iter_o) == &PyTupleIter_Type);
|
||||
PyTupleObject *seq = it->it_seq;
|
||||
#ifdef Py_GIL_DISABLED
|
||||
assert(_PyObject_IsUniquelyReferenced(iter_o));
|
||||
#endif
|
||||
assert(seq);
|
||||
assert(it->it_index < PyTuple_GET_SIZE(seq));
|
||||
next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(seq, it->it_index++));
|
||||
null_or_index = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(tuple_o) == &PyTuple_Type);
|
||||
uintptr_t i = PyStackRef_UntagInt(null_or_index);
|
||||
assert((size_t)i < (size_t)PyTuple_GET_SIZE(tuple_o));
|
||||
next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(tuple_o, i));
|
||||
null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
|
||||
stack_pointer[-1] = null_or_index;
|
||||
stack_pointer[0] = next;
|
||||
stack_pointer += 1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
|
@ -4422,7 +4425,7 @@
|
|||
|
||||
case _ITER_CHECK_RANGE: {
|
||||
_PyStackRef iter;
|
||||
iter = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
|
||||
if (Py_TYPE(r) != &PyRangeIter_Type) {
|
||||
UOP_STAT_INC(uopcode, miss);
|
||||
|
@ -4441,7 +4444,7 @@
|
|||
|
||||
case _GUARD_NOT_EXHAUSTED_RANGE: {
|
||||
_PyStackRef iter;
|
||||
iter = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(r) == &PyRangeIter_Type);
|
||||
if (r->len <= 0) {
|
||||
|
@ -4454,7 +4457,7 @@
|
|||
case _ITER_NEXT_RANGE: {
|
||||
_PyStackRef iter;
|
||||
_PyStackRef next;
|
||||
iter = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(r) == &PyRangeIter_Type);
|
||||
#ifdef Py_GIL_DISABLED
|
||||
|
@ -4479,7 +4482,7 @@
|
|||
_PyStackRef iter;
|
||||
_PyInterpreterFrame *gen_frame;
|
||||
oparg = CURRENT_OPARG();
|
||||
iter = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
PyGenObject *gen = (PyGenObject *)PyStackRef_AsPyObjectBorrow(iter);
|
||||
if (Py_TYPE(gen) != &PyGen_Type) {
|
||||
UOP_STAT_INC(uopcode, miss);
|
||||
|
|
|
@ -299,26 +299,34 @@ basicblock_returns(const basicblock *b) {
|
|||
}
|
||||
|
||||
static void
|
||||
dump_basicblock(const basicblock *b)
|
||||
dump_basicblock(const basicblock *b, bool highlight)
|
||||
{
|
||||
const char *b_return = basicblock_returns(b) ? "return " : "";
|
||||
if (highlight) {
|
||||
fprintf(stderr, ">>> ");
|
||||
}
|
||||
fprintf(stderr, "%d: [EH=%d CLD=%d WRM=%d NO_FT=%d %p] used: %d, depth: %d, preds: %d %s\n",
|
||||
b->b_label.id, b->b_except_handler, b->b_cold, b->b_warm, BB_NO_FALLTHROUGH(b), b, b->b_iused,
|
||||
b->b_startdepth, b->b_predecessors, b_return);
|
||||
int depth = b->b_startdepth;
|
||||
if (b->b_instr) {
|
||||
int i;
|
||||
for (i = 0; i < b->b_iused; i++) {
|
||||
fprintf(stderr, " [%02d] ", i);
|
||||
fprintf(stderr, " [%02d] depth: %d ", i, depth);
|
||||
dump_instr(b->b_instr + i);
|
||||
|
||||
int popped = _PyOpcode_num_popped(b->b_instr[i].i_opcode, b->b_instr[i].i_oparg);
|
||||
int pushed = _PyOpcode_num_pushed(b->b_instr[i].i_opcode, b->b_instr[i].i_oparg);
|
||||
depth += (pushed - popped);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
_PyCfgBuilder_DumpGraph(const basicblock *entryblock)
|
||||
_PyCfgBuilder_DumpGraph(const basicblock *entryblock, const basicblock *mark)
|
||||
{
|
||||
for (const basicblock *b = entryblock; b != NULL; b = b->b_next) {
|
||||
dump_basicblock(b);
|
||||
dump_basicblock(b, b == mark);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2863,6 +2871,7 @@ optimize_load_fast(cfg_builder *g)
|
|||
|
||||
// Opcodes that consume no inputs
|
||||
case GET_ANEXT:
|
||||
case GET_ITER:
|
||||
case GET_LEN:
|
||||
case IMPORT_FROM:
|
||||
case MATCH_KEYS:
|
||||
|
|
293
Python/generated_cases.c.h
generated
293
Python/generated_cases.c.h
generated
|
@ -5731,17 +5731,19 @@
|
|||
_Py_CODEUNIT* const this_instr = next_instr - 2;
|
||||
(void)this_instr;
|
||||
_PyStackRef iter;
|
||||
_PyStackRef null_or_index;
|
||||
_PyStackRef next;
|
||||
// _SPECIALIZE_FOR_ITER
|
||||
{
|
||||
iter = stack_pointer[-1];
|
||||
null_or_index = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
_Py_Specialize_ForIter(iter, next_instr, oparg);
|
||||
_Py_Specialize_ForIter(iter, null_or_index, next_instr, oparg);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
|
@ -5752,29 +5754,44 @@
|
|||
// _FOR_ITER
|
||||
{
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (next_o == NULL) {
|
||||
if (_PyErr_Occurred(tstate)) {
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (!matches) {
|
||||
JUMP_TO_LABEL(error);
|
||||
}
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
_PyEval_MonitorRaise(tstate, frame, this_instr);
|
||||
_PyErr_Clear(tstate);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (PyStackRef_IsTaggedInt(null_or_index)) {
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
next = _PyForIter_NextWithIndex(iter_o, null_or_index);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (PyStackRef_IsNull(next)) {
|
||||
null_or_index = PyStackRef_TagInt(-1);
|
||||
JUMPBY(oparg + 1);
|
||||
stack_pointer[-1] = null_or_index;
|
||||
DISPATCH();
|
||||
}
|
||||
assert(next_instr[oparg].op.code == END_FOR ||
|
||||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
|
||||
}
|
||||
else {
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (next_o == NULL) {
|
||||
if (_PyErr_Occurred(tstate)) {
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (!matches) {
|
||||
JUMP_TO_LABEL(error);
|
||||
}
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
_PyEval_MonitorRaise(tstate, frame, this_instr);
|
||||
_PyErr_Clear(tstate);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
}
|
||||
assert(next_instr[oparg].op.code == END_FOR ||
|
||||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
}
|
||||
next = PyStackRef_FromPyObjectSteal(next_o);
|
||||
}
|
||||
next = PyStackRef_FromPyObjectSteal(next_o);
|
||||
}
|
||||
stack_pointer[-1] = null_or_index;
|
||||
stack_pointer[0] = next;
|
||||
stack_pointer += 1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
|
@ -5806,7 +5823,7 @@
|
|||
}
|
||||
// _FOR_ITER_GEN_FRAME
|
||||
{
|
||||
iter = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
PyGenObject *gen = (PyGenObject *)PyStackRef_AsPyObjectBorrow(iter);
|
||||
if (Py_TYPE(gen) != &PyGen_Type) {
|
||||
UPDATE_MISS_STATS(FOR_ITER);
|
||||
|
@ -5863,26 +5880,22 @@
|
|||
INSTRUCTION_STATS(FOR_ITER_LIST);
|
||||
static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size");
|
||||
_PyStackRef iter;
|
||||
_PyStackRef null_or_index;
|
||||
_PyStackRef next;
|
||||
/* Skip 1 cache entry */
|
||||
// _ITER_CHECK_LIST
|
||||
{
|
||||
iter = stack_pointer[-1];
|
||||
null_or_index = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
if (Py_TYPE(iter_o) != &PyListIter_Type) {
|
||||
if (Py_TYPE(iter_o) != &PyList_Type) {
|
||||
UPDATE_MISS_STATS(FOR_ITER);
|
||||
assert(_PyOpcode_Deopt[opcode] == (FOR_ITER));
|
||||
JUMP_TO_PREDICTED(FOR_ITER);
|
||||
}
|
||||
assert(PyStackRef_IsTaggedInt(null_or_index));
|
||||
#ifdef Py_GIL_DISABLED
|
||||
if (!_PyObject_IsUniquelyReferenced(iter_o)) {
|
||||
UPDATE_MISS_STATS(FOR_ITER);
|
||||
assert(_PyOpcode_Deopt[opcode] == (FOR_ITER));
|
||||
JUMP_TO_PREDICTED(FOR_ITER);
|
||||
}
|
||||
_PyListIterObject *it = (_PyListIterObject *)iter_o;
|
||||
if (!_Py_IsOwnedByCurrentThread((PyObject *)it->it_seq) ||
|
||||
!_PyObject_GC_IS_SHARED(it->it_seq)) {
|
||||
if (!_Py_IsOwnedByCurrentThread(iter_o) && !_PyObject_GC_IS_SHARED(iter_o)) {
|
||||
UPDATE_MISS_STATS(FOR_ITER);
|
||||
assert(_PyOpcode_Deopt[opcode] == (FOR_ITER));
|
||||
JUMP_TO_PREDICTED(FOR_ITER);
|
||||
|
@ -5891,42 +5904,30 @@
|
|||
}
|
||||
// _ITER_JUMP_LIST
|
||||
{
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(iter_o) == &PyListIter_Type);
|
||||
#ifdef Py_GIL_DISABLED
|
||||
assert(_PyObject_IsUniquelyReferenced(iter_o));
|
||||
(void)iter_o;
|
||||
|
||||
#else
|
||||
_PyListIterObject *it = (_PyListIterObject *)iter_o;
|
||||
PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(list_o) == &PyList_Type);
|
||||
STAT_INC(FOR_ITER, hit);
|
||||
PyListObject *seq = it->it_seq;
|
||||
if (seq == NULL || (size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) {
|
||||
it->it_index = -1;
|
||||
if (seq != NULL) {
|
||||
it->it_seq = NULL;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
Py_DECREF(seq);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
}
|
||||
if ((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyList_GET_SIZE(list_o)) {
|
||||
null_or_index = PyStackRef_TagInt(-1);
|
||||
JUMPBY(oparg + 1);
|
||||
stack_pointer[-1] = null_or_index;
|
||||
DISPATCH();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
// _ITER_NEXT_LIST
|
||||
{
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
_PyListIterObject *it = (_PyListIterObject *)iter_o;
|
||||
assert(Py_TYPE(iter_o) == &PyListIter_Type);
|
||||
PyListObject *seq = it->it_seq;
|
||||
assert(seq);
|
||||
PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(PyList_CheckExact(list_o));
|
||||
#ifdef Py_GIL_DISABLED
|
||||
assert(_PyObject_IsUniquelyReferenced(iter_o));
|
||||
assert(_Py_IsOwnedByCurrentThread((PyObject *)seq) ||
|
||||
_PyObject_GC_IS_SHARED(seq));
|
||||
assert(_Py_IsOwnedByCurrentThread(list_o) ||
|
||||
_PyObject_GC_IS_SHARED(list_o));
|
||||
STAT_INC(FOR_ITER, hit);
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
int result = _PyList_GetItemRefNoLock(seq, it->it_index, &next);
|
||||
int result = _PyList_GetItemRefNoLock((PyListObject *)list_o, PyStackRef_UntagInt(null_or_index), &next);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (result < 0) {
|
||||
UPDATE_MISS_STATS(FOR_ITER);
|
||||
|
@ -5934,16 +5935,17 @@
|
|||
JUMP_TO_PREDICTED(FOR_ITER);
|
||||
}
|
||||
if (result == 0) {
|
||||
it->it_index = -1;
|
||||
null_or_index = PyStackRef_TagInt(-1);
|
||||
JUMPBY(oparg + 1);
|
||||
stack_pointer[-1] = null_or_index;
|
||||
DISPATCH();
|
||||
}
|
||||
it->it_index++;
|
||||
#else
|
||||
assert(it->it_index < PyList_GET_SIZE(seq));
|
||||
next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(seq, it->it_index++));
|
||||
next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(list_o, PyStackRef_UntagInt(null_or_index)));
|
||||
#endif
|
||||
null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
|
||||
}
|
||||
stack_pointer[-1] = null_or_index;
|
||||
stack_pointer[0] = next;
|
||||
stack_pointer += 1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
|
@ -5966,7 +5968,7 @@
|
|||
/* Skip 1 cache entry */
|
||||
// _ITER_CHECK_RANGE
|
||||
{
|
||||
iter = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
|
||||
if (Py_TYPE(r) != &PyRangeIter_Type) {
|
||||
UPDATE_MISS_STATS(FOR_ITER);
|
||||
|
@ -6029,63 +6031,44 @@
|
|||
INSTRUCTION_STATS(FOR_ITER_TUPLE);
|
||||
static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size");
|
||||
_PyStackRef iter;
|
||||
_PyStackRef null_or_index;
|
||||
_PyStackRef next;
|
||||
/* Skip 1 cache entry */
|
||||
// _ITER_CHECK_TUPLE
|
||||
{
|
||||
iter = stack_pointer[-1];
|
||||
null_or_index = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
if (Py_TYPE(iter_o) != &PyTupleIter_Type) {
|
||||
if (Py_TYPE(iter_o) != &PyTuple_Type) {
|
||||
UPDATE_MISS_STATS(FOR_ITER);
|
||||
assert(_PyOpcode_Deopt[opcode] == (FOR_ITER));
|
||||
JUMP_TO_PREDICTED(FOR_ITER);
|
||||
}
|
||||
#ifdef Py_GIL_DISABLED
|
||||
if (!_PyObject_IsUniquelyReferenced(iter_o)) {
|
||||
UPDATE_MISS_STATS(FOR_ITER);
|
||||
assert(_PyOpcode_Deopt[opcode] == (FOR_ITER));
|
||||
JUMP_TO_PREDICTED(FOR_ITER);
|
||||
}
|
||||
#endif
|
||||
assert(PyStackRef_IsTaggedInt(null_or_index));
|
||||
}
|
||||
// _ITER_JUMP_TUPLE
|
||||
{
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
(void)iter_o;
|
||||
assert(Py_TYPE(iter_o) == &PyTupleIter_Type);
|
||||
#ifdef Py_GIL_DISABLED
|
||||
assert(_PyObject_IsUniquelyReferenced(iter_o));
|
||||
#endif
|
||||
_PyTupleIterObject *it = (_PyTupleIterObject *)iter_o;
|
||||
PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
(void)tuple_o;
|
||||
assert(Py_TYPE(tuple_o) == &PyTuple_Type);
|
||||
STAT_INC(FOR_ITER, hit);
|
||||
PyTupleObject *seq = it->it_seq;
|
||||
if (seq == NULL || (size_t)it->it_index >= (size_t)PyTuple_GET_SIZE(seq)) {
|
||||
#ifndef Py_GIL_DISABLED
|
||||
if (seq != NULL) {
|
||||
it->it_seq = NULL;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
Py_DECREF(seq);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
}
|
||||
#endif
|
||||
|
||||
if ((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyTuple_GET_SIZE(tuple_o)) {
|
||||
null_or_index = PyStackRef_TagInt(-1);
|
||||
JUMPBY(oparg + 1);
|
||||
stack_pointer[-1] = null_or_index;
|
||||
DISPATCH();
|
||||
}
|
||||
}
|
||||
// _ITER_NEXT_TUPLE
|
||||
{
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
_PyTupleIterObject *it = (_PyTupleIterObject *)iter_o;
|
||||
assert(Py_TYPE(iter_o) == &PyTupleIter_Type);
|
||||
PyTupleObject *seq = it->it_seq;
|
||||
#ifdef Py_GIL_DISABLED
|
||||
assert(_PyObject_IsUniquelyReferenced(iter_o));
|
||||
#endif
|
||||
assert(seq);
|
||||
assert(it->it_index < PyTuple_GET_SIZE(seq));
|
||||
next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(seq, it->it_index++));
|
||||
PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
assert(Py_TYPE(tuple_o) == &PyTuple_Type);
|
||||
uintptr_t i = PyStackRef_UntagInt(null_or_index);
|
||||
assert((size_t)i < (size_t)PyTuple_GET_SIZE(tuple_o));
|
||||
next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(tuple_o, i));
|
||||
null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
|
||||
}
|
||||
stack_pointer[-1] = null_or_index;
|
||||
stack_pointer[0] = next;
|
||||
stack_pointer += 1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
|
@ -6216,25 +6199,37 @@
|
|||
INSTRUCTION_STATS(GET_ITER);
|
||||
_PyStackRef iterable;
|
||||
_PyStackRef iter;
|
||||
_PyStackRef index_or_null;
|
||||
iterable = stack_pointer[-1];
|
||||
#ifdef Py_STATS
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
_Py_GatherStats_GetIter(iterable);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
#endif
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable));
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
stack_pointer += -1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyStackRef_CLOSE(iterable);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (iter_o == NULL) {
|
||||
JUMP_TO_LABEL(error);
|
||||
|
||||
PyTypeObject *tp = PyStackRef_TYPE(iterable);
|
||||
if (tp == &PyTuple_Type || tp == &PyList_Type) {
|
||||
iter = iterable;
|
||||
index_or_null = PyStackRef_TagInt(0);
|
||||
}
|
||||
iter = PyStackRef_FromPyObjectSteal(iter_o);
|
||||
stack_pointer[0] = iter;
|
||||
else {
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable));
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
stack_pointer += -1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyStackRef_CLOSE(iterable);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (iter_o == NULL) {
|
||||
JUMP_TO_LABEL(error);
|
||||
}
|
||||
iter = PyStackRef_FromPyObjectSteal(iter_o);
|
||||
index_or_null = PyStackRef_NULL;
|
||||
stack_pointer += 1;
|
||||
}
|
||||
stack_pointer[-1] = iter;
|
||||
stack_pointer[0] = index_or_null;
|
||||
stack_pointer += 1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
DISPATCH();
|
||||
|
@ -6999,7 +6994,7 @@
|
|||
_PyStackRef receiver;
|
||||
_PyStackRef value;
|
||||
value = stack_pointer[-1];
|
||||
receiver = stack_pointer[-2];
|
||||
receiver = stack_pointer[-3];
|
||||
if (PyStackRef_GenCheck(receiver)) {
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
int err = monitor_stop_iteration(tstate, frame, this_instr, PyStackRef_AsPyObjectBorrow(value));
|
||||
|
@ -7061,34 +7056,48 @@
|
|||
next_instr += 2;
|
||||
INSTRUCTION_STATS(INSTRUMENTED_FOR_ITER);
|
||||
_PyStackRef iter;
|
||||
_PyStackRef null_or_index;
|
||||
_PyStackRef next;
|
||||
/* Skip 1 cache entry */
|
||||
iter = stack_pointer[-1];
|
||||
null_or_index = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (next_o != NULL) {
|
||||
next = PyStackRef_FromPyObjectSteal(next_o);
|
||||
if (PyStackRef_IsTaggedInt(null_or_index)) {
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
next = _PyForIter_NextWithIndex(iter_o, null_or_index);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (PyStackRef_IsNull(next)) {
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
}
|
||||
INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT);
|
||||
}
|
||||
else {
|
||||
if (_PyErr_Occurred(tstate)) {
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (!matches) {
|
||||
JUMP_TO_LABEL(error);
|
||||
}
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
_PyEval_MonitorRaise(tstate, frame, this_instr);
|
||||
_PyErr_Clear(tstate);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (next_o != NULL) {
|
||||
next = PyStackRef_FromPyObjectSteal(next_o);
|
||||
INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT);
|
||||
}
|
||||
else {
|
||||
if (_PyErr_Occurred(tstate)) {
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (!matches) {
|
||||
JUMP_TO_LABEL(error);
|
||||
}
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
_PyEval_MonitorRaise(tstate, frame, this_instr);
|
||||
_PyErr_Clear(tstate);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
}
|
||||
assert(next_instr[oparg].op.code == END_FOR ||
|
||||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
}
|
||||
assert(next_instr[oparg].op.code == END_FOR ||
|
||||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
}
|
||||
stack_pointer[0] = next;
|
||||
stack_pointer += 1;
|
||||
|
@ -7356,9 +7365,12 @@
|
|||
next_instr += 1;
|
||||
INSTRUCTION_STATS(INSTRUMENTED_POP_ITER);
|
||||
_PyStackRef iter;
|
||||
iter = stack_pointer[-1];
|
||||
_PyStackRef index_or_null;
|
||||
index_or_null = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
(void)index_or_null;
|
||||
INSTRUMENTED_JUMP(prev_instr, this_instr+1, PY_MONITORING_EVENT_BRANCH_RIGHT);
|
||||
stack_pointer += -1;
|
||||
stack_pointer += -2;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyStackRef_CLOSE(iter);
|
||||
|
@ -10122,12 +10134,15 @@
|
|||
frame->instr_ptr = next_instr;
|
||||
next_instr += 1;
|
||||
INSTRUCTION_STATS(POP_ITER);
|
||||
_PyStackRef value;
|
||||
value = stack_pointer[-1];
|
||||
stack_pointer += -1;
|
||||
_PyStackRef iter;
|
||||
_PyStackRef index_or_null;
|
||||
index_or_null = stack_pointer[-1];
|
||||
iter = stack_pointer[-2];
|
||||
(void)index_or_null;
|
||||
stack_pointer += -2;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyStackRef_CLOSE(value);
|
||||
PyStackRef_CLOSE(iter);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
DISPATCH();
|
||||
}
|
||||
|
@ -10275,7 +10290,7 @@
|
|||
stack_pointer += -1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
PyStackRef_CLOSE(value);
|
||||
PyStackRef_XCLOSE(value);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
DISPATCH();
|
||||
}
|
||||
|
|
|
@ -840,7 +840,7 @@ dummy_func(void) {
|
|||
value = sym_new_unknown(ctx);
|
||||
}
|
||||
|
||||
op(_FOR_ITER_GEN_FRAME, (unused -- unused, gen_frame: _Py_UOpsAbstractFrame*)) {
|
||||
op(_FOR_ITER_GEN_FRAME, (unused, unused -- unused, unused, gen_frame: _Py_UOpsAbstractFrame*)) {
|
||||
gen_frame = NULL;
|
||||
/* We are about to hit the end of the trace */
|
||||
ctx->done = true;
|
||||
|
@ -914,7 +914,7 @@ dummy_func(void) {
|
|||
}
|
||||
}
|
||||
|
||||
op(_ITER_NEXT_RANGE, (iter -- iter, next)) {
|
||||
op(_ITER_NEXT_RANGE, (iter, null_or_index -- iter, null_or_index, next)) {
|
||||
next = sym_new_type(ctx, &PyLong_Type);
|
||||
}
|
||||
|
||||
|
|
11
Python/optimizer_cases.c.h
generated
11
Python/optimizer_cases.c.h
generated
|
@ -126,6 +126,12 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case _POP_ITER: {
|
||||
stack_pointer += -2;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
break;
|
||||
}
|
||||
|
||||
case _END_SEND: {
|
||||
JitOptSymbol *val;
|
||||
val = sym_new_not_null(ctx);
|
||||
|
@ -1557,8 +1563,13 @@
|
|||
|
||||
case _GET_ITER: {
|
||||
JitOptSymbol *iter;
|
||||
JitOptSymbol *index_or_null;
|
||||
iter = sym_new_not_null(ctx);
|
||||
index_or_null = sym_new_not_null(ctx);
|
||||
stack_pointer[-1] = iter;
|
||||
stack_pointer[0] = index_or_null;
|
||||
stack_pointer += 1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
|
@ -2904,53 +2904,57 @@ int
|
|||
#endif // Py_STATS
|
||||
|
||||
Py_NO_INLINE void
|
||||
_Py_Specialize_ForIter(_PyStackRef iter, _Py_CODEUNIT *instr, int oparg)
|
||||
_Py_Specialize_ForIter(_PyStackRef iter, _PyStackRef null_or_index, _Py_CODEUNIT *instr, int oparg)
|
||||
{
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(_PyOpcode_Caches[FOR_ITER] == INLINE_CACHE_ENTRIES_FOR_ITER);
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
PyTypeObject *tp = Py_TYPE(iter_o);
|
||||
|
||||
if (PyStackRef_IsNull(null_or_index)) {
|
||||
#ifdef Py_GIL_DISABLED
|
||||
// Only specialize for uniquely referenced iterators, so that we know
|
||||
// they're only referenced by this one thread. This is more limiting
|
||||
// than we need (even `it = iter(mylist); for item in it:` won't get
|
||||
// specialized) but we don't have a way to check whether we're the only
|
||||
// _thread_ who has access to the object.
|
||||
if (!_PyObject_IsUniquelyReferenced(iter_o))
|
||||
goto failure;
|
||||
#endif
|
||||
if (tp == &PyListIter_Type) {
|
||||
#ifdef Py_GIL_DISABLED
|
||||
_PyListIterObject *it = (_PyListIterObject *)iter_o;
|
||||
if (!_Py_IsOwnedByCurrentThread((PyObject *)it->it_seq) &&
|
||||
!_PyObject_GC_IS_SHARED(it->it_seq)) {
|
||||
// Maybe this should just set GC_IS_SHARED in a critical
|
||||
// section, instead of leaving it to the first iteration?
|
||||
// Only specialize for uniquely referenced iterators, so that we know
|
||||
// they're only referenced by this one thread. This is more limiting
|
||||
// than we need (even `it = iter(mylist); for item in it:` won't get
|
||||
// specialized) but we don't have a way to check whether we're the only
|
||||
// _thread_ who has access to the object.
|
||||
if (!_PyObject_IsUniquelyReferenced(iter_o)) {
|
||||
goto failure;
|
||||
}
|
||||
#endif
|
||||
specialize(instr, FOR_ITER_LIST);
|
||||
return;
|
||||
if (tp == &PyRangeIter_Type) {
|
||||
specialize(instr, FOR_ITER_RANGE);
|
||||
return;
|
||||
}
|
||||
else if (tp == &PyGen_Type && oparg <= SHRT_MAX) {
|
||||
// Generators are very much not thread-safe, so don't worry about
|
||||
// the specialization not being thread-safe.
|
||||
assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR ||
|
||||
instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == INSTRUMENTED_END_FOR
|
||||
);
|
||||
/* Don't specialize if PEP 523 is active */
|
||||
if (_PyInterpreterState_GET()->eval_frame) {
|
||||
goto failure;
|
||||
}
|
||||
specialize(instr, FOR_ITER_GEN);
|
||||
return;
|
||||
}
|
||||
}
|
||||
else if (tp == &PyTupleIter_Type) {
|
||||
specialize(instr, FOR_ITER_TUPLE);
|
||||
return;
|
||||
}
|
||||
else if (tp == &PyRangeIter_Type) {
|
||||
specialize(instr, FOR_ITER_RANGE);
|
||||
return;
|
||||
}
|
||||
else if (tp == &PyGen_Type && oparg <= SHRT_MAX) {
|
||||
// Generators are very much not thread-safe, so don't worry about
|
||||
// the specialization not being thread-safe.
|
||||
assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR ||
|
||||
instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == INSTRUMENTED_END_FOR
|
||||
);
|
||||
/* Don't specialize if PEP 523 is active */
|
||||
if (_PyInterpreterState_GET()->eval_frame)
|
||||
goto failure;
|
||||
specialize(instr, FOR_ITER_GEN);
|
||||
return;
|
||||
else {
|
||||
if (tp == &PyList_Type) {
|
||||
#ifdef Py_GIL_DISABLED
|
||||
// Only specialize for lists owned by this thread or shared
|
||||
if (!_Py_IsOwnedByCurrentThread(iter_o) && !_PyObject_GC_IS_SHARED(iter_o)) {
|
||||
goto failure;
|
||||
}
|
||||
#endif
|
||||
specialize(instr, FOR_ITER_LIST);
|
||||
return;
|
||||
}
|
||||
else if (tp == &PyTuple_Type) {
|
||||
specialize(instr, FOR_ITER_TUPLE);
|
||||
return;
|
||||
}
|
||||
}
|
||||
failure:
|
||||
SPECIALIZATION_FAIL(FOR_ITER,
|
||||
|
|
|
@ -216,4 +216,12 @@ PyStackRef_IsNullOrInt(_PyStackRef ref)
|
|||
return PyStackRef_IsNull(ref) || PyStackRef_IsTaggedInt(ref);
|
||||
}
|
||||
|
||||
_PyStackRef
|
||||
PyStackRef_IncrementTaggedIntNoOverflow(_PyStackRef ref)
|
||||
{
|
||||
assert(ref.index <= INT_MAX - 2); // No overflow
|
||||
return (_PyStackRef){ .index = ref.index + 2 };
|
||||
}
|
||||
|
||||
|
||||
#endif
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue