mirror of
https://github.com/python/cpython.git
synced 2025-08-03 16:39:00 +00:00
GH-111485: Use micro-ops to split specialization code from base action (GH-111561)
This commit is contained in:
parent
eaf67e37a2
commit
b14e882428
9 changed files with 864 additions and 583 deletions
22
Python/abstract_interp_cases.c.h
generated
22
Python/abstract_interp_cases.c.h
generated
|
@ -38,7 +38,7 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case TO_BOOL: {
|
||||
case _TO_BOOL: {
|
||||
PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true);
|
||||
break;
|
||||
}
|
||||
|
@ -113,7 +113,7 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case BINARY_SUBSCR: {
|
||||
case _BINARY_SUBSCR: {
|
||||
STACK_SHRINK(1);
|
||||
PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true);
|
||||
break;
|
||||
|
@ -164,7 +164,7 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case STORE_SUBSCR: {
|
||||
case _STORE_SUBSCR: {
|
||||
STACK_SHRINK(3);
|
||||
break;
|
||||
}
|
||||
|
@ -242,7 +242,11 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case UNPACK_SEQUENCE: {
|
||||
case _SPECIALIZE_UNPACK_SEQUENCE: {
|
||||
break;
|
||||
}
|
||||
|
||||
case _UNPACK_SEQUENCE: {
|
||||
STACK_SHRINK(1);
|
||||
STACK_GROW(oparg);
|
||||
break;
|
||||
|
@ -272,7 +276,7 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case STORE_ATTR: {
|
||||
case _STORE_ATTR: {
|
||||
STACK_SHRINK(2);
|
||||
break;
|
||||
}
|
||||
|
@ -308,7 +312,7 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case LOAD_GLOBAL: {
|
||||
case _LOAD_GLOBAL: {
|
||||
STACK_GROW(1);
|
||||
STACK_GROW(((oparg & 1) ? 1 : 0));
|
||||
PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - (oparg & 1 ? 1 : 0))), true);
|
||||
|
@ -456,7 +460,7 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case LOAD_ATTR: {
|
||||
case _LOAD_ATTR: {
|
||||
STACK_GROW(((oparg & 1) ? 1 : 0));
|
||||
PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - (oparg & 1 ? 1 : 0))), true);
|
||||
PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-(oparg & 1 ? 1 : 0))), true);
|
||||
|
@ -532,7 +536,7 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case COMPARE_OP: {
|
||||
case _COMPARE_OP: {
|
||||
STACK_SHRINK(1);
|
||||
PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true);
|
||||
break;
|
||||
|
@ -901,7 +905,7 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case BINARY_OP: {
|
||||
case _BINARY_OP: {
|
||||
STACK_SHRINK(1);
|
||||
PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true);
|
||||
break;
|
||||
|
|
|
@ -311,9 +311,10 @@ dummy_func(
|
|||
TO_BOOL_STR,
|
||||
};
|
||||
|
||||
inst(TO_BOOL, (unused/1, unused/2, value -- res)) {
|
||||
op(_SPECIALIZE_TO_BOOL, (counter/1, value -- value)) {
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_ToBool(value, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
|
@ -321,12 +322,17 @@ dummy_func(
|
|||
STAT_INC(TO_BOOL, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_TO_BOOL, (unused/2, value -- res)) {
|
||||
int err = PyObject_IsTrue(value);
|
||||
DECREF_INPUTS();
|
||||
ERROR_IF(err < 0, error);
|
||||
res = err ? Py_True : Py_False;
|
||||
}
|
||||
|
||||
macro(TO_BOOL) = _SPECIALIZE_TO_BOOL + _TO_BOOL;
|
||||
|
||||
inst(TO_BOOL_BOOL, (unused/1, unused/2, value -- value)) {
|
||||
DEOPT_IF(!PyBool_Check(value));
|
||||
STAT_INC(TO_BOOL, hit);
|
||||
|
@ -530,9 +536,10 @@ dummy_func(
|
|||
BINARY_SUBSCR_TUPLE_INT,
|
||||
};
|
||||
|
||||
inst(BINARY_SUBSCR, (unused/1, container, sub -- res)) {
|
||||
op(_SPECIALIZE_BINARY_SUBSCR, (counter/1, container, sub -- container, sub)) {
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_BinarySubscr(container, sub, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
|
@ -540,11 +547,16 @@ dummy_func(
|
|||
STAT_INC(BINARY_SUBSCR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_BINARY_SUBSCR, (container, sub -- res)) {
|
||||
res = PyObject_GetItem(container, sub);
|
||||
DECREF_INPUTS();
|
||||
ERROR_IF(res == NULL, error);
|
||||
}
|
||||
|
||||
macro(BINARY_SUBSCR) = _SPECIALIZE_BINARY_SUBSCR + _BINARY_SUBSCR;
|
||||
|
||||
inst(BINARY_SLICE, (container, start, stop -- res)) {
|
||||
PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop);
|
||||
// Can't use ERROR_IF() here, because we haven't
|
||||
|
@ -677,9 +689,10 @@ dummy_func(
|
|||
STORE_SUBSCR_LIST_INT,
|
||||
};
|
||||
|
||||
inst(STORE_SUBSCR, (unused/1, v, container, sub -- )) {
|
||||
op(_SPECIALIZE_STORE_SUBSCR, (counter/1, container, sub -- container, sub)) {
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_StoreSubscr(container, sub, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
|
@ -687,12 +700,17 @@ dummy_func(
|
|||
STAT_INC(STORE_SUBSCR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_STORE_SUBSCR, (v, container, sub -- )) {
|
||||
/* container[sub] = v */
|
||||
int err = PyObject_SetItem(container, sub, v);
|
||||
DECREF_INPUTS();
|
||||
ERROR_IF(err, error);
|
||||
}
|
||||
|
||||
macro(STORE_SUBSCR) = _SPECIALIZE_STORE_SUBSCR + _STORE_SUBSCR;
|
||||
|
||||
inst(STORE_SUBSCR_LIST_INT, (unused/1, value, list, sub -- )) {
|
||||
DEOPT_IF(!PyLong_CheckExact(sub));
|
||||
DEOPT_IF(!PyList_CheckExact(list));
|
||||
|
@ -956,9 +974,10 @@ dummy_func(
|
|||
SEND_GEN,
|
||||
};
|
||||
|
||||
inst(SEND, (unused/1, receiver, v -- receiver, retval)) {
|
||||
op(_SPECIALIZE_SEND, (counter/1, receiver, unused -- receiver, unused)) {
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_Send(receiver, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
|
@ -966,6 +985,9 @@ dummy_func(
|
|||
STAT_INC(SEND, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_SEND, (receiver, v -- receiver, retval)) {
|
||||
assert(frame != &entry_frame);
|
||||
if ((tstate->interp->eval_frame == NULL) &&
|
||||
(Py_TYPE(receiver) == &PyGen_Type || Py_TYPE(receiver) == &PyCoro_Type) &&
|
||||
|
@ -1004,6 +1026,8 @@ dummy_func(
|
|||
Py_DECREF(v);
|
||||
}
|
||||
|
||||
macro(SEND) = _SPECIALIZE_SEND + _SEND;
|
||||
|
||||
inst(SEND_GEN, (unused/1, receiver, v -- receiver, unused)) {
|
||||
DEOPT_IF(tstate->interp->eval_frame);
|
||||
PyGenObject *gen = (PyGenObject *)receiver;
|
||||
|
@ -1182,9 +1206,9 @@ dummy_func(
|
|||
UNPACK_SEQUENCE_LIST,
|
||||
};
|
||||
|
||||
inst(UNPACK_SEQUENCE, (unused/1, seq -- unused[oparg])) {
|
||||
op(_SPECIALIZE_UNPACK_SEQUENCE, (counter/1, seq -- seq)) {
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_UnpackSequence(seq, next_instr, oparg);
|
||||
DISPATCH_SAME_OPARG();
|
||||
|
@ -1192,12 +1216,17 @@ dummy_func(
|
|||
STAT_INC(UNPACK_SEQUENCE, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_UNPACK_SEQUENCE, (seq -- unused[oparg])) {
|
||||
PyObject **top = stack_pointer + oparg - 1;
|
||||
int res = _PyEval_UnpackIterable(tstate, seq, oparg, -1, top);
|
||||
DECREF_INPUTS();
|
||||
ERROR_IF(res == 0, error);
|
||||
}
|
||||
|
||||
macro(UNPACK_SEQUENCE) = _SPECIALIZE_UNPACK_SEQUENCE + _UNPACK_SEQUENCE;
|
||||
|
||||
inst(UNPACK_SEQUENCE_TWO_TUPLE, (unused/1, seq -- values[oparg])) {
|
||||
DEOPT_IF(!PyTuple_CheckExact(seq));
|
||||
DEOPT_IF(PyTuple_GET_SIZE(seq) != 2);
|
||||
|
@ -1244,9 +1273,10 @@ dummy_func(
|
|||
STORE_ATTR_WITH_HINT,
|
||||
};
|
||||
|
||||
inst(STORE_ATTR, (unused/1, unused/3, v, owner --)) {
|
||||
op(_SPECIALIZE_STORE_ATTR, (counter/1, owner -- owner)) {
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_StoreAttr(owner, next_instr, name);
|
||||
|
@ -1255,12 +1285,17 @@ dummy_func(
|
|||
STAT_INC(STORE_ATTR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_STORE_ATTR, (unused/3, v, owner --)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
|
||||
int err = PyObject_SetAttr(owner, name, v);
|
||||
DECREF_INPUTS();
|
||||
ERROR_IF(err, error);
|
||||
}
|
||||
|
||||
macro(STORE_ATTR) = _SPECIALIZE_STORE_ATTR + _STORE_ATTR;
|
||||
|
||||
inst(DELETE_ATTR, (owner --)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
|
||||
int err = PyObject_DelAttr(owner, name);
|
||||
|
@ -1365,9 +1400,10 @@ dummy_func(
|
|||
LOAD_GLOBAL_BUILTIN,
|
||||
};
|
||||
|
||||
inst(LOAD_GLOBAL, (unused/1, unused/1, unused/1, unused/1 -- res, null if (oparg & 1))) {
|
||||
op(_SPECIALIZE_LOAD_GLOBAL, (counter/1 -- )) {
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name);
|
||||
|
@ -1376,6 +1412,9 @@ dummy_func(
|
|||
STAT_INC(LOAD_GLOBAL, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_LOAD_GLOBAL, (unused/1, unused/1, unused/1 -- res, null if (oparg & 1))) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
if (PyDict_CheckExact(GLOBALS())
|
||||
&& PyDict_CheckExact(BUILTINS()))
|
||||
|
@ -1396,7 +1435,6 @@ dummy_func(
|
|||
}
|
||||
else {
|
||||
/* Slow-path if globals or builtins is not a dict */
|
||||
|
||||
/* namespace 1: globals */
|
||||
ERROR_IF(PyMapping_GetOptionalItem(GLOBALS(), name, &res) < 0, error);
|
||||
if (res == NULL) {
|
||||
|
@ -1413,6 +1451,8 @@ dummy_func(
|
|||
null = NULL;
|
||||
}
|
||||
|
||||
macro(LOAD_GLOBAL) = _SPECIALIZE_LOAD_GLOBAL + _LOAD_GLOBAL;
|
||||
|
||||
op(_GUARD_GLOBALS_VERSION, (version/1 --)) {
|
||||
PyDictObject *dict = (PyDictObject *)GLOBALS();
|
||||
DEOPT_IF(!PyDict_CheckExact(dict));
|
||||
|
@ -1701,12 +1741,11 @@ dummy_func(
|
|||
LOAD_SUPER_ATTR_METHOD,
|
||||
};
|
||||
|
||||
inst(LOAD_SUPER_ATTR, (unused/1, global_super, class, self -- attr, null if (oparg & 1))) {
|
||||
op(_SPECIALIZE_LOAD_SUPER_ATTR, (counter/1, global_super, class, unused -- global_super, class, unused)) {
|
||||
TIER_ONE_ONLY
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2);
|
||||
#if ENABLE_SPECIALIZATION
|
||||
int load_method = oparg & 1;
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_LoadSuperAttr(global_super, class, next_instr, load_method);
|
||||
DISPATCH_SAME_OPARG();
|
||||
|
@ -1714,7 +1753,10 @@ dummy_func(
|
|||
STAT_INC(LOAD_SUPER_ATTR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_LOAD_SUPER_ATTR, (global_super, class, self -- attr, null if (oparg & 1))) {
|
||||
TIER_ONE_ONLY
|
||||
if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) {
|
||||
PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING;
|
||||
int err = _Py_call_instrumentation_2args(
|
||||
|
@ -1722,7 +1764,6 @@ dummy_func(
|
|||
frame, this_instr, global_super, arg);
|
||||
ERROR_IF(err, error);
|
||||
}
|
||||
|
||||
// we make no attempt to optimize here; specializations should
|
||||
// handle any case whose performance we care about
|
||||
PyObject *stack[] = {class, self};
|
||||
|
@ -1745,12 +1786,15 @@ dummy_func(
|
|||
}
|
||||
DECREF_INPUTS();
|
||||
ERROR_IF(super == NULL, error);
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2);
|
||||
attr = PyObject_GetAttr(super, name);
|
||||
Py_DECREF(super);
|
||||
ERROR_IF(attr == NULL, error);
|
||||
null = NULL;
|
||||
}
|
||||
|
||||
macro(LOAD_SUPER_ATTR) = _SPECIALIZE_LOAD_SUPER_ATTR + _LOAD_SUPER_ATTR;
|
||||
|
||||
pseudo(LOAD_SUPER_METHOD) = {
|
||||
LOAD_SUPER_ATTR,
|
||||
};
|
||||
|
@ -1813,9 +1857,10 @@ dummy_func(
|
|||
LOAD_ATTR_NONDESCRIPTOR_NO_DICT,
|
||||
};
|
||||
|
||||
inst(LOAD_ATTR, (unused/9, owner -- attr, self_or_null if (oparg & 1))) {
|
||||
op(_SPECIALIZE_LOAD_ATTR, (counter/1, owner -- owner)) {
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_LoadAttr(owner, next_instr, name);
|
||||
|
@ -1824,6 +1869,9 @@ dummy_func(
|
|||
STAT_INC(LOAD_ATTR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_LOAD_ATTR, (unused/8, owner -- attr, self_or_null if (oparg & 1))) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1);
|
||||
if (oparg & 1) {
|
||||
/* Designed to work in tandem with CALL, pushes two values. */
|
||||
|
@ -1831,7 +1879,6 @@ dummy_func(
|
|||
if (_PyObject_GetMethod(owner, name, &attr)) {
|
||||
/* We can bypass temporary bound method object.
|
||||
meth is unbound method and obj is self.
|
||||
|
||||
meth | self | arg1 | ... | argN
|
||||
*/
|
||||
assert(attr != NULL); // No errors on this branch
|
||||
|
@ -1842,7 +1889,6 @@ dummy_func(
|
|||
something was returned by a descriptor protocol). Set
|
||||
the second element of the stack to NULL, to signal
|
||||
CALL that it's not a method call.
|
||||
|
||||
NULL | meth | arg1 | ... | argN
|
||||
*/
|
||||
DECREF_INPUTS();
|
||||
|
@ -1858,6 +1904,8 @@ dummy_func(
|
|||
}
|
||||
}
|
||||
|
||||
macro(LOAD_ATTR) = _SPECIALIZE_LOAD_ATTR + _LOAD_ATTR;
|
||||
|
||||
pseudo(LOAD_METHOD) = {
|
||||
LOAD_ATTR,
|
||||
};
|
||||
|
@ -2133,9 +2181,10 @@ dummy_func(
|
|||
COMPARE_OP_STR,
|
||||
};
|
||||
|
||||
inst(COMPARE_OP, (unused/1, left, right -- res)) {
|
||||
op(_SPECIALIZE_COMPARE_OP, (counter/1, left, right -- left, right)) {
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_CompareOp(left, right, next_instr, oparg);
|
||||
DISPATCH_SAME_OPARG();
|
||||
|
@ -2143,6 +2192,9 @@ dummy_func(
|
|||
STAT_INC(COMPARE_OP, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_COMPARE_OP, (left, right -- res)) {
|
||||
assert((oparg >> 5) <= Py_GE);
|
||||
res = PyObject_RichCompare(left, right, oparg >> 5);
|
||||
DECREF_INPUTS();
|
||||
|
@ -2155,6 +2207,8 @@ dummy_func(
|
|||
}
|
||||
}
|
||||
|
||||
macro(COMPARE_OP) = _SPECIALIZE_COMPARE_OP + _COMPARE_OP;
|
||||
|
||||
inst(COMPARE_OP_FLOAT, (unused/1, left, right -- res)) {
|
||||
DEOPT_IF(!PyFloat_CheckExact(left));
|
||||
DEOPT_IF(!PyFloat_CheckExact(right));
|
||||
|
@ -2448,9 +2502,10 @@ dummy_func(
|
|||
FOR_ITER_GEN,
|
||||
};
|
||||
|
||||
inst(FOR_ITER, (unused/1, iter -- iter, next)) {
|
||||
op(_SPECIALIZE_FOR_ITER, (counter/1, iter -- iter)) {
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_ForIter(iter, next_instr, oparg);
|
||||
DISPATCH_SAME_OPARG();
|
||||
|
@ -2458,6 +2513,9 @@ dummy_func(
|
|||
STAT_INC(FOR_ITER, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_FOR_ITER, (iter -- iter, next)) {
|
||||
/* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
|
||||
next = (*Py_TYPE(iter)->tp_iternext)(iter);
|
||||
if (next == NULL) {
|
||||
|
@ -2480,6 +2538,8 @@ dummy_func(
|
|||
// Common case: no jump, leave it to the code generator
|
||||
}
|
||||
|
||||
macro(FOR_ITER) = _SPECIALIZE_FOR_ITER + _FOR_ITER;
|
||||
|
||||
inst(INSTRUMENTED_FOR_ITER, (unused/1 -- )) {
|
||||
_Py_CODEUNIT *target;
|
||||
PyObject *iter = TOP();
|
||||
|
@ -2937,24 +2997,28 @@ dummy_func(
|
|||
CALL_ALLOC_AND_ENTER_INIT,
|
||||
};
|
||||
|
||||
op(_SPECIALIZE_CALL, (counter/1, callable, self_or_null, args[oparg] -- callable, self_or_null, args[oparg])) {
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_Call(callable, next_instr, oparg + (self_or_null != NULL));
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(CALL, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
// When calling Python, inline the call using DISPATCH_INLINED().
|
||||
inst(CALL, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) {
|
||||
op(_CALL, (unused/2, callable, self_or_null, args[oparg] -- res)) {
|
||||
// oparg counts all of the args, but *not* self:
|
||||
int total_args = oparg;
|
||||
if (self_or_null != NULL) {
|
||||
args--;
|
||||
total_args++;
|
||||
}
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_Call(callable, next_instr, total_args);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(CALL, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
if (self_or_null == NULL && Py_TYPE(callable) == &PyMethod_Type) {
|
||||
else if (Py_TYPE(callable) == &PyMethod_Type) {
|
||||
args--;
|
||||
total_args++;
|
||||
PyObject *self = ((PyMethodObject *)callable)->im_self;
|
||||
|
@ -3017,6 +3081,8 @@ dummy_func(
|
|||
CHECK_EVAL_BREAKER();
|
||||
}
|
||||
|
||||
macro(CALL) = _SPECIALIZE_CALL + _CALL;
|
||||
|
||||
op(_CHECK_CALL_BOUND_METHOD_EXACT_ARGS, (callable, null, unused[oparg] -- callable, null, unused[oparg])) {
|
||||
DEOPT_IF(null != NULL);
|
||||
DEOPT_IF(Py_TYPE(callable) != &PyMethod_Type);
|
||||
|
@ -3799,9 +3865,10 @@ dummy_func(
|
|||
top = Py_NewRef(bottom);
|
||||
}
|
||||
|
||||
inst(BINARY_OP, (unused/1, lhs, rhs -- res)) {
|
||||
op(_SPECIALIZE_BINARY_OP, (counter/1, lhs, rhs -- lhs, rhs)) {
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, LOCALS_ARRAY);
|
||||
DISPATCH_SAME_OPARG();
|
||||
|
@ -3811,12 +3878,17 @@ dummy_func(
|
|||
#endif /* ENABLE_SPECIALIZATION */
|
||||
assert(NB_ADD <= oparg);
|
||||
assert(oparg <= NB_INPLACE_XOR);
|
||||
}
|
||||
|
||||
op(_BINARY_OP, (lhs, rhs -- res)) {
|
||||
assert(_PyEval_BinaryOps[oparg]);
|
||||
res = _PyEval_BinaryOps[oparg](lhs, rhs);
|
||||
DECREF_INPUTS();
|
||||
ERROR_IF(res == NULL, error);
|
||||
}
|
||||
|
||||
macro(BINARY_OP) = _SPECIALIZE_BINARY_OP + _BINARY_OP;
|
||||
|
||||
inst(SWAP, (bottom, unused[oparg-2], top --
|
||||
top, unused[oparg-2], bottom)) {
|
||||
assert(oparg >= 2);
|
||||
|
|
107
Python/executor_cases.c.h
generated
107
Python/executor_cases.c.h
generated
|
@ -114,19 +114,10 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case TO_BOOL: {
|
||||
case _TO_BOOL: {
|
||||
PyObject *value;
|
||||
PyObject *res;
|
||||
value = stack_pointer[-1];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_ToBool(value, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(TO_BOOL, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
int err = PyObject_IsTrue(value);
|
||||
Py_DECREF(value);
|
||||
if (err < 0) goto pop_1_error;
|
||||
|
@ -372,21 +363,12 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case BINARY_SUBSCR: {
|
||||
case _BINARY_SUBSCR: {
|
||||
PyObject *sub;
|
||||
PyObject *container;
|
||||
PyObject *res;
|
||||
sub = stack_pointer[-1];
|
||||
container = stack_pointer[-2];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_BinarySubscr(container, sub, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(BINARY_SUBSCR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
res = PyObject_GetItem(container, sub);
|
||||
Py_DECREF(container);
|
||||
Py_DECREF(sub);
|
||||
|
@ -564,22 +546,13 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case STORE_SUBSCR: {
|
||||
case _STORE_SUBSCR: {
|
||||
PyObject *sub;
|
||||
PyObject *container;
|
||||
PyObject *v;
|
||||
sub = stack_pointer[-1];
|
||||
container = stack_pointer[-2];
|
||||
v = stack_pointer[-3];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_StoreSubscr(container, sub, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(STORE_SUBSCR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
/* container[sub] = v */
|
||||
int err = PyObject_SetItem(container, sub, v);
|
||||
Py_DECREF(v);
|
||||
|
@ -893,11 +866,12 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case UNPACK_SEQUENCE: {
|
||||
case _SPECIALIZE_UNPACK_SEQUENCE: {
|
||||
PyObject *seq;
|
||||
seq = stack_pointer[-1];
|
||||
uint16_t counter = (uint16_t)operand;
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_UnpackSequence(seq, next_instr, oparg);
|
||||
DISPATCH_SAME_OPARG();
|
||||
|
@ -905,6 +879,12 @@
|
|||
STAT_INC(UNPACK_SEQUENCE, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
break;
|
||||
}
|
||||
|
||||
case _UNPACK_SEQUENCE: {
|
||||
PyObject *seq;
|
||||
seq = stack_pointer[-1];
|
||||
PyObject **top = stack_pointer + oparg - 1;
|
||||
int res = _PyEval_UnpackIterable(tstate, seq, oparg, -1, top);
|
||||
Py_DECREF(seq);
|
||||
|
@ -979,21 +959,11 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case STORE_ATTR: {
|
||||
case _STORE_ATTR: {
|
||||
PyObject *owner;
|
||||
PyObject *v;
|
||||
owner = stack_pointer[-1];
|
||||
v = stack_pointer[-2];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_StoreAttr(owner, next_instr, name);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(STORE_ATTR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
|
||||
int err = PyObject_SetAttr(owner, name, v);
|
||||
Py_DECREF(v);
|
||||
|
@ -1124,19 +1094,9 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case LOAD_GLOBAL: {
|
||||
case _LOAD_GLOBAL: {
|
||||
PyObject *res;
|
||||
PyObject *null = NULL;
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(LOAD_GLOBAL, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
if (PyDict_CheckExact(GLOBALS())
|
||||
&& PyDict_CheckExact(BUILTINS()))
|
||||
|
@ -1157,7 +1117,6 @@
|
|||
}
|
||||
else {
|
||||
/* Slow-path if globals or builtins is not a dict */
|
||||
|
||||
/* namespace 1: globals */
|
||||
if (PyMapping_GetOptionalItem(GLOBALS(), name, &res) < 0) goto error;
|
||||
if (res == NULL) {
|
||||
|
@ -1624,21 +1583,11 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case LOAD_ATTR: {
|
||||
case _LOAD_ATTR: {
|
||||
PyObject *owner;
|
||||
PyObject *attr;
|
||||
PyObject *self_or_null = NULL;
|
||||
owner = stack_pointer[-1];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_LoadAttr(owner, next_instr, name);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(LOAD_ATTR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1);
|
||||
if (oparg & 1) {
|
||||
/* Designed to work in tandem with CALL, pushes two values. */
|
||||
|
@ -1646,7 +1595,6 @@
|
|||
if (_PyObject_GetMethod(owner, name, &attr)) {
|
||||
/* We can bypass temporary bound method object.
|
||||
meth is unbound method and obj is self.
|
||||
|
||||
meth | self | arg1 | ... | argN
|
||||
*/
|
||||
assert(attr != NULL); // No errors on this branch
|
||||
|
@ -1657,7 +1605,6 @@
|
|||
something was returned by a descriptor protocol). Set
|
||||
the second element of the stack to NULL, to signal
|
||||
CALL that it's not a method call.
|
||||
|
||||
NULL | meth | arg1 | ... | argN
|
||||
*/
|
||||
Py_DECREF(owner);
|
||||
|
@ -1885,21 +1832,12 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case COMPARE_OP: {
|
||||
case _COMPARE_OP: {
|
||||
PyObject *right;
|
||||
PyObject *left;
|
||||
PyObject *res;
|
||||
right = stack_pointer[-1];
|
||||
left = stack_pointer[-2];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_CompareOp(left, right, next_instr, oparg);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(COMPARE_OP, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
assert((oparg >> 5) <= Py_GE);
|
||||
res = PyObject_RichCompare(left, right, oparg >> 5);
|
||||
Py_DECREF(left);
|
||||
|
@ -3246,23 +3184,12 @@
|
|||
break;
|
||||
}
|
||||
|
||||
case BINARY_OP: {
|
||||
case _BINARY_OP: {
|
||||
PyObject *rhs;
|
||||
PyObject *lhs;
|
||||
PyObject *res;
|
||||
rhs = stack_pointer[-1];
|
||||
lhs = stack_pointer[-2];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, LOCALS_ARRAY);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(BINARY_OP, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
assert(NB_ADD <= oparg);
|
||||
assert(oparg <= NB_INPLACE_XOR);
|
||||
assert(_PyEval_BinaryOps[oparg]);
|
||||
res = _PyEval_BinaryOps[oparg](lhs, rhs);
|
||||
Py_DECREF(lhs);
|
||||
|
|
804
Python/generated_cases.c.h
generated
804
Python/generated_cases.c.h
generated
|
@ -332,20 +332,28 @@
|
|||
static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size");
|
||||
PyObject *value;
|
||||
PyObject *res;
|
||||
// _SPECIALIZE_TO_BOOL
|
||||
value = stack_pointer[-1];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_ToBool(value, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
{
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_ToBool(value, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(TO_BOOL, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _TO_BOOL
|
||||
{
|
||||
int err = PyObject_IsTrue(value);
|
||||
Py_DECREF(value);
|
||||
if (err < 0) goto pop_1_error;
|
||||
res = err ? Py_True : Py_False;
|
||||
}
|
||||
STAT_INC(TO_BOOL, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
int err = PyObject_IsTrue(value);
|
||||
Py_DECREF(value);
|
||||
if (err < 0) goto pop_1_error;
|
||||
res = err ? Py_True : Py_False;
|
||||
stack_pointer[-1] = res;
|
||||
DISPATCH();
|
||||
}
|
||||
|
@ -708,21 +716,29 @@
|
|||
PyObject *sub;
|
||||
PyObject *container;
|
||||
PyObject *res;
|
||||
// _SPECIALIZE_BINARY_SUBSCR
|
||||
sub = stack_pointer[-1];
|
||||
container = stack_pointer[-2];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_BinarySubscr(container, sub, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
{
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_BinarySubscr(container, sub, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(BINARY_SUBSCR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _BINARY_SUBSCR
|
||||
{
|
||||
res = PyObject_GetItem(container, sub);
|
||||
Py_DECREF(container);
|
||||
Py_DECREF(sub);
|
||||
if (res == NULL) goto pop_2_error;
|
||||
}
|
||||
STAT_INC(BINARY_SUBSCR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
res = PyObject_GetItem(container, sub);
|
||||
Py_DECREF(container);
|
||||
Py_DECREF(sub);
|
||||
if (res == NULL) goto pop_2_error;
|
||||
STACK_SHRINK(1);
|
||||
stack_pointer[-1] = res;
|
||||
DISPATCH();
|
||||
|
@ -962,24 +978,32 @@
|
|||
PyObject *sub;
|
||||
PyObject *container;
|
||||
PyObject *v;
|
||||
// _SPECIALIZE_STORE_SUBSCR
|
||||
sub = stack_pointer[-1];
|
||||
container = stack_pointer[-2];
|
||||
v = stack_pointer[-3];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_StoreSubscr(container, sub, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
{
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_StoreSubscr(container, sub, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(STORE_SUBSCR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _STORE_SUBSCR
|
||||
v = stack_pointer[-3];
|
||||
{
|
||||
/* container[sub] = v */
|
||||
int err = PyObject_SetItem(container, sub, v);
|
||||
Py_DECREF(v);
|
||||
Py_DECREF(container);
|
||||
Py_DECREF(sub);
|
||||
if (err) goto pop_3_error;
|
||||
}
|
||||
STAT_INC(STORE_SUBSCR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
/* container[sub] = v */
|
||||
int err = PyObject_SetItem(container, sub, v);
|
||||
Py_DECREF(v);
|
||||
Py_DECREF(container);
|
||||
Py_DECREF(sub);
|
||||
if (err) goto pop_3_error;
|
||||
STACK_SHRINK(3);
|
||||
DISPATCH();
|
||||
}
|
||||
|
@ -1378,56 +1402,64 @@
|
|||
PREDICTED(SEND);
|
||||
_Py_CODEUNIT *this_instr = next_instr - 2;
|
||||
static_assert(INLINE_CACHE_ENTRIES_SEND == 1, "incorrect cache size");
|
||||
PyObject *v;
|
||||
PyObject *receiver;
|
||||
PyObject *v;
|
||||
PyObject *retval;
|
||||
v = stack_pointer[-1];
|
||||
// _SPECIALIZE_SEND
|
||||
receiver = stack_pointer[-2];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_Send(receiver, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(SEND, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
assert(frame != &entry_frame);
|
||||
if ((tstate->interp->eval_frame == NULL) &&
|
||||
(Py_TYPE(receiver) == &PyGen_Type || Py_TYPE(receiver) == &PyCoro_Type) &&
|
||||
((PyGenObject *)receiver)->gi_frame_state < FRAME_EXECUTING)
|
||||
{
|
||||
PyGenObject *gen = (PyGenObject *)receiver;
|
||||
_PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe;
|
||||
STACK_SHRINK(1);
|
||||
_PyFrame_StackPush(gen_frame, v);
|
||||
gen->gi_frame_state = FRAME_EXECUTING;
|
||||
gen->gi_exc_state.previous_item = tstate->exc_info;
|
||||
tstate->exc_info = &gen->gi_exc_state;
|
||||
assert(1 + INLINE_CACHE_ENTRIES_SEND == next_instr - this_instr);
|
||||
frame->return_offset = (uint16_t)(1 + INLINE_CACHE_ENTRIES_SEND + oparg);
|
||||
DISPATCH_INLINED(gen_frame);
|
||||
}
|
||||
if (Py_IsNone(v) && PyIter_Check(receiver)) {
|
||||
retval = Py_TYPE(receiver)->tp_iternext(receiver);
|
||||
}
|
||||
else {
|
||||
retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v);
|
||||
}
|
||||
if (retval == NULL) {
|
||||
if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)
|
||||
) {
|
||||
monitor_raise(tstate, frame, this_instr);
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_Send(receiver, next_instr);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
if (_PyGen_FetchStopIterationValue(&retval) == 0) {
|
||||
assert(retval != NULL);
|
||||
JUMPBY(oparg);
|
||||
STAT_INC(SEND, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _SEND
|
||||
v = stack_pointer[-1];
|
||||
{
|
||||
assert(frame != &entry_frame);
|
||||
if ((tstate->interp->eval_frame == NULL) &&
|
||||
(Py_TYPE(receiver) == &PyGen_Type || Py_TYPE(receiver) == &PyCoro_Type) &&
|
||||
((PyGenObject *)receiver)->gi_frame_state < FRAME_EXECUTING)
|
||||
{
|
||||
PyGenObject *gen = (PyGenObject *)receiver;
|
||||
_PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe;
|
||||
STACK_SHRINK(1);
|
||||
_PyFrame_StackPush(gen_frame, v);
|
||||
gen->gi_frame_state = FRAME_EXECUTING;
|
||||
gen->gi_exc_state.previous_item = tstate->exc_info;
|
||||
tstate->exc_info = &gen->gi_exc_state;
|
||||
assert(1 + INLINE_CACHE_ENTRIES_SEND == next_instr - this_instr);
|
||||
frame->return_offset = (uint16_t)(1 + INLINE_CACHE_ENTRIES_SEND + oparg);
|
||||
DISPATCH_INLINED(gen_frame);
|
||||
}
|
||||
if (Py_IsNone(v) && PyIter_Check(receiver)) {
|
||||
retval = Py_TYPE(receiver)->tp_iternext(receiver);
|
||||
}
|
||||
else {
|
||||
goto error;
|
||||
retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v);
|
||||
}
|
||||
if (retval == NULL) {
|
||||
if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)
|
||||
) {
|
||||
monitor_raise(tstate, frame, this_instr);
|
||||
}
|
||||
if (_PyGen_FetchStopIterationValue(&retval) == 0) {
|
||||
assert(retval != NULL);
|
||||
JUMPBY(oparg);
|
||||
}
|
||||
else {
|
||||
goto error;
|
||||
}
|
||||
}
|
||||
Py_DECREF(v);
|
||||
}
|
||||
Py_DECREF(v);
|
||||
stack_pointer[-1] = retval;
|
||||
DISPATCH();
|
||||
}
|
||||
|
@ -1694,20 +1726,27 @@
|
|||
_Py_CODEUNIT *this_instr = next_instr - 2;
|
||||
static_assert(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE == 1, "incorrect cache size");
|
||||
PyObject *seq;
|
||||
// _SPECIALIZE_UNPACK_SEQUENCE
|
||||
seq = stack_pointer[-1];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_UnpackSequence(seq, next_instr, oparg);
|
||||
DISPATCH_SAME_OPARG();
|
||||
{
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_UnpackSequence(seq, next_instr, oparg);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(UNPACK_SEQUENCE, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _UNPACK_SEQUENCE
|
||||
{
|
||||
PyObject **top = stack_pointer + oparg - 1;
|
||||
int res = _PyEval_UnpackIterable(tstate, seq, oparg, -1, top);
|
||||
Py_DECREF(seq);
|
||||
if (res == 0) goto pop_1_error;
|
||||
}
|
||||
STAT_INC(UNPACK_SEQUENCE, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
PyObject **top = stack_pointer + oparg - 1;
|
||||
int res = _PyEval_UnpackIterable(tstate, seq, oparg, -1, top);
|
||||
Py_DECREF(seq);
|
||||
if (res == 0) goto pop_1_error;
|
||||
STACK_SHRINK(1);
|
||||
STACK_GROW(oparg);
|
||||
DISPATCH();
|
||||
|
@ -1799,23 +1838,31 @@
|
|||
static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size");
|
||||
PyObject *owner;
|
||||
PyObject *v;
|
||||
// _SPECIALIZE_STORE_ATTR
|
||||
owner = stack_pointer[-1];
|
||||
v = stack_pointer[-2];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_StoreAttr(owner, next_instr, name);
|
||||
DISPATCH_SAME_OPARG();
|
||||
{
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_StoreAttr(owner, next_instr, name);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(STORE_ATTR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _STORE_ATTR
|
||||
v = stack_pointer[-2];
|
||||
{
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
|
||||
int err = PyObject_SetAttr(owner, name, v);
|
||||
Py_DECREF(v);
|
||||
Py_DECREF(owner);
|
||||
if (err) goto pop_2_error;
|
||||
}
|
||||
STAT_INC(STORE_ATTR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
|
||||
int err = PyObject_SetAttr(owner, name, v);
|
||||
Py_DECREF(v);
|
||||
Py_DECREF(owner);
|
||||
if (err) goto pop_2_error;
|
||||
STACK_SHRINK(2);
|
||||
DISPATCH();
|
||||
}
|
||||
|
@ -1968,51 +2015,58 @@
|
|||
static_assert(INLINE_CACHE_ENTRIES_LOAD_GLOBAL == 4, "incorrect cache size");
|
||||
PyObject *res;
|
||||
PyObject *null = NULL;
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(LOAD_GLOBAL, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
if (PyDict_CheckExact(GLOBALS())
|
||||
&& PyDict_CheckExact(BUILTINS()))
|
||||
// _SPECIALIZE_LOAD_GLOBAL
|
||||
{
|
||||
res = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(),
|
||||
(PyDictObject *)BUILTINS(),
|
||||
name);
|
||||
if (res == NULL) {
|
||||
if (!_PyErr_Occurred(tstate)) {
|
||||
/* _PyDict_LoadGlobal() returns NULL without raising
|
||||
* an exception if the key doesn't exist */
|
||||
_PyEval_FormatExcCheckArg(tstate, PyExc_NameError,
|
||||
NAME_ERROR_MSG, name);
|
||||
}
|
||||
if (true) goto error;
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
Py_INCREF(res);
|
||||
STAT_INC(LOAD_GLOBAL, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
else {
|
||||
/* Slow-path if globals or builtins is not a dict */
|
||||
|
||||
/* namespace 1: globals */
|
||||
if (PyMapping_GetOptionalItem(GLOBALS(), name, &res) < 0) goto error;
|
||||
if (res == NULL) {
|
||||
/* namespace 2: builtins */
|
||||
if (PyMapping_GetOptionalItem(BUILTINS(), name, &res) < 0) goto error;
|
||||
// _LOAD_GLOBAL
|
||||
{
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
if (PyDict_CheckExact(GLOBALS())
|
||||
&& PyDict_CheckExact(BUILTINS()))
|
||||
{
|
||||
res = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(),
|
||||
(PyDictObject *)BUILTINS(),
|
||||
name);
|
||||
if (res == NULL) {
|
||||
_PyEval_FormatExcCheckArg(
|
||||
tstate, PyExc_NameError,
|
||||
NAME_ERROR_MSG, name);
|
||||
if (!_PyErr_Occurred(tstate)) {
|
||||
/* _PyDict_LoadGlobal() returns NULL without raising
|
||||
* an exception if the key doesn't exist */
|
||||
_PyEval_FormatExcCheckArg(tstate, PyExc_NameError,
|
||||
NAME_ERROR_MSG, name);
|
||||
}
|
||||
if (true) goto error;
|
||||
}
|
||||
Py_INCREF(res);
|
||||
}
|
||||
else {
|
||||
/* Slow-path if globals or builtins is not a dict */
|
||||
/* namespace 1: globals */
|
||||
if (PyMapping_GetOptionalItem(GLOBALS(), name, &res) < 0) goto error;
|
||||
if (res == NULL) {
|
||||
/* namespace 2: builtins */
|
||||
if (PyMapping_GetOptionalItem(BUILTINS(), name, &res) < 0) goto error;
|
||||
if (res == NULL) {
|
||||
_PyEval_FormatExcCheckArg(
|
||||
tstate, PyExc_NameError,
|
||||
NAME_ERROR_MSG, name);
|
||||
if (true) goto error;
|
||||
}
|
||||
}
|
||||
}
|
||||
null = NULL;
|
||||
}
|
||||
null = NULL;
|
||||
STACK_GROW(1);
|
||||
STACK_GROW(((oparg & 1) ? 1 : 0));
|
||||
stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = res;
|
||||
|
@ -2498,63 +2552,69 @@
|
|||
PREDICTED(LOAD_SUPER_ATTR);
|
||||
_Py_CODEUNIT *this_instr = next_instr - 2;
|
||||
static_assert(INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR == 1, "incorrect cache size");
|
||||
PyObject *self;
|
||||
PyObject *class;
|
||||
PyObject *global_super;
|
||||
PyObject *self;
|
||||
PyObject *attr;
|
||||
PyObject *null = NULL;
|
||||
self = stack_pointer[-1];
|
||||
// _SPECIALIZE_LOAD_SUPER_ATTR
|
||||
class = stack_pointer[-2];
|
||||
global_super = stack_pointer[-3];
|
||||
TIER_ONE_ONLY
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2);
|
||||
#if ENABLE_SPECIALIZATION
|
||||
int load_method = oparg & 1;
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_LoadSuperAttr(global_super, class, next_instr, load_method);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(LOAD_SUPER_ATTR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
|
||||
if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) {
|
||||
PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING;
|
||||
int err = _Py_call_instrumentation_2args(
|
||||
tstate, PY_MONITORING_EVENT_CALL,
|
||||
frame, this_instr, global_super, arg);
|
||||
if (err) goto pop_3_error;
|
||||
}
|
||||
|
||||
// we make no attempt to optimize here; specializations should
|
||||
// handle any case whose performance we care about
|
||||
PyObject *stack[] = {class, self};
|
||||
PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL);
|
||||
if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) {
|
||||
PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING;
|
||||
if (super == NULL) {
|
||||
_Py_call_instrumentation_exc2(
|
||||
tstate, PY_MONITORING_EVENT_C_RAISE,
|
||||
frame, this_instr, global_super, arg);
|
||||
{
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
int load_method = oparg & 1;
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_LoadSuperAttr(global_super, class, next_instr, load_method);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
else {
|
||||
STAT_INC(LOAD_SUPER_ATTR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _LOAD_SUPER_ATTR
|
||||
self = stack_pointer[-1];
|
||||
{
|
||||
TIER_ONE_ONLY
|
||||
if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) {
|
||||
PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING;
|
||||
int err = _Py_call_instrumentation_2args(
|
||||
tstate, PY_MONITORING_EVENT_C_RETURN,
|
||||
frame, this_instr, global_super, arg);
|
||||
if (err < 0) {
|
||||
Py_CLEAR(super);
|
||||
tstate, PY_MONITORING_EVENT_CALL,
|
||||
frame, this_instr, global_super, arg);
|
||||
if (err) goto pop_3_error;
|
||||
}
|
||||
// we make no attempt to optimize here; specializations should
|
||||
// handle any case whose performance we care about
|
||||
PyObject *stack[] = {class, self};
|
||||
PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL);
|
||||
if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) {
|
||||
PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING;
|
||||
if (super == NULL) {
|
||||
_Py_call_instrumentation_exc2(
|
||||
tstate, PY_MONITORING_EVENT_C_RAISE,
|
||||
frame, this_instr, global_super, arg);
|
||||
}
|
||||
else {
|
||||
int err = _Py_call_instrumentation_2args(
|
||||
tstate, PY_MONITORING_EVENT_C_RETURN,
|
||||
frame, this_instr, global_super, arg);
|
||||
if (err < 0) {
|
||||
Py_CLEAR(super);
|
||||
}
|
||||
}
|
||||
}
|
||||
Py_DECREF(global_super);
|
||||
Py_DECREF(class);
|
||||
Py_DECREF(self);
|
||||
if (super == NULL) goto pop_3_error;
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2);
|
||||
attr = PyObject_GetAttr(super, name);
|
||||
Py_DECREF(super);
|
||||
if (attr == NULL) goto pop_3_error;
|
||||
null = NULL;
|
||||
}
|
||||
Py_DECREF(global_super);
|
||||
Py_DECREF(class);
|
||||
Py_DECREF(self);
|
||||
if (super == NULL) goto pop_3_error;
|
||||
attr = PyObject_GetAttr(super, name);
|
||||
Py_DECREF(super);
|
||||
if (attr == NULL) goto pop_3_error;
|
||||
null = NULL;
|
||||
STACK_SHRINK(2);
|
||||
STACK_GROW(((oparg & 1) ? 1 : 0));
|
||||
stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr;
|
||||
|
@ -2637,49 +2697,55 @@
|
|||
PyObject *owner;
|
||||
PyObject *attr;
|
||||
PyObject *self_or_null = NULL;
|
||||
// _SPECIALIZE_LOAD_ATTR
|
||||
owner = stack_pointer[-1];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_LoadAttr(owner, next_instr, name);
|
||||
DISPATCH_SAME_OPARG();
|
||||
{
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_LoadAttr(owner, next_instr, name);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(LOAD_ATTR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
STAT_INC(LOAD_ATTR, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1);
|
||||
if (oparg & 1) {
|
||||
/* Designed to work in tandem with CALL, pushes two values. */
|
||||
attr = NULL;
|
||||
if (_PyObject_GetMethod(owner, name, &attr)) {
|
||||
/* We can bypass temporary bound method object.
|
||||
meth is unbound method and obj is self.
|
||||
|
||||
meth | self | arg1 | ... | argN
|
||||
*/
|
||||
assert(attr != NULL); // No errors on this branch
|
||||
self_or_null = owner; // Transfer ownership
|
||||
// _LOAD_ATTR
|
||||
{
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1);
|
||||
if (oparg & 1) {
|
||||
/* Designed to work in tandem with CALL, pushes two values. */
|
||||
attr = NULL;
|
||||
if (_PyObject_GetMethod(owner, name, &attr)) {
|
||||
/* We can bypass temporary bound method object.
|
||||
meth is unbound method and obj is self.
|
||||
meth | self | arg1 | ... | argN
|
||||
*/
|
||||
assert(attr != NULL); // No errors on this branch
|
||||
self_or_null = owner; // Transfer ownership
|
||||
}
|
||||
else {
|
||||
/* meth is not an unbound method (but a regular attr, or
|
||||
something was returned by a descriptor protocol). Set
|
||||
the second element of the stack to NULL, to signal
|
||||
CALL that it's not a method call.
|
||||
NULL | meth | arg1 | ... | argN
|
||||
*/
|
||||
Py_DECREF(owner);
|
||||
if (attr == NULL) goto pop_1_error;
|
||||
self_or_null = NULL;
|
||||
}
|
||||
}
|
||||
else {
|
||||
/* meth is not an unbound method (but a regular attr, or
|
||||
something was returned by a descriptor protocol). Set
|
||||
the second element of the stack to NULL, to signal
|
||||
CALL that it's not a method call.
|
||||
|
||||
NULL | meth | arg1 | ... | argN
|
||||
*/
|
||||
/* Classic, pushes one value. */
|
||||
attr = PyObject_GetAttr(owner, name);
|
||||
Py_DECREF(owner);
|
||||
if (attr == NULL) goto pop_1_error;
|
||||
self_or_null = NULL;
|
||||
}
|
||||
}
|
||||
else {
|
||||
/* Classic, pushes one value. */
|
||||
attr = PyObject_GetAttr(owner, name);
|
||||
Py_DECREF(owner);
|
||||
if (attr == NULL) goto pop_1_error;
|
||||
}
|
||||
STACK_GROW(((oparg & 1) ? 1 : 0));
|
||||
stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr;
|
||||
if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = self_or_null; }
|
||||
|
@ -3076,27 +3142,35 @@
|
|||
PyObject *right;
|
||||
PyObject *left;
|
||||
PyObject *res;
|
||||
// _SPECIALIZE_COMPARE_OP
|
||||
right = stack_pointer[-1];
|
||||
left = stack_pointer[-2];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_CompareOp(left, right, next_instr, oparg);
|
||||
DISPATCH_SAME_OPARG();
|
||||
{
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_CompareOp(left, right, next_instr, oparg);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(COMPARE_OP, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
STAT_INC(COMPARE_OP, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
assert((oparg >> 5) <= Py_GE);
|
||||
res = PyObject_RichCompare(left, right, oparg >> 5);
|
||||
Py_DECREF(left);
|
||||
Py_DECREF(right);
|
||||
if (res == NULL) goto pop_2_error;
|
||||
if (oparg & 16) {
|
||||
int res_bool = PyObject_IsTrue(res);
|
||||
Py_DECREF(res);
|
||||
if (res_bool < 0) goto pop_2_error;
|
||||
res = res_bool ? Py_True : Py_False;
|
||||
// _COMPARE_OP
|
||||
{
|
||||
assert((oparg >> 5) <= Py_GE);
|
||||
res = PyObject_RichCompare(left, right, oparg >> 5);
|
||||
Py_DECREF(left);
|
||||
Py_DECREF(right);
|
||||
if (res == NULL) goto pop_2_error;
|
||||
if (oparg & 16) {
|
||||
int res_bool = PyObject_IsTrue(res);
|
||||
Py_DECREF(res);
|
||||
if (res_bool < 0) goto pop_2_error;
|
||||
res = res_bool ? Py_True : Py_False;
|
||||
}
|
||||
}
|
||||
STACK_SHRINK(1);
|
||||
stack_pointer[-1] = res;
|
||||
|
@ -3630,36 +3704,44 @@
|
|||
static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size");
|
||||
PyObject *iter;
|
||||
PyObject *next;
|
||||
// _SPECIALIZE_FOR_ITER
|
||||
iter = stack_pointer[-1];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_ForIter(iter, next_instr, oparg);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(FOR_ITER, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
/* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
|
||||
next = (*Py_TYPE(iter)->tp_iternext)(iter);
|
||||
if (next == NULL) {
|
||||
if (_PyErr_Occurred(tstate)) {
|
||||
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
|
||||
goto error;
|
||||
}
|
||||
monitor_raise(tstate, frame, this_instr);
|
||||
_PyErr_Clear(tstate);
|
||||
{
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_ForIter(iter, next_instr, oparg);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
/* iterator ended normally */
|
||||
assert(next_instr[oparg].op.code == END_FOR ||
|
||||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
|
||||
Py_DECREF(iter);
|
||||
STACK_SHRINK(1);
|
||||
/* Jump forward oparg, then skip following END_FOR instruction */
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
STAT_INC(FOR_ITER, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _FOR_ITER
|
||||
{
|
||||
/* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
|
||||
next = (*Py_TYPE(iter)->tp_iternext)(iter);
|
||||
if (next == NULL) {
|
||||
if (_PyErr_Occurred(tstate)) {
|
||||
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
|
||||
goto error;
|
||||
}
|
||||
monitor_raise(tstate, frame, this_instr);
|
||||
_PyErr_Clear(tstate);
|
||||
}
|
||||
/* iterator ended normally */
|
||||
assert(next_instr[oparg].op.code == END_FOR ||
|
||||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
|
||||
Py_DECREF(iter);
|
||||
STACK_SHRINK(1);
|
||||
/* Jump forward oparg, then skip following END_FOR instruction */
|
||||
JUMPBY(oparg + 1);
|
||||
DISPATCH();
|
||||
}
|
||||
// Common case: no jump, leave it to the code generator
|
||||
}
|
||||
// Common case: no jump, leave it to the code generator
|
||||
STACK_GROW(1);
|
||||
stack_pointer[-1] = next;
|
||||
DISPATCH();
|
||||
|
@ -4211,84 +4293,92 @@
|
|||
PyObject *self_or_null;
|
||||
PyObject *callable;
|
||||
PyObject *res;
|
||||
// _SPECIALIZE_CALL
|
||||
args = stack_pointer - oparg;
|
||||
self_or_null = stack_pointer[-1 - oparg];
|
||||
callable = stack_pointer[-2 - oparg];
|
||||
// oparg counts all of the args, but *not* self:
|
||||
int total_args = oparg;
|
||||
if (self_or_null != NULL) {
|
||||
args--;
|
||||
total_args++;
|
||||
}
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_Call(callable, next_instr, total_args);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(CALL, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
if (self_or_null == NULL && Py_TYPE(callable) == &PyMethod_Type) {
|
||||
args--;
|
||||
total_args++;
|
||||
PyObject *self = ((PyMethodObject *)callable)->im_self;
|
||||
args[0] = Py_NewRef(self);
|
||||
PyObject *method = ((PyMethodObject *)callable)->im_func;
|
||||
args[-1] = Py_NewRef(method);
|
||||
Py_DECREF(callable);
|
||||
callable = method;
|
||||
}
|
||||
// Check if the call can be inlined or not
|
||||
if (Py_TYPE(callable) == &PyFunction_Type &&
|
||||
tstate->interp->eval_frame == NULL &&
|
||||
((PyFunctionObject *)callable)->vectorcall == _PyFunction_Vectorcall)
|
||||
{
|
||||
int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable))->co_flags;
|
||||
PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable));
|
||||
_PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit(
|
||||
tstate, (PyFunctionObject *)callable, locals,
|
||||
args, total_args, NULL
|
||||
);
|
||||
// Manipulate stack directly since we leave using DISPATCH_INLINED().
|
||||
STACK_SHRINK(oparg + 2);
|
||||
// The frame has stolen all the arguments from the stack,
|
||||
// so there is no need to clean them up.
|
||||
if (new_frame == NULL) {
|
||||
goto error;
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_Call(callable, next_instr, oparg + (self_or_null != NULL));
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
assert(1 + INLINE_CACHE_ENTRIES_CALL == next_instr - this_instr);
|
||||
frame->return_offset = 1 + INLINE_CACHE_ENTRIES_CALL;
|
||||
DISPATCH_INLINED(new_frame);
|
||||
STAT_INC(CALL, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
/* Callable is not a normal Python function */
|
||||
res = PyObject_Vectorcall(
|
||||
callable, args,
|
||||
total_args | PY_VECTORCALL_ARGUMENTS_OFFSET,
|
||||
NULL);
|
||||
if (opcode == INSTRUMENTED_CALL) {
|
||||
PyObject *arg = total_args == 0 ?
|
||||
&_PyInstrumentation_MISSING : args[0];
|
||||
if (res == NULL) {
|
||||
_Py_call_instrumentation_exc2(
|
||||
tstate, PY_MONITORING_EVENT_C_RAISE,
|
||||
frame, this_instr, callable, arg);
|
||||
// _CALL
|
||||
{
|
||||
// oparg counts all of the args, but *not* self:
|
||||
int total_args = oparg;
|
||||
if (self_or_null != NULL) {
|
||||
args--;
|
||||
total_args++;
|
||||
}
|
||||
else {
|
||||
int err = _Py_call_instrumentation_2args(
|
||||
tstate, PY_MONITORING_EVENT_C_RETURN,
|
||||
frame, this_instr, callable, arg);
|
||||
if (err < 0) {
|
||||
Py_CLEAR(res);
|
||||
else if (Py_TYPE(callable) == &PyMethod_Type) {
|
||||
args--;
|
||||
total_args++;
|
||||
PyObject *self = ((PyMethodObject *)callable)->im_self;
|
||||
args[0] = Py_NewRef(self);
|
||||
PyObject *method = ((PyMethodObject *)callable)->im_func;
|
||||
args[-1] = Py_NewRef(method);
|
||||
Py_DECREF(callable);
|
||||
callable = method;
|
||||
}
|
||||
// Check if the call can be inlined or not
|
||||
if (Py_TYPE(callable) == &PyFunction_Type &&
|
||||
tstate->interp->eval_frame == NULL &&
|
||||
((PyFunctionObject *)callable)->vectorcall == _PyFunction_Vectorcall)
|
||||
{
|
||||
int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable))->co_flags;
|
||||
PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable));
|
||||
_PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit(
|
||||
tstate, (PyFunctionObject *)callable, locals,
|
||||
args, total_args, NULL
|
||||
);
|
||||
// Manipulate stack directly since we leave using DISPATCH_INLINED().
|
||||
STACK_SHRINK(oparg + 2);
|
||||
// The frame has stolen all the arguments from the stack,
|
||||
// so there is no need to clean them up.
|
||||
if (new_frame == NULL) {
|
||||
goto error;
|
||||
}
|
||||
assert(1 + INLINE_CACHE_ENTRIES_CALL == next_instr - this_instr);
|
||||
frame->return_offset = 1 + INLINE_CACHE_ENTRIES_CALL;
|
||||
DISPATCH_INLINED(new_frame);
|
||||
}
|
||||
/* Callable is not a normal Python function */
|
||||
res = PyObject_Vectorcall(
|
||||
callable, args,
|
||||
total_args | PY_VECTORCALL_ARGUMENTS_OFFSET,
|
||||
NULL);
|
||||
if (opcode == INSTRUMENTED_CALL) {
|
||||
PyObject *arg = total_args == 0 ?
|
||||
&_PyInstrumentation_MISSING : args[0];
|
||||
if (res == NULL) {
|
||||
_Py_call_instrumentation_exc2(
|
||||
tstate, PY_MONITORING_EVENT_C_RAISE,
|
||||
frame, this_instr, callable, arg);
|
||||
}
|
||||
else {
|
||||
int err = _Py_call_instrumentation_2args(
|
||||
tstate, PY_MONITORING_EVENT_C_RETURN,
|
||||
frame, this_instr, callable, arg);
|
||||
if (err < 0) {
|
||||
Py_CLEAR(res);
|
||||
}
|
||||
}
|
||||
}
|
||||
assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
|
||||
Py_DECREF(callable);
|
||||
for (int i = 0; i < total_args; i++) {
|
||||
Py_DECREF(args[i]);
|
||||
}
|
||||
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
|
||||
}
|
||||
assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
|
||||
Py_DECREF(callable);
|
||||
for (int i = 0; i < total_args; i++) {
|
||||
Py_DECREF(args[i]);
|
||||
}
|
||||
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
|
||||
STACK_SHRINK(oparg);
|
||||
STACK_SHRINK(1);
|
||||
stack_pointer[-1] = res;
|
||||
|
@ -5505,24 +5595,32 @@
|
|||
PyObject *rhs;
|
||||
PyObject *lhs;
|
||||
PyObject *res;
|
||||
// _SPECIALIZE_BINARY_OP
|
||||
rhs = stack_pointer[-1];
|
||||
lhs = stack_pointer[-2];
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, LOCALS_ARRAY);
|
||||
DISPATCH_SAME_OPARG();
|
||||
{
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
TIER_ONE_ONLY
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, LOCALS_ARRAY);
|
||||
DISPATCH_SAME_OPARG();
|
||||
}
|
||||
STAT_INC(BINARY_OP, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
assert(NB_ADD <= oparg);
|
||||
assert(oparg <= NB_INPLACE_XOR);
|
||||
}
|
||||
// _BINARY_OP
|
||||
{
|
||||
assert(_PyEval_BinaryOps[oparg]);
|
||||
res = _PyEval_BinaryOps[oparg](lhs, rhs);
|
||||
Py_DECREF(lhs);
|
||||
Py_DECREF(rhs);
|
||||
if (res == NULL) goto pop_2_error;
|
||||
}
|
||||
STAT_INC(BINARY_OP, deferred);
|
||||
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
assert(NB_ADD <= oparg);
|
||||
assert(oparg <= NB_INPLACE_XOR);
|
||||
assert(_PyEval_BinaryOps[oparg]);
|
||||
res = _PyEval_BinaryOps[oparg](lhs, rhs);
|
||||
Py_DECREF(lhs);
|
||||
Py_DECREF(rhs);
|
||||
if (res == NULL) goto pop_2_error;
|
||||
STACK_SHRINK(1);
|
||||
stack_pointer[-1] = res;
|
||||
DISPATCH();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue