GH-103082: Implementation of PEP 669: Low Impact Monitoring for CPython (GH-103083)

* The majority of the monitoring code is in instrumentation.c

* The new instrumentation bytecodes are in bytecodes.c

* legacy_tracing.c adapts the new API to the old sys.setrace and sys.setprofile APIs
This commit is contained in:
Mark Shannon 2023-04-12 12:04:55 +01:00 committed by GitHub
parent dce2d38cb0
commit 411b169281
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
44 changed files with 6029 additions and 1625 deletions

View file

@ -14,6 +14,7 @@
#include "pycore_function.h"
#include "pycore_intrinsics.h"
#include "pycore_long.h" // _PyLong_GetZero()
#include "pycore_instruments.h"
#include "pycore_object.h" // _PyObject_GC_TRACK()
#include "pycore_moduleobject.h" // PyModuleObject
#include "pycore_opcode.h" // EXTRA_CASES
@ -134,11 +135,45 @@ dummy_func(
inst(RESUME, (--)) {
assert(tstate->cframe == &cframe);
assert(frame == cframe.current_frame);
if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) {
/* Possibly combine this with eval breaker */
if (frame->f_code->_co_instrumentation_version != tstate->interp->monitoring_version) {
int err = _Py_Instrument(frame->f_code, tstate->interp);
ERROR_IF(err, error);
next_instr--;
}
else if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) {
goto handle_eval_breaker;
}
}
inst(INSTRUMENTED_RESUME, (--)) {
/* Possible performance enhancement:
* We need to check the eval breaker anyway, can we
* combine the instrument verison check and the eval breaker test?
*/
if (frame->f_code->_co_instrumentation_version != tstate->interp->monitoring_version) {
if (_Py_Instrument(frame->f_code, tstate->interp)) {
goto error;
}
next_instr--;
}
else {
_PyFrame_SetStackPointer(frame, stack_pointer);
int err = _Py_call_instrumentation(
tstate, oparg > 0, frame, next_instr-1);
stack_pointer = _PyFrame_GetStackPointer(frame);
ERROR_IF(err, error);
if (frame->prev_instr != next_instr-1) {
/* Instrumentation has jumped */
next_instr = frame->prev_instr;
DISPATCH();
}
if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) {
goto handle_eval_breaker;
}
}
}
inst(LOAD_CLOSURE, (-- value)) {
/* We keep LOAD_CLOSURE so that the bytecode stays more readable. */
value = GETLOCAL(oparg);
@ -183,6 +218,34 @@ dummy_func(
macro(END_FOR) = POP_TOP + POP_TOP;
inst(INSTRUMENTED_END_FOR, (receiver, value --)) {
/* Need to create a fake StopIteration error here,
* to conform to PEP 380 */
if (PyGen_Check(receiver)) {
PyErr_SetObject(PyExc_StopIteration, value);
if (monitor_stop_iteration(tstate, frame, next_instr-1)) {
goto error;
}
PyErr_SetRaisedException(NULL);
}
DECREF_INPUTS();
}
inst(END_SEND, (receiver, value -- value)) {
Py_DECREF(receiver);
}
inst(INSTRUMENTED_END_SEND, (receiver, value -- value)) {
if (PyGen_Check(receiver) || PyCoro_CheckExact(receiver)) {
PyErr_SetObject(PyExc_StopIteration, value);
if (monitor_stop_iteration(tstate, frame, next_instr-1)) {
goto error;
}
PyErr_SetRaisedException(NULL);
}
Py_DECREF(receiver);
}
inst(UNARY_NEGATIVE, (value -- res)) {
res = PyNumber_Negative(value);
DECREF_INPUTS();
@ -222,7 +285,6 @@ dummy_func(
inst(BINARY_OP_MULTIPLY_INT, (unused/1, left, right -- prod)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@ -233,7 +295,6 @@ dummy_func(
}
inst(BINARY_OP_MULTIPLY_FLOAT, (unused/1, left, right -- prod)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@ -243,7 +304,6 @@ dummy_func(
}
inst(BINARY_OP_SUBTRACT_INT, (unused/1, left, right -- sub)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@ -254,7 +314,6 @@ dummy_func(
}
inst(BINARY_OP_SUBTRACT_FLOAT, (unused/1, left, right -- sub)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@ -263,7 +322,6 @@ dummy_func(
}
inst(BINARY_OP_ADD_UNICODE, (unused/1, left, right -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@ -280,7 +338,6 @@ dummy_func(
// specializations, but there is no output.
// At the end we just skip over the STORE_FAST.
inst(BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
_Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP];
@ -310,7 +367,6 @@ dummy_func(
}
inst(BINARY_OP_ADD_FLOAT, (unused/1, left, right -- sum)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@ -320,7 +376,6 @@ dummy_func(
}
inst(BINARY_OP_ADD_INT, (unused/1, left, right -- sum)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@ -342,7 +397,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_BinarySubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
@ -386,7 +440,6 @@ dummy_func(
}
inst(BINARY_SUBSCR_LIST_INT, (unused/1, list, sub -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR);
DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR);
@ -403,7 +456,6 @@ dummy_func(
}
inst(BINARY_SUBSCR_TUPLE_INT, (unused/1, tuple, sub -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR);
DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR);
@ -420,7 +472,6 @@ dummy_func(
}
inst(BINARY_SUBSCR_DICT, (unused/1, dict, sub -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR);
STAT_INC(BINARY_SUBSCR, hit);
res = PyDict_GetItemWithError(dict, sub);
@ -479,7 +530,6 @@ dummy_func(
inst(STORE_SUBSCR, (counter/1, v, container, sub -- )) {
#if ENABLE_SPECIALIZATION
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_StoreSubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
@ -497,7 +547,6 @@ dummy_func(
}
inst(STORE_SUBSCR_LIST_INT, (unused/1, value, list, sub -- )) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR);
DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR);
@ -517,7 +566,6 @@ dummy_func(
}
inst(STORE_SUBSCR_DICT, (unused/1, value, dict, sub -- )) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR);
STAT_INC(STORE_SUBSCR, hit);
int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value);
@ -573,7 +621,6 @@ dummy_func(
assert(EMPTY());
/* Restore previous cframe and return. */
tstate->cframe = cframe.previous;
tstate->cframe->use_tracing = cframe.use_tracing;
assert(tstate->cframe->current_frame == frame->previous);
assert(!_PyErr_Occurred(tstate));
_Py_LeaveRecursiveCallTstate(tstate);
@ -584,8 +631,24 @@ dummy_func(
STACK_SHRINK(1);
assert(EMPTY());
_PyFrame_SetStackPointer(frame, stack_pointer);
TRACE_FUNCTION_EXIT();
DTRACE_FUNCTION_EXIT();
_Py_LeaveRecursiveCallPy(tstate);
assert(frame != &entry_frame);
// GH-99729: We need to unlink the frame *before* clearing it:
_PyInterpreterFrame *dying = frame;
frame = cframe.current_frame = dying->previous;
_PyEvalFrameClearAndPop(tstate, dying);
_PyFrame_StackPush(frame, retval);
goto resume_frame;
}
inst(INSTRUMENTED_RETURN_VALUE, (retval --)) {
int err = _Py_call_instrumentation_arg(
tstate, PY_MONITORING_EVENT_PY_RETURN,
frame, next_instr-1, retval);
if (err) goto error;
STACK_SHRINK(1);
assert(EMPTY());
_PyFrame_SetStackPointer(frame, stack_pointer);
_Py_LeaveRecursiveCallPy(tstate);
assert(frame != &entry_frame);
// GH-99729: We need to unlink the frame *before* clearing it:
@ -601,8 +664,25 @@ dummy_func(
Py_INCREF(retval);
assert(EMPTY());
_PyFrame_SetStackPointer(frame, stack_pointer);
TRACE_FUNCTION_EXIT();
DTRACE_FUNCTION_EXIT();
_Py_LeaveRecursiveCallPy(tstate);
assert(frame != &entry_frame);
// GH-99729: We need to unlink the frame *before* clearing it:
_PyInterpreterFrame *dying = frame;
frame = cframe.current_frame = dying->previous;
_PyEvalFrameClearAndPop(tstate, dying);
_PyFrame_StackPush(frame, retval);
goto resume_frame;
}
inst(INSTRUMENTED_RETURN_CONST, (--)) {
PyObject *retval = GETITEM(frame->f_code->co_consts, oparg);
int err = _Py_call_instrumentation_arg(
tstate, PY_MONITORING_EVENT_PY_RETURN,
frame, next_instr-1, retval);
if (err) goto error;
Py_INCREF(retval);
assert(EMPTY());
_PyFrame_SetStackPointer(frame, stack_pointer);
_Py_LeaveRecursiveCallPy(tstate);
assert(frame != &entry_frame);
// GH-99729: We need to unlink the frame *before* clearing it:
@ -730,7 +810,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PySendCache *cache = (_PySendCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_Send(receiver, next_instr);
DISPATCH_SAME_OPARG();
@ -739,6 +818,20 @@ dummy_func(
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
#endif /* ENABLE_SPECIALIZATION */
assert(frame != &entry_frame);
if ((Py_TYPE(receiver) == &PyGen_Type ||
Py_TYPE(receiver) == &PyCoro_Type) && ((PyGenObject *)receiver)->gi_frame_state < FRAME_EXECUTING)
{
PyGenObject *gen = (PyGenObject *)receiver;
_PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe;
frame->yield_offset = oparg;
STACK_SHRINK(1);
_PyFrame_StackPush(gen_frame, v);
gen->gi_frame_state = FRAME_EXECUTING;
gen->gi_exc_state.previous_item = tstate->exc_info;
tstate->exc_info = &gen->gi_exc_state;
JUMPBY(INLINE_CACHE_ENTRIES_SEND + oparg);
DISPATCH_INLINED(gen_frame);
}
if (Py_IsNone(v) && PyIter_Check(receiver)) {
retval = Py_TYPE(receiver)->tp_iternext(receiver);
}
@ -746,26 +839,22 @@ dummy_func(
retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v);
}
if (retval == NULL) {
if (tstate->c_tracefunc != NULL
&& _PyErr_ExceptionMatches(tstate, PyExc_StopIteration))
call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame);
if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)
) {
monitor_raise(tstate, frame, next_instr-1);
}
if (_PyGen_FetchStopIterationValue(&retval) == 0) {
assert(retval != NULL);
JUMPBY(oparg);
}
else {
assert(retval == NULL);
goto error;
}
}
else {
assert(retval != NULL);
}
Py_DECREF(v);
}
inst(SEND_GEN, (unused/1, receiver, v -- receiver)) {
assert(cframe.use_tracing == 0);
PyGenObject *gen = (PyGenObject *)receiver;
DEOPT_IF(Py_TYPE(gen) != &PyGen_Type &&
Py_TYPE(gen) != &PyCoro_Type, SEND);
@ -782,6 +871,26 @@ dummy_func(
DISPATCH_INLINED(gen_frame);
}
inst(INSTRUMENTED_YIELD_VALUE, (retval -- unused)) {
assert(frame != &entry_frame);
PyGenObject *gen = _PyFrame_GetGenerator(frame);
gen->gi_frame_state = FRAME_SUSPENDED;
_PyFrame_SetStackPointer(frame, stack_pointer - 1);
int err = _Py_call_instrumentation_arg(
tstate, PY_MONITORING_EVENT_PY_YIELD,
frame, next_instr-1, retval);
if (err) goto error;
tstate->exc_info = gen->gi_exc_state.previous_item;
gen->gi_exc_state.previous_item = NULL;
_Py_LeaveRecursiveCallPy(tstate);
_PyInterpreterFrame *gen_frame = frame;
frame = cframe.current_frame = frame->previous;
gen_frame->previous = NULL;
frame->prev_instr -= frame->yield_offset;
_PyFrame_StackPush(frame, retval);
goto resume_frame;
}
inst(YIELD_VALUE, (retval -- unused)) {
// NOTE: It's important that YIELD_VALUE never raises an exception!
// The compiler treats any exception raised here as a failed close()
@ -790,8 +899,6 @@ dummy_func(
PyGenObject *gen = _PyFrame_GetGenerator(frame);
gen->gi_frame_state = FRAME_SUSPENDED;
_PyFrame_SetStackPointer(frame, stack_pointer - 1);
TRACE_FUNCTION_EXIT();
DTRACE_FUNCTION_EXIT();
tstate->exc_info = gen->gi_exc_state.previous_item;
gen->gi_exc_state.previous_item = NULL;
_Py_LeaveRecursiveCallPy(tstate);
@ -930,7 +1037,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_UnpackSequence(seq, next_instr, oparg);
DISPATCH_SAME_OPARG();
@ -994,7 +1100,6 @@ dummy_func(
inst(STORE_ATTR, (counter/1, unused/3, v, owner --)) {
#if ENABLE_SPECIALIZATION
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
assert(cframe.use_tracing == 0);
PyObject *name = GETITEM(frame->f_code->co_names, oparg);
next_instr--;
_Py_Specialize_StoreAttr(owner, next_instr, name);
@ -1111,7 +1216,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
PyObject *name = GETITEM(frame->f_code->co_names, oparg>>1);
next_instr--;
_Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name);
@ -1163,7 +1267,6 @@ dummy_func(
}
inst(LOAD_GLOBAL_MODULE, (unused/1, index/1, version/1, unused/1 -- null if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL);
PyDictObject *dict = (PyDictObject *)GLOBALS();
DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL);
@ -1177,11 +1280,11 @@ dummy_func(
}
inst(LOAD_GLOBAL_BUILTIN, (unused/1, index/1, mod_version/1, bltn_version/1 -- null if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL);
DEOPT_IF(!PyDict_CheckExact(BUILTINS()), LOAD_GLOBAL);
PyDictObject *mdict = (PyDictObject *)GLOBALS();
PyDictObject *bdict = (PyDictObject *)BUILTINS();
assert(opcode == LOAD_GLOBAL_BUILTIN);
DEOPT_IF(mdict->ma_keys->dk_version != mod_version, LOAD_GLOBAL);
DEOPT_IF(bdict->ma_keys->dk_version != bltn_version, LOAD_GLOBAL);
assert(DK_IS_UNICODE(bdict->ma_keys));
@ -1465,7 +1568,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyAttrCache *cache = (_PyAttrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
PyObject *name = GETITEM(frame->f_code->co_names, oparg>>1);
next_instr--;
_Py_Specialize_LoadAttr(owner, next_instr, name);
@ -1511,7 +1613,6 @@ dummy_func(
}
inst(LOAD_ATTR_INSTANCE_VALUE, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR);
@ -1528,7 +1629,6 @@ dummy_func(
}
inst(LOAD_ATTR_MODULE, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR);
PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict;
assert(dict != NULL);
@ -1545,7 +1645,6 @@ dummy_func(
}
inst(LOAD_ATTR_WITH_HINT, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR);
@ -1576,7 +1675,6 @@ dummy_func(
}
inst(LOAD_ATTR_SLOT, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR);
@ -1590,7 +1688,6 @@ dummy_func(
}
inst(LOAD_ATTR_CLASS, (unused/1, type_version/2, unused/2, descr/4, cls -- res2 if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyType_Check(cls), LOAD_ATTR);
DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version,
@ -1606,7 +1703,6 @@ dummy_func(
}
inst(LOAD_ATTR_PROPERTY, (unused/1, type_version/2, func_version/2, fget/4, owner -- unused if (oparg & 1), unused)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR);
PyTypeObject *cls = Py_TYPE(owner);
@ -1632,7 +1728,6 @@ dummy_func(
}
inst(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, (unused/1, type_version/2, func_version/2, getattribute/4, owner -- unused if (oparg & 1), unused)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR);
PyTypeObject *cls = Py_TYPE(owner);
DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR);
@ -1660,7 +1755,6 @@ dummy_func(
}
inst(STORE_ATTR_INSTANCE_VALUE, (unused/1, type_version/2, index/1, value, owner --)) {
assert(cframe.use_tracing == 0);
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
@ -1681,7 +1775,6 @@ dummy_func(
}
inst(STORE_ATTR_WITH_HINT, (unused/1, type_version/2, hint/1, value, owner --)) {
assert(cframe.use_tracing == 0);
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
@ -1723,7 +1816,6 @@ dummy_func(
}
inst(STORE_ATTR_SLOT, (unused/1, type_version/2, index/1, value, owner --)) {
assert(cframe.use_tracing == 0);
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
@ -1746,7 +1838,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_CompareOp(left, right, next_instr, oparg);
DISPATCH_SAME_OPARG();
@ -1761,7 +1852,6 @@ dummy_func(
}
inst(COMPARE_OP_FLOAT, (unused/1, left, right -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP);
STAT_INC(COMPARE_OP, hit);
@ -1777,7 +1867,6 @@ dummy_func(
// Similar to COMPARE_OP_FLOAT
inst(COMPARE_OP_INT, (unused/1, left, right -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP);
DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)left), COMPARE_OP);
@ -1797,7 +1886,6 @@ dummy_func(
// Similar to COMPARE_OP_FLOAT, but for ==, != only
inst(COMPARE_OP_STR, (unused/1, left, right -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP);
STAT_INC(COMPARE_OP, hit);
@ -2044,7 +2132,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyForIterCache *cache = (_PyForIterCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_ForIter(iter, next_instr, oparg);
DISPATCH_SAME_OPARG();
@ -2059,13 +2146,12 @@ dummy_func(
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
goto error;
}
else if (tstate->c_tracefunc != NULL) {
call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame);
}
monitor_raise(tstate, frame, next_instr-1);
_PyErr_Clear(tstate);
}
/* iterator ended normally */
assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR);
assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR ||
next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == INSTRUMENTED_END_FOR);
Py_DECREF(iter);
STACK_SHRINK(1);
/* Jump forward oparg, then skip following END_FOR instruction */
@ -2075,8 +2161,35 @@ dummy_func(
// Common case: no jump, leave it to the code generator
}
inst(INSTRUMENTED_FOR_ITER, ( -- )) {
_Py_CODEUNIT *here = next_instr-1;
_Py_CODEUNIT *target;
PyObject *iter = TOP();
PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter);
if (next != NULL) {
PUSH(next);
target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER;
}
else {
if (_PyErr_Occurred(tstate)) {
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
goto error;
}
monitor_raise(tstate, frame, here);
_PyErr_Clear(tstate);
}
/* iterator ended normally */
assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR ||
next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == INSTRUMENTED_END_FOR);
STACK_SHRINK(1);
Py_DECREF(iter);
/* Skip END_FOR */
target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1;
}
INSTRUMENTED_JUMP(here, target, PY_MONITORING_EVENT_BRANCH);
}
inst(FOR_ITER_LIST, (unused/1, iter -- iter, next)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(Py_TYPE(iter) != &PyListIter_Type, FOR_ITER);
_PyListIterObject *it = (_PyListIterObject *)iter;
STAT_INC(FOR_ITER, hit);
@ -2099,7 +2212,6 @@ dummy_func(
}
inst(FOR_ITER_TUPLE, (unused/1, iter -- iter, next)) {
assert(cframe.use_tracing == 0);
_PyTupleIterObject *it = (_PyTupleIterObject *)iter;
DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER);
STAT_INC(FOR_ITER, hit);
@ -2122,7 +2234,6 @@ dummy_func(
}
inst(FOR_ITER_RANGE, (unused/1, iter -- iter, next)) {
assert(cframe.use_tracing == 0);
_PyRangeIterObject *r = (_PyRangeIterObject *)iter;
DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER);
STAT_INC(FOR_ITER, hit);
@ -2143,7 +2254,6 @@ dummy_func(
}
inst(FOR_ITER_GEN, (unused/1, iter -- iter, unused)) {
assert(cframe.use_tracing == 0);
PyGenObject *gen = (PyGenObject *)iter;
DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER);
DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, FOR_ITER);
@ -2155,7 +2265,8 @@ dummy_func(
gen->gi_exc_state.previous_item = tstate->exc_info;
tstate->exc_info = &gen->gi_exc_state;
JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg);
assert(next_instr->op.code == END_FOR);
assert(next_instr->op.code == END_FOR ||
next_instr->op.code == INSTRUMENTED_END_FOR);
DISPATCH_INLINED(gen_frame);
}
@ -2264,7 +2375,6 @@ dummy_func(
inst(LOAD_ATTR_METHOD_WITH_VALUES, (unused/1, type_version/2, keys_version/2, descr/4, self -- res2 if (oparg & 1), res)) {
/* Cached method object */
assert(cframe.use_tracing == 0);
PyTypeObject *self_cls = Py_TYPE(self);
assert(type_version != 0);
DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR);
@ -2283,7 +2393,6 @@ dummy_func(
}
inst(LOAD_ATTR_METHOD_NO_DICT, (unused/1, type_version/2, unused/2, descr/4, self -- res2 if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
PyTypeObject *self_cls = Py_TYPE(self);
DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR);
assert(self_cls->tp_dictoffset == 0);
@ -2296,7 +2405,6 @@ dummy_func(
}
inst(LOAD_ATTR_METHOD_LAZY_DICT, (unused/1, type_version/2, unused/2, descr/4, self -- res2 if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
PyTypeObject *self_cls = Py_TYPE(self);
DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR);
Py_ssize_t dictoffset = self_cls->tp_dictoffset;
@ -2318,6 +2426,21 @@ dummy_func(
kwnames = GETITEM(frame->f_code->co_consts, oparg);
}
inst(INSTRUMENTED_CALL, ( -- )) {
int is_meth = PEEK(oparg+2) != NULL;
int total_args = oparg + is_meth;
PyObject *function = PEEK(total_args + 1);
PyObject *arg = total_args == 0 ?
&_PyInstrumentation_MISSING : PEEK(total_args);
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_CALL,
frame, next_instr-1, function, arg);
ERROR_IF(err, error);
_PyCallCache *cache = (_PyCallCache *)next_instr;
INCREMENT_ADAPTIVE_COUNTER(cache->counter);
GO_TO_INSTRUCTION(CALL);
}
// Cache layout: counter/1, func_version/2
// Neither CALL_INTRINSIC_1/2 nor CALL_FUNCTION_EX are members!
family(call, INLINE_CACHE_ENTRIES_CALL) = {
@ -2359,7 +2482,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyCallCache *cache = (_PyCallCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_Call(callable, next_instr, total_args, kwnames);
DISPATCH_SAME_OPARG();
@ -2402,16 +2524,26 @@ dummy_func(
DISPATCH_INLINED(new_frame);
}
/* Callable is not a normal Python function */
if (cframe.use_tracing) {
res = trace_call_function(
tstate, callable, args,
positional_args, kwnames);
}
else {
res = PyObject_Vectorcall(
callable, args,
positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET,
kwnames);
res = PyObject_Vectorcall(
callable, args,
positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET,
kwnames);
if (opcode == INSTRUMENTED_CALL) {
PyObject *arg = total_args == 0 ?
&_PyInstrumentation_MISSING : PEEK(total_args);
if (res == NULL) {
_Py_call_instrumentation_exc2(
tstate, PY_MONITORING_EVENT_C_RAISE,
frame, next_instr-1, callable, arg);
}
else {
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_C_RETURN,
frame, next_instr-1, callable, arg);
if (err < 0) {
Py_CLEAR(res);
}
}
}
kwnames = NULL;
assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
@ -2504,7 +2636,6 @@ dummy_func(
inst(CALL_NO_KW_TYPE_1, (unused/1, unused/2, null, callable, args[oparg] -- res)) {
assert(kwnames == NULL);
assert(cframe.use_tracing == 0);
assert(oparg == 1);
DEOPT_IF(null != NULL, CALL);
PyObject *obj = args[0];
@ -2517,7 +2648,6 @@ dummy_func(
inst(CALL_NO_KW_STR_1, (unused/1, unused/2, null, callable, args[oparg] -- res)) {
assert(kwnames == NULL);
assert(cframe.use_tracing == 0);
assert(oparg == 1);
DEOPT_IF(null != NULL, CALL);
DEOPT_IF(callable != (PyObject *)&PyUnicode_Type, CALL);
@ -2570,7 +2700,6 @@ dummy_func(
}
inst(CALL_NO_KW_BUILTIN_O, (unused/1, unused/2, method, callable, args[oparg] -- res)) {
assert(cframe.use_tracing == 0);
/* Builtin METH_O functions */
assert(kwnames == NULL);
int is_meth = method != NULL;
@ -2602,7 +2731,6 @@ dummy_func(
}
inst(CALL_NO_KW_BUILTIN_FAST, (unused/1, unused/2, method, callable, args[oparg] -- res)) {
assert(cframe.use_tracing == 0);
/* Builtin METH_FASTCALL functions, without keywords */
assert(kwnames == NULL);
int is_meth = method != NULL;
@ -2638,7 +2766,6 @@ dummy_func(
}
inst(CALL_BUILTIN_FAST_WITH_KEYWORDS, (unused/1, unused/2, method, callable, args[oparg] -- res)) {
assert(cframe.use_tracing == 0);
/* Builtin METH_FASTCALL | METH_KEYWORDS functions */
int is_meth = method != NULL;
int total_args = oparg;
@ -2674,7 +2801,6 @@ dummy_func(
}
inst(CALL_NO_KW_LEN, (unused/1, unused/2, method, callable, args[oparg] -- res)) {
assert(cframe.use_tracing == 0);
assert(kwnames == NULL);
/* len(o) */
int is_meth = method != NULL;
@ -2702,7 +2828,6 @@ dummy_func(
}
inst(CALL_NO_KW_ISINSTANCE, (unused/1, unused/2, method, callable, args[oparg] -- res)) {
assert(cframe.use_tracing == 0);
assert(kwnames == NULL);
/* isinstance(o, o2) */
int is_meth = method != NULL;
@ -2733,7 +2858,6 @@ dummy_func(
// This is secretly a super-instruction
inst(CALL_NO_KW_LIST_APPEND, (unused/1, unused/2, method, self, args[oparg] -- unused)) {
assert(cframe.use_tracing == 0);
assert(kwnames == NULL);
assert(oparg == 1);
assert(method != NULL);
@ -2882,12 +3006,14 @@ dummy_func(
CHECK_EVAL_BREAKER();
}
inst(INSTRUMENTED_CALL_FUNCTION_EX, ( -- )) {
GO_TO_INSTRUCTION(CALL_FUNCTION_EX);
}
inst(CALL_FUNCTION_EX, (unused, func, callargs, kwargs if (oparg & 1) -- result)) {
if (oparg & 1) {
// DICT_MERGE is called before this opcode if there are kwargs.
// It converts all dict subtypes in kwargs into regular dicts.
assert(PyDict_CheckExact(kwargs));
}
// DICT_MERGE is called before this opcode if there are kwargs.
// It converts all dict subtypes in kwargs into regular dicts.
assert(kwargs == NULL || PyDict_CheckExact(kwargs));
if (!PyTuple_CheckExact(callargs)) {
if (check_args_iterable(tstate, func, callargs) < 0) {
goto error;
@ -2899,10 +3025,35 @@ dummy_func(
Py_SETREF(callargs, tuple);
}
assert(PyTuple_CheckExact(callargs));
result = do_call_core(tstate, func, callargs, kwargs, cframe.use_tracing);
EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_FUNCTION_EX, func);
if (opcode == INSTRUMENTED_CALL_FUNCTION_EX &&
!PyFunction_Check(func) && !PyMethod_Check(func)
) {
PyObject *arg = PyTuple_GET_SIZE(callargs) > 0 ?
PyTuple_GET_ITEM(callargs, 0) : Py_None;
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_CALL,
frame, next_instr-1, func, arg);
if (err) goto error;
result = PyObject_Call(func, callargs, kwargs);
if (result == NULL) {
_Py_call_instrumentation_exc2(
tstate, PY_MONITORING_EVENT_C_RAISE,
frame, next_instr-1, func, arg);
}
else {
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_C_RETURN,
frame, next_instr-1, func, arg);
if (err < 0) {
Py_CLEAR(result);
}
}
}
else {
result = PyObject_Call(func, callargs, kwargs);
}
DECREF_INPUTS();
assert(PEEK(3 + (oparg & 1)) == NULL);
ERROR_IF(result == NULL, error);
CHECK_EVAL_BREAKER();
@ -3018,7 +3169,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, &GETLOCAL(0));
DISPATCH_SAME_OPARG();
@ -3039,9 +3189,105 @@ dummy_func(
assert(oparg >= 2);
}
inst(EXTENDED_ARG, (--)) {
inst(INSTRUMENTED_LINE, ( -- )) {
_Py_CODEUNIT *here = next_instr-1;
_PyFrame_SetStackPointer(frame, stack_pointer);
int original_opcode = _Py_call_instrumentation_line(
tstate, frame, here);
stack_pointer = _PyFrame_GetStackPointer(frame);
if (original_opcode < 0) {
next_instr = here+1;
goto error;
}
next_instr = frame->prev_instr;
if (next_instr != here) {
DISPATCH();
}
if (_PyOpcode_Caches[original_opcode]) {
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1);
INCREMENT_ADAPTIVE_COUNTER(cache->counter);
}
opcode = original_opcode;
DISPATCH_GOTO();
}
inst(INSTRUMENTED_INSTRUCTION, ( -- )) {
int next_opcode = _Py_call_instrumentation_instruction(
tstate, frame, next_instr-1);
ERROR_IF(next_opcode < 0, error);
next_instr--;
if (_PyOpcode_Caches[next_opcode]) {
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1);
INCREMENT_ADAPTIVE_COUNTER(cache->counter);
}
assert(next_opcode > 0 && next_opcode < 256);
opcode = next_opcode;
DISPATCH_GOTO();
}
inst(INSTRUMENTED_JUMP_FORWARD, ( -- )) {
INSTRUMENTED_JUMP(next_instr-1, next_instr+oparg, PY_MONITORING_EVENT_JUMP);
}
inst(INSTRUMENTED_JUMP_BACKWARD, ( -- )) {
INSTRUMENTED_JUMP(next_instr-1, next_instr-oparg, PY_MONITORING_EVENT_JUMP);
CHECK_EVAL_BREAKER();
}
inst(INSTRUMENTED_POP_JUMP_IF_TRUE, ( -- )) {
PyObject *cond = POP();
int err = PyObject_IsTrue(cond);
Py_DECREF(cond);
ERROR_IF(err < 0, error);
_Py_CODEUNIT *here = next_instr-1;
assert(err == 0 || err == 1);
int offset = err*oparg;
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
}
inst(INSTRUMENTED_POP_JUMP_IF_FALSE, ( -- )) {
PyObject *cond = POP();
int err = PyObject_IsTrue(cond);
Py_DECREF(cond);
ERROR_IF(err < 0, error);
_Py_CODEUNIT *here = next_instr-1;
assert(err == 0 || err == 1);
int offset = (1-err)*oparg;
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
}
inst(INSTRUMENTED_POP_JUMP_IF_NONE, ( -- )) {
PyObject *value = POP();
_Py_CODEUNIT *here = next_instr-1;
int offset;
if (Py_IsNone(value)) {
_Py_DECREF_NO_DEALLOC(value);
offset = oparg;
}
else {
Py_DECREF(value);
offset = 0;
}
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
}
inst(INSTRUMENTED_POP_JUMP_IF_NOT_NONE, ( -- )) {
PyObject *value = POP();
_Py_CODEUNIT *here = next_instr-1;
int offset;
if (Py_IsNone(value)) {
_Py_DECREF_NO_DEALLOC(value);
offset = 0;
}
else {
Py_DECREF(value);
offset = oparg;
}
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
}
inst(EXTENDED_ARG, ( -- )) {
assert(oparg);
assert(cframe.use_tracing == 0);
opcode = next_instr->op.code;
oparg = oparg << 8 | next_instr->op.arg;
PRE_DISPATCH_GOTO();
@ -3049,6 +3295,12 @@ dummy_func(
}
inst(CACHE, (--)) {
assert(0 && "Executing a cache.");
Py_UNREACHABLE();
}
inst(RESERVED, (--)) {
assert(0 && "Executing RESERVED instruction.");
Py_UNREACHABLE();
}

View file

@ -10,6 +10,7 @@
#include "pycore_function.h"
#include "pycore_intrinsics.h"
#include "pycore_long.h" // _PyLong_GetZero()
#include "pycore_instruments.h"
#include "pycore_object.h" // _PyObject_GC_TRACK()
#include "pycore_moduleobject.h" // PyModuleObject
#include "pycore_opcode.h" // EXTRA_CASES
@ -92,13 +93,6 @@
#define _Py_atomic_load_relaxed_int32(ATOMIC_VAL) _Py_atomic_load_relaxed(ATOMIC_VAL)
#endif
/* Forward declarations */
static PyObject *trace_call_function(
PyThreadState *tstate, PyObject *callable, PyObject **stack,
Py_ssize_t oparg, PyObject *kwnames);
static PyObject * do_call_core(
PyThreadState *tstate, PyObject *func,
PyObject *callargs, PyObject *kwdict, int use_tracing);
#ifdef LLTRACE
static void
@ -179,19 +173,22 @@ lltrace_resume_frame(_PyInterpreterFrame *frame)
PyErr_SetRaisedException(exc);
}
#endif
static int call_trace(Py_tracefunc, PyObject *,
PyThreadState *, _PyInterpreterFrame *,
int, PyObject *);
static int call_trace_protected(Py_tracefunc, PyObject *,
PyThreadState *, _PyInterpreterFrame *,
int, PyObject *);
static void call_exc_trace(Py_tracefunc, PyObject *,
PyThreadState *, _PyInterpreterFrame *);
static int maybe_call_line_trace(Py_tracefunc, PyObject *,
PyThreadState *, _PyInterpreterFrame *, int);
static void maybe_dtrace_line(_PyInterpreterFrame *, PyTraceInfo *, int);
static void dtrace_function_entry(_PyInterpreterFrame *);
static void dtrace_function_return(_PyInterpreterFrame *);
static void monitor_raise(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr);
static int monitor_stop_iteration(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr);
static void monitor_unwind(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr);
static void monitor_handled(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr, PyObject *exc);
static void monitor_throw(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr);
static PyObject * import_name(PyThreadState *, _PyInterpreterFrame *,
PyObject *, PyObject *, PyObject *);
@ -217,21 +214,6 @@ _PyEvalFrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame *frame);
"cannot access free variable '%s' where it is not associated with a" \
" value in enclosing scope"
#ifndef NDEBUG
/* Ensure that tstate is valid: sanity check for PyEval_AcquireThread() and
PyEval_RestoreThread(). Detect if tstate memory was freed. It can happen
when a thread continues to run after Python finalization, especially
daemon threads. */
static int
is_tstate_valid(PyThreadState *tstate)
{
assert(!_PyMem_IsPtrFreed(tstate));
assert(!_PyMem_IsPtrFreed(tstate->interp));
return 1;
}
#endif
#ifdef HAVE_ERRNO_H
#include <errno.h>
#endif
@ -596,63 +578,6 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag)
#include "ceval_macros.h"
static int
trace_function_entry(PyThreadState *tstate, _PyInterpreterFrame *frame)
{
if (tstate->c_tracefunc != NULL) {
/* tstate->c_tracefunc, if defined, is a
function that will be called on *every* entry
to a code block. Its return value, if not
None, is a function that will be called at
the start of each executed line of code.
(Actually, the function must return itself
in order to continue tracing.) The trace
functions are called with three arguments:
a pointer to the current frame, a string
indicating why the function is called, and
an argument which depends on the situation.
The global trace function is also called
whenever an exception is detected. */
if (call_trace_protected(tstate->c_tracefunc,
tstate->c_traceobj,
tstate, frame,
PyTrace_CALL, Py_None)) {
/* Trace function raised an error */
return -1;
}
}
if (tstate->c_profilefunc != NULL) {
/* Similar for c_profilefunc, except it needn't
return itself and isn't called for "line" events */
if (call_trace_protected(tstate->c_profilefunc,
tstate->c_profileobj,
tstate, frame,
PyTrace_CALL, Py_None)) {
/* Profile function raised an error */
return -1;
}
}
return 0;
}
static int
trace_function_exit(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject *retval)
{
if (tstate->c_tracefunc) {
if (call_trace_protected(tstate->c_tracefunc, tstate->c_traceobj,
tstate, frame, PyTrace_RETURN, retval)) {
return -1;
}
}
if (tstate->c_profilefunc) {
if (call_trace_protected(tstate->c_profilefunc, tstate->c_profileobj,
tstate, frame, PyTrace_RETURN, retval)) {
return -1;
}
}
return 0;
}
int _Py_CheckRecursiveCallPy(
PyThreadState *tstate)
@ -730,7 +655,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
* strict stack discipline must be maintained.
*/
_PyCFrame *prev_cframe = tstate->cframe;
cframe.use_tracing = prev_cframe->use_tracing;
cframe.previous = prev_cframe;
tstate->cframe = &cframe;
@ -765,8 +689,11 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
if (_Py_EnterRecursivePy(tstate)) {
goto exit_unwind;
}
TRACE_FUNCTION_THROW_ENTRY();
DTRACE_FUNCTION_ENTRY();
/* Because this avoids the RESUME,
* we need to update instrumentation */
_Py_Instrument(frame->f_code, tstate->interp);
monitor_throw(tstate, frame, frame->prev_instr);
/* TO DO -- Monitor throw entry. */
goto resume_with_error;
}
@ -781,15 +708,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
assert(_PyInterpreterFrame_LASTI(frame) >= -1); \
/* Jump back to the last instruction executed... */ \
next_instr = frame->prev_instr + 1; \
stack_pointer = _PyFrame_GetStackPointer(frame); \
/* Set stackdepth to -1. \
Update when returning or calling trace function. \
Having stackdepth <= 0 ensures that invalid \
values are not visible to the cycle GC. \
We choose -1 rather than 0 to assist debugging. \
*/ \
frame->stacktop = -1;
stack_pointer = _PyFrame_GetStackPointer(frame);
start_frame:
if (_Py_EnterRecursivePy(tstate)) {
@ -845,91 +764,6 @@ handle_eval_breaker:
#include "generated_cases.c.h"
#if USE_COMPUTED_GOTOS
TARGET_DO_TRACING:
#else
case DO_TRACING:
#endif
{
assert(cframe.use_tracing);
assert(tstate->tracing == 0);
if (INSTR_OFFSET() >= frame->f_code->_co_firsttraceable) {
int instr_prev = _PyInterpreterFrame_LASTI(frame);
frame->prev_instr = next_instr;
NEXTOPARG();
// No _PyOpcode_Deopt here, since RESUME has no optimized forms:
if (opcode == RESUME) {
if (oparg < 2) {
CHECK_EVAL_BREAKER();
}
/* Call tracing */
TRACE_FUNCTION_ENTRY();
DTRACE_FUNCTION_ENTRY();
}
else {
/* line-by-line tracing support */
if (PyDTrace_LINE_ENABLED()) {
maybe_dtrace_line(frame, &tstate->trace_info, instr_prev);
}
if (cframe.use_tracing &&
tstate->c_tracefunc != NULL && !tstate->tracing) {
int err;
/* see maybe_call_line_trace()
for expository comments */
_PyFrame_SetStackPointer(frame, stack_pointer);
err = maybe_call_line_trace(tstate->c_tracefunc,
tstate->c_traceobj,
tstate, frame, instr_prev);
// Reload possibly changed frame fields:
stack_pointer = _PyFrame_GetStackPointer(frame);
frame->stacktop = -1;
// next_instr is only reloaded if tracing *does not* raise.
// This is consistent with the behavior of older Python
// versions. If a trace function sets a new f_lineno and
// *then* raises, we use the *old* location when searching
// for an exception handler, displaying the traceback, and
// so on:
if (err) {
// next_instr wasn't incremented at the start of this
// instruction. Increment it before handling the error,
// so that it looks the same as a "normal" instruction:
next_instr++;
goto error;
}
// Reload next_instr. Don't increment it, though, since
// we're going to re-dispatch to the "true" instruction now:
next_instr = frame->prev_instr;
}
}
}
NEXTOPARG();
PRE_DISPATCH_GOTO();
// No _PyOpcode_Deopt here, since EXTENDED_ARG has no optimized forms:
while (opcode == EXTENDED_ARG) {
// CPython hasn't ever traced the instruction after an EXTENDED_ARG.
// Inline the EXTENDED_ARG here, so we can avoid branching there:
INSTRUCTION_START(EXTENDED_ARG);
opcode = next_instr->op.code;
oparg = oparg << 8 | next_instr->op.arg;
// Make sure the next instruction isn't a RESUME, since that needs
// to trace properly (and shouldn't have an EXTENDED_ARG, anyways):
assert(opcode != RESUME);
PRE_DISPATCH_GOTO();
}
opcode = _PyOpcode_Deopt[opcode];
if (_PyOpcode_Caches[opcode]) {
uint16_t *counter = &next_instr[1].cache;
// The instruction is going to decrement the counter, so we need to
// increment it here to make sure it doesn't try to specialize:
if (!ADAPTIVE_COUNTER_IS_MAX(*counter)) {
INCREMENT_ADAPTIVE_COUNTER(*counter);
}
}
DISPATCH_GOTO();
}
#if USE_COMPUTED_GOTOS
_unknown_opcode:
#else
@ -988,12 +822,7 @@ error:
PyTraceBack_Here(f);
}
}
if (tstate->c_tracefunc != NULL) {
/* Make sure state is set to FRAME_UNWINDING for tracing */
call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj,
tstate, frame);
}
monitor_raise(tstate, frame, next_instr-1);
exception_unwind:
{
@ -1012,8 +841,7 @@ exception_unwind:
}
assert(STACK_LEVEL() == 0);
_PyFrame_SetStackPointer(frame, stack_pointer);
TRACE_FUNCTION_UNWIND();
DTRACE_FUNCTION_EXIT();
monitor_unwind(tstate, frame, next_instr-1);
goto exit_unwind;
}
@ -1036,8 +864,10 @@ exception_unwind:
available to the handler,
so a program can emulate the
Python main loop. */
PUSH(_PyErr_GetRaisedException(tstate));
PyObject *exc = _PyErr_GetRaisedException(tstate);
PUSH(exc);
JUMPTO(handler);
monitor_handled(tstate, frame, next_instr, exc);
/* Resume normal execution */
DISPATCH();
}
@ -1054,7 +884,6 @@ exit_unwind:
if (frame == &entry_frame) {
/* Restore previous cframe and exit */
tstate->cframe = cframe.previous;
tstate->cframe->use_tracing = cframe.use_tracing;
assert(tstate->cframe->current_frame == frame->previous);
_Py_LeaveRecursiveCallTstate(tstate);
return NULL;
@ -2020,105 +1849,108 @@ Error:
return 0;
}
static void
call_exc_trace(Py_tracefunc func, PyObject *self,
PyThreadState *tstate,
_PyInterpreterFrame *f)
static int
do_monitor_exc(PyThreadState *tstate, _PyInterpreterFrame *frame,
_Py_CODEUNIT *instr, int event)
{
PyObject *exc = _PyErr_GetRaisedException(tstate);
assert(exc && PyExceptionInstance_Check(exc));
PyObject *type = PyExceptionInstance_Class(exc);
PyObject *traceback = PyException_GetTraceback(exc);
if (traceback == NULL) {
traceback = Py_NewRef(Py_None);
}
PyObject *arg = PyTuple_Pack(3, type, exc, traceback);
Py_XDECREF(traceback);
if (arg == NULL) {
_PyErr_SetRaisedException(tstate, exc);
return;
}
int err = call_trace(func, self, tstate, f, PyTrace_EXCEPTION, arg);
Py_DECREF(arg);
assert(event < PY_MONITORING_UNGROUPED_EVENTS);
PyObject *exc = PyErr_GetRaisedException();
assert(exc != NULL);
int err = _Py_call_instrumentation_arg(tstate, event, frame, instr, exc);
if (err == 0) {
_PyErr_SetRaisedException(tstate, exc);
PyErr_SetRaisedException(exc);
}
else {
Py_XDECREF(exc);
Py_DECREF(exc);
}
return err;
}
static inline int
no_tools_for_event(PyThreadState *tstate, _PyInterpreterFrame *frame, int event)
{
_PyCoMonitoringData *data = frame->f_code->_co_monitoring;
if (data) {
if (data->active_monitors.tools[event] == 0) {
return 1;
}
}
else {
if (tstate->interp->monitors.tools[event] == 0) {
return 1;
}
}
return 0;
}
static void
monitor_raise(PyThreadState *tstate, _PyInterpreterFrame *frame,
_Py_CODEUNIT *instr)
{
if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_RAISE)) {
return;
}
do_monitor_exc(tstate, frame, instr, PY_MONITORING_EVENT_RAISE);
}
static int
call_trace_protected(Py_tracefunc func, PyObject *obj,
PyThreadState *tstate, _PyInterpreterFrame *frame,
int what, PyObject *arg)
monitor_stop_iteration(PyThreadState *tstate, _PyInterpreterFrame *frame,
_Py_CODEUNIT *instr)
{
PyObject *exc = _PyErr_GetRaisedException(tstate);
int err = call_trace(func, obj, tstate, frame, what, arg);
if (err == 0)
{
_PyErr_SetRaisedException(tstate, exc);
if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_STOP_ITERATION)) {
return 0;
}
else {
Py_XDECREF(exc);
return -1;
}
return do_monitor_exc(tstate, frame, instr, PY_MONITORING_EVENT_STOP_ITERATION);
}
static void
initialize_trace_info(PyTraceInfo *trace_info, _PyInterpreterFrame *frame)
monitor_unwind(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr)
{
PyCodeObject *code = frame->f_code;
if (trace_info->code != code) {
trace_info->code = code;
_PyCode_InitAddressRange(code, &trace_info->bounds);
if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_PY_UNWIND)) {
return;
}
_Py_call_instrumentation_exc0(tstate, PY_MONITORING_EVENT_PY_UNWIND, frame, instr);
}
static void
monitor_handled(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr, PyObject *exc)
{
if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_EXCEPTION_HANDLED)) {
return;
}
_Py_call_instrumentation_arg(tstate, PY_MONITORING_EVENT_EXCEPTION_HANDLED, frame, instr, exc);
}
static void
monitor_throw(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr)
{
if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_PY_THROW)) {
return;
}
_Py_call_instrumentation_exc0(tstate, PY_MONITORING_EVENT_PY_THROW, frame, instr);
}
void
PyThreadState_EnterTracing(PyThreadState *tstate)
{
assert(tstate->tracing >= 0);
tstate->tracing++;
tstate->cframe->use_tracing = 0;
}
void
PyThreadState_LeaveTracing(PyThreadState *tstate)
{
assert(tstate->tracing > 0 && tstate->cframe->use_tracing == 0);
assert(tstate->tracing > 0);
tstate->tracing--;
_PyThreadState_UpdateTracingState(tstate);
}
static int
call_trace(Py_tracefunc func, PyObject *obj,
PyThreadState *tstate, _PyInterpreterFrame *frame,
int what, PyObject *arg)
{
int result;
if (tstate->tracing) {
return 0;
}
PyFrameObject *f = _PyFrame_GetFrameObject(frame);
if (f == NULL) {
return -1;
}
int old_what = tstate->tracing_what;
tstate->tracing_what = what;
PyThreadState_EnterTracing(tstate);
assert(_PyInterpreterFrame_LASTI(frame) >= 0);
if (_PyCode_InitLineArray(frame->f_code)) {
return -1;
}
f->f_lineno = _PyCode_LineNumberFromArray(frame->f_code, _PyInterpreterFrame_LASTI(frame));
result = func(obj, f, what, arg);
f->f_lineno = 0;
PyThreadState_LeaveTracing(tstate);
tstate->tracing_what = old_what;
return result;
}
PyObject*
_PyEval_CallTracing(PyObject *func, PyObject *args)
@ -2126,7 +1958,6 @@ _PyEval_CallTracing(PyObject *func, PyObject *args)
// Save and disable tracing
PyThreadState *tstate = _PyThreadState_GET();
int save_tracing = tstate->tracing;
int save_use_tracing = tstate->cframe->use_tracing;
tstate->tracing = 0;
// Call the tracing function
@ -2134,81 +1965,9 @@ _PyEval_CallTracing(PyObject *func, PyObject *args)
// Restore tracing
tstate->tracing = save_tracing;
tstate->cframe->use_tracing = save_use_tracing;
return result;
}
/* See Objects/lnotab_notes.txt for a description of how tracing works. */
static int
maybe_call_line_trace(Py_tracefunc func, PyObject *obj,
PyThreadState *tstate, _PyInterpreterFrame *frame, int instr_prev)
{
int result = 0;
/* If the last instruction falls at the start of a line or if it
represents a jump backwards, update the frame's line number and
then call the trace function if we're tracing source lines.
*/
if (_PyCode_InitLineArray(frame->f_code)) {
return -1;
}
int lastline;
if (instr_prev <= frame->f_code->_co_firsttraceable) {
lastline = -1;
}
else {
lastline = _PyCode_LineNumberFromArray(frame->f_code, instr_prev);
}
int line = _PyCode_LineNumberFromArray(frame->f_code, _PyInterpreterFrame_LASTI(frame));
PyFrameObject *f = _PyFrame_GetFrameObject(frame);
if (f == NULL) {
return -1;
}
if (line != -1 && f->f_trace_lines) {
/* Trace backward edges (except in 'yield from') or if line number has changed */
int trace = line != lastline ||
(_PyInterpreterFrame_LASTI(frame) < instr_prev &&
// SEND has no quickened forms, so no need to use _PyOpcode_Deopt
// here:
frame->prev_instr->op.code != SEND);
if (trace) {
result = call_trace(func, obj, tstate, frame, PyTrace_LINE, Py_None);
}
}
/* Always emit an opcode event if we're tracing all opcodes. */
if (f->f_trace_opcodes && result == 0) {
result = call_trace(func, obj, tstate, frame, PyTrace_OPCODE, Py_None);
}
return result;
}
int
_PyEval_SetProfile(PyThreadState *tstate, Py_tracefunc func, PyObject *arg)
{
assert(is_tstate_valid(tstate));
/* The caller must hold the GIL */
assert(PyGILState_Check());
/* Call _PySys_Audit() in the context of the current thread state,
even if tstate is not the current thread state. */
PyThreadState *current_tstate = _PyThreadState_GET();
if (_PySys_Audit(current_tstate, "sys.setprofile", NULL) < 0) {
return -1;
}
tstate->c_profilefunc = func;
PyObject *old_profileobj = tstate->c_profileobj;
tstate->c_profileobj = Py_XNewRef(arg);
/* Flag that tracing or profiling is turned on */
_PyThreadState_UpdateTracingState(tstate);
// gh-98257: Only call Py_XDECREF() once the new profile function is fully
// set, so it's safe to call sys.setprofile() again (reentrant call).
Py_XDECREF(old_profileobj);
return 0;
}
void
PyEval_SetProfile(Py_tracefunc func, PyObject *arg)
{
@ -2240,33 +1999,6 @@ PyEval_SetProfileAllThreads(Py_tracefunc func, PyObject *arg)
}
}
int
_PyEval_SetTrace(PyThreadState *tstate, Py_tracefunc func, PyObject *arg)
{
assert(is_tstate_valid(tstate));
/* The caller must hold the GIL */
assert(PyGILState_Check());
/* Call _PySys_Audit() in the context of the current thread state,
even if tstate is not the current thread state. */
PyThreadState *current_tstate = _PyThreadState_GET();
if (_PySys_Audit(current_tstate, "sys.settrace", NULL) < 0) {
return -1;
}
tstate->c_tracefunc = func;
PyObject *old_traceobj = tstate->c_traceobj;
tstate->c_traceobj = Py_XNewRef(arg);
/* Flag that tracing or profiling is turned on */
_PyThreadState_UpdateTracingState(tstate);
// gh-98257: Only call Py_XDECREF() once the new trace function is fully
// set, so it's safe to call sys.settrace() again (reentrant call).
Py_XDECREF(old_traceobj);
return 0;
}
void
PyEval_SetTrace(Py_tracefunc func, PyObject *arg)
{
@ -2492,114 +2224,6 @@ PyEval_GetFuncDesc(PyObject *func)
return " object";
}
#define C_TRACE(x, call) \
if (use_tracing && tstate->c_profilefunc) { \
if (call_trace(tstate->c_profilefunc, tstate->c_profileobj, \
tstate, tstate->cframe->current_frame, \
PyTrace_C_CALL, func)) { \
x = NULL; \
} \
else { \
x = call; \
if (tstate->c_profilefunc != NULL) { \
if (x == NULL) { \
call_trace_protected(tstate->c_profilefunc, \
tstate->c_profileobj, \
tstate, tstate->cframe->current_frame, \
PyTrace_C_EXCEPTION, func); \
/* XXX should pass (type, value, tb) */ \
} else { \
if (call_trace(tstate->c_profilefunc, \
tstate->c_profileobj, \
tstate, tstate->cframe->current_frame, \
PyTrace_C_RETURN, func)) { \
Py_DECREF(x); \
x = NULL; \
} \
} \
} \
} \
} else { \
x = call; \
}
static PyObject *
trace_call_function(PyThreadState *tstate,
PyObject *func,
PyObject **args, Py_ssize_t nargs,
PyObject *kwnames)
{
int use_tracing = 1;
PyObject *x;
if (PyCFunction_CheckExact(func) || PyCMethod_CheckExact(func)) {
C_TRACE(x, PyObject_Vectorcall(func, args, nargs, kwnames));
return x;
}
else if (Py_IS_TYPE(func, &PyMethodDescr_Type) && nargs > 0) {
/* We need to create a temporary bound method as argument
for profiling.
If nargs == 0, then this cannot work because we have no
"self". In any case, the call itself would raise
TypeError (foo needs an argument), so we just skip
profiling. */
PyObject *self = args[0];
func = Py_TYPE(func)->tp_descr_get(func, self, (PyObject*)Py_TYPE(self));
if (func == NULL) {
return NULL;
}
C_TRACE(x, PyObject_Vectorcall(func,
args+1, nargs-1,
kwnames));
Py_DECREF(func);
return x;
}
return PyObject_Vectorcall(func, args, nargs | PY_VECTORCALL_ARGUMENTS_OFFSET, kwnames);
}
static PyObject *
do_call_core(PyThreadState *tstate,
PyObject *func,
PyObject *callargs,
PyObject *kwdict,
int use_tracing
)
{
PyObject *result;
if (PyCFunction_CheckExact(func) || PyCMethod_CheckExact(func)) {
C_TRACE(result, PyObject_Call(func, callargs, kwdict));
return result;
}
else if (Py_IS_TYPE(func, &PyMethodDescr_Type)) {
Py_ssize_t nargs = PyTuple_GET_SIZE(callargs);
if (nargs > 0 && use_tracing) {
/* We need to create a temporary bound method as argument
for profiling.
If nargs == 0, then this cannot work because we have no
"self". In any case, the call itself would raise
TypeError (foo needs an argument), so we just skip
profiling. */
PyObject *self = PyTuple_GET_ITEM(callargs, 0);
func = Py_TYPE(func)->tp_descr_get(func, self, (PyObject*)Py_TYPE(self));
if (func == NULL) {
return NULL;
}
C_TRACE(result, _PyObject_FastCallDictTstate(
tstate, func,
&_PyTuple_ITEMS(callargs)[1],
nargs - 1,
kwdict));
Py_DECREF(func);
return result;
}
}
EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_FUNCTION_EX, func);
return PyObject_Call(func, callargs, kwdict);
}
/* Extract a slice index from a PyLong or an object with the
nb_index slot defined, and store in *pi.
Silently reduce values larger than PY_SSIZE_T_MAX to PY_SSIZE_T_MAX,
@ -2973,69 +2597,6 @@ PyUnstable_Eval_RequestCodeExtraIndex(freefunc free)
return new_index;
}
static void
dtrace_function_entry(_PyInterpreterFrame *frame)
{
const char *filename;
const char *funcname;
int lineno;
PyCodeObject *code = frame->f_code;
filename = PyUnicode_AsUTF8(code->co_filename);
funcname = PyUnicode_AsUTF8(code->co_name);
lineno = _PyInterpreterFrame_GetLine(frame);
PyDTrace_FUNCTION_ENTRY(filename, funcname, lineno);
}
static void
dtrace_function_return(_PyInterpreterFrame *frame)
{
const char *filename;
const char *funcname;
int lineno;
PyCodeObject *code = frame->f_code;
filename = PyUnicode_AsUTF8(code->co_filename);
funcname = PyUnicode_AsUTF8(code->co_name);
lineno = _PyInterpreterFrame_GetLine(frame);
PyDTrace_FUNCTION_RETURN(filename, funcname, lineno);
}
/* DTrace equivalent of maybe_call_line_trace. */
static void
maybe_dtrace_line(_PyInterpreterFrame *frame,
PyTraceInfo *trace_info,
int instr_prev)
{
const char *co_filename, *co_name;
/* If the last instruction executed isn't in the current
instruction window, reset the window.
*/
initialize_trace_info(trace_info, frame);
int lastline = _PyCode_CheckLineNumber(instr_prev*sizeof(_Py_CODEUNIT), &trace_info->bounds);
int addr = _PyInterpreterFrame_LASTI(frame) * sizeof(_Py_CODEUNIT);
int line = _PyCode_CheckLineNumber(addr, &trace_info->bounds);
if (line != -1) {
/* Trace backward edges or first instruction of a new line */
if (_PyInterpreterFrame_LASTI(frame) < instr_prev ||
(line != lastline && addr == trace_info->bounds.ar_start))
{
co_filename = PyUnicode_AsUTF8(frame->f_code->co_filename);
if (!co_filename) {
co_filename = "?";
}
co_name = PyUnicode_AsUTF8(frame->f_code->co_name);
if (!co_name) {
co_name = "?";
}
PyDTrace_LINE(co_filename, co_name, line);
}
}
}
/* Implement Py_EnterRecursiveCall() and Py_LeaveRecursiveCall() as functions
for the limited API. */

View file

@ -93,8 +93,6 @@
{ \
NEXTOPARG(); \
PRE_DISPATCH_GOTO(); \
assert(cframe.use_tracing == 0 || cframe.use_tracing == 255); \
opcode |= cframe.use_tracing OR_DTRACE_LINE; \
DISPATCH_GOTO(); \
}
@ -102,7 +100,6 @@
{ \
opcode = next_instr->op.code; \
PRE_DISPATCH_GOTO(); \
opcode |= cframe.use_tracing OR_DTRACE_LINE; \
DISPATCH_GOTO(); \
}
@ -183,7 +180,7 @@ GETITEM(PyObject *v, Py_ssize_t i) {
#define PREDICT(next_op) \
do { \
_Py_CODEUNIT word = *next_instr; \
opcode = word.op.code | cframe.use_tracing OR_DTRACE_LINE; \
opcode = word.op.code; \
if (opcode == next_op) { \
oparg = word.op.arg; \
INSTRUCTION_START(next_op); \
@ -283,47 +280,6 @@ GETITEM(PyObject *v, Py_ssize_t i) {
#define BUILTINS() frame->f_builtins
#define LOCALS() frame->f_locals
/* Shared opcode macros */
#define TRACE_FUNCTION_EXIT() \
if (cframe.use_tracing) { \
if (trace_function_exit(tstate, frame, retval)) { \
Py_DECREF(retval); \
goto exit_unwind; \
} \
}
#define DTRACE_FUNCTION_EXIT() \
if (PyDTrace_FUNCTION_RETURN_ENABLED()) { \
dtrace_function_return(frame); \
}
#define TRACE_FUNCTION_UNWIND() \
if (cframe.use_tracing) { \
/* Since we are already unwinding, \
* we don't care if this raises */ \
trace_function_exit(tstate, frame, NULL); \
}
#define TRACE_FUNCTION_ENTRY() \
if (cframe.use_tracing) { \
_PyFrame_SetStackPointer(frame, stack_pointer); \
int err = trace_function_entry(tstate, frame); \
stack_pointer = _PyFrame_GetStackPointer(frame); \
frame->stacktop = -1; \
if (err) { \
goto error; \
} \
}
#define TRACE_FUNCTION_THROW_ENTRY() \
if (cframe.use_tracing) { \
assert(frame->stacktop >= 0); \
if (trace_function_entry(tstate, frame)) { \
goto exit_unwind; \
} \
}
#define DTRACE_FUNCTION_ENTRY() \
if (PyDTrace_FUNCTION_ENTRY_ENABLED()) { \
dtrace_function_entry(frame); \
@ -371,3 +327,18 @@ do { \
_Py_DECREF_NO_DEALLOC(right); \
} \
} while (0)
// If a trace function sets a new f_lineno and
// *then* raises, we use the destination when searching
// for an exception handler, displaying the traceback, and so on
#define INSTRUMENTED_JUMP(src, dest, event) \
do { \
_PyFrame_SetStackPointer(frame, stack_pointer); \
int err = _Py_call_instrumentation_jump(tstate, event, frame, src, dest); \
stack_pointer = _PyFrame_GetStackPointer(frame); \
if (err) { \
next_instr = (dest)+1; \
goto error; \
} \
next_instr = frame->prev_instr; \
} while (0);

311
Python/clinic/instrumentation.c.h generated Normal file
View file

@ -0,0 +1,311 @@
/*[clinic input]
preserve
[clinic start generated code]*/
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
# include "pycore_gc.h" // PyGC_Head
# include "pycore_runtime.h" // _Py_ID()
#endif
PyDoc_STRVAR(monitoring_use_tool_id__doc__,
"use_tool_id($module, tool_id, name, /)\n"
"--\n"
"\n");
#define MONITORING_USE_TOOL_ID_METHODDEF \
{"use_tool_id", _PyCFunction_CAST(monitoring_use_tool_id), METH_FASTCALL, monitoring_use_tool_id__doc__},
static PyObject *
monitoring_use_tool_id_impl(PyObject *module, int tool_id, PyObject *name);
static PyObject *
monitoring_use_tool_id(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
int tool_id;
PyObject *name;
if (!_PyArg_CheckPositional("use_tool_id", nargs, 2, 2)) {
goto exit;
}
tool_id = _PyLong_AsInt(args[0]);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
name = args[1];
return_value = monitoring_use_tool_id_impl(module, tool_id, name);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_free_tool_id__doc__,
"free_tool_id($module, tool_id, /)\n"
"--\n"
"\n");
#define MONITORING_FREE_TOOL_ID_METHODDEF \
{"free_tool_id", (PyCFunction)monitoring_free_tool_id, METH_O, monitoring_free_tool_id__doc__},
static PyObject *
monitoring_free_tool_id_impl(PyObject *module, int tool_id);
static PyObject *
monitoring_free_tool_id(PyObject *module, PyObject *arg)
{
PyObject *return_value = NULL;
int tool_id;
tool_id = _PyLong_AsInt(arg);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
return_value = monitoring_free_tool_id_impl(module, tool_id);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_get_tool__doc__,
"get_tool($module, tool_id, /)\n"
"--\n"
"\n");
#define MONITORING_GET_TOOL_METHODDEF \
{"get_tool", (PyCFunction)monitoring_get_tool, METH_O, monitoring_get_tool__doc__},
static PyObject *
monitoring_get_tool_impl(PyObject *module, int tool_id);
static PyObject *
monitoring_get_tool(PyObject *module, PyObject *arg)
{
PyObject *return_value = NULL;
int tool_id;
tool_id = _PyLong_AsInt(arg);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
return_value = monitoring_get_tool_impl(module, tool_id);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_register_callback__doc__,
"register_callback($module, tool_id, event, func, /)\n"
"--\n"
"\n");
#define MONITORING_REGISTER_CALLBACK_METHODDEF \
{"register_callback", _PyCFunction_CAST(monitoring_register_callback), METH_FASTCALL, monitoring_register_callback__doc__},
static PyObject *
monitoring_register_callback_impl(PyObject *module, int tool_id, int event,
PyObject *func);
static PyObject *
monitoring_register_callback(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
int tool_id;
int event;
PyObject *func;
if (!_PyArg_CheckPositional("register_callback", nargs, 3, 3)) {
goto exit;
}
tool_id = _PyLong_AsInt(args[0]);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
event = _PyLong_AsInt(args[1]);
if (event == -1 && PyErr_Occurred()) {
goto exit;
}
func = args[2];
return_value = monitoring_register_callback_impl(module, tool_id, event, func);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_get_events__doc__,
"get_events($module, tool_id, /)\n"
"--\n"
"\n");
#define MONITORING_GET_EVENTS_METHODDEF \
{"get_events", (PyCFunction)monitoring_get_events, METH_O, monitoring_get_events__doc__},
static int
monitoring_get_events_impl(PyObject *module, int tool_id);
static PyObject *
monitoring_get_events(PyObject *module, PyObject *arg)
{
PyObject *return_value = NULL;
int tool_id;
int _return_value;
tool_id = _PyLong_AsInt(arg);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
_return_value = monitoring_get_events_impl(module, tool_id);
if ((_return_value == -1) && PyErr_Occurred()) {
goto exit;
}
return_value = PyLong_FromLong((long)_return_value);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_set_events__doc__,
"set_events($module, tool_id, event_set, /)\n"
"--\n"
"\n");
#define MONITORING_SET_EVENTS_METHODDEF \
{"set_events", _PyCFunction_CAST(monitoring_set_events), METH_FASTCALL, monitoring_set_events__doc__},
static PyObject *
monitoring_set_events_impl(PyObject *module, int tool_id, int event_set);
static PyObject *
monitoring_set_events(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
int tool_id;
int event_set;
if (!_PyArg_CheckPositional("set_events", nargs, 2, 2)) {
goto exit;
}
tool_id = _PyLong_AsInt(args[0]);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
event_set = _PyLong_AsInt(args[1]);
if (event_set == -1 && PyErr_Occurred()) {
goto exit;
}
return_value = monitoring_set_events_impl(module, tool_id, event_set);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_get_local_events__doc__,
"get_local_events($module, tool_id, code, /)\n"
"--\n"
"\n");
#define MONITORING_GET_LOCAL_EVENTS_METHODDEF \
{"get_local_events", _PyCFunction_CAST(monitoring_get_local_events), METH_FASTCALL, monitoring_get_local_events__doc__},
static int
monitoring_get_local_events_impl(PyObject *module, int tool_id,
PyObject *code);
static PyObject *
monitoring_get_local_events(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
int tool_id;
PyObject *code;
int _return_value;
if (!_PyArg_CheckPositional("get_local_events", nargs, 2, 2)) {
goto exit;
}
tool_id = _PyLong_AsInt(args[0]);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
code = args[1];
_return_value = monitoring_get_local_events_impl(module, tool_id, code);
if ((_return_value == -1) && PyErr_Occurred()) {
goto exit;
}
return_value = PyLong_FromLong((long)_return_value);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_set_local_events__doc__,
"set_local_events($module, tool_id, code, event_set, /)\n"
"--\n"
"\n");
#define MONITORING_SET_LOCAL_EVENTS_METHODDEF \
{"set_local_events", _PyCFunction_CAST(monitoring_set_local_events), METH_FASTCALL, monitoring_set_local_events__doc__},
static PyObject *
monitoring_set_local_events_impl(PyObject *module, int tool_id,
PyObject *code, int event_set);
static PyObject *
monitoring_set_local_events(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
int tool_id;
PyObject *code;
int event_set;
if (!_PyArg_CheckPositional("set_local_events", nargs, 3, 3)) {
goto exit;
}
tool_id = _PyLong_AsInt(args[0]);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
code = args[1];
event_set = _PyLong_AsInt(args[2]);
if (event_set == -1 && PyErr_Occurred()) {
goto exit;
}
return_value = monitoring_set_local_events_impl(module, tool_id, code, event_set);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_restart_events__doc__,
"restart_events($module, /)\n"
"--\n"
"\n");
#define MONITORING_RESTART_EVENTS_METHODDEF \
{"restart_events", (PyCFunction)monitoring_restart_events, METH_NOARGS, monitoring_restart_events__doc__},
static PyObject *
monitoring_restart_events_impl(PyObject *module);
static PyObject *
monitoring_restart_events(PyObject *module, PyObject *Py_UNUSED(ignored))
{
return monitoring_restart_events_impl(module);
}
PyDoc_STRVAR(monitoring__all_events__doc__,
"_all_events($module, /)\n"
"--\n"
"\n");
#define MONITORING__ALL_EVENTS_METHODDEF \
{"_all_events", (PyCFunction)monitoring__all_events, METH_NOARGS, monitoring__all_events__doc__},
static PyObject *
monitoring__all_events_impl(PyObject *module);
static PyObject *
monitoring__all_events(PyObject *module, PyObject *Py_UNUSED(ignored))
{
return monitoring__all_events_impl(module);
}
/*[clinic end generated code: output=11cc0803875b3ffa input=a9049054013a1b77]*/

View file

@ -1427,8 +1427,7 @@ compiler_add_yield_from(struct compiler *c, location loc, int await)
ADDOP(c, loc, CLEANUP_THROW);
USE_LABEL(c, exit);
ADDOP_I(c, loc, SWAP, 2);
ADDOP(c, loc, POP_TOP);
ADDOP(c, loc, END_SEND);
return SUCCESS;
}

File diff suppressed because it is too large Load diff

2021
Python/instrumentation.c Normal file

File diff suppressed because it is too large Load diff

528
Python/legacy_tracing.c Normal file
View file

@ -0,0 +1,528 @@
/* Support for legacy tracing on top of PEP 669 instrumentation
* Provides callables to forward PEP 669 events to legacy events.
*/
#include <stddef.h>
#include "Python.h"
#include "pycore_ceval.h"
#include "pycore_object.h"
#include "pycore_sysmodule.h"
typedef struct _PyLegacyEventHandler {
PyObject_HEAD
vectorcallfunc vectorcall;
int event;
} _PyLegacyEventHandler;
/* The Py_tracefunc function expects the following arguments:
* obj: the trace object (PyObject *)
* frame: the current frame (PyFrameObject *)
* kind: the kind of event, see PyTrace_XXX #defines (int)
* arg: The arg (a PyObject *)
*/
static PyObject *
call_profile_func(_PyLegacyEventHandler *self, PyObject *arg)
{
PyThreadState *tstate = _PyThreadState_GET();
if (tstate->c_profilefunc == NULL) {
Py_RETURN_NONE;
}
PyFrameObject *frame = PyEval_GetFrame();
if (frame == NULL) {
PyErr_SetString(PyExc_SystemError,
"Missing frame when calling profile function.");
return NULL;
}
Py_INCREF(frame);
int err = tstate->c_profilefunc(tstate->c_profileobj, frame, self->event, arg);
Py_DECREF(frame);
if (err) {
return NULL;
}
Py_RETURN_NONE;
}
static PyObject *
sys_profile_func2(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 2);
return call_profile_func(self, Py_None);
}
static PyObject *
sys_profile_func3(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 3);
return call_profile_func(self, args[2]);
}
static PyObject *
sys_profile_call_or_return(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 4);
PyObject *callable = args[2];
if (PyCFunction_Check(callable)) {
return call_profile_func(self, callable);
}
if (Py_TYPE(callable) == &PyMethodDescr_Type) {
PyObject *self_arg = args[3];
/* For backwards compatibility need to
* convert to builtin method */
/* If no arg, skip */
if (self_arg == &_PyInstrumentation_MISSING) {
Py_RETURN_NONE;
}
PyObject *meth = Py_TYPE(callable)->tp_descr_get(
callable, self_arg, (PyObject*)Py_TYPE(self_arg));
if (meth == NULL) {
return NULL;
}
PyObject *res = call_profile_func(self, meth);
Py_DECREF(meth);
return res;
}
Py_RETURN_NONE;
}
static PyObject *
call_trace_func(_PyLegacyEventHandler *self, PyObject *arg)
{
PyThreadState *tstate = _PyThreadState_GET();
if (tstate->c_tracefunc == NULL) {
Py_RETURN_NONE;
}
PyFrameObject *frame = PyEval_GetFrame();
if (frame == NULL) {
PyErr_SetString(PyExc_SystemError,
"Missing frame when calling trace function.");
return NULL;
}
Py_INCREF(frame);
int err = tstate->c_tracefunc(tstate->c_traceobj, frame, self->event, arg);
Py_DECREF(frame);
if (err) {
return NULL;
}
Py_RETURN_NONE;
}
static PyObject *
sys_trace_exception_func(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 3);
PyObject *exc = args[2];
assert(PyExceptionInstance_Check(exc));
PyObject *type = (PyObject *)Py_TYPE(exc);
PyObject *tb = PyException_GetTraceback(exc);
if (tb == NULL) {
tb = Py_NewRef(Py_None);
}
PyObject *tuple = PyTuple_Pack(3, type, exc, tb);
Py_DECREF(tb);
if (tuple == NULL) {
return NULL;
}
PyObject *res = call_trace_func(self, tuple);
Py_DECREF(tuple);
return res;
}
static PyObject *
sys_trace_func2(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 2);
return call_trace_func(self, Py_None);
}
static PyObject *
sys_trace_return(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(!PyErr_Occurred());
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 3);
assert(PyCode_Check(args[0]));
PyObject *val = args[2];
PyObject *res = call_trace_func(self, val);
return res;
}
static PyObject *
sys_trace_yield(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 3);
return call_trace_func(self, args[2]);
}
static PyObject *
sys_trace_instruction_func(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 2);
PyFrameObject *frame = PyEval_GetFrame();
if (frame == NULL) {
PyErr_SetString(PyExc_SystemError,
"Missing frame when calling trace function.");
return NULL;
}
if (!frame->f_trace_opcodes) {
Py_RETURN_NONE;
}
Py_INCREF(frame);
PyThreadState *tstate = _PyThreadState_GET();
int err = tstate->c_tracefunc(tstate->c_traceobj, frame, self->event, Py_None);
frame->f_lineno = 0;
Py_DECREF(frame);
if (err) {
return NULL;
}
Py_RETURN_NONE;
}
static PyObject *
trace_line(
PyThreadState *tstate, _PyLegacyEventHandler *self,
PyFrameObject *frame, int line
) {
if (!frame->f_trace_lines) {
Py_RETURN_NONE;
}
if (line < 0) {
Py_RETURN_NONE;
}
frame ->f_last_traced_line = line;
Py_INCREF(frame);
frame->f_lineno = line;
int err = tstate->c_tracefunc(tstate->c_traceobj, frame, self->event, Py_None);
frame->f_lineno = 0;
Py_DECREF(frame);
if (err) {
return NULL;
}
Py_RETURN_NONE;
}
static PyObject *
sys_trace_line_func(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
PyThreadState *tstate = _PyThreadState_GET();
if (tstate->c_tracefunc == NULL) {
Py_RETURN_NONE;
}
assert(PyVectorcall_NARGS(nargsf) == 2);
int line = _PyLong_AsInt(args[1]);
assert(line >= 0);
PyFrameObject *frame = PyEval_GetFrame();
if (frame == NULL) {
PyErr_SetString(PyExc_SystemError,
"Missing frame when calling trace function.");
return NULL;
}
assert(args[0] == (PyObject *)frame->f_frame->f_code);
if (frame ->f_last_traced_line == line) {
/* Already traced this line */
Py_RETURN_NONE;
}
return trace_line(tstate, self, frame, line);
}
static PyObject *
sys_trace_jump_func(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
PyThreadState *tstate = _PyThreadState_GET();
if (tstate->c_tracefunc == NULL) {
Py_RETURN_NONE;
}
assert(PyVectorcall_NARGS(nargsf) == 3);
int from = _PyLong_AsInt(args[1])/sizeof(_Py_CODEUNIT);
assert(from >= 0);
int to = _PyLong_AsInt(args[2])/sizeof(_Py_CODEUNIT);
assert(to >= 0);
PyFrameObject *frame = PyEval_GetFrame();
if (frame == NULL) {
PyErr_SetString(PyExc_SystemError,
"Missing frame when calling trace function.");
return NULL;
}
if (!frame->f_trace_lines) {
Py_RETURN_NONE;
}
PyCodeObject *code = (PyCodeObject *)args[0];
assert(PyCode_Check(code));
assert(code == frame->f_frame->f_code);
/* We can call _Py_Instrumentation_GetLine because we always set
* line events for tracing */
int to_line = _Py_Instrumentation_GetLine(code, to);
/* Backward jump: Always generate event
* Forward jump: Only generate event if jumping to different line. */
if (to > from && frame->f_last_traced_line == to_line) {
/* Already traced this line */
Py_RETURN_NONE;
}
return trace_line(tstate, self, frame, to_line);
}
/* We don't care about the exception here,
* we just treat it as a possible new line
*/
static PyObject *
sys_trace_exception_handled(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
PyThreadState *tstate = _PyThreadState_GET();
if (tstate->c_tracefunc == NULL) {
Py_RETURN_NONE;
}
assert(PyVectorcall_NARGS(nargsf) == 3);
PyFrameObject *frame = PyEval_GetFrame();
PyCodeObject *code = (PyCodeObject *)args[0];
assert(PyCode_Check(code));
assert(code == frame->f_frame->f_code);
assert(PyLong_Check(args[1]));
int offset = _PyLong_AsInt(args[1])/sizeof(_Py_CODEUNIT);
/* We can call _Py_Instrumentation_GetLine because we always set
* line events for tracing */
int line = _Py_Instrumentation_GetLine(code, offset);
if (frame->f_last_traced_line == line) {
/* Already traced this line */
Py_RETURN_NONE;
}
return trace_line(tstate, self, frame, line);
}
PyTypeObject _PyLegacyEventHandler_Type = {
_PyVarObject_IMMORTAL_INIT(&PyType_Type, 0),
"sys.legacy_event_handler",
sizeof(_PyLegacyEventHandler),
.tp_dealloc = (destructor)PyObject_Free,
.tp_vectorcall_offset = offsetof(_PyLegacyEventHandler, vectorcall),
.tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE |
Py_TPFLAGS_HAVE_VECTORCALL | Py_TPFLAGS_DISALLOW_INSTANTIATION,
.tp_call = PyVectorcall_Call,
};
static int
set_callbacks(int tool, vectorcallfunc vectorcall, int legacy_event, int event1, int event2)
{
_PyLegacyEventHandler *callback =
PyObject_NEW(_PyLegacyEventHandler, &_PyLegacyEventHandler_Type);
if (callback == NULL) {
return -1;
}
callback->vectorcall = vectorcall;
callback->event = legacy_event;
Py_XDECREF(_PyMonitoring_RegisterCallback(tool, event1, (PyObject *)callback));
if (event2 >= 0) {
Py_XDECREF(_PyMonitoring_RegisterCallback(tool, event2, (PyObject *)callback));
}
Py_DECREF(callback);
return 0;
}
#ifndef NDEBUG
/* Ensure that tstate is valid: sanity check for PyEval_AcquireThread() and
PyEval_RestoreThread(). Detect if tstate memory was freed. It can happen
when a thread continues to run after Python finalization, especially
daemon threads. */
static int
is_tstate_valid(PyThreadState *tstate)
{
assert(!_PyMem_IsPtrFreed(tstate));
assert(!_PyMem_IsPtrFreed(tstate->interp));
return 1;
}
#endif
int
_PyEval_SetProfile(PyThreadState *tstate, Py_tracefunc func, PyObject *arg)
{
assert(is_tstate_valid(tstate));
/* The caller must hold the GIL */
assert(PyGILState_Check());
/* Call _PySys_Audit() in the context of the current thread state,
even if tstate is not the current thread state. */
PyThreadState *current_tstate = _PyThreadState_GET();
if (_PySys_Audit(current_tstate, "sys.setprofile", NULL) < 0) {
return -1;
}
/* Setup PEP 669 monitoring callbacks and events. */
if (!tstate->interp->sys_profile_initialized) {
tstate->interp->sys_profile_initialized = true;
if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID,
(vectorcallfunc)sys_profile_func2, PyTrace_CALL,
PY_MONITORING_EVENT_PY_START, PY_MONITORING_EVENT_PY_RESUME)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID,
(vectorcallfunc)sys_profile_func3, PyTrace_RETURN,
PY_MONITORING_EVENT_PY_RETURN, PY_MONITORING_EVENT_PY_YIELD)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID,
(vectorcallfunc)sys_profile_func2, PyTrace_RETURN,
PY_MONITORING_EVENT_PY_UNWIND, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID,
(vectorcallfunc)sys_profile_call_or_return, PyTrace_C_CALL,
PY_MONITORING_EVENT_CALL, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID,
(vectorcallfunc)sys_profile_call_or_return, PyTrace_C_RETURN,
PY_MONITORING_EVENT_C_RETURN, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID,
(vectorcallfunc)sys_profile_call_or_return, PyTrace_C_EXCEPTION,
PY_MONITORING_EVENT_C_RAISE, -1)) {
return -1;
}
}
int delta = (func != NULL) - (tstate->c_profilefunc != NULL);
tstate->c_profilefunc = func;
PyObject *old_profileobj = tstate->c_profileobj;
tstate->c_profileobj = Py_XNewRef(arg);
Py_XDECREF(old_profileobj);
tstate->interp->sys_profiling_threads += delta;
assert(tstate->interp->sys_profiling_threads >= 0);
uint32_t events = 0;
if (tstate->interp->sys_profiling_threads) {
events =
(1 << PY_MONITORING_EVENT_PY_START) | (1 << PY_MONITORING_EVENT_PY_RESUME) |
(1 << PY_MONITORING_EVENT_PY_RETURN) | (1 << PY_MONITORING_EVENT_PY_YIELD) |
(1 << PY_MONITORING_EVENT_CALL) | (1 << PY_MONITORING_EVENT_PY_UNWIND);
}
return _PyMonitoring_SetEvents(PY_MONITORING_SYS_PROFILE_ID, events);
}
int
_PyEval_SetTrace(PyThreadState *tstate, Py_tracefunc func, PyObject *arg)
{
assert(is_tstate_valid(tstate));
/* The caller must hold the GIL */
assert(PyGILState_Check());
/* Call _PySys_Audit() in the context of the current thread state,
even if tstate is not the current thread state. */
PyThreadState *current_tstate = _PyThreadState_GET();
if (_PySys_Audit(current_tstate, "sys.settrace", NULL) < 0) {
return -1;
}
assert(tstate->interp->sys_tracing_threads >= 0);
/* Setup PEP 669 monitoring callbacks and events. */
if (!tstate->interp->sys_trace_initialized) {
tstate->interp->sys_trace_initialized = true;
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_func2, PyTrace_CALL,
PY_MONITORING_EVENT_PY_START, PY_MONITORING_EVENT_PY_RESUME)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_func2, PyTrace_CALL,
PY_MONITORING_EVENT_PY_THROW, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_return, PyTrace_RETURN,
PY_MONITORING_EVENT_PY_RETURN, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_yield, PyTrace_RETURN,
PY_MONITORING_EVENT_PY_YIELD, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_exception_func, PyTrace_EXCEPTION,
PY_MONITORING_EVENT_RAISE, PY_MONITORING_EVENT_STOP_ITERATION)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_line_func, PyTrace_LINE,
PY_MONITORING_EVENT_LINE, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_func2, PyTrace_RETURN,
PY_MONITORING_EVENT_PY_UNWIND, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_jump_func, PyTrace_LINE,
PY_MONITORING_EVENT_JUMP, PY_MONITORING_EVENT_BRANCH)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_instruction_func, PyTrace_OPCODE,
PY_MONITORING_EVENT_INSTRUCTION, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_exception_handled, PyTrace_LINE,
PY_MONITORING_EVENT_EXCEPTION_HANDLED, -1)) {
return -1;
}
}
int delta = (func != NULL) - (tstate->c_tracefunc != NULL);
tstate->c_tracefunc = func;
PyObject *old_traceobj = tstate->c_traceobj;
tstate->c_traceobj = Py_XNewRef(arg);
Py_XDECREF(old_traceobj);
tstate->interp->sys_tracing_threads += delta;
assert(tstate->interp->sys_tracing_threads >= 0);
uint32_t events = 0;
if (tstate->interp->sys_tracing_threads) {
events =
(1 << PY_MONITORING_EVENT_PY_START) | (1 << PY_MONITORING_EVENT_PY_RESUME) |
(1 << PY_MONITORING_EVENT_PY_RETURN) | (1 << PY_MONITORING_EVENT_PY_YIELD) |
(1 << PY_MONITORING_EVENT_RAISE) | (1 << PY_MONITORING_EVENT_LINE) |
(1 << PY_MONITORING_EVENT_JUMP) | (1 << PY_MONITORING_EVENT_BRANCH) |
(1 << PY_MONITORING_EVENT_PY_UNWIND) | (1 << PY_MONITORING_EVENT_PY_THROW) |
(1 << PY_MONITORING_EVENT_STOP_ITERATION) |
(1 << PY_MONITORING_EVENT_EXCEPTION_HANDLED);
if (tstate->interp->f_opcode_trace_set) {
events |= (1 << PY_MONITORING_EVENT_INSTRUCTION);
}
}
return _PyMonitoring_SetEvents(PY_MONITORING_SYS_TRACE_ID, events);
}

View file

@ -32,7 +32,6 @@ def write_contents(f):
"""
opcode = find_module('opcode')
targets = ['_unknown_opcode'] * 256
targets[255] = "TARGET_DO_TRACING"
for opname, op in opcode.opmap.items():
if not opcode.is_pseudo(op):
targets[op] = "TARGET_%s" % opname

View file

@ -13,6 +13,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 0;
case RESUME:
return 0;
case INSTRUMENTED_RESUME:
return 0;
case LOAD_CLOSURE:
return 0;
case LOAD_FAST_CHECK:
@ -39,6 +41,12 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 0;
case END_FOR:
return 1+1;
case INSTRUMENTED_END_FOR:
return 2;
case END_SEND:
return 2;
case INSTRUMENTED_END_SEND:
return 2;
case UNARY_NEGATIVE:
return 1;
case UNARY_NOT:
@ -97,8 +105,12 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 1;
case RETURN_VALUE:
return 1;
case INSTRUMENTED_RETURN_VALUE:
return 1;
case RETURN_CONST:
return 0;
case INSTRUMENTED_RETURN_CONST:
return 0;
case GET_AITER:
return 1;
case GET_ANEXT:
@ -109,6 +121,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 2;
case SEND_GEN:
return 2;
case INSTRUMENTED_YIELD_VALUE:
return 1;
case YIELD_VALUE:
return 1;
case POP_EXCEPT:
@ -263,6 +277,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 1;
case FOR_ITER:
return 1;
case INSTRUMENTED_FOR_ITER:
return 0;
case FOR_ITER_LIST:
return 1;
case FOR_ITER_TUPLE:
@ -287,6 +303,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 1;
case KW_NAMES:
return 0;
case INSTRUMENTED_CALL:
return 0;
case CALL:
return oparg + 2;
case CALL_BOUND_METHOD_EXACT_ARGS:
@ -323,6 +341,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return oparg + 2;
case CALL_NO_KW_METHOD_DESCRIPTOR_FAST:
return oparg + 2;
case INSTRUMENTED_CALL_FUNCTION_EX:
return 0;
case CALL_FUNCTION_EX:
return ((oparg & 1) ? 1 : 0) + 3;
case MAKE_FUNCTION:
@ -339,10 +359,28 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 2;
case SWAP:
return (oparg-2) + 2;
case INSTRUMENTED_LINE:
return 0;
case INSTRUMENTED_INSTRUCTION:
return 0;
case INSTRUMENTED_JUMP_FORWARD:
return 0;
case INSTRUMENTED_JUMP_BACKWARD:
return 0;
case INSTRUMENTED_POP_JUMP_IF_TRUE:
return 0;
case INSTRUMENTED_POP_JUMP_IF_FALSE:
return 0;
case INSTRUMENTED_POP_JUMP_IF_NONE:
return 0;
case INSTRUMENTED_POP_JUMP_IF_NOT_NONE:
return 0;
case EXTENDED_ARG:
return 0;
case CACHE:
return 0;
case RESERVED:
return 0;
default:
return -1;
}
@ -359,6 +397,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 0;
case RESUME:
return 0;
case INSTRUMENTED_RESUME:
return 0;
case LOAD_CLOSURE:
return 1;
case LOAD_FAST_CHECK:
@ -385,6 +425,12 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 1;
case END_FOR:
return 0+0;
case INSTRUMENTED_END_FOR:
return 0;
case END_SEND:
return 1;
case INSTRUMENTED_END_SEND:
return 1;
case UNARY_NEGATIVE:
return 1;
case UNARY_NOT:
@ -443,8 +489,12 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 0;
case RETURN_VALUE:
return 0;
case INSTRUMENTED_RETURN_VALUE:
return 0;
case RETURN_CONST:
return 0;
case INSTRUMENTED_RETURN_CONST:
return 0;
case GET_AITER:
return 1;
case GET_ANEXT:
@ -455,6 +505,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 2;
case SEND_GEN:
return 1;
case INSTRUMENTED_YIELD_VALUE:
return 1;
case YIELD_VALUE:
return 1;
case POP_EXCEPT:
@ -609,6 +661,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 1;
case FOR_ITER:
return 2;
case INSTRUMENTED_FOR_ITER:
return 0;
case FOR_ITER_LIST:
return 2;
case FOR_ITER_TUPLE:
@ -633,6 +687,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return ((oparg & 1) ? 1 : 0) + 1;
case KW_NAMES:
return 0;
case INSTRUMENTED_CALL:
return 0;
case CALL:
return 1;
case CALL_BOUND_METHOD_EXACT_ARGS:
@ -669,6 +725,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 1;
case CALL_NO_KW_METHOD_DESCRIPTOR_FAST:
return 1;
case INSTRUMENTED_CALL_FUNCTION_EX:
return 0;
case CALL_FUNCTION_EX:
return 1;
case MAKE_FUNCTION:
@ -685,10 +743,28 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 1;
case SWAP:
return (oparg-2) + 2;
case INSTRUMENTED_LINE:
return 0;
case INSTRUMENTED_INSTRUCTION:
return 0;
case INSTRUMENTED_JUMP_FORWARD:
return 0;
case INSTRUMENTED_JUMP_BACKWARD:
return 0;
case INSTRUMENTED_POP_JUMP_IF_TRUE:
return 0;
case INSTRUMENTED_POP_JUMP_IF_FALSE:
return 0;
case INSTRUMENTED_POP_JUMP_IF_NONE:
return 0;
case INSTRUMENTED_POP_JUMP_IF_NOT_NONE:
return 0;
case EXTENDED_ARG:
return 0;
case CACHE:
return 0;
case RESERVED:
return 0;
default:
return -1;
}
@ -707,6 +783,7 @@ extern const struct opcode_metadata _PyOpcode_opcode_metadata[256];
const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[NOP] = { true, INSTR_FMT_IX },
[RESUME] = { true, INSTR_FMT_IB },
[INSTRUMENTED_RESUME] = { true, INSTR_FMT_IB },
[LOAD_CLOSURE] = { true, INSTR_FMT_IB },
[LOAD_FAST_CHECK] = { true, INSTR_FMT_IB },
[LOAD_FAST] = { true, INSTR_FMT_IB },
@ -720,6 +797,9 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[POP_TOP] = { true, INSTR_FMT_IX },
[PUSH_NULL] = { true, INSTR_FMT_IX },
[END_FOR] = { true, INSTR_FMT_IB },
[INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX },
[END_SEND] = { true, INSTR_FMT_IX },
[INSTRUMENTED_END_SEND] = { true, INSTR_FMT_IX },
[UNARY_NEGATIVE] = { true, INSTR_FMT_IX },
[UNARY_NOT] = { true, INSTR_FMT_IX },
[UNARY_INVERT] = { true, INSTR_FMT_IX },
@ -749,12 +829,15 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[RAISE_VARARGS] = { true, INSTR_FMT_IB },
[INTERPRETER_EXIT] = { true, INSTR_FMT_IX },
[RETURN_VALUE] = { true, INSTR_FMT_IX },
[INSTRUMENTED_RETURN_VALUE] = { true, INSTR_FMT_IX },
[RETURN_CONST] = { true, INSTR_FMT_IB },
[INSTRUMENTED_RETURN_CONST] = { true, INSTR_FMT_IB },
[GET_AITER] = { true, INSTR_FMT_IX },
[GET_ANEXT] = { true, INSTR_FMT_IX },
[GET_AWAITABLE] = { true, INSTR_FMT_IB },
[SEND] = { true, INSTR_FMT_IBC },
[SEND_GEN] = { true, INSTR_FMT_IBC },
[INSTRUMENTED_YIELD_VALUE] = { true, INSTR_FMT_IX },
[YIELD_VALUE] = { true, INSTR_FMT_IX },
[POP_EXCEPT] = { true, INSTR_FMT_IX },
[RERAISE] = { true, INSTR_FMT_IB },
@ -832,6 +915,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[GET_ITER] = { true, INSTR_FMT_IX },
[GET_YIELD_FROM_ITER] = { true, INSTR_FMT_IX },
[FOR_ITER] = { true, INSTR_FMT_IBC },
[INSTRUMENTED_FOR_ITER] = { true, INSTR_FMT_IB },
[FOR_ITER_LIST] = { true, INSTR_FMT_IBC },
[FOR_ITER_TUPLE] = { true, INSTR_FMT_IBC },
[FOR_ITER_RANGE] = { true, INSTR_FMT_IBC },
@ -844,6 +928,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC00000000 },
[LOAD_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IBC00000000 },
[KW_NAMES] = { true, INSTR_FMT_IB },
[INSTRUMENTED_CALL] = { true, INSTR_FMT_IB },
[CALL] = { true, INSTR_FMT_IBC00 },
[CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IBC00 },
[CALL_PY_EXACT_ARGS] = { true, INSTR_FMT_IBC00 },
@ -862,6 +947,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = { true, INSTR_FMT_IBC00 },
[CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS] = { true, INSTR_FMT_IBC00 },
[CALL_NO_KW_METHOD_DESCRIPTOR_FAST] = { true, INSTR_FMT_IBC00 },
[INSTRUMENTED_CALL_FUNCTION_EX] = { true, INSTR_FMT_IX },
[CALL_FUNCTION_EX] = { true, INSTR_FMT_IB },
[MAKE_FUNCTION] = { true, INSTR_FMT_IB },
[RETURN_GENERATOR] = { true, INSTR_FMT_IX },
@ -870,7 +956,16 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[COPY] = { true, INSTR_FMT_IB },
[BINARY_OP] = { true, INSTR_FMT_IBC },
[SWAP] = { true, INSTR_FMT_IB },
[INSTRUMENTED_LINE] = { true, INSTR_FMT_IX },
[INSTRUMENTED_INSTRUCTION] = { true, INSTR_FMT_IX },
[INSTRUMENTED_JUMP_FORWARD] = { true, INSTR_FMT_IB },
[INSTRUMENTED_JUMP_BACKWARD] = { true, INSTR_FMT_IB },
[INSTRUMENTED_POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IB },
[INSTRUMENTED_POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IB },
[INSTRUMENTED_POP_JUMP_IF_NONE] = { true, INSTR_FMT_IB },
[INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IB },
[EXTENDED_ARG] = { true, INSTR_FMT_IB },
[CACHE] = { true, INSTR_FMT_IX },
[RESERVED] = { true, INSTR_FMT_IX },
};
#endif

View file

@ -4,17 +4,19 @@ static void *opcode_targets[256] = {
&&TARGET_PUSH_NULL,
&&TARGET_INTERPRETER_EXIT,
&&TARGET_END_FOR,
&&TARGET_END_SEND,
&&TARGET_BINARY_OP_ADD_FLOAT,
&&TARGET_BINARY_OP_ADD_INT,
&&TARGET_BINARY_OP_ADD_UNICODE,
&&TARGET_BINARY_OP_INPLACE_ADD_UNICODE,
&&TARGET_NOP,
&&TARGET_BINARY_OP_MULTIPLY_FLOAT,
&&TARGET_BINARY_OP_INPLACE_ADD_UNICODE,
&&TARGET_UNARY_NEGATIVE,
&&TARGET_UNARY_NOT,
&&TARGET_BINARY_OP_MULTIPLY_FLOAT,
&&TARGET_BINARY_OP_MULTIPLY_INT,
&&TARGET_BINARY_OP_SUBTRACT_FLOAT,
&&TARGET_UNARY_INVERT,
&&TARGET_BINARY_OP_SUBTRACT_FLOAT,
&&TARGET_RESERVED,
&&TARGET_BINARY_OP_SUBTRACT_INT,
&&TARGET_BINARY_SUBSCR_DICT,
&&TARGET_BINARY_SUBSCR_GETITEM,
@ -22,21 +24,21 @@ static void *opcode_targets[256] = {
&&TARGET_BINARY_SUBSCR_TUPLE_INT,
&&TARGET_CALL_PY_EXACT_ARGS,
&&TARGET_CALL_PY_WITH_DEFAULTS,
&&TARGET_CALL_BOUND_METHOD_EXACT_ARGS,
&&TARGET_CALL_BUILTIN_CLASS,
&&TARGET_BINARY_SUBSCR,
&&TARGET_BINARY_SLICE,
&&TARGET_STORE_SLICE,
&&TARGET_CALL_BUILTIN_FAST_WITH_KEYWORDS,
&&TARGET_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS,
&&TARGET_CALL_BOUND_METHOD_EXACT_ARGS,
&&TARGET_CALL_BUILTIN_CLASS,
&&TARGET_GET_LEN,
&&TARGET_MATCH_MAPPING,
&&TARGET_MATCH_SEQUENCE,
&&TARGET_MATCH_KEYS,
&&TARGET_CALL_NO_KW_BUILTIN_FAST,
&&TARGET_CALL_BUILTIN_FAST_WITH_KEYWORDS,
&&TARGET_PUSH_EXC_INFO,
&&TARGET_CHECK_EXC_MATCH,
&&TARGET_CHECK_EG_MATCH,
&&TARGET_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS,
&&TARGET_CALL_NO_KW_BUILTIN_FAST,
&&TARGET_CALL_NO_KW_BUILTIN_O,
&&TARGET_CALL_NO_KW_ISINSTANCE,
&&TARGET_CALL_NO_KW_LEN,
@ -46,8 +48,6 @@ static void *opcode_targets[256] = {
&&TARGET_CALL_NO_KW_METHOD_DESCRIPTOR_O,
&&TARGET_CALL_NO_KW_STR_1,
&&TARGET_CALL_NO_KW_TUPLE_1,
&&TARGET_CALL_NO_KW_TYPE_1,
&&TARGET_COMPARE_OP_FLOAT,
&&TARGET_WITH_EXCEPT_START,
&&TARGET_GET_AITER,
&&TARGET_GET_ANEXT,
@ -55,39 +55,39 @@ static void *opcode_targets[256] = {
&&TARGET_BEFORE_WITH,
&&TARGET_END_ASYNC_FOR,
&&TARGET_CLEANUP_THROW,
&&TARGET_CALL_NO_KW_TYPE_1,
&&TARGET_COMPARE_OP_FLOAT,
&&TARGET_COMPARE_OP_INT,
&&TARGET_COMPARE_OP_STR,
&&TARGET_FOR_ITER_LIST,
&&TARGET_FOR_ITER_TUPLE,
&&TARGET_STORE_SUBSCR,
&&TARGET_DELETE_SUBSCR,
&&TARGET_FOR_ITER_LIST,
&&TARGET_FOR_ITER_TUPLE,
&&TARGET_FOR_ITER_RANGE,
&&TARGET_FOR_ITER_GEN,
&&TARGET_LOAD_ATTR_CLASS,
&&TARGET_LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN,
&&TARGET_LOAD_ATTR_INSTANCE_VALUE,
&&TARGET_LOAD_ATTR_MODULE,
&&TARGET_GET_ITER,
&&TARGET_GET_YIELD_FROM_ITER,
&&TARGET_LOAD_ATTR_PROPERTY,
&&TARGET_LOAD_ATTR_INSTANCE_VALUE,
&&TARGET_LOAD_BUILD_CLASS,
&&TARGET_LOAD_ATTR_SLOT,
&&TARGET_LOAD_ATTR_WITH_HINT,
&&TARGET_LOAD_ATTR_MODULE,
&&TARGET_LOAD_ATTR_PROPERTY,
&&TARGET_LOAD_ASSERTION_ERROR,
&&TARGET_RETURN_GENERATOR,
&&TARGET_LOAD_ATTR_SLOT,
&&TARGET_LOAD_ATTR_WITH_HINT,
&&TARGET_LOAD_ATTR_METHOD_LAZY_DICT,
&&TARGET_LOAD_ATTR_METHOD_NO_DICT,
&&TARGET_LOAD_ATTR_METHOD_WITH_VALUES,
&&TARGET_LOAD_CONST__LOAD_FAST,
&&TARGET_LOAD_FAST__LOAD_CONST,
&&TARGET_LOAD_FAST__LOAD_FAST,
&&TARGET_LOAD_GLOBAL_BUILTIN,
&&TARGET_RETURN_VALUE,
&&TARGET_LOAD_GLOBAL_MODULE,
&&TARGET_LOAD_FAST__LOAD_FAST,
&&TARGET_SETUP_ANNOTATIONS,
&&TARGET_LOAD_GLOBAL_BUILTIN,
&&TARGET_LOAD_GLOBAL_MODULE,
&&TARGET_STORE_ATTR_INSTANCE_VALUE,
&&TARGET_STORE_ATTR_SLOT,
&&TARGET_STORE_ATTR_WITH_HINT,
&&TARGET_POP_EXCEPT,
&&TARGET_STORE_NAME,
&&TARGET_DELETE_NAME,
@ -110,9 +110,9 @@ static void *opcode_targets[256] = {
&&TARGET_IMPORT_NAME,
&&TARGET_IMPORT_FROM,
&&TARGET_JUMP_FORWARD,
&&TARGET_STORE_ATTR_SLOT,
&&TARGET_STORE_ATTR_WITH_HINT,
&&TARGET_STORE_FAST__LOAD_FAST,
&&TARGET_STORE_FAST__STORE_FAST,
&&TARGET_STORE_SUBSCR_DICT,
&&TARGET_POP_JUMP_IF_FALSE,
&&TARGET_POP_JUMP_IF_TRUE,
&&TARGET_LOAD_GLOBAL,
@ -140,9 +140,9 @@ static void *opcode_targets[256] = {
&&TARGET_STORE_DEREF,
&&TARGET_DELETE_DEREF,
&&TARGET_JUMP_BACKWARD,
&&TARGET_STORE_SUBSCR_LIST_INT,
&&TARGET_STORE_FAST__STORE_FAST,
&&TARGET_CALL_FUNCTION_EX,
&&TARGET_UNPACK_SEQUENCE_LIST,
&&TARGET_STORE_SUBSCR_DICT,
&&TARGET_EXTENDED_ARG,
&&TARGET_LIST_APPEND,
&&TARGET_SET_ADD,
@ -152,15 +152,15 @@ static void *opcode_targets[256] = {
&&TARGET_YIELD_VALUE,
&&TARGET_RESUME,
&&TARGET_MATCH_CLASS,
&&TARGET_UNPACK_SEQUENCE_TUPLE,
&&TARGET_UNPACK_SEQUENCE_TWO_TUPLE,
&&TARGET_STORE_SUBSCR_LIST_INT,
&&TARGET_UNPACK_SEQUENCE_LIST,
&&TARGET_FORMAT_VALUE,
&&TARGET_BUILD_CONST_KEY_MAP,
&&TARGET_BUILD_STRING,
&&TARGET_UNPACK_SEQUENCE_TUPLE,
&&TARGET_UNPACK_SEQUENCE_TWO_TUPLE,
&&TARGET_SEND_GEN,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&TARGET_LIST_EXTEND,
&&TARGET_SET_UPDATE,
&&TARGET_DICT_MERGE,
@ -237,22 +237,22 @@ static void *opcode_targets[256] = {
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&TARGET_DO_TRACING
&&TARGET_INSTRUMENTED_POP_JUMP_IF_NONE,
&&TARGET_INSTRUMENTED_POP_JUMP_IF_NOT_NONE,
&&TARGET_INSTRUMENTED_RESUME,
&&TARGET_INSTRUMENTED_CALL,
&&TARGET_INSTRUMENTED_RETURN_VALUE,
&&TARGET_INSTRUMENTED_YIELD_VALUE,
&&TARGET_INSTRUMENTED_CALL_FUNCTION_EX,
&&TARGET_INSTRUMENTED_JUMP_FORWARD,
&&TARGET_INSTRUMENTED_JUMP_BACKWARD,
&&TARGET_INSTRUMENTED_RETURN_CONST,
&&TARGET_INSTRUMENTED_FOR_ITER,
&&TARGET_INSTRUMENTED_POP_JUMP_IF_FALSE,
&&TARGET_INSTRUMENTED_POP_JUMP_IF_TRUE,
&&TARGET_INSTRUMENTED_END_FOR,
&&TARGET_INSTRUMENTED_END_SEND,
&&TARGET_INSTRUMENTED_INSTRUCTION,
&&TARGET_INSTRUMENTED_LINE,
&&_unknown_opcode
};

View file

@ -625,7 +625,6 @@ free_interpreter(PyInterpreterState *interp)
main interpreter. We fix those fields here, in addition
to the other dynamically initialized fields.
*/
static void
init_interpreter(PyInterpreterState *interp,
_PyRuntimeState *runtime, int64_t id,
@ -650,12 +649,22 @@ init_interpreter(PyInterpreterState *interp,
_PyGC_InitState(&interp->gc);
PyConfig_InitPythonConfig(&interp->config);
_PyType_InitCache(interp);
for(int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) {
interp->monitors.tools[i] = 0;
}
for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) {
for(int e = 0; e < PY_MONITORING_EVENTS; e++) {
interp->monitoring_callables[t][e] = NULL;
}
}
interp->sys_profile_initialized = false;
interp->sys_trace_initialized = false;
if (interp != &runtime->_main_interpreter) {
/* Fix the self-referential, statically initialized fields. */
interp->dtoa = (struct _dtoa_state)_dtoa_state_INIT(interp);
}
interp->f_opcode_trace_set = false;
interp->_initialized = 1;
}
@ -788,6 +797,20 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate)
Py_CLEAR(interp->audit_hooks);
for(int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) {
interp->monitors.tools[i] = 0;
}
for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) {
for(int e = 0; e < PY_MONITORING_EVENTS; e++) {
Py_CLEAR(interp->monitoring_callables[t][e]);
}
}
interp->sys_profile_initialized = false;
interp->sys_trace_initialized = false;
for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) {
Py_CLEAR(interp->monitoring_tool_names[t]);
}
PyConfig_Clear(&interp->config);
Py_CLEAR(interp->codec_search_path);
Py_CLEAR(interp->codec_search_cache);
@ -845,7 +868,7 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate)
interp->code_watchers[i] = NULL;
}
interp->active_code_watchers = 0;
interp->f_opcode_trace_set = false;
// XXX Once we have one allocator per interpreter (i.e.
// per-interpreter GC) we must ensure that all of the interpreter's
// objects have been cleaned up at the point.
@ -1237,6 +1260,7 @@ init_threadstate(PyThreadState *tstate,
tstate->datastack_chunk = NULL;
tstate->datastack_top = NULL;
tstate->datastack_limit = NULL;
tstate->what_event = -1;
tstate->_status.initialized = 1;
}
@ -1412,8 +1436,14 @@ PyThreadState_Clear(PyThreadState *tstate)
"PyThreadState_Clear: warning: thread still has a generator\n");
}
tstate->c_profilefunc = NULL;
tstate->c_tracefunc = NULL;
if (tstate->c_profilefunc != NULL) {
tstate->interp->sys_profiling_threads--;
tstate->c_profilefunc = NULL;
}
if (tstate->c_tracefunc != NULL) {
tstate->interp->sys_tracing_threads--;
tstate->c_tracefunc = NULL;
}
Py_CLEAR(tstate->c_profileobj);
Py_CLEAR(tstate->c_traceobj);

View file

@ -273,7 +273,8 @@ _PyCode_Quicken(PyCodeObject *code)
_Py_CODEUNIT *instructions = _PyCode_CODE(code);
for (int i = 0; i < Py_SIZE(code); i++) {
int previous_opcode = opcode;
opcode = _PyOpcode_Deopt[instructions[i].op.code];
opcode = _Py_GetBaseOpcode(code, i);
assert(opcode < MIN_INSTRUMENTED_OPCODE);
int caches = _PyOpcode_Caches[opcode];
if (caches) {
instructions[i + 1].cache = adaptive_counter_warmup();
@ -1737,6 +1738,7 @@ _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, int nargs,
{
assert(ENABLE_SPECIALIZATION);
assert(_PyOpcode_Caches[CALL] == INLINE_CACHE_ENTRIES_CALL);
assert(_Py_OPCODE(*instr) != INSTRUMENTED_CALL);
_PyCallCache *cache = (_PyCallCache *)(instr + 1);
int fail;
if (PyCFunction_CheckExact(callable)) {
@ -2149,7 +2151,9 @@ _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg)
goto success;
}
else if (tp == &PyGen_Type && oparg <= SHRT_MAX) {
assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR);
assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR ||
instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == INSTRUMENTED_END_FOR
);
instr->op.code = FOR_ITER_GEN;
goto success;
}

View file

@ -3409,6 +3409,7 @@ error:
return _PyStatus_ERR("can't set preliminary stderr");
}
PyObject *_Py_CreateMonitoringObject(void);
/* Create sys module without all attributes.
_PySys_UpdateConfig() should be called later to add remaining attributes. */
@ -3458,6 +3459,16 @@ _PySys_Create(PyThreadState *tstate, PyObject **sysmod_p)
goto error;
}
PyObject *monitoring = _Py_CreateMonitoringObject();
if (monitoring == NULL) {
goto error;
}
int err = PyDict_SetItemString(sysdict, "monitoring", monitoring);
Py_DECREF(monitoring);
if (err < 0) {
goto error;
}
assert(!_PyErr_Occurred(tstate));
*sysmod_p = sysmod;