GH-103082: Implementation of PEP 669: Low Impact Monitoring for CPython (GH-103083)

* The majority of the monitoring code is in instrumentation.c

* The new instrumentation bytecodes are in bytecodes.c

* legacy_tracing.c adapts the new API to the old sys.setrace and sys.setprofile APIs
This commit is contained in:
Mark Shannon 2023-04-12 12:04:55 +01:00 committed by GitHub
parent dce2d38cb0
commit 411b169281
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
44 changed files with 6029 additions and 1625 deletions

View file

@ -3,10 +3,22 @@
#ifndef Py_LIMITED_API
#ifndef Py_CODE_H
#define Py_CODE_H
#ifdef __cplusplus
extern "C" {
#endif
/* Count of all "real" monitoring events (not derived from other events) */
#define PY_MONITORING_UNGROUPED_EVENTS 14
/* Count of all monitoring events */
#define PY_MONITORING_EVENTS 16
/* Table of which tools are active for each monitored event. */
typedef struct _Py_Monitors {
uint8_t tools[PY_MONITORING_UNGROUPED_EVENTS];
} _Py_Monitors;
/* Each instruction in a code object is a fixed-width value,
* currently 2 bytes: 1-byte opcode + 1-byte oparg. The EXTENDED_ARG
* opcode allows for larger values but the current limit is 3 uses
@ -56,6 +68,35 @@ typedef struct {
PyObject *_co_freevars;
} _PyCoCached;
/* Ancilliary data structure used for instrumentation.
Line instrumentation creates an array of
these. One entry per code unit.*/
typedef struct {
uint8_t original_opcode;
int8_t line_delta;
} _PyCoLineInstrumentationData;
/* Main data structure used for instrumentation.
* This is allocated when needed for instrumentation
*/
typedef struct {
/* Monitoring specific to this code object */
_Py_Monitors local_monitors;
/* Monitoring that is active on this code object */
_Py_Monitors active_monitors;
/* The tools that are to be notified for events for the matching code unit */
uint8_t *tools;
/* Information to support line events */
_PyCoLineInstrumentationData *lines;
/* The tools that are to be notified for line events for the matching code unit */
uint8_t *line_tools;
/* Information to support instruction events */
/* The underlying instructions, which can themselves be instrumented */
uint8_t *per_instruction_opcodes;
/* The tools that are to be notified for instruction events for the matching code unit */
uint8_t *per_instruction_tools;
} _PyCoMonitoringData;
// To avoid repeating ourselves in deepfreeze.py, all PyCodeObject members are
// defined in this macro:
#define _PyCode_DEF(SIZE) { \
@ -87,7 +128,6 @@ typedef struct {
PyObject *co_exceptiontable; /* Byte string encoding exception handling \
table */ \
int co_flags; /* CO_..., see below */ \
short _co_linearray_entry_size; /* Size of each entry in _co_linearray */ \
\
/* The rest are not so impactful on performance. */ \
int co_argcount; /* #arguments, except *args */ \
@ -114,8 +154,9 @@ typedef struct {
PyObject *co_linetable; /* bytes object that holds location info */ \
PyObject *co_weakreflist; /* to support weakrefs to code objects */ \
_PyCoCached *_co_cached; /* cached co_* attributes */ \
uint64_t _co_instrumentation_version; /* current instrumentation version */ \
_PyCoMonitoringData *_co_monitoring; /* Monitoring data */ \
int _co_firsttraceable; /* index of first traceable instruction */ \
char *_co_linearray; /* array of line offsets */ \
/* Scratch space for extra data relating to the code object. \
Type is a void* to keep the format private in codeobject.c to force \
people to go through the proper APIs. */ \

View file

@ -58,12 +58,6 @@ typedef int (*Py_tracefunc)(PyObject *, PyFrameObject *, int, PyObject *);
#define PyTrace_C_RETURN 6
#define PyTrace_OPCODE 7
typedef struct {
PyCodeObject *code; // The code object for the bounds. May be NULL.
PyCodeAddressRange bounds; // Only valid if code != NULL.
} PyTraceInfo;
// Internal structure: you should not use it directly, but use public functions
// like PyThreadState_EnterTracing() and PyThreadState_LeaveTracing().
typedef struct _PyCFrame {
@ -77,7 +71,6 @@ typedef struct _PyCFrame {
* discipline and make sure that instances of this struct cannot
* accessed outside of their lifetime.
*/
uint8_t use_tracing; // 0 or 255 (or'ed into opcode, hence 8-bit type)
/* Pointer to the currently executing frame (it can be NULL) */
struct _PyInterpreterFrame *current_frame;
struct _PyCFrame *previous;
@ -157,7 +150,7 @@ struct _ts {
This is to prevent the actual trace/profile code from being recorded in
the trace/profile. */
int tracing;
int tracing_what; /* The event currently being traced, if any. */
int what_event; /* The event currently being monitored, if any. */
/* Pointer to current _PyCFrame in the C stack frame of the currently,
* or most recently, executing _PyEval_EvalFrameDefault. */
@ -228,8 +221,6 @@ struct _ts {
/* Unique thread state id. */
uint64_t id;
PyTraceInfo trace_info;
_PyStackChunk *datastack_chunk;
PyObject **datastack_top;
PyObject **datastack_limit;

View file

@ -441,32 +441,6 @@ adaptive_counter_backoff(uint16_t counter) {
/* Line array cache for tracing */
extern int _PyCode_CreateLineArray(PyCodeObject *co);
static inline int
_PyCode_InitLineArray(PyCodeObject *co)
{
if (co->_co_linearray) {
return 0;
}
return _PyCode_CreateLineArray(co);
}
static inline int
_PyCode_LineNumberFromArray(PyCodeObject *co, int index)
{
assert(co->_co_linearray != NULL);
assert(index >= 0);
assert(index < Py_SIZE(co));
if (co->_co_linearray_entry_size == 2) {
return ((int16_t *)co->_co_linearray)[index];
}
else {
assert(co->_co_linearray_entry_size == 4);
return ((int32_t *)co->_co_linearray)[index];
}
}
typedef struct _PyShimCodeDef {
const uint8_t *code;
int codelen;
@ -500,6 +474,10 @@ extern uint32_t _Py_next_func_version;
#define COMPARISON_NOT_EQUALS (COMPARISON_UNORDERED | COMPARISON_LESS_THAN | COMPARISON_GREATER_THAN)
extern int _Py_Instrument(PyCodeObject *co, PyInterpreterState *interp);
extern int _Py_GetBaseOpcode(PyCodeObject *code, int offset);
#ifdef __cplusplus
}

View file

@ -19,6 +19,7 @@ struct _frame {
struct _PyInterpreterFrame *f_frame; /* points to the frame data */
PyObject *f_trace; /* Trace function */
int f_lineno; /* Current line number. Only valid if non-zero */
int f_last_traced_line; /* The last line traced for this frame */
char f_trace_lines; /* Emit per-line trace events? */
char f_trace_opcodes; /* Emit per-opcode trace events? */
char f_fast_as_locals; /* Have the fast locals of this frame been converted to a dict? */
@ -137,10 +138,16 @@ _PyFrame_GetLocalsArray(_PyInterpreterFrame *frame)
return frame->localsplus;
}
/* Fetches the stack pointer, and sets stacktop to -1.
Having stacktop <= 0 ensures that invalid
values are not visible to the cycle GC.
We choose -1 rather than 0 to assist debugging. */
static inline PyObject**
_PyFrame_GetStackPointer(_PyInterpreterFrame *frame)
{
return frame->localsplus+frame->stacktop;
PyObject **sp = frame->localsplus + frame->stacktop;
frame->stacktop = -1;
return sp;
}
static inline void

View file

@ -0,0 +1,107 @@
#ifndef Py_INTERNAL_INSTRUMENT_H
#define Py_INTERNAL_INSTRUMENT_H
#include "pycore_bitutils.h" // _Py_popcount32
#include "pycore_frame.h"
#include "cpython/code.h"
#ifdef __cplusplus
extern "C" {
#endif
#define PY_MONITORING_TOOL_IDS 8
/* Local events.
* These require bytecode instrumentation */
#define PY_MONITORING_EVENT_PY_START 0
#define PY_MONITORING_EVENT_PY_RESUME 1
#define PY_MONITORING_EVENT_PY_RETURN 2
#define PY_MONITORING_EVENT_PY_YIELD 3
#define PY_MONITORING_EVENT_CALL 4
#define PY_MONITORING_EVENT_LINE 5
#define PY_MONITORING_EVENT_INSTRUCTION 6
#define PY_MONITORING_EVENT_JUMP 7
#define PY_MONITORING_EVENT_BRANCH 8
#define PY_MONITORING_EVENT_STOP_ITERATION 9
#define PY_MONITORING_INSTRUMENTED_EVENTS 10
/* Other events, mainly exceptions */
#define PY_MONITORING_EVENT_RAISE 10
#define PY_MONITORING_EVENT_EXCEPTION_HANDLED 11
#define PY_MONITORING_EVENT_PY_UNWIND 12
#define PY_MONITORING_EVENT_PY_THROW 13
/* Ancilliary events */
#define PY_MONITORING_EVENT_C_RETURN 14
#define PY_MONITORING_EVENT_C_RAISE 15
typedef uint32_t _PyMonitoringEventSet;
/* Tool IDs */
/* These are defined in PEP 669 for convenience to avoid clashes */
#define PY_MONITORING_DEBUGGER_ID 0
#define PY_MONITORING_COVERAGE_ID 1
#define PY_MONITORING_PROFILER_ID 2
#define PY_MONITORING_OPTIMIZER_ID 5
/* Internal IDs used to suuport sys.setprofile() and sys.settrace() */
#define PY_MONITORING_SYS_PROFILE_ID 6
#define PY_MONITORING_SYS_TRACE_ID 7
PyObject *_PyMonitoring_RegisterCallback(int tool_id, int event_id, PyObject *obj);
int _PyMonitoring_SetEvents(int tool_id, _PyMonitoringEventSet events);
extern int
_Py_call_instrumentation(PyThreadState *tstate, int event,
_PyInterpreterFrame *frame, _Py_CODEUNIT *instr);
extern int
_Py_call_instrumentation_line(PyThreadState *tstate, _PyInterpreterFrame* frame,
_Py_CODEUNIT *instr);
extern int
_Py_call_instrumentation_instruction(
PyThreadState *tstate, _PyInterpreterFrame* frame, _Py_CODEUNIT *instr);
int
_Py_call_instrumentation_jump(
PyThreadState *tstate, int event,
_PyInterpreterFrame *frame, _Py_CODEUNIT *instr, _Py_CODEUNIT *target);
extern int
_Py_call_instrumentation_arg(PyThreadState *tstate, int event,
_PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *arg);
extern int
_Py_call_instrumentation_2args(PyThreadState *tstate, int event,
_PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *arg0, PyObject *arg1);
extern void
_Py_call_instrumentation_exc0(PyThreadState *tstate, int event,
_PyInterpreterFrame *frame, _Py_CODEUNIT *instr);
extern void
_Py_call_instrumentation_exc2(PyThreadState *tstate, int event,
_PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *arg0, PyObject *arg1);
extern int
_Py_Instrumentation_GetLine(PyCodeObject *code, int index);
extern PyObject _PyInstrumentation_MISSING;
#ifdef __cplusplus
}
#endif
#endif /* !Py_INTERNAL_INSTRUMENT_H */

View file

@ -24,6 +24,7 @@ extern "C" {
#include "pycore_genobject.h" // struct _Py_async_gen_state
#include "pycore_gc.h" // struct _gc_runtime_state
#include "pycore_import.h" // struct _import_state
#include "pycore_instruments.h" // PY_MONITORING_EVENTS
#include "pycore_list.h" // struct _Py_list_state
#include "pycore_global_objects.h" // struct _Py_interp_static_objects
#include "pycore_object_state.h" // struct _py_object_state
@ -37,7 +38,6 @@ struct _Py_long_state {
int max_str_digits;
};
/* interpreter state */
/* PyInterpreterState holds the global state for one of the runtime's
@ -49,6 +49,9 @@ struct _is {
PyInterpreterState *next;
uint64_t monitoring_version;
uint64_t last_restart_version;
struct pythreads {
uint64_t next_unique_id;
/* The linked list of threads, newest first. */
@ -148,6 +151,15 @@ struct _is {
struct callable_cache callable_cache;
PyCodeObject *interpreter_trampoline;
_Py_Monitors monitors;
bool f_opcode_trace_set;
bool sys_profile_initialized;
bool sys_trace_initialized;
Py_ssize_t sys_profiling_threads; /* Count of threads with c_profilefunc set */
Py_ssize_t sys_tracing_threads; /* Count of threads with c_tracefunc set */
PyObject *monitoring_callables[PY_MONITORING_TOOL_IDS][PY_MONITORING_EVENTS];
PyObject *monitoring_tool_names[PY_MONITORING_TOOL_IDS];
struct _Py_interp_cached_objects cached_objects;
struct _Py_interp_static_objects static_objects;

View file

@ -112,6 +112,7 @@ const uint8_t _PyOpcode_Deopt[256] = {
[DICT_UPDATE] = DICT_UPDATE,
[END_ASYNC_FOR] = END_ASYNC_FOR,
[END_FOR] = END_FOR,
[END_SEND] = END_SEND,
[EXTENDED_ARG] = EXTENDED_ARG,
[FORMAT_VALUE] = FORMAT_VALUE,
[FOR_ITER] = FOR_ITER,
@ -127,6 +128,23 @@ const uint8_t _PyOpcode_Deopt[256] = {
[GET_YIELD_FROM_ITER] = GET_YIELD_FROM_ITER,
[IMPORT_FROM] = IMPORT_FROM,
[IMPORT_NAME] = IMPORT_NAME,
[INSTRUMENTED_CALL] = INSTRUMENTED_CALL,
[INSTRUMENTED_CALL_FUNCTION_EX] = INSTRUMENTED_CALL_FUNCTION_EX,
[INSTRUMENTED_END_FOR] = INSTRUMENTED_END_FOR,
[INSTRUMENTED_END_SEND] = INSTRUMENTED_END_SEND,
[INSTRUMENTED_FOR_ITER] = INSTRUMENTED_FOR_ITER,
[INSTRUMENTED_INSTRUCTION] = INSTRUMENTED_INSTRUCTION,
[INSTRUMENTED_JUMP_BACKWARD] = INSTRUMENTED_JUMP_BACKWARD,
[INSTRUMENTED_JUMP_FORWARD] = INSTRUMENTED_JUMP_FORWARD,
[INSTRUMENTED_LINE] = INSTRUMENTED_LINE,
[INSTRUMENTED_POP_JUMP_IF_FALSE] = INSTRUMENTED_POP_JUMP_IF_FALSE,
[INSTRUMENTED_POP_JUMP_IF_NONE] = INSTRUMENTED_POP_JUMP_IF_NONE,
[INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = INSTRUMENTED_POP_JUMP_IF_NOT_NONE,
[INSTRUMENTED_POP_JUMP_IF_TRUE] = INSTRUMENTED_POP_JUMP_IF_TRUE,
[INSTRUMENTED_RESUME] = INSTRUMENTED_RESUME,
[INSTRUMENTED_RETURN_CONST] = INSTRUMENTED_RETURN_CONST,
[INSTRUMENTED_RETURN_VALUE] = INSTRUMENTED_RETURN_VALUE,
[INSTRUMENTED_YIELD_VALUE] = INSTRUMENTED_YIELD_VALUE,
[INTERPRETER_EXIT] = INTERPRETER_EXIT,
[IS_OP] = IS_OP,
[JUMP_BACKWARD] = JUMP_BACKWARD,
@ -179,6 +197,7 @@ const uint8_t _PyOpcode_Deopt[256] = {
[PUSH_NULL] = PUSH_NULL,
[RAISE_VARARGS] = RAISE_VARARGS,
[RERAISE] = RERAISE,
[RESERVED] = RESERVED,
[RESUME] = RESUME,
[RETURN_CONST] = RETURN_CONST,
[RETURN_GENERATOR] = RETURN_GENERATOR,
@ -223,17 +242,19 @@ static const char *const _PyOpcode_OpName[263] = {
[PUSH_NULL] = "PUSH_NULL",
[INTERPRETER_EXIT] = "INTERPRETER_EXIT",
[END_FOR] = "END_FOR",
[END_SEND] = "END_SEND",
[BINARY_OP_ADD_FLOAT] = "BINARY_OP_ADD_FLOAT",
[BINARY_OP_ADD_INT] = "BINARY_OP_ADD_INT",
[BINARY_OP_ADD_UNICODE] = "BINARY_OP_ADD_UNICODE",
[BINARY_OP_INPLACE_ADD_UNICODE] = "BINARY_OP_INPLACE_ADD_UNICODE",
[NOP] = "NOP",
[BINARY_OP_MULTIPLY_FLOAT] = "BINARY_OP_MULTIPLY_FLOAT",
[BINARY_OP_INPLACE_ADD_UNICODE] = "BINARY_OP_INPLACE_ADD_UNICODE",
[UNARY_NEGATIVE] = "UNARY_NEGATIVE",
[UNARY_NOT] = "UNARY_NOT",
[BINARY_OP_MULTIPLY_FLOAT] = "BINARY_OP_MULTIPLY_FLOAT",
[BINARY_OP_MULTIPLY_INT] = "BINARY_OP_MULTIPLY_INT",
[BINARY_OP_SUBTRACT_FLOAT] = "BINARY_OP_SUBTRACT_FLOAT",
[UNARY_INVERT] = "UNARY_INVERT",
[BINARY_OP_SUBTRACT_FLOAT] = "BINARY_OP_SUBTRACT_FLOAT",
[RESERVED] = "RESERVED",
[BINARY_OP_SUBTRACT_INT] = "BINARY_OP_SUBTRACT_INT",
[BINARY_SUBSCR_DICT] = "BINARY_SUBSCR_DICT",
[BINARY_SUBSCR_GETITEM] = "BINARY_SUBSCR_GETITEM",
@ -241,21 +262,21 @@ static const char *const _PyOpcode_OpName[263] = {
[BINARY_SUBSCR_TUPLE_INT] = "BINARY_SUBSCR_TUPLE_INT",
[CALL_PY_EXACT_ARGS] = "CALL_PY_EXACT_ARGS",
[CALL_PY_WITH_DEFAULTS] = "CALL_PY_WITH_DEFAULTS",
[CALL_BOUND_METHOD_EXACT_ARGS] = "CALL_BOUND_METHOD_EXACT_ARGS",
[CALL_BUILTIN_CLASS] = "CALL_BUILTIN_CLASS",
[BINARY_SUBSCR] = "BINARY_SUBSCR",
[BINARY_SLICE] = "BINARY_SLICE",
[STORE_SLICE] = "STORE_SLICE",
[CALL_BUILTIN_FAST_WITH_KEYWORDS] = "CALL_BUILTIN_FAST_WITH_KEYWORDS",
[CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = "CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS",
[CALL_BOUND_METHOD_EXACT_ARGS] = "CALL_BOUND_METHOD_EXACT_ARGS",
[CALL_BUILTIN_CLASS] = "CALL_BUILTIN_CLASS",
[GET_LEN] = "GET_LEN",
[MATCH_MAPPING] = "MATCH_MAPPING",
[MATCH_SEQUENCE] = "MATCH_SEQUENCE",
[MATCH_KEYS] = "MATCH_KEYS",
[CALL_NO_KW_BUILTIN_FAST] = "CALL_NO_KW_BUILTIN_FAST",
[CALL_BUILTIN_FAST_WITH_KEYWORDS] = "CALL_BUILTIN_FAST_WITH_KEYWORDS",
[PUSH_EXC_INFO] = "PUSH_EXC_INFO",
[CHECK_EXC_MATCH] = "CHECK_EXC_MATCH",
[CHECK_EG_MATCH] = "CHECK_EG_MATCH",
[CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = "CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS",
[CALL_NO_KW_BUILTIN_FAST] = "CALL_NO_KW_BUILTIN_FAST",
[CALL_NO_KW_BUILTIN_O] = "CALL_NO_KW_BUILTIN_O",
[CALL_NO_KW_ISINSTANCE] = "CALL_NO_KW_ISINSTANCE",
[CALL_NO_KW_LEN] = "CALL_NO_KW_LEN",
@ -265,8 +286,6 @@ static const char *const _PyOpcode_OpName[263] = {
[CALL_NO_KW_METHOD_DESCRIPTOR_O] = "CALL_NO_KW_METHOD_DESCRIPTOR_O",
[CALL_NO_KW_STR_1] = "CALL_NO_KW_STR_1",
[CALL_NO_KW_TUPLE_1] = "CALL_NO_KW_TUPLE_1",
[CALL_NO_KW_TYPE_1] = "CALL_NO_KW_TYPE_1",
[COMPARE_OP_FLOAT] = "COMPARE_OP_FLOAT",
[WITH_EXCEPT_START] = "WITH_EXCEPT_START",
[GET_AITER] = "GET_AITER",
[GET_ANEXT] = "GET_ANEXT",
@ -274,39 +293,39 @@ static const char *const _PyOpcode_OpName[263] = {
[BEFORE_WITH] = "BEFORE_WITH",
[END_ASYNC_FOR] = "END_ASYNC_FOR",
[CLEANUP_THROW] = "CLEANUP_THROW",
[CALL_NO_KW_TYPE_1] = "CALL_NO_KW_TYPE_1",
[COMPARE_OP_FLOAT] = "COMPARE_OP_FLOAT",
[COMPARE_OP_INT] = "COMPARE_OP_INT",
[COMPARE_OP_STR] = "COMPARE_OP_STR",
[FOR_ITER_LIST] = "FOR_ITER_LIST",
[FOR_ITER_TUPLE] = "FOR_ITER_TUPLE",
[STORE_SUBSCR] = "STORE_SUBSCR",
[DELETE_SUBSCR] = "DELETE_SUBSCR",
[FOR_ITER_LIST] = "FOR_ITER_LIST",
[FOR_ITER_TUPLE] = "FOR_ITER_TUPLE",
[FOR_ITER_RANGE] = "FOR_ITER_RANGE",
[FOR_ITER_GEN] = "FOR_ITER_GEN",
[LOAD_ATTR_CLASS] = "LOAD_ATTR_CLASS",
[LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN",
[LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE",
[LOAD_ATTR_MODULE] = "LOAD_ATTR_MODULE",
[GET_ITER] = "GET_ITER",
[GET_YIELD_FROM_ITER] = "GET_YIELD_FROM_ITER",
[LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY",
[LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE",
[LOAD_BUILD_CLASS] = "LOAD_BUILD_CLASS",
[LOAD_ATTR_SLOT] = "LOAD_ATTR_SLOT",
[LOAD_ATTR_WITH_HINT] = "LOAD_ATTR_WITH_HINT",
[LOAD_ATTR_MODULE] = "LOAD_ATTR_MODULE",
[LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY",
[LOAD_ASSERTION_ERROR] = "LOAD_ASSERTION_ERROR",
[RETURN_GENERATOR] = "RETURN_GENERATOR",
[LOAD_ATTR_SLOT] = "LOAD_ATTR_SLOT",
[LOAD_ATTR_WITH_HINT] = "LOAD_ATTR_WITH_HINT",
[LOAD_ATTR_METHOD_LAZY_DICT] = "LOAD_ATTR_METHOD_LAZY_DICT",
[LOAD_ATTR_METHOD_NO_DICT] = "LOAD_ATTR_METHOD_NO_DICT",
[LOAD_ATTR_METHOD_WITH_VALUES] = "LOAD_ATTR_METHOD_WITH_VALUES",
[LOAD_CONST__LOAD_FAST] = "LOAD_CONST__LOAD_FAST",
[LOAD_FAST__LOAD_CONST] = "LOAD_FAST__LOAD_CONST",
[LOAD_FAST__LOAD_FAST] = "LOAD_FAST__LOAD_FAST",
[LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN",
[RETURN_VALUE] = "RETURN_VALUE",
[LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE",
[LOAD_FAST__LOAD_FAST] = "LOAD_FAST__LOAD_FAST",
[SETUP_ANNOTATIONS] = "SETUP_ANNOTATIONS",
[LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN",
[LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE",
[STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE",
[STORE_ATTR_SLOT] = "STORE_ATTR_SLOT",
[STORE_ATTR_WITH_HINT] = "STORE_ATTR_WITH_HINT",
[POP_EXCEPT] = "POP_EXCEPT",
[STORE_NAME] = "STORE_NAME",
[DELETE_NAME] = "DELETE_NAME",
@ -329,9 +348,9 @@ static const char *const _PyOpcode_OpName[263] = {
[IMPORT_NAME] = "IMPORT_NAME",
[IMPORT_FROM] = "IMPORT_FROM",
[JUMP_FORWARD] = "JUMP_FORWARD",
[STORE_ATTR_SLOT] = "STORE_ATTR_SLOT",
[STORE_ATTR_WITH_HINT] = "STORE_ATTR_WITH_HINT",
[STORE_FAST__LOAD_FAST] = "STORE_FAST__LOAD_FAST",
[STORE_FAST__STORE_FAST] = "STORE_FAST__STORE_FAST",
[STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT",
[POP_JUMP_IF_FALSE] = "POP_JUMP_IF_FALSE",
[POP_JUMP_IF_TRUE] = "POP_JUMP_IF_TRUE",
[LOAD_GLOBAL] = "LOAD_GLOBAL",
@ -359,9 +378,9 @@ static const char *const _PyOpcode_OpName[263] = {
[STORE_DEREF] = "STORE_DEREF",
[DELETE_DEREF] = "DELETE_DEREF",
[JUMP_BACKWARD] = "JUMP_BACKWARD",
[STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT",
[STORE_FAST__STORE_FAST] = "STORE_FAST__STORE_FAST",
[CALL_FUNCTION_EX] = "CALL_FUNCTION_EX",
[UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST",
[STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT",
[EXTENDED_ARG] = "EXTENDED_ARG",
[LIST_APPEND] = "LIST_APPEND",
[SET_ADD] = "SET_ADD",
@ -371,14 +390,14 @@ static const char *const _PyOpcode_OpName[263] = {
[YIELD_VALUE] = "YIELD_VALUE",
[RESUME] = "RESUME",
[MATCH_CLASS] = "MATCH_CLASS",
[UNPACK_SEQUENCE_TUPLE] = "UNPACK_SEQUENCE_TUPLE",
[UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE",
[STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT",
[UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST",
[FORMAT_VALUE] = "FORMAT_VALUE",
[BUILD_CONST_KEY_MAP] = "BUILD_CONST_KEY_MAP",
[BUILD_STRING] = "BUILD_STRING",
[UNPACK_SEQUENCE_TUPLE] = "UNPACK_SEQUENCE_TUPLE",
[UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE",
[SEND_GEN] = "SEND_GEN",
[159] = "<159>",
[160] = "<160>",
[161] = "<161>",
[LIST_EXTEND] = "LIST_EXTEND",
[SET_UPDATE] = "SET_UPDATE",
@ -456,24 +475,24 @@ static const char *const _PyOpcode_OpName[263] = {
[235] = "<235>",
[236] = "<236>",
[237] = "<237>",
[238] = "<238>",
[239] = "<239>",
[240] = "<240>",
[241] = "<241>",
[242] = "<242>",
[243] = "<243>",
[244] = "<244>",
[245] = "<245>",
[246] = "<246>",
[247] = "<247>",
[248] = "<248>",
[249] = "<249>",
[250] = "<250>",
[251] = "<251>",
[252] = "<252>",
[253] = "<253>",
[254] = "<254>",
[DO_TRACING] = "DO_TRACING",
[INSTRUMENTED_POP_JUMP_IF_NONE] = "INSTRUMENTED_POP_JUMP_IF_NONE",
[INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = "INSTRUMENTED_POP_JUMP_IF_NOT_NONE",
[INSTRUMENTED_RESUME] = "INSTRUMENTED_RESUME",
[INSTRUMENTED_CALL] = "INSTRUMENTED_CALL",
[INSTRUMENTED_RETURN_VALUE] = "INSTRUMENTED_RETURN_VALUE",
[INSTRUMENTED_YIELD_VALUE] = "INSTRUMENTED_YIELD_VALUE",
[INSTRUMENTED_CALL_FUNCTION_EX] = "INSTRUMENTED_CALL_FUNCTION_EX",
[INSTRUMENTED_JUMP_FORWARD] = "INSTRUMENTED_JUMP_FORWARD",
[INSTRUMENTED_JUMP_BACKWARD] = "INSTRUMENTED_JUMP_BACKWARD",
[INSTRUMENTED_RETURN_CONST] = "INSTRUMENTED_RETURN_CONST",
[INSTRUMENTED_FOR_ITER] = "INSTRUMENTED_FOR_ITER",
[INSTRUMENTED_POP_JUMP_IF_FALSE] = "INSTRUMENTED_POP_JUMP_IF_FALSE",
[INSTRUMENTED_POP_JUMP_IF_TRUE] = "INSTRUMENTED_POP_JUMP_IF_TRUE",
[INSTRUMENTED_END_FOR] = "INSTRUMENTED_END_FOR",
[INSTRUMENTED_END_SEND] = "INSTRUMENTED_END_SEND",
[INSTRUMENTED_INSTRUCTION] = "INSTRUMENTED_INSTRUCTION",
[INSTRUMENTED_LINE] = "INSTRUMENTED_LINE",
[255] = "<255>",
[SETUP_FINALLY] = "SETUP_FINALLY",
[SETUP_CLEANUP] = "SETUP_CLEANUP",
[SETUP_WITH] = "SETUP_WITH",
@ -485,8 +504,6 @@ static const char *const _PyOpcode_OpName[263] = {
#endif
#define EXTRA_CASES \
case 159: \
case 160: \
case 161: \
case 166: \
case 167: \
@ -556,23 +573,7 @@ static const char *const _PyOpcode_OpName[263] = {
case 235: \
case 236: \
case 237: \
case 238: \
case 239: \
case 240: \
case 241: \
case 242: \
case 243: \
case 244: \
case 245: \
case 246: \
case 247: \
case 248: \
case 249: \
case 250: \
case 251: \
case 252: \
case 253: \
case 254: \
case 255: \
;
#ifdef __cplusplus

View file

@ -133,16 +133,6 @@ extern void _PyThreadState_BindDetached(PyThreadState *);
extern void _PyThreadState_UnbindDetached(PyThreadState *);
static inline void
_PyThreadState_UpdateTracingState(PyThreadState *tstate)
{
bool use_tracing =
(tstate->tracing == 0) &&
(tstate->c_tracefunc != NULL || tstate->c_profilefunc != NULL);
tstate->cframe->use_tracing = (use_tracing ? 255 : 0);
}
/* Other */
PyAPI_FUNC(PyThreadState *) _PyThreadState_Swap(

145
Include/opcode.h generated
View file

@ -13,10 +13,12 @@ extern "C" {
#define PUSH_NULL 2
#define INTERPRETER_EXIT 3
#define END_FOR 4
#define END_SEND 5
#define NOP 9
#define UNARY_NEGATIVE 11
#define UNARY_NOT 12
#define UNARY_INVERT 15
#define RESERVED 17
#define BINARY_SUBSCR 25
#define BINARY_SLICE 26
#define STORE_SLICE 27
@ -114,6 +116,24 @@ extern "C" {
#define KW_NAMES 172
#define CALL_INTRINSIC_1 173
#define CALL_INTRINSIC_2 174
#define MIN_INSTRUMENTED_OPCODE 238
#define INSTRUMENTED_POP_JUMP_IF_NONE 238
#define INSTRUMENTED_POP_JUMP_IF_NOT_NONE 239
#define INSTRUMENTED_RESUME 240
#define INSTRUMENTED_CALL 241
#define INSTRUMENTED_RETURN_VALUE 242
#define INSTRUMENTED_YIELD_VALUE 243
#define INSTRUMENTED_CALL_FUNCTION_EX 244
#define INSTRUMENTED_JUMP_FORWARD 245
#define INSTRUMENTED_JUMP_BACKWARD 246
#define INSTRUMENTED_RETURN_CONST 247
#define INSTRUMENTED_FOR_ITER 248
#define INSTRUMENTED_POP_JUMP_IF_FALSE 249
#define INSTRUMENTED_POP_JUMP_IF_TRUE 250
#define INSTRUMENTED_END_FOR 251
#define INSTRUMENTED_END_SEND 252
#define INSTRUMENTED_INSTRUCTION 253
#define INSTRUMENTED_LINE 254
#define MIN_PSEUDO_OPCODE 256
#define SETUP_FINALLY 256
#define SETUP_CLEANUP 257
@ -123,69 +143,68 @@ extern "C" {
#define JUMP_NO_INTERRUPT 261
#define LOAD_METHOD 262
#define MAX_PSEUDO_OPCODE 262
#define BINARY_OP_ADD_FLOAT 5
#define BINARY_OP_ADD_INT 6
#define BINARY_OP_ADD_UNICODE 7
#define BINARY_OP_INPLACE_ADD_UNICODE 8
#define BINARY_OP_MULTIPLY_FLOAT 10
#define BINARY_OP_MULTIPLY_INT 13
#define BINARY_OP_SUBTRACT_FLOAT 14
#define BINARY_OP_SUBTRACT_INT 16
#define BINARY_SUBSCR_DICT 17
#define BINARY_SUBSCR_GETITEM 18
#define BINARY_SUBSCR_LIST_INT 19
#define BINARY_SUBSCR_TUPLE_INT 20
#define CALL_PY_EXACT_ARGS 21
#define CALL_PY_WITH_DEFAULTS 22
#define CALL_BOUND_METHOD_EXACT_ARGS 23
#define CALL_BUILTIN_CLASS 24
#define CALL_BUILTIN_FAST_WITH_KEYWORDS 28
#define CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 29
#define CALL_NO_KW_BUILTIN_FAST 34
#define CALL_NO_KW_BUILTIN_O 38
#define CALL_NO_KW_ISINSTANCE 39
#define CALL_NO_KW_LEN 40
#define CALL_NO_KW_LIST_APPEND 41
#define CALL_NO_KW_METHOD_DESCRIPTOR_FAST 42
#define CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS 43
#define CALL_NO_KW_METHOD_DESCRIPTOR_O 44
#define CALL_NO_KW_STR_1 45
#define CALL_NO_KW_TUPLE_1 46
#define CALL_NO_KW_TYPE_1 47
#define COMPARE_OP_FLOAT 48
#define COMPARE_OP_INT 56
#define COMPARE_OP_STR 57
#define FOR_ITER_LIST 58
#define FOR_ITER_TUPLE 59
#define FOR_ITER_RANGE 62
#define FOR_ITER_GEN 63
#define LOAD_ATTR_CLASS 64
#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 65
#define LOAD_ATTR_INSTANCE_VALUE 66
#define LOAD_ATTR_MODULE 67
#define LOAD_ATTR_PROPERTY 70
#define LOAD_ATTR_SLOT 72
#define LOAD_ATTR_WITH_HINT 73
#define LOAD_ATTR_METHOD_LAZY_DICT 76
#define LOAD_ATTR_METHOD_NO_DICT 77
#define LOAD_ATTR_METHOD_WITH_VALUES 78
#define LOAD_CONST__LOAD_FAST 79
#define LOAD_FAST__LOAD_CONST 80
#define LOAD_FAST__LOAD_FAST 81
#define LOAD_GLOBAL_BUILTIN 82
#define LOAD_GLOBAL_MODULE 84
#define STORE_ATTR_INSTANCE_VALUE 86
#define STORE_ATTR_SLOT 87
#define STORE_ATTR_WITH_HINT 88
#define STORE_FAST__LOAD_FAST 111
#define STORE_FAST__STORE_FAST 112
#define STORE_SUBSCR_DICT 113
#define STORE_SUBSCR_LIST_INT 141
#define UNPACK_SEQUENCE_LIST 143
#define UNPACK_SEQUENCE_TUPLE 153
#define UNPACK_SEQUENCE_TWO_TUPLE 154
#define SEND_GEN 158
#define DO_TRACING 255
#define BINARY_OP_ADD_FLOAT 6
#define BINARY_OP_ADD_INT 7
#define BINARY_OP_ADD_UNICODE 8
#define BINARY_OP_INPLACE_ADD_UNICODE 10
#define BINARY_OP_MULTIPLY_FLOAT 13
#define BINARY_OP_MULTIPLY_INT 14
#define BINARY_OP_SUBTRACT_FLOAT 16
#define BINARY_OP_SUBTRACT_INT 18
#define BINARY_SUBSCR_DICT 19
#define BINARY_SUBSCR_GETITEM 20
#define BINARY_SUBSCR_LIST_INT 21
#define BINARY_SUBSCR_TUPLE_INT 22
#define CALL_PY_EXACT_ARGS 23
#define CALL_PY_WITH_DEFAULTS 24
#define CALL_BOUND_METHOD_EXACT_ARGS 28
#define CALL_BUILTIN_CLASS 29
#define CALL_BUILTIN_FAST_WITH_KEYWORDS 34
#define CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 38
#define CALL_NO_KW_BUILTIN_FAST 39
#define CALL_NO_KW_BUILTIN_O 40
#define CALL_NO_KW_ISINSTANCE 41
#define CALL_NO_KW_LEN 42
#define CALL_NO_KW_LIST_APPEND 43
#define CALL_NO_KW_METHOD_DESCRIPTOR_FAST 44
#define CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS 45
#define CALL_NO_KW_METHOD_DESCRIPTOR_O 46
#define CALL_NO_KW_STR_1 47
#define CALL_NO_KW_TUPLE_1 48
#define CALL_NO_KW_TYPE_1 56
#define COMPARE_OP_FLOAT 57
#define COMPARE_OP_INT 58
#define COMPARE_OP_STR 59
#define FOR_ITER_LIST 62
#define FOR_ITER_TUPLE 63
#define FOR_ITER_RANGE 64
#define FOR_ITER_GEN 65
#define LOAD_ATTR_CLASS 66
#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 67
#define LOAD_ATTR_INSTANCE_VALUE 70
#define LOAD_ATTR_MODULE 72
#define LOAD_ATTR_PROPERTY 73
#define LOAD_ATTR_SLOT 76
#define LOAD_ATTR_WITH_HINT 77
#define LOAD_ATTR_METHOD_LAZY_DICT 78
#define LOAD_ATTR_METHOD_NO_DICT 79
#define LOAD_ATTR_METHOD_WITH_VALUES 80
#define LOAD_CONST__LOAD_FAST 81
#define LOAD_FAST__LOAD_CONST 82
#define LOAD_FAST__LOAD_FAST 84
#define LOAD_GLOBAL_BUILTIN 86
#define LOAD_GLOBAL_MODULE 87
#define STORE_ATTR_INSTANCE_VALUE 88
#define STORE_ATTR_SLOT 111
#define STORE_ATTR_WITH_HINT 112
#define STORE_FAST__LOAD_FAST 113
#define STORE_FAST__STORE_FAST 141
#define STORE_SUBSCR_DICT 143
#define STORE_SUBSCR_LIST_INT 153
#define UNPACK_SEQUENCE_LIST 154
#define UNPACK_SEQUENCE_TUPLE 158
#define UNPACK_SEQUENCE_TWO_TUPLE 159
#define SEND_GEN 160
#define HAS_ARG(op) ((((op) >= HAVE_ARGUMENT) && (!IS_PSEUDO_OPCODE(op)))\
|| ((op) == JUMP) \

View file

@ -439,6 +439,7 @@ _code_type = type(_write_atomic.__code__)
# Python 3.12a7 3523 (Convert COMPARE_AND_BRANCH back to COMPARE_OP)
# Python 3.12a7 3524 (Shrink the BINARY_SUBSCR caches)
# Python 3.12b1 3525 (Shrink the CALL caches)
# Python 3.12a7 3526 (Add instrumentation support)
# Python 3.13 will start with 3550
@ -455,7 +456,7 @@ _code_type = type(_write_atomic.__code__)
# Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array
# in PC/launcher.c must also be updated.
MAGIC_NUMBER = (3525).to_bytes(2, 'little') + b'\r\n'
MAGIC_NUMBER = (3526).to_bytes(2, 'little') + b'\r\n'
_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c

View file

@ -83,6 +83,7 @@ def_op('PUSH_NULL', 2)
def_op('INTERPRETER_EXIT', 3)
def_op('END_FOR', 4)
def_op('END_SEND', 5)
def_op('NOP', 9)
@ -91,6 +92,10 @@ def_op('UNARY_NOT', 12)
def_op('UNARY_INVERT', 15)
# We reserve 17 as it is the initial value for the specializing counter
# This helps us catch cases where we attempt to execute a cache.
def_op('RESERVED', 17)
def_op('BINARY_SUBSCR', 25)
def_op('BINARY_SLICE', 26)
def_op('STORE_SLICE', 27)
@ -221,6 +226,28 @@ hasconst.append(172)
def_op('CALL_INTRINSIC_1', 173)
def_op('CALL_INTRINSIC_2', 174)
# Instrumented instructions
MIN_INSTRUMENTED_OPCODE = 238
def_op('INSTRUMENTED_POP_JUMP_IF_NONE', 238)
def_op('INSTRUMENTED_POP_JUMP_IF_NOT_NONE', 239)
def_op('INSTRUMENTED_RESUME', 240)
def_op('INSTRUMENTED_CALL', 241)
def_op('INSTRUMENTED_RETURN_VALUE', 242)
def_op('INSTRUMENTED_YIELD_VALUE', 243)
def_op('INSTRUMENTED_CALL_FUNCTION_EX', 244)
def_op('INSTRUMENTED_JUMP_FORWARD', 245)
def_op('INSTRUMENTED_JUMP_BACKWARD', 246)
def_op('INSTRUMENTED_RETURN_CONST', 247)
def_op('INSTRUMENTED_FOR_ITER', 248)
def_op('INSTRUMENTED_POP_JUMP_IF_FALSE', 249)
def_op('INSTRUMENTED_POP_JUMP_IF_TRUE', 250)
def_op('INSTRUMENTED_END_FOR', 251)
def_op('INSTRUMENTED_END_SEND', 252)
def_op('INSTRUMENTED_INSTRUCTION', 253)
def_op('INSTRUMENTED_LINE', 254)
# 255 is reserved
hasarg.extend([op for op in opmap.values() if op >= HAVE_ARGUMENT])
MIN_PSEUDO_OPCODE = 256

View file

@ -20,6 +20,8 @@ class OpcodeTests(unittest.TestCase):
# All defined opcodes
has_arg = dis.hasarg
for name, code in filter(lambda item: item[0] not in dis.deoptmap, dis.opmap.items()):
if code >= opcode.MIN_INSTRUMENTED_OPCODE:
continue
with self.subTest(opname=name):
if code not in has_arg:
stack_effect(code)
@ -47,6 +49,8 @@ class OpcodeTests(unittest.TestCase):
has_exc = dis.hasexc
has_jump = dis.hasjabs + dis.hasjrel
for name, code in filter(lambda item: item[0] not in dis.deoptmap, dis.opmap.items()):
if code >= opcode.MIN_INSTRUMENTED_OPCODE:
continue
with self.subTest(opname=name):
if code not in has_arg:
common = stack_effect(code)

View file

@ -433,8 +433,9 @@ class TracerRun():
not_empty = ''
if self.tracer.set_list:
not_empty += 'All paired tuples have not been processed, '
not_empty += ('the last one was number %d' %
not_empty += ('the last one was number %d\n' %
self.tracer.expect_set_no)
not_empty += repr(self.tracer.set_list)
# Make a BdbNotExpectedError a unittest failure.
if type_ is not None and issubclass(BdbNotExpectedError, type_):

View file

@ -349,14 +349,14 @@ class CodeTest(unittest.TestCase):
def foo():
pass
# assert that opcode 238 is invalid
self.assertEqual(opname[238], '<238>')
# assert that opcode 229 is invalid
self.assertEqual(opname[229], '<229>')
# change first opcode to 0xee (=238)
# change first opcode to 0xeb (=229)
foo.__code__ = foo.__code__.replace(
co_code=b'\xee' + foo.__code__.co_code[1:])
co_code=b'\xe5' + foo.__code__.co_code[1:])
msg = f"unknown opcode 238"
msg = f"unknown opcode 229"
with self.assertRaisesRegex(SystemError, msg):
foo()

View file

@ -479,8 +479,7 @@ dis_asyncwith = """\
YIELD_VALUE 2
RESUME 3
JUMP_BACKWARD_NO_INTERRUPT 5 (to 14)
>> SWAP 2
POP_TOP
>> END_SEND
POP_TOP
%3d LOAD_CONST 1 (1)
@ -492,11 +491,11 @@ dis_asyncwith = """\
CALL 2
GET_AWAITABLE 2
LOAD_CONST 0 (None)
>> SEND 3 (to 62)
>> SEND 3 (to 60)
YIELD_VALUE 2
RESUME 3
JUMP_BACKWARD_NO_INTERRUPT 5 (to 52)
>> POP_TOP
JUMP_BACKWARD_NO_INTERRUPT 5 (to 50)
>> END_SEND
POP_TOP
%3d LOAD_CONST 2 (2)
@ -504,21 +503,20 @@ dis_asyncwith = """\
RETURN_CONST 0 (None)
%3d >> CLEANUP_THROW
JUMP_BACKWARD 26 (to 24)
JUMP_BACKWARD 25 (to 24)
>> CLEANUP_THROW
JUMP_BACKWARD 9 (to 62)
JUMP_BACKWARD 9 (to 60)
>> PUSH_EXC_INFO
WITH_EXCEPT_START
GET_AWAITABLE 2
LOAD_CONST 0 (None)
>> SEND 4 (to 100)
>> SEND 4 (to 98)
YIELD_VALUE 3
RESUME 3
JUMP_BACKWARD_NO_INTERRUPT 5 (to 88)
JUMP_BACKWARD_NO_INTERRUPT 5 (to 86)
>> CLEANUP_THROW
>> SWAP 2
POP_TOP
POP_JUMP_IF_TRUE 1 (to 108)
>> END_SEND
POP_JUMP_IF_TRUE 1 (to 104)
RERAISE 2
>> POP_TOP
POP_EXCEPT
@ -878,9 +876,9 @@ class DisTests(DisTestBase):
def test_widths(self):
long_opcodes = set(['JUMP_BACKWARD_NO_INTERRUPT',
])
'INSTRUMENTED_CALL_FUNCTION_EX'])
for opcode, opname in enumerate(dis.opname):
if opname in long_opcodes:
if opname in long_opcodes or opname.startswith("INSTRUMENTED"):
continue
with self.subTest(opname=opname):
width = dis._OPNAME_WIDTH

1044
Lib/test/test_monitoring.py Normal file

File diff suppressed because it is too large Load diff

View file

@ -1445,7 +1445,7 @@ class SizeofTest(unittest.TestCase):
def func():
return sys._getframe()
x = func()
check(x, size('3Pi3c7P2ic??2P'))
check(x, size('3Pii3c7P2ic??2P'))
# function
def func(): pass
check(func, size('14Pi'))

View file

@ -2808,5 +2808,65 @@ class TestEdgeCases(unittest.TestCase):
sys.settrace(sys.gettrace())
class TestLinesAfterTraceStarted(TraceTestCase):
def test_events(self):
tracer = Tracer()
sys._getframe().f_trace = tracer.trace
sys.settrace(tracer.trace)
line = 4
line = 5
sys.settrace(None)
self.compare_events(
TestLinesAfterTraceStarted.test_events.__code__.co_firstlineno,
tracer.events, [
(4, 'line'),
(5, 'line'),
(6, 'line')])
class TestSetLocalTrace(TraceTestCase):
def test_with_branches(self):
def tracefunc(frame, event, arg):
if frame.f_code.co_name == "func":
frame.f_trace = tracefunc
line = frame.f_lineno - frame.f_code.co_firstlineno
events.append((line, event))
return tracefunc
def func(arg = 1):
N = 1
if arg >= 2:
not_reached = 3
else:
reached = 5
if arg >= 3:
not_reached = 7
else:
reached = 9
the_end = 10
EXPECTED_EVENTS = [
(0, 'call'),
(1, 'line'),
(2, 'line'),
(5, 'line'),
(6, 'line'),
(9, 'line'),
(10, 'line'),
(10, 'return'),
]
events = []
sys.settrace(tracefunc)
sys._getframe().f_trace = tracefunc
func()
self.assertEqual(events, EXPECTED_EVENTS)
sys.settrace(None)
if __name__ == "__main__":
unittest.main()

View file

@ -395,7 +395,9 @@ PYTHON_OBJS= \
Python/import.o \
Python/importdl.o \
Python/initconfig.o \
Python/instrumentation.o \
Python/intrinsics.o \
Python/legacy_tracing.o \
Python/marshal.o \
Python/modsupport.o \
Python/mysnprintf.o \

View file

@ -0,0 +1 @@
Implement :pep:`669` Low Impact Monitoring for CPython.

View file

@ -431,13 +431,13 @@ init_code(PyCodeObject *co, struct _PyCodeConstructor *con)
if (_Py_next_func_version != 0) {
_Py_next_func_version++;
}
co->_co_monitoring = NULL;
co->_co_instrumentation_version = 0;
/* not set */
co->co_weakreflist = NULL;
co->co_extra = NULL;
co->_co_cached = NULL;
co->_co_linearray_entry_size = 0;
co->_co_linearray = NULL;
memcpy(_PyCode_CODE(co), PyBytes_AS_STRING(con->code),
PyBytes_GET_SIZE(con->code));
int entry_point = 0;
@ -816,54 +816,6 @@ failed:
* source location tracking (co_lines/co_positions)
******************/
/* Use co_linetable to compute the line number from a bytecode index, addrq. See
lnotab_notes.txt for the details of the lnotab representation.
*/
int
_PyCode_CreateLineArray(PyCodeObject *co)
{
assert(co->_co_linearray == NULL);
PyCodeAddressRange bounds;
int size;
int max_line = 0;
_PyCode_InitAddressRange(co, &bounds);
while(_PyLineTable_NextAddressRange(&bounds)) {
if (bounds.ar_line > max_line) {
max_line = bounds.ar_line;
}
}
if (max_line < (1 << 15)) {
size = 2;
}
else {
size = 4;
}
co->_co_linearray = PyMem_Malloc(Py_SIZE(co)*size);
if (co->_co_linearray == NULL) {
PyErr_NoMemory();
return -1;
}
co->_co_linearray_entry_size = size;
_PyCode_InitAddressRange(co, &bounds);
while(_PyLineTable_NextAddressRange(&bounds)) {
int start = bounds.ar_start / sizeof(_Py_CODEUNIT);
int end = bounds.ar_end / sizeof(_Py_CODEUNIT);
for (int index = start; index < end; index++) {
assert(index < (int)Py_SIZE(co));
if (size == 2) {
assert(((int16_t)bounds.ar_line) == bounds.ar_line);
((int16_t *)co->_co_linearray)[index] = bounds.ar_line;
}
else {
assert(size == 4);
((int32_t *)co->_co_linearray)[index] = bounds.ar_line;
}
}
}
return 0;
}
int
PyCode_Addr2Line(PyCodeObject *co, int addrq)
{
@ -871,9 +823,6 @@ PyCode_Addr2Line(PyCodeObject *co, int addrq)
return co->co_firstlineno;
}
assert(addrq >= 0 && addrq < _PyCode_NBYTES(co));
if (co->_co_linearray) {
return _PyCode_LineNumberFromArray(co, addrq / sizeof(_Py_CODEUNIT));
}
PyCodeAddressRange bounds;
_PyCode_InitAddressRange(co, &bounds);
return _PyCode_CheckLineNumber(addrq, &bounds);
@ -1531,17 +1480,17 @@ PyCode_GetFreevars(PyCodeObject *code)
}
static void
deopt_code(_Py_CODEUNIT *instructions, Py_ssize_t len)
deopt_code(PyCodeObject *code, _Py_CODEUNIT *instructions)
{
Py_ssize_t len = Py_SIZE(code);
for (int i = 0; i < len; i++) {
_Py_CODEUNIT instruction = instructions[i];
int opcode = _PyOpcode_Deopt[instruction.op.code];
int opcode = _Py_GetBaseOpcode(code, i);
int caches = _PyOpcode_Caches[opcode];
instructions[i].op.code = opcode;
while (caches--) {
instructions[++i].op.code = CACHE;
instructions[i].op.arg = 0;
for (int j = 1; j <= caches; j++) {
instructions[i+j].cache = 0;
}
i += caches;
}
}
@ -1559,7 +1508,7 @@ _PyCode_GetCode(PyCodeObject *co)
if (code == NULL) {
return NULL;
}
deopt_code((_Py_CODEUNIT *)PyBytes_AS_STRING(code), Py_SIZE(co));
deopt_code(co, (_Py_CODEUNIT *)PyBytes_AS_STRING(code));
assert(co->_co_cached->_co_code == NULL);
co->_co_cached->_co_code = Py_NewRef(code);
return code;
@ -1693,6 +1642,30 @@ code_new_impl(PyTypeObject *type, int argcount, int posonlyargcount,
return co;
}
static void
free_monitoring_data(_PyCoMonitoringData *data)
{
if (data == NULL) {
return;
}
if (data->tools) {
PyMem_Free(data->tools);
}
if (data->lines) {
PyMem_Free(data->lines);
}
if (data->line_tools) {
PyMem_Free(data->line_tools);
}
if (data->per_instruction_opcodes) {
PyMem_Free(data->per_instruction_opcodes);
}
if (data->per_instruction_tools) {
PyMem_Free(data->per_instruction_tools);
}
PyMem_Free(data);
}
static void
code_dealloc(PyCodeObject *co)
{
@ -1739,9 +1712,7 @@ code_dealloc(PyCodeObject *co)
if (co->co_weakreflist != NULL) {
PyObject_ClearWeakRefs((PyObject*)co);
}
if (co->_co_linearray) {
PyMem_Free(co->_co_linearray);
}
free_monitoring_data(co->_co_monitoring);
PyObject_Free(co);
}
@ -1885,7 +1856,7 @@ code_hash(PyCodeObject *co)
SCRAMBLE_IN(co->co_firstlineno);
SCRAMBLE_IN(Py_SIZE(co));
for (int i = 0; i < Py_SIZE(co); i++) {
int deop = _PyOpcode_Deopt[_PyCode_CODE(co)[i].op.code];
int deop = _Py_GetBaseOpcode(co, i);
SCRAMBLE_IN(deop);
SCRAMBLE_IN(_PyCode_CODE(co)[i].op.arg);
i += _PyOpcode_Caches[deop];
@ -2314,7 +2285,7 @@ _PyCode_ConstantKey(PyObject *op)
void
_PyStaticCode_Fini(PyCodeObject *co)
{
deopt_code(_PyCode_CODE(co), Py_SIZE(co));
deopt_code(co, _PyCode_CODE(co));
PyMem_Free(co->co_extra);
if (co->_co_cached != NULL) {
Py_CLEAR(co->_co_cached->_co_code);
@ -2329,10 +2300,8 @@ _PyStaticCode_Fini(PyCodeObject *co)
PyObject_ClearWeakRefs((PyObject *)co);
co->co_weakreflist = NULL;
}
if (co->_co_linearray) {
PyMem_Free(co->_co_linearray);
co->_co_linearray = NULL;
}
free_monitoring_data(co->_co_monitoring);
co->_co_monitoring = NULL;
}
int

View file

@ -17,7 +17,6 @@
static PyMemberDef frame_memberlist[] = {
{"f_trace_lines", T_BOOL, OFF(f_trace_lines), 0},
{"f_trace_opcodes", T_BOOL, OFF(f_trace_opcodes), 0},
{NULL} /* Sentinel */
};
@ -104,24 +103,29 @@ frame_getback(PyFrameObject *f, void *closure)
return res;
}
// Given the index of the effective opcode, scan back to construct the oparg
// with EXTENDED_ARG. This only works correctly with *unquickened* code,
// obtained via a call to _PyCode_GetCode!
static unsigned int
get_arg(const _Py_CODEUNIT *codestr, Py_ssize_t i)
static PyObject *
frame_gettrace_opcodes(PyFrameObject *f, void *closure)
{
_Py_CODEUNIT word;
unsigned int oparg = codestr[i].op.arg;
if (i >= 1 && (word = codestr[i-1]).op.code == EXTENDED_ARG) {
oparg |= word.op.arg << 8;
if (i >= 2 && (word = codestr[i-2]).op.code == EXTENDED_ARG) {
oparg |= word.op.arg << 16;
if (i >= 3 && (word = codestr[i-3]).op.code == EXTENDED_ARG) {
oparg |= word.op.arg << 24;
}
}
PyObject *result = f->f_trace_opcodes ? Py_True : Py_False;
return Py_NewRef(result);
}
static int
frame_settrace_opcodes(PyFrameObject *f, PyObject* value, void *Py_UNUSED(ignored))
{
if (!PyBool_Check(value)) {
PyErr_SetString(PyExc_TypeError,
"attribute value type must be bool");
return -1;
}
return oparg;
if (value == Py_True) {
f->f_trace_opcodes = 1;
_PyInterpreterState_GET()->f_opcode_trace_set = true;
}
else {
f->f_trace_opcodes = 0;
}
return 0;
}
/* Model the evaluation stack, to determine which jumps
@ -299,46 +303,52 @@ mark_stacks(PyCodeObject *code_obj, int len)
while (todo) {
todo = 0;
/* Scan instructions */
for (i = 0; i < len; i++) {
for (i = 0; i < len;) {
int64_t next_stack = stacks[i];
opcode = _Py_GetBaseOpcode(code_obj, i);
int oparg = 0;
while (opcode == EXTENDED_ARG) {
oparg = (oparg << 8) | code[i].op.arg;
i++;
opcode = _Py_GetBaseOpcode(code_obj, i);
stacks[i] = next_stack;
}
int next_i = i + _PyOpcode_Caches[opcode] + 1;
if (next_stack == UNINITIALIZED) {
i = next_i;
continue;
}
opcode = code[i].op.code;
oparg = (oparg << 8) | code[i].op.arg;
switch (opcode) {
case POP_JUMP_IF_FALSE:
case POP_JUMP_IF_TRUE:
{
int64_t target_stack;
int j = get_arg(code, i);
j += i + 1;
int j = next_i + oparg;
assert(j < len);
if (stacks[j] == UNINITIALIZED && j < i) {
todo = 1;
}
next_stack = pop_value(next_stack);
target_stack = next_stack;
assert(stacks[j] == UNINITIALIZED || stacks[j] == target_stack);
stacks[j] = target_stack;
stacks[i+1] = next_stack;
stacks[next_i] = next_stack;
break;
}
case SEND:
j = get_arg(code, i) + i + INLINE_CACHE_ENTRIES_SEND + 1;
j = oparg + i + INLINE_CACHE_ENTRIES_SEND + 1;
assert(j < len);
assert(stacks[j] == UNINITIALIZED || stacks[j] == next_stack);
stacks[j] = next_stack;
stacks[i+1] = next_stack;
stacks[next_i] = next_stack;
break;
case JUMP_FORWARD:
j = get_arg(code, i) + i + 1;
j = oparg + i + 1;
assert(j < len);
assert(stacks[j] == UNINITIALIZED || stacks[j] == next_stack);
stacks[j] = next_stack;
break;
case JUMP_BACKWARD:
case JUMP_BACKWARD_NO_INTERRUPT:
j = i + 1 - get_arg(code, i);
j = i + 1 - oparg;
assert(j >= 0);
assert(j < len);
if (stacks[j] == UNINITIALIZED && j < i) {
@ -350,13 +360,13 @@ mark_stacks(PyCodeObject *code_obj, int len)
case GET_ITER:
case GET_AITER:
next_stack = push_value(pop_value(next_stack), Iterator);
stacks[i+1] = next_stack;
stacks[next_i] = next_stack;
break;
case FOR_ITER:
{
int64_t target_stack = push_value(next_stack, Object);
stacks[i+1] = target_stack;
j = get_arg(code, i) + 1 + INLINE_CACHE_ENTRIES_FOR_ITER + i;
stacks[next_i] = target_stack;
j = oparg + 1 + INLINE_CACHE_ENTRIES_FOR_ITER + i;
assert(j < len);
assert(stacks[j] == UNINITIALIZED || stacks[j] == target_stack);
stacks[j] = target_stack;
@ -364,16 +374,16 @@ mark_stacks(PyCodeObject *code_obj, int len)
}
case END_ASYNC_FOR:
next_stack = pop_value(pop_value(next_stack));
stacks[i+1] = next_stack;
stacks[next_i] = next_stack;
break;
case PUSH_EXC_INFO:
next_stack = push_value(next_stack, Except);
stacks[i+1] = next_stack;
stacks[next_i] = next_stack;
break;
case POP_EXCEPT:
assert(top_of_stack(next_stack) == Except);
next_stack = pop_value(next_stack);
stacks[i+1] = next_stack;
stacks[next_i] = next_stack;
break;
case RETURN_VALUE:
assert(pop_value(next_stack) == EMPTY_STACK);
@ -389,57 +399,62 @@ mark_stacks(PyCodeObject *code_obj, int len)
break;
case PUSH_NULL:
next_stack = push_value(next_stack, Null);
stacks[i+1] = next_stack;
stacks[next_i] = next_stack;
break;
case LOAD_GLOBAL:
{
int j = get_arg(code, i);
int j = oparg;
if (j & 1) {
next_stack = push_value(next_stack, Null);
}
next_stack = push_value(next_stack, Object);
stacks[i+1] = next_stack;
stacks[next_i] = next_stack;
break;
}
case LOAD_ATTR:
{
assert(top_of_stack(next_stack) == Object);
int j = get_arg(code, i);
int j = oparg;
if (j & 1) {
next_stack = pop_value(next_stack);
next_stack = push_value(next_stack, Null);
next_stack = push_value(next_stack, Object);
}
stacks[i+1] = next_stack;
stacks[next_i] = next_stack;
break;
}
case CALL:
{
int args = get_arg(code, i);
int args = oparg;
for (int j = 0; j < args+2; j++) {
next_stack = pop_value(next_stack);
}
next_stack = push_value(next_stack, Object);
stacks[i+1] = next_stack;
stacks[next_i] = next_stack;
break;
}
case SWAP:
{
int n = get_arg(code, i);
int n = oparg;
next_stack = stack_swap(next_stack, n);
stacks[i+1] = next_stack;
stacks[next_i] = next_stack;
break;
}
case COPY:
{
int n = get_arg(code, i);
int n = oparg;
next_stack = push_value(next_stack, peek(next_stack, n));
stacks[i+1] = next_stack;
stacks[next_i] = next_stack;
break;
}
case CACHE:
case RESERVED:
{
assert(0);
}
default:
{
int delta = PyCompile_OpcodeStackEffect(opcode, get_arg(code, i));
int delta = PyCompile_OpcodeStackEffect(opcode, oparg);
assert(delta != PY_INVALID_STACK_EFFECT);
while (delta < 0) {
next_stack = pop_value(next_stack);
@ -449,9 +464,10 @@ mark_stacks(PyCodeObject *code_obj, int len)
next_stack = push_value(next_stack, Object);
delta--;
}
stacks[i+1] = next_stack;
stacks[next_i] = next_stack;
}
}
i = next_i;
}
/* Scan exception table */
unsigned char *start = (unsigned char *)PyBytes_AS_STRING(code_obj->co_exceptiontable);
@ -646,31 +662,43 @@ frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignore
* In addition, jumps are forbidden when not tracing,
* as this is a debugging feature.
*/
switch(PyThreadState_GET()->tracing_what) {
case PyTrace_EXCEPTION:
PyErr_SetString(PyExc_ValueError,
"can only jump from a 'line' trace event");
return -1;
case PyTrace_CALL:
int what_event = PyThreadState_GET()->what_event;
if (what_event < 0) {
PyErr_Format(PyExc_ValueError,
"f_lineno can only be set in a trace function");
return -1;
}
switch (what_event) {
case PY_MONITORING_EVENT_PY_RESUME:
case PY_MONITORING_EVENT_JUMP:
case PY_MONITORING_EVENT_BRANCH:
case PY_MONITORING_EVENT_LINE:
case PY_MONITORING_EVENT_PY_YIELD:
/* Setting f_lineno is allowed for the above events */
break;
case PY_MONITORING_EVENT_PY_START:
PyErr_Format(PyExc_ValueError,
"can't jump from the 'call' trace event of a new frame");
return -1;
case PyTrace_LINE:
break;
case PyTrace_RETURN:
if (state == FRAME_SUSPENDED) {
break;
}
/* fall through */
default:
case PY_MONITORING_EVENT_CALL:
case PY_MONITORING_EVENT_C_RETURN:
PyErr_SetString(PyExc_ValueError,
"can't jump during a call");
return -1;
case PY_MONITORING_EVENT_PY_RETURN:
case PY_MONITORING_EVENT_PY_UNWIND:
case PY_MONITORING_EVENT_PY_THROW:
case PY_MONITORING_EVENT_RAISE:
case PY_MONITORING_EVENT_C_RAISE:
case PY_MONITORING_EVENT_INSTRUCTION:
case PY_MONITORING_EVENT_EXCEPTION_HANDLED:
PyErr_Format(PyExc_ValueError,
"can only jump from a 'line' trace event");
return -1;
}
if (!f->f_trace) {
PyErr_Format(PyExc_ValueError,
"f_lineno can only be set by a trace function");
return -1;
default:
PyErr_SetString(PyExc_SystemError,
"unexpected event type");
return -1;
}
int new_lineno;
@ -803,6 +831,7 @@ frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignore
start_stack = pop_value(start_stack);
}
/* Finally set the new lasti and return OK. */
f->f_last_traced_line = new_lineno;
f->f_lineno = 0;
f->f_frame->prev_instr = _PyCode_CODE(f->f_frame->f_code) + best_addr;
return 0;
@ -823,7 +852,10 @@ frame_settrace(PyFrameObject *f, PyObject* v, void *closure)
if (v == Py_None) {
v = NULL;
}
Py_XSETREF(f->f_trace, Py_XNewRef(v));
if (v != f->f_trace) {
Py_XSETREF(f->f_trace, Py_XNewRef(v));
f->f_last_traced_line = -1;
}
return 0;
}
@ -838,6 +870,7 @@ static PyGetSetDef frame_getsetlist[] = {
{"f_globals", (getter)frame_getglobals, NULL, NULL},
{"f_builtins", (getter)frame_getbuiltins, NULL, NULL},
{"f_code", (getter)frame_getcode, NULL, NULL},
{"f_trace_opcodes", (getter)frame_gettrace_opcodes, (setter)frame_settrace_opcodes, NULL},
{0}
};
@ -1023,6 +1056,7 @@ _PyFrame_New_NoTrack(PyCodeObject *code)
f->f_trace_opcodes = 0;
f->f_fast_as_locals = 0;
f->f_lineno = 0;
f->f_last_traced_line = -1;
return f;
}

View file

@ -1972,6 +1972,7 @@ extern PyTypeObject _Py_GenericAliasIterType;
extern PyTypeObject _PyMemoryIter_Type;
extern PyTypeObject _PyLineIterator;
extern PyTypeObject _PyPositionsIterator;
extern PyTypeObject _PyLegacyEventHandler_Type;
static PyTypeObject* static_types[] = {
// The two most important base types: must be initialized first and
@ -2069,6 +2070,7 @@ static PyTypeObject* static_types[] = {
&_PyHamt_BitmapNode_Type,
&_PyHamt_CollisionNode_Type,
&_PyHamt_Type,
&_PyLegacyEventHandler_Type,
&_PyInterpreterID_Type,
&_PyLineIterator,
&_PyManagedBuffer_Type,

View file

@ -209,6 +209,8 @@
<ClCompile Include="..\Python\importdl.c" />
<ClCompile Include="..\Python\initconfig.c" />
<ClCompile Include="..\Python\intrinsics.c" />
<ClCompile Include="..\Python\instrumentation.c" />
<ClCompile Include="..\Python\legacy_tracing.c" />
<ClCompile Include="..\Python\marshal.c" />
<ClCompile Include="..\Python\modsupport.c" />
<ClCompile Include="..\Python\mysnprintf.c" />

View file

@ -214,6 +214,12 @@
<ClCompile Include="..\Python\intrinsics.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Python\instrumentation.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Python\legacy_tracing.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Objects\interpreteridobject.c">
<Filter>Source Files</Filter>
</ClCompile>

View file

@ -532,6 +532,8 @@
<ClCompile Include="..\Python\importdl.c" />
<ClCompile Include="..\Python\initconfig.c" />
<ClCompile Include="..\Python\intrinsics.c" />
<ClCompile Include="..\Python\instrumentation.c" />
<ClCompile Include="..\Python\legacy_tracing.c" />
<ClCompile Include="..\Python\marshal.c" />
<ClCompile Include="..\Python\modsupport.c" />
<ClCompile Include="..\Python\mysnprintf.c" />

View file

@ -1178,6 +1178,12 @@
<ClCompile Include="..\Python\intrinsics.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Python\instrumentation.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Python\legacy_tracing.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Python\marshal.c">
<Filter>Python</Filter>
</ClCompile>

View file

@ -14,6 +14,7 @@
#include "pycore_function.h"
#include "pycore_intrinsics.h"
#include "pycore_long.h" // _PyLong_GetZero()
#include "pycore_instruments.h"
#include "pycore_object.h" // _PyObject_GC_TRACK()
#include "pycore_moduleobject.h" // PyModuleObject
#include "pycore_opcode.h" // EXTRA_CASES
@ -134,11 +135,45 @@ dummy_func(
inst(RESUME, (--)) {
assert(tstate->cframe == &cframe);
assert(frame == cframe.current_frame);
if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) {
/* Possibly combine this with eval breaker */
if (frame->f_code->_co_instrumentation_version != tstate->interp->monitoring_version) {
int err = _Py_Instrument(frame->f_code, tstate->interp);
ERROR_IF(err, error);
next_instr--;
}
else if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) {
goto handle_eval_breaker;
}
}
inst(INSTRUMENTED_RESUME, (--)) {
/* Possible performance enhancement:
* We need to check the eval breaker anyway, can we
* combine the instrument verison check and the eval breaker test?
*/
if (frame->f_code->_co_instrumentation_version != tstate->interp->monitoring_version) {
if (_Py_Instrument(frame->f_code, tstate->interp)) {
goto error;
}
next_instr--;
}
else {
_PyFrame_SetStackPointer(frame, stack_pointer);
int err = _Py_call_instrumentation(
tstate, oparg > 0, frame, next_instr-1);
stack_pointer = _PyFrame_GetStackPointer(frame);
ERROR_IF(err, error);
if (frame->prev_instr != next_instr-1) {
/* Instrumentation has jumped */
next_instr = frame->prev_instr;
DISPATCH();
}
if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) {
goto handle_eval_breaker;
}
}
}
inst(LOAD_CLOSURE, (-- value)) {
/* We keep LOAD_CLOSURE so that the bytecode stays more readable. */
value = GETLOCAL(oparg);
@ -183,6 +218,34 @@ dummy_func(
macro(END_FOR) = POP_TOP + POP_TOP;
inst(INSTRUMENTED_END_FOR, (receiver, value --)) {
/* Need to create a fake StopIteration error here,
* to conform to PEP 380 */
if (PyGen_Check(receiver)) {
PyErr_SetObject(PyExc_StopIteration, value);
if (monitor_stop_iteration(tstate, frame, next_instr-1)) {
goto error;
}
PyErr_SetRaisedException(NULL);
}
DECREF_INPUTS();
}
inst(END_SEND, (receiver, value -- value)) {
Py_DECREF(receiver);
}
inst(INSTRUMENTED_END_SEND, (receiver, value -- value)) {
if (PyGen_Check(receiver) || PyCoro_CheckExact(receiver)) {
PyErr_SetObject(PyExc_StopIteration, value);
if (monitor_stop_iteration(tstate, frame, next_instr-1)) {
goto error;
}
PyErr_SetRaisedException(NULL);
}
Py_DECREF(receiver);
}
inst(UNARY_NEGATIVE, (value -- res)) {
res = PyNumber_Negative(value);
DECREF_INPUTS();
@ -222,7 +285,6 @@ dummy_func(
inst(BINARY_OP_MULTIPLY_INT, (unused/1, left, right -- prod)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@ -233,7 +295,6 @@ dummy_func(
}
inst(BINARY_OP_MULTIPLY_FLOAT, (unused/1, left, right -- prod)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@ -243,7 +304,6 @@ dummy_func(
}
inst(BINARY_OP_SUBTRACT_INT, (unused/1, left, right -- sub)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@ -254,7 +314,6 @@ dummy_func(
}
inst(BINARY_OP_SUBTRACT_FLOAT, (unused/1, left, right -- sub)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@ -263,7 +322,6 @@ dummy_func(
}
inst(BINARY_OP_ADD_UNICODE, (unused/1, left, right -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@ -280,7 +338,6 @@ dummy_func(
// specializations, but there is no output.
// At the end we just skip over the STORE_FAST.
inst(BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
_Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP];
@ -310,7 +367,6 @@ dummy_func(
}
inst(BINARY_OP_ADD_FLOAT, (unused/1, left, right -- sum)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@ -320,7 +376,6 @@ dummy_func(
}
inst(BINARY_OP_ADD_INT, (unused/1, left, right -- sum)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@ -342,7 +397,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_BinarySubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
@ -386,7 +440,6 @@ dummy_func(
}
inst(BINARY_SUBSCR_LIST_INT, (unused/1, list, sub -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR);
DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR);
@ -403,7 +456,6 @@ dummy_func(
}
inst(BINARY_SUBSCR_TUPLE_INT, (unused/1, tuple, sub -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR);
DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR);
@ -420,7 +472,6 @@ dummy_func(
}
inst(BINARY_SUBSCR_DICT, (unused/1, dict, sub -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR);
STAT_INC(BINARY_SUBSCR, hit);
res = PyDict_GetItemWithError(dict, sub);
@ -479,7 +530,6 @@ dummy_func(
inst(STORE_SUBSCR, (counter/1, v, container, sub -- )) {
#if ENABLE_SPECIALIZATION
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_StoreSubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
@ -497,7 +547,6 @@ dummy_func(
}
inst(STORE_SUBSCR_LIST_INT, (unused/1, value, list, sub -- )) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR);
DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR);
@ -517,7 +566,6 @@ dummy_func(
}
inst(STORE_SUBSCR_DICT, (unused/1, value, dict, sub -- )) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR);
STAT_INC(STORE_SUBSCR, hit);
int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value);
@ -573,7 +621,6 @@ dummy_func(
assert(EMPTY());
/* Restore previous cframe and return. */
tstate->cframe = cframe.previous;
tstate->cframe->use_tracing = cframe.use_tracing;
assert(tstate->cframe->current_frame == frame->previous);
assert(!_PyErr_Occurred(tstate));
_Py_LeaveRecursiveCallTstate(tstate);
@ -584,8 +631,24 @@ dummy_func(
STACK_SHRINK(1);
assert(EMPTY());
_PyFrame_SetStackPointer(frame, stack_pointer);
TRACE_FUNCTION_EXIT();
DTRACE_FUNCTION_EXIT();
_Py_LeaveRecursiveCallPy(tstate);
assert(frame != &entry_frame);
// GH-99729: We need to unlink the frame *before* clearing it:
_PyInterpreterFrame *dying = frame;
frame = cframe.current_frame = dying->previous;
_PyEvalFrameClearAndPop(tstate, dying);
_PyFrame_StackPush(frame, retval);
goto resume_frame;
}
inst(INSTRUMENTED_RETURN_VALUE, (retval --)) {
int err = _Py_call_instrumentation_arg(
tstate, PY_MONITORING_EVENT_PY_RETURN,
frame, next_instr-1, retval);
if (err) goto error;
STACK_SHRINK(1);
assert(EMPTY());
_PyFrame_SetStackPointer(frame, stack_pointer);
_Py_LeaveRecursiveCallPy(tstate);
assert(frame != &entry_frame);
// GH-99729: We need to unlink the frame *before* clearing it:
@ -601,8 +664,25 @@ dummy_func(
Py_INCREF(retval);
assert(EMPTY());
_PyFrame_SetStackPointer(frame, stack_pointer);
TRACE_FUNCTION_EXIT();
DTRACE_FUNCTION_EXIT();
_Py_LeaveRecursiveCallPy(tstate);
assert(frame != &entry_frame);
// GH-99729: We need to unlink the frame *before* clearing it:
_PyInterpreterFrame *dying = frame;
frame = cframe.current_frame = dying->previous;
_PyEvalFrameClearAndPop(tstate, dying);
_PyFrame_StackPush(frame, retval);
goto resume_frame;
}
inst(INSTRUMENTED_RETURN_CONST, (--)) {
PyObject *retval = GETITEM(frame->f_code->co_consts, oparg);
int err = _Py_call_instrumentation_arg(
tstate, PY_MONITORING_EVENT_PY_RETURN,
frame, next_instr-1, retval);
if (err) goto error;
Py_INCREF(retval);
assert(EMPTY());
_PyFrame_SetStackPointer(frame, stack_pointer);
_Py_LeaveRecursiveCallPy(tstate);
assert(frame != &entry_frame);
// GH-99729: We need to unlink the frame *before* clearing it:
@ -730,7 +810,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PySendCache *cache = (_PySendCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_Send(receiver, next_instr);
DISPATCH_SAME_OPARG();
@ -739,6 +818,20 @@ dummy_func(
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
#endif /* ENABLE_SPECIALIZATION */
assert(frame != &entry_frame);
if ((Py_TYPE(receiver) == &PyGen_Type ||
Py_TYPE(receiver) == &PyCoro_Type) && ((PyGenObject *)receiver)->gi_frame_state < FRAME_EXECUTING)
{
PyGenObject *gen = (PyGenObject *)receiver;
_PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe;
frame->yield_offset = oparg;
STACK_SHRINK(1);
_PyFrame_StackPush(gen_frame, v);
gen->gi_frame_state = FRAME_EXECUTING;
gen->gi_exc_state.previous_item = tstate->exc_info;
tstate->exc_info = &gen->gi_exc_state;
JUMPBY(INLINE_CACHE_ENTRIES_SEND + oparg);
DISPATCH_INLINED(gen_frame);
}
if (Py_IsNone(v) && PyIter_Check(receiver)) {
retval = Py_TYPE(receiver)->tp_iternext(receiver);
}
@ -746,26 +839,22 @@ dummy_func(
retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v);
}
if (retval == NULL) {
if (tstate->c_tracefunc != NULL
&& _PyErr_ExceptionMatches(tstate, PyExc_StopIteration))
call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame);
if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)
) {
monitor_raise(tstate, frame, next_instr-1);
}
if (_PyGen_FetchStopIterationValue(&retval) == 0) {
assert(retval != NULL);
JUMPBY(oparg);
}
else {
assert(retval == NULL);
goto error;
}
}
else {
assert(retval != NULL);
}
Py_DECREF(v);
}
inst(SEND_GEN, (unused/1, receiver, v -- receiver)) {
assert(cframe.use_tracing == 0);
PyGenObject *gen = (PyGenObject *)receiver;
DEOPT_IF(Py_TYPE(gen) != &PyGen_Type &&
Py_TYPE(gen) != &PyCoro_Type, SEND);
@ -782,6 +871,26 @@ dummy_func(
DISPATCH_INLINED(gen_frame);
}
inst(INSTRUMENTED_YIELD_VALUE, (retval -- unused)) {
assert(frame != &entry_frame);
PyGenObject *gen = _PyFrame_GetGenerator(frame);
gen->gi_frame_state = FRAME_SUSPENDED;
_PyFrame_SetStackPointer(frame, stack_pointer - 1);
int err = _Py_call_instrumentation_arg(
tstate, PY_MONITORING_EVENT_PY_YIELD,
frame, next_instr-1, retval);
if (err) goto error;
tstate->exc_info = gen->gi_exc_state.previous_item;
gen->gi_exc_state.previous_item = NULL;
_Py_LeaveRecursiveCallPy(tstate);
_PyInterpreterFrame *gen_frame = frame;
frame = cframe.current_frame = frame->previous;
gen_frame->previous = NULL;
frame->prev_instr -= frame->yield_offset;
_PyFrame_StackPush(frame, retval);
goto resume_frame;
}
inst(YIELD_VALUE, (retval -- unused)) {
// NOTE: It's important that YIELD_VALUE never raises an exception!
// The compiler treats any exception raised here as a failed close()
@ -790,8 +899,6 @@ dummy_func(
PyGenObject *gen = _PyFrame_GetGenerator(frame);
gen->gi_frame_state = FRAME_SUSPENDED;
_PyFrame_SetStackPointer(frame, stack_pointer - 1);
TRACE_FUNCTION_EXIT();
DTRACE_FUNCTION_EXIT();
tstate->exc_info = gen->gi_exc_state.previous_item;
gen->gi_exc_state.previous_item = NULL;
_Py_LeaveRecursiveCallPy(tstate);
@ -930,7 +1037,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_UnpackSequence(seq, next_instr, oparg);
DISPATCH_SAME_OPARG();
@ -994,7 +1100,6 @@ dummy_func(
inst(STORE_ATTR, (counter/1, unused/3, v, owner --)) {
#if ENABLE_SPECIALIZATION
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
assert(cframe.use_tracing == 0);
PyObject *name = GETITEM(frame->f_code->co_names, oparg);
next_instr--;
_Py_Specialize_StoreAttr(owner, next_instr, name);
@ -1111,7 +1216,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
PyObject *name = GETITEM(frame->f_code->co_names, oparg>>1);
next_instr--;
_Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name);
@ -1163,7 +1267,6 @@ dummy_func(
}
inst(LOAD_GLOBAL_MODULE, (unused/1, index/1, version/1, unused/1 -- null if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL);
PyDictObject *dict = (PyDictObject *)GLOBALS();
DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL);
@ -1177,11 +1280,11 @@ dummy_func(
}
inst(LOAD_GLOBAL_BUILTIN, (unused/1, index/1, mod_version/1, bltn_version/1 -- null if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL);
DEOPT_IF(!PyDict_CheckExact(BUILTINS()), LOAD_GLOBAL);
PyDictObject *mdict = (PyDictObject *)GLOBALS();
PyDictObject *bdict = (PyDictObject *)BUILTINS();
assert(opcode == LOAD_GLOBAL_BUILTIN);
DEOPT_IF(mdict->ma_keys->dk_version != mod_version, LOAD_GLOBAL);
DEOPT_IF(bdict->ma_keys->dk_version != bltn_version, LOAD_GLOBAL);
assert(DK_IS_UNICODE(bdict->ma_keys));
@ -1465,7 +1568,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyAttrCache *cache = (_PyAttrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
PyObject *name = GETITEM(frame->f_code->co_names, oparg>>1);
next_instr--;
_Py_Specialize_LoadAttr(owner, next_instr, name);
@ -1511,7 +1613,6 @@ dummy_func(
}
inst(LOAD_ATTR_INSTANCE_VALUE, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR);
@ -1528,7 +1629,6 @@ dummy_func(
}
inst(LOAD_ATTR_MODULE, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR);
PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict;
assert(dict != NULL);
@ -1545,7 +1645,6 @@ dummy_func(
}
inst(LOAD_ATTR_WITH_HINT, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR);
@ -1576,7 +1675,6 @@ dummy_func(
}
inst(LOAD_ATTR_SLOT, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR);
@ -1590,7 +1688,6 @@ dummy_func(
}
inst(LOAD_ATTR_CLASS, (unused/1, type_version/2, unused/2, descr/4, cls -- res2 if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyType_Check(cls), LOAD_ATTR);
DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version,
@ -1606,7 +1703,6 @@ dummy_func(
}
inst(LOAD_ATTR_PROPERTY, (unused/1, type_version/2, func_version/2, fget/4, owner -- unused if (oparg & 1), unused)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR);
PyTypeObject *cls = Py_TYPE(owner);
@ -1632,7 +1728,6 @@ dummy_func(
}
inst(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, (unused/1, type_version/2, func_version/2, getattribute/4, owner -- unused if (oparg & 1), unused)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR);
PyTypeObject *cls = Py_TYPE(owner);
DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR);
@ -1660,7 +1755,6 @@ dummy_func(
}
inst(STORE_ATTR_INSTANCE_VALUE, (unused/1, type_version/2, index/1, value, owner --)) {
assert(cframe.use_tracing == 0);
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
@ -1681,7 +1775,6 @@ dummy_func(
}
inst(STORE_ATTR_WITH_HINT, (unused/1, type_version/2, hint/1, value, owner --)) {
assert(cframe.use_tracing == 0);
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
@ -1723,7 +1816,6 @@ dummy_func(
}
inst(STORE_ATTR_SLOT, (unused/1, type_version/2, index/1, value, owner --)) {
assert(cframe.use_tracing == 0);
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
@ -1746,7 +1838,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_CompareOp(left, right, next_instr, oparg);
DISPATCH_SAME_OPARG();
@ -1761,7 +1852,6 @@ dummy_func(
}
inst(COMPARE_OP_FLOAT, (unused/1, left, right -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP);
STAT_INC(COMPARE_OP, hit);
@ -1777,7 +1867,6 @@ dummy_func(
// Similar to COMPARE_OP_FLOAT
inst(COMPARE_OP_INT, (unused/1, left, right -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP);
DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)left), COMPARE_OP);
@ -1797,7 +1886,6 @@ dummy_func(
// Similar to COMPARE_OP_FLOAT, but for ==, != only
inst(COMPARE_OP_STR, (unused/1, left, right -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP);
STAT_INC(COMPARE_OP, hit);
@ -2044,7 +2132,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyForIterCache *cache = (_PyForIterCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_ForIter(iter, next_instr, oparg);
DISPATCH_SAME_OPARG();
@ -2059,13 +2146,12 @@ dummy_func(
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
goto error;
}
else if (tstate->c_tracefunc != NULL) {
call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame);
}
monitor_raise(tstate, frame, next_instr-1);
_PyErr_Clear(tstate);
}
/* iterator ended normally */
assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR);
assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR ||
next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == INSTRUMENTED_END_FOR);
Py_DECREF(iter);
STACK_SHRINK(1);
/* Jump forward oparg, then skip following END_FOR instruction */
@ -2075,8 +2161,35 @@ dummy_func(
// Common case: no jump, leave it to the code generator
}
inst(INSTRUMENTED_FOR_ITER, ( -- )) {
_Py_CODEUNIT *here = next_instr-1;
_Py_CODEUNIT *target;
PyObject *iter = TOP();
PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter);
if (next != NULL) {
PUSH(next);
target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER;
}
else {
if (_PyErr_Occurred(tstate)) {
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
goto error;
}
monitor_raise(tstate, frame, here);
_PyErr_Clear(tstate);
}
/* iterator ended normally */
assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR ||
next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == INSTRUMENTED_END_FOR);
STACK_SHRINK(1);
Py_DECREF(iter);
/* Skip END_FOR */
target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1;
}
INSTRUMENTED_JUMP(here, target, PY_MONITORING_EVENT_BRANCH);
}
inst(FOR_ITER_LIST, (unused/1, iter -- iter, next)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(Py_TYPE(iter) != &PyListIter_Type, FOR_ITER);
_PyListIterObject *it = (_PyListIterObject *)iter;
STAT_INC(FOR_ITER, hit);
@ -2099,7 +2212,6 @@ dummy_func(
}
inst(FOR_ITER_TUPLE, (unused/1, iter -- iter, next)) {
assert(cframe.use_tracing == 0);
_PyTupleIterObject *it = (_PyTupleIterObject *)iter;
DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER);
STAT_INC(FOR_ITER, hit);
@ -2122,7 +2234,6 @@ dummy_func(
}
inst(FOR_ITER_RANGE, (unused/1, iter -- iter, next)) {
assert(cframe.use_tracing == 0);
_PyRangeIterObject *r = (_PyRangeIterObject *)iter;
DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER);
STAT_INC(FOR_ITER, hit);
@ -2143,7 +2254,6 @@ dummy_func(
}
inst(FOR_ITER_GEN, (unused/1, iter -- iter, unused)) {
assert(cframe.use_tracing == 0);
PyGenObject *gen = (PyGenObject *)iter;
DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER);
DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, FOR_ITER);
@ -2155,7 +2265,8 @@ dummy_func(
gen->gi_exc_state.previous_item = tstate->exc_info;
tstate->exc_info = &gen->gi_exc_state;
JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg);
assert(next_instr->op.code == END_FOR);
assert(next_instr->op.code == END_FOR ||
next_instr->op.code == INSTRUMENTED_END_FOR);
DISPATCH_INLINED(gen_frame);
}
@ -2264,7 +2375,6 @@ dummy_func(
inst(LOAD_ATTR_METHOD_WITH_VALUES, (unused/1, type_version/2, keys_version/2, descr/4, self -- res2 if (oparg & 1), res)) {
/* Cached method object */
assert(cframe.use_tracing == 0);
PyTypeObject *self_cls = Py_TYPE(self);
assert(type_version != 0);
DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR);
@ -2283,7 +2393,6 @@ dummy_func(
}
inst(LOAD_ATTR_METHOD_NO_DICT, (unused/1, type_version/2, unused/2, descr/4, self -- res2 if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
PyTypeObject *self_cls = Py_TYPE(self);
DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR);
assert(self_cls->tp_dictoffset == 0);
@ -2296,7 +2405,6 @@ dummy_func(
}
inst(LOAD_ATTR_METHOD_LAZY_DICT, (unused/1, type_version/2, unused/2, descr/4, self -- res2 if (oparg & 1), res)) {
assert(cframe.use_tracing == 0);
PyTypeObject *self_cls = Py_TYPE(self);
DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR);
Py_ssize_t dictoffset = self_cls->tp_dictoffset;
@ -2318,6 +2426,21 @@ dummy_func(
kwnames = GETITEM(frame->f_code->co_consts, oparg);
}
inst(INSTRUMENTED_CALL, ( -- )) {
int is_meth = PEEK(oparg+2) != NULL;
int total_args = oparg + is_meth;
PyObject *function = PEEK(total_args + 1);
PyObject *arg = total_args == 0 ?
&_PyInstrumentation_MISSING : PEEK(total_args);
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_CALL,
frame, next_instr-1, function, arg);
ERROR_IF(err, error);
_PyCallCache *cache = (_PyCallCache *)next_instr;
INCREMENT_ADAPTIVE_COUNTER(cache->counter);
GO_TO_INSTRUCTION(CALL);
}
// Cache layout: counter/1, func_version/2
// Neither CALL_INTRINSIC_1/2 nor CALL_FUNCTION_EX are members!
family(call, INLINE_CACHE_ENTRIES_CALL) = {
@ -2359,7 +2482,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyCallCache *cache = (_PyCallCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_Call(callable, next_instr, total_args, kwnames);
DISPATCH_SAME_OPARG();
@ -2402,16 +2524,26 @@ dummy_func(
DISPATCH_INLINED(new_frame);
}
/* Callable is not a normal Python function */
if (cframe.use_tracing) {
res = trace_call_function(
tstate, callable, args,
positional_args, kwnames);
}
else {
res = PyObject_Vectorcall(
callable, args,
positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET,
kwnames);
res = PyObject_Vectorcall(
callable, args,
positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET,
kwnames);
if (opcode == INSTRUMENTED_CALL) {
PyObject *arg = total_args == 0 ?
&_PyInstrumentation_MISSING : PEEK(total_args);
if (res == NULL) {
_Py_call_instrumentation_exc2(
tstate, PY_MONITORING_EVENT_C_RAISE,
frame, next_instr-1, callable, arg);
}
else {
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_C_RETURN,
frame, next_instr-1, callable, arg);
if (err < 0) {
Py_CLEAR(res);
}
}
}
kwnames = NULL;
assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
@ -2504,7 +2636,6 @@ dummy_func(
inst(CALL_NO_KW_TYPE_1, (unused/1, unused/2, null, callable, args[oparg] -- res)) {
assert(kwnames == NULL);
assert(cframe.use_tracing == 0);
assert(oparg == 1);
DEOPT_IF(null != NULL, CALL);
PyObject *obj = args[0];
@ -2517,7 +2648,6 @@ dummy_func(
inst(CALL_NO_KW_STR_1, (unused/1, unused/2, null, callable, args[oparg] -- res)) {
assert(kwnames == NULL);
assert(cframe.use_tracing == 0);
assert(oparg == 1);
DEOPT_IF(null != NULL, CALL);
DEOPT_IF(callable != (PyObject *)&PyUnicode_Type, CALL);
@ -2570,7 +2700,6 @@ dummy_func(
}
inst(CALL_NO_KW_BUILTIN_O, (unused/1, unused/2, method, callable, args[oparg] -- res)) {
assert(cframe.use_tracing == 0);
/* Builtin METH_O functions */
assert(kwnames == NULL);
int is_meth = method != NULL;
@ -2602,7 +2731,6 @@ dummy_func(
}
inst(CALL_NO_KW_BUILTIN_FAST, (unused/1, unused/2, method, callable, args[oparg] -- res)) {
assert(cframe.use_tracing == 0);
/* Builtin METH_FASTCALL functions, without keywords */
assert(kwnames == NULL);
int is_meth = method != NULL;
@ -2638,7 +2766,6 @@ dummy_func(
}
inst(CALL_BUILTIN_FAST_WITH_KEYWORDS, (unused/1, unused/2, method, callable, args[oparg] -- res)) {
assert(cframe.use_tracing == 0);
/* Builtin METH_FASTCALL | METH_KEYWORDS functions */
int is_meth = method != NULL;
int total_args = oparg;
@ -2674,7 +2801,6 @@ dummy_func(
}
inst(CALL_NO_KW_LEN, (unused/1, unused/2, method, callable, args[oparg] -- res)) {
assert(cframe.use_tracing == 0);
assert(kwnames == NULL);
/* len(o) */
int is_meth = method != NULL;
@ -2702,7 +2828,6 @@ dummy_func(
}
inst(CALL_NO_KW_ISINSTANCE, (unused/1, unused/2, method, callable, args[oparg] -- res)) {
assert(cframe.use_tracing == 0);
assert(kwnames == NULL);
/* isinstance(o, o2) */
int is_meth = method != NULL;
@ -2733,7 +2858,6 @@ dummy_func(
// This is secretly a super-instruction
inst(CALL_NO_KW_LIST_APPEND, (unused/1, unused/2, method, self, args[oparg] -- unused)) {
assert(cframe.use_tracing == 0);
assert(kwnames == NULL);
assert(oparg == 1);
assert(method != NULL);
@ -2882,12 +3006,14 @@ dummy_func(
CHECK_EVAL_BREAKER();
}
inst(INSTRUMENTED_CALL_FUNCTION_EX, ( -- )) {
GO_TO_INSTRUCTION(CALL_FUNCTION_EX);
}
inst(CALL_FUNCTION_EX, (unused, func, callargs, kwargs if (oparg & 1) -- result)) {
if (oparg & 1) {
// DICT_MERGE is called before this opcode if there are kwargs.
// It converts all dict subtypes in kwargs into regular dicts.
assert(PyDict_CheckExact(kwargs));
}
// DICT_MERGE is called before this opcode if there are kwargs.
// It converts all dict subtypes in kwargs into regular dicts.
assert(kwargs == NULL || PyDict_CheckExact(kwargs));
if (!PyTuple_CheckExact(callargs)) {
if (check_args_iterable(tstate, func, callargs) < 0) {
goto error;
@ -2899,10 +3025,35 @@ dummy_func(
Py_SETREF(callargs, tuple);
}
assert(PyTuple_CheckExact(callargs));
result = do_call_core(tstate, func, callargs, kwargs, cframe.use_tracing);
EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_FUNCTION_EX, func);
if (opcode == INSTRUMENTED_CALL_FUNCTION_EX &&
!PyFunction_Check(func) && !PyMethod_Check(func)
) {
PyObject *arg = PyTuple_GET_SIZE(callargs) > 0 ?
PyTuple_GET_ITEM(callargs, 0) : Py_None;
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_CALL,
frame, next_instr-1, func, arg);
if (err) goto error;
result = PyObject_Call(func, callargs, kwargs);
if (result == NULL) {
_Py_call_instrumentation_exc2(
tstate, PY_MONITORING_EVENT_C_RAISE,
frame, next_instr-1, func, arg);
}
else {
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_C_RETURN,
frame, next_instr-1, func, arg);
if (err < 0) {
Py_CLEAR(result);
}
}
}
else {
result = PyObject_Call(func, callargs, kwargs);
}
DECREF_INPUTS();
assert(PEEK(3 + (oparg & 1)) == NULL);
ERROR_IF(result == NULL, error);
CHECK_EVAL_BREAKER();
@ -3018,7 +3169,6 @@ dummy_func(
#if ENABLE_SPECIALIZATION
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, &GETLOCAL(0));
DISPATCH_SAME_OPARG();
@ -3039,9 +3189,105 @@ dummy_func(
assert(oparg >= 2);
}
inst(EXTENDED_ARG, (--)) {
inst(INSTRUMENTED_LINE, ( -- )) {
_Py_CODEUNIT *here = next_instr-1;
_PyFrame_SetStackPointer(frame, stack_pointer);
int original_opcode = _Py_call_instrumentation_line(
tstate, frame, here);
stack_pointer = _PyFrame_GetStackPointer(frame);
if (original_opcode < 0) {
next_instr = here+1;
goto error;
}
next_instr = frame->prev_instr;
if (next_instr != here) {
DISPATCH();
}
if (_PyOpcode_Caches[original_opcode]) {
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1);
INCREMENT_ADAPTIVE_COUNTER(cache->counter);
}
opcode = original_opcode;
DISPATCH_GOTO();
}
inst(INSTRUMENTED_INSTRUCTION, ( -- )) {
int next_opcode = _Py_call_instrumentation_instruction(
tstate, frame, next_instr-1);
ERROR_IF(next_opcode < 0, error);
next_instr--;
if (_PyOpcode_Caches[next_opcode]) {
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1);
INCREMENT_ADAPTIVE_COUNTER(cache->counter);
}
assert(next_opcode > 0 && next_opcode < 256);
opcode = next_opcode;
DISPATCH_GOTO();
}
inst(INSTRUMENTED_JUMP_FORWARD, ( -- )) {
INSTRUMENTED_JUMP(next_instr-1, next_instr+oparg, PY_MONITORING_EVENT_JUMP);
}
inst(INSTRUMENTED_JUMP_BACKWARD, ( -- )) {
INSTRUMENTED_JUMP(next_instr-1, next_instr-oparg, PY_MONITORING_EVENT_JUMP);
CHECK_EVAL_BREAKER();
}
inst(INSTRUMENTED_POP_JUMP_IF_TRUE, ( -- )) {
PyObject *cond = POP();
int err = PyObject_IsTrue(cond);
Py_DECREF(cond);
ERROR_IF(err < 0, error);
_Py_CODEUNIT *here = next_instr-1;
assert(err == 0 || err == 1);
int offset = err*oparg;
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
}
inst(INSTRUMENTED_POP_JUMP_IF_FALSE, ( -- )) {
PyObject *cond = POP();
int err = PyObject_IsTrue(cond);
Py_DECREF(cond);
ERROR_IF(err < 0, error);
_Py_CODEUNIT *here = next_instr-1;
assert(err == 0 || err == 1);
int offset = (1-err)*oparg;
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
}
inst(INSTRUMENTED_POP_JUMP_IF_NONE, ( -- )) {
PyObject *value = POP();
_Py_CODEUNIT *here = next_instr-1;
int offset;
if (Py_IsNone(value)) {
_Py_DECREF_NO_DEALLOC(value);
offset = oparg;
}
else {
Py_DECREF(value);
offset = 0;
}
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
}
inst(INSTRUMENTED_POP_JUMP_IF_NOT_NONE, ( -- )) {
PyObject *value = POP();
_Py_CODEUNIT *here = next_instr-1;
int offset;
if (Py_IsNone(value)) {
_Py_DECREF_NO_DEALLOC(value);
offset = 0;
}
else {
Py_DECREF(value);
offset = oparg;
}
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
}
inst(EXTENDED_ARG, ( -- )) {
assert(oparg);
assert(cframe.use_tracing == 0);
opcode = next_instr->op.code;
oparg = oparg << 8 | next_instr->op.arg;
PRE_DISPATCH_GOTO();
@ -3049,6 +3295,12 @@ dummy_func(
}
inst(CACHE, (--)) {
assert(0 && "Executing a cache.");
Py_UNREACHABLE();
}
inst(RESERVED, (--)) {
assert(0 && "Executing RESERVED instruction.");
Py_UNREACHABLE();
}

View file

@ -10,6 +10,7 @@
#include "pycore_function.h"
#include "pycore_intrinsics.h"
#include "pycore_long.h" // _PyLong_GetZero()
#include "pycore_instruments.h"
#include "pycore_object.h" // _PyObject_GC_TRACK()
#include "pycore_moduleobject.h" // PyModuleObject
#include "pycore_opcode.h" // EXTRA_CASES
@ -92,13 +93,6 @@
#define _Py_atomic_load_relaxed_int32(ATOMIC_VAL) _Py_atomic_load_relaxed(ATOMIC_VAL)
#endif
/* Forward declarations */
static PyObject *trace_call_function(
PyThreadState *tstate, PyObject *callable, PyObject **stack,
Py_ssize_t oparg, PyObject *kwnames);
static PyObject * do_call_core(
PyThreadState *tstate, PyObject *func,
PyObject *callargs, PyObject *kwdict, int use_tracing);
#ifdef LLTRACE
static void
@ -179,19 +173,22 @@ lltrace_resume_frame(_PyInterpreterFrame *frame)
PyErr_SetRaisedException(exc);
}
#endif
static int call_trace(Py_tracefunc, PyObject *,
PyThreadState *, _PyInterpreterFrame *,
int, PyObject *);
static int call_trace_protected(Py_tracefunc, PyObject *,
PyThreadState *, _PyInterpreterFrame *,
int, PyObject *);
static void call_exc_trace(Py_tracefunc, PyObject *,
PyThreadState *, _PyInterpreterFrame *);
static int maybe_call_line_trace(Py_tracefunc, PyObject *,
PyThreadState *, _PyInterpreterFrame *, int);
static void maybe_dtrace_line(_PyInterpreterFrame *, PyTraceInfo *, int);
static void dtrace_function_entry(_PyInterpreterFrame *);
static void dtrace_function_return(_PyInterpreterFrame *);
static void monitor_raise(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr);
static int monitor_stop_iteration(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr);
static void monitor_unwind(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr);
static void monitor_handled(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr, PyObject *exc);
static void monitor_throw(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr);
static PyObject * import_name(PyThreadState *, _PyInterpreterFrame *,
PyObject *, PyObject *, PyObject *);
@ -217,21 +214,6 @@ _PyEvalFrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame *frame);
"cannot access free variable '%s' where it is not associated with a" \
" value in enclosing scope"
#ifndef NDEBUG
/* Ensure that tstate is valid: sanity check for PyEval_AcquireThread() and
PyEval_RestoreThread(). Detect if tstate memory was freed. It can happen
when a thread continues to run after Python finalization, especially
daemon threads. */
static int
is_tstate_valid(PyThreadState *tstate)
{
assert(!_PyMem_IsPtrFreed(tstate));
assert(!_PyMem_IsPtrFreed(tstate->interp));
return 1;
}
#endif
#ifdef HAVE_ERRNO_H
#include <errno.h>
#endif
@ -596,63 +578,6 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag)
#include "ceval_macros.h"
static int
trace_function_entry(PyThreadState *tstate, _PyInterpreterFrame *frame)
{
if (tstate->c_tracefunc != NULL) {
/* tstate->c_tracefunc, if defined, is a
function that will be called on *every* entry
to a code block. Its return value, if not
None, is a function that will be called at
the start of each executed line of code.
(Actually, the function must return itself
in order to continue tracing.) The trace
functions are called with three arguments:
a pointer to the current frame, a string
indicating why the function is called, and
an argument which depends on the situation.
The global trace function is also called
whenever an exception is detected. */
if (call_trace_protected(tstate->c_tracefunc,
tstate->c_traceobj,
tstate, frame,
PyTrace_CALL, Py_None)) {
/* Trace function raised an error */
return -1;
}
}
if (tstate->c_profilefunc != NULL) {
/* Similar for c_profilefunc, except it needn't
return itself and isn't called for "line" events */
if (call_trace_protected(tstate->c_profilefunc,
tstate->c_profileobj,
tstate, frame,
PyTrace_CALL, Py_None)) {
/* Profile function raised an error */
return -1;
}
}
return 0;
}
static int
trace_function_exit(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject *retval)
{
if (tstate->c_tracefunc) {
if (call_trace_protected(tstate->c_tracefunc, tstate->c_traceobj,
tstate, frame, PyTrace_RETURN, retval)) {
return -1;
}
}
if (tstate->c_profilefunc) {
if (call_trace_protected(tstate->c_profilefunc, tstate->c_profileobj,
tstate, frame, PyTrace_RETURN, retval)) {
return -1;
}
}
return 0;
}
int _Py_CheckRecursiveCallPy(
PyThreadState *tstate)
@ -730,7 +655,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
* strict stack discipline must be maintained.
*/
_PyCFrame *prev_cframe = tstate->cframe;
cframe.use_tracing = prev_cframe->use_tracing;
cframe.previous = prev_cframe;
tstate->cframe = &cframe;
@ -765,8 +689,11 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
if (_Py_EnterRecursivePy(tstate)) {
goto exit_unwind;
}
TRACE_FUNCTION_THROW_ENTRY();
DTRACE_FUNCTION_ENTRY();
/* Because this avoids the RESUME,
* we need to update instrumentation */
_Py_Instrument(frame->f_code, tstate->interp);
monitor_throw(tstate, frame, frame->prev_instr);
/* TO DO -- Monitor throw entry. */
goto resume_with_error;
}
@ -781,15 +708,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
assert(_PyInterpreterFrame_LASTI(frame) >= -1); \
/* Jump back to the last instruction executed... */ \
next_instr = frame->prev_instr + 1; \
stack_pointer = _PyFrame_GetStackPointer(frame); \
/* Set stackdepth to -1. \
Update when returning or calling trace function. \
Having stackdepth <= 0 ensures that invalid \
values are not visible to the cycle GC. \
We choose -1 rather than 0 to assist debugging. \
*/ \
frame->stacktop = -1;
stack_pointer = _PyFrame_GetStackPointer(frame);
start_frame:
if (_Py_EnterRecursivePy(tstate)) {
@ -845,91 +764,6 @@ handle_eval_breaker:
#include "generated_cases.c.h"
#if USE_COMPUTED_GOTOS
TARGET_DO_TRACING:
#else
case DO_TRACING:
#endif
{
assert(cframe.use_tracing);
assert(tstate->tracing == 0);
if (INSTR_OFFSET() >= frame->f_code->_co_firsttraceable) {
int instr_prev = _PyInterpreterFrame_LASTI(frame);
frame->prev_instr = next_instr;
NEXTOPARG();
// No _PyOpcode_Deopt here, since RESUME has no optimized forms:
if (opcode == RESUME) {
if (oparg < 2) {
CHECK_EVAL_BREAKER();
}
/* Call tracing */
TRACE_FUNCTION_ENTRY();
DTRACE_FUNCTION_ENTRY();
}
else {
/* line-by-line tracing support */
if (PyDTrace_LINE_ENABLED()) {
maybe_dtrace_line(frame, &tstate->trace_info, instr_prev);
}
if (cframe.use_tracing &&
tstate->c_tracefunc != NULL && !tstate->tracing) {
int err;
/* see maybe_call_line_trace()
for expository comments */
_PyFrame_SetStackPointer(frame, stack_pointer);
err = maybe_call_line_trace(tstate->c_tracefunc,
tstate->c_traceobj,
tstate, frame, instr_prev);
// Reload possibly changed frame fields:
stack_pointer = _PyFrame_GetStackPointer(frame);
frame->stacktop = -1;
// next_instr is only reloaded if tracing *does not* raise.
// This is consistent with the behavior of older Python
// versions. If a trace function sets a new f_lineno and
// *then* raises, we use the *old* location when searching
// for an exception handler, displaying the traceback, and
// so on:
if (err) {
// next_instr wasn't incremented at the start of this
// instruction. Increment it before handling the error,
// so that it looks the same as a "normal" instruction:
next_instr++;
goto error;
}
// Reload next_instr. Don't increment it, though, since
// we're going to re-dispatch to the "true" instruction now:
next_instr = frame->prev_instr;
}
}
}
NEXTOPARG();
PRE_DISPATCH_GOTO();
// No _PyOpcode_Deopt here, since EXTENDED_ARG has no optimized forms:
while (opcode == EXTENDED_ARG) {
// CPython hasn't ever traced the instruction after an EXTENDED_ARG.
// Inline the EXTENDED_ARG here, so we can avoid branching there:
INSTRUCTION_START(EXTENDED_ARG);
opcode = next_instr->op.code;
oparg = oparg << 8 | next_instr->op.arg;
// Make sure the next instruction isn't a RESUME, since that needs
// to trace properly (and shouldn't have an EXTENDED_ARG, anyways):
assert(opcode != RESUME);
PRE_DISPATCH_GOTO();
}
opcode = _PyOpcode_Deopt[opcode];
if (_PyOpcode_Caches[opcode]) {
uint16_t *counter = &next_instr[1].cache;
// The instruction is going to decrement the counter, so we need to
// increment it here to make sure it doesn't try to specialize:
if (!ADAPTIVE_COUNTER_IS_MAX(*counter)) {
INCREMENT_ADAPTIVE_COUNTER(*counter);
}
}
DISPATCH_GOTO();
}
#if USE_COMPUTED_GOTOS
_unknown_opcode:
#else
@ -988,12 +822,7 @@ error:
PyTraceBack_Here(f);
}
}
if (tstate->c_tracefunc != NULL) {
/* Make sure state is set to FRAME_UNWINDING for tracing */
call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj,
tstate, frame);
}
monitor_raise(tstate, frame, next_instr-1);
exception_unwind:
{
@ -1012,8 +841,7 @@ exception_unwind:
}
assert(STACK_LEVEL() == 0);
_PyFrame_SetStackPointer(frame, stack_pointer);
TRACE_FUNCTION_UNWIND();
DTRACE_FUNCTION_EXIT();
monitor_unwind(tstate, frame, next_instr-1);
goto exit_unwind;
}
@ -1036,8 +864,10 @@ exception_unwind:
available to the handler,
so a program can emulate the
Python main loop. */
PUSH(_PyErr_GetRaisedException(tstate));
PyObject *exc = _PyErr_GetRaisedException(tstate);
PUSH(exc);
JUMPTO(handler);
monitor_handled(tstate, frame, next_instr, exc);
/* Resume normal execution */
DISPATCH();
}
@ -1054,7 +884,6 @@ exit_unwind:
if (frame == &entry_frame) {
/* Restore previous cframe and exit */
tstate->cframe = cframe.previous;
tstate->cframe->use_tracing = cframe.use_tracing;
assert(tstate->cframe->current_frame == frame->previous);
_Py_LeaveRecursiveCallTstate(tstate);
return NULL;
@ -2020,105 +1849,108 @@ Error:
return 0;
}
static void
call_exc_trace(Py_tracefunc func, PyObject *self,
PyThreadState *tstate,
_PyInterpreterFrame *f)
static int
do_monitor_exc(PyThreadState *tstate, _PyInterpreterFrame *frame,
_Py_CODEUNIT *instr, int event)
{
PyObject *exc = _PyErr_GetRaisedException(tstate);
assert(exc && PyExceptionInstance_Check(exc));
PyObject *type = PyExceptionInstance_Class(exc);
PyObject *traceback = PyException_GetTraceback(exc);
if (traceback == NULL) {
traceback = Py_NewRef(Py_None);
}
PyObject *arg = PyTuple_Pack(3, type, exc, traceback);
Py_XDECREF(traceback);
if (arg == NULL) {
_PyErr_SetRaisedException(tstate, exc);
return;
}
int err = call_trace(func, self, tstate, f, PyTrace_EXCEPTION, arg);
Py_DECREF(arg);
assert(event < PY_MONITORING_UNGROUPED_EVENTS);
PyObject *exc = PyErr_GetRaisedException();
assert(exc != NULL);
int err = _Py_call_instrumentation_arg(tstate, event, frame, instr, exc);
if (err == 0) {
_PyErr_SetRaisedException(tstate, exc);
PyErr_SetRaisedException(exc);
}
else {
Py_XDECREF(exc);
Py_DECREF(exc);
}
return err;
}
static inline int
no_tools_for_event(PyThreadState *tstate, _PyInterpreterFrame *frame, int event)
{
_PyCoMonitoringData *data = frame->f_code->_co_monitoring;
if (data) {
if (data->active_monitors.tools[event] == 0) {
return 1;
}
}
else {
if (tstate->interp->monitors.tools[event] == 0) {
return 1;
}
}
return 0;
}
static void
monitor_raise(PyThreadState *tstate, _PyInterpreterFrame *frame,
_Py_CODEUNIT *instr)
{
if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_RAISE)) {
return;
}
do_monitor_exc(tstate, frame, instr, PY_MONITORING_EVENT_RAISE);
}
static int
call_trace_protected(Py_tracefunc func, PyObject *obj,
PyThreadState *tstate, _PyInterpreterFrame *frame,
int what, PyObject *arg)
monitor_stop_iteration(PyThreadState *tstate, _PyInterpreterFrame *frame,
_Py_CODEUNIT *instr)
{
PyObject *exc = _PyErr_GetRaisedException(tstate);
int err = call_trace(func, obj, tstate, frame, what, arg);
if (err == 0)
{
_PyErr_SetRaisedException(tstate, exc);
if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_STOP_ITERATION)) {
return 0;
}
else {
Py_XDECREF(exc);
return -1;
}
return do_monitor_exc(tstate, frame, instr, PY_MONITORING_EVENT_STOP_ITERATION);
}
static void
initialize_trace_info(PyTraceInfo *trace_info, _PyInterpreterFrame *frame)
monitor_unwind(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr)
{
PyCodeObject *code = frame->f_code;
if (trace_info->code != code) {
trace_info->code = code;
_PyCode_InitAddressRange(code, &trace_info->bounds);
if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_PY_UNWIND)) {
return;
}
_Py_call_instrumentation_exc0(tstate, PY_MONITORING_EVENT_PY_UNWIND, frame, instr);
}
static void
monitor_handled(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr, PyObject *exc)
{
if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_EXCEPTION_HANDLED)) {
return;
}
_Py_call_instrumentation_arg(tstate, PY_MONITORING_EVENT_EXCEPTION_HANDLED, frame, instr, exc);
}
static void
monitor_throw(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr)
{
if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_PY_THROW)) {
return;
}
_Py_call_instrumentation_exc0(tstate, PY_MONITORING_EVENT_PY_THROW, frame, instr);
}
void
PyThreadState_EnterTracing(PyThreadState *tstate)
{
assert(tstate->tracing >= 0);
tstate->tracing++;
tstate->cframe->use_tracing = 0;
}
void
PyThreadState_LeaveTracing(PyThreadState *tstate)
{
assert(tstate->tracing > 0 && tstate->cframe->use_tracing == 0);
assert(tstate->tracing > 0);
tstate->tracing--;
_PyThreadState_UpdateTracingState(tstate);
}
static int
call_trace(Py_tracefunc func, PyObject *obj,
PyThreadState *tstate, _PyInterpreterFrame *frame,
int what, PyObject *arg)
{
int result;
if (tstate->tracing) {
return 0;
}
PyFrameObject *f = _PyFrame_GetFrameObject(frame);
if (f == NULL) {
return -1;
}
int old_what = tstate->tracing_what;
tstate->tracing_what = what;
PyThreadState_EnterTracing(tstate);
assert(_PyInterpreterFrame_LASTI(frame) >= 0);
if (_PyCode_InitLineArray(frame->f_code)) {
return -1;
}
f->f_lineno = _PyCode_LineNumberFromArray(frame->f_code, _PyInterpreterFrame_LASTI(frame));
result = func(obj, f, what, arg);
f->f_lineno = 0;
PyThreadState_LeaveTracing(tstate);
tstate->tracing_what = old_what;
return result;
}
PyObject*
_PyEval_CallTracing(PyObject *func, PyObject *args)
@ -2126,7 +1958,6 @@ _PyEval_CallTracing(PyObject *func, PyObject *args)
// Save and disable tracing
PyThreadState *tstate = _PyThreadState_GET();
int save_tracing = tstate->tracing;
int save_use_tracing = tstate->cframe->use_tracing;
tstate->tracing = 0;
// Call the tracing function
@ -2134,81 +1965,9 @@ _PyEval_CallTracing(PyObject *func, PyObject *args)
// Restore tracing
tstate->tracing = save_tracing;
tstate->cframe->use_tracing = save_use_tracing;
return result;
}
/* See Objects/lnotab_notes.txt for a description of how tracing works. */
static int
maybe_call_line_trace(Py_tracefunc func, PyObject *obj,
PyThreadState *tstate, _PyInterpreterFrame *frame, int instr_prev)
{
int result = 0;
/* If the last instruction falls at the start of a line or if it
represents a jump backwards, update the frame's line number and
then call the trace function if we're tracing source lines.
*/
if (_PyCode_InitLineArray(frame->f_code)) {
return -1;
}
int lastline;
if (instr_prev <= frame->f_code->_co_firsttraceable) {
lastline = -1;
}
else {
lastline = _PyCode_LineNumberFromArray(frame->f_code, instr_prev);
}
int line = _PyCode_LineNumberFromArray(frame->f_code, _PyInterpreterFrame_LASTI(frame));
PyFrameObject *f = _PyFrame_GetFrameObject(frame);
if (f == NULL) {
return -1;
}
if (line != -1 && f->f_trace_lines) {
/* Trace backward edges (except in 'yield from') or if line number has changed */
int trace = line != lastline ||
(_PyInterpreterFrame_LASTI(frame) < instr_prev &&
// SEND has no quickened forms, so no need to use _PyOpcode_Deopt
// here:
frame->prev_instr->op.code != SEND);
if (trace) {
result = call_trace(func, obj, tstate, frame, PyTrace_LINE, Py_None);
}
}
/* Always emit an opcode event if we're tracing all opcodes. */
if (f->f_trace_opcodes && result == 0) {
result = call_trace(func, obj, tstate, frame, PyTrace_OPCODE, Py_None);
}
return result;
}
int
_PyEval_SetProfile(PyThreadState *tstate, Py_tracefunc func, PyObject *arg)
{
assert(is_tstate_valid(tstate));
/* The caller must hold the GIL */
assert(PyGILState_Check());
/* Call _PySys_Audit() in the context of the current thread state,
even if tstate is not the current thread state. */
PyThreadState *current_tstate = _PyThreadState_GET();
if (_PySys_Audit(current_tstate, "sys.setprofile", NULL) < 0) {
return -1;
}
tstate->c_profilefunc = func;
PyObject *old_profileobj = tstate->c_profileobj;
tstate->c_profileobj = Py_XNewRef(arg);
/* Flag that tracing or profiling is turned on */
_PyThreadState_UpdateTracingState(tstate);
// gh-98257: Only call Py_XDECREF() once the new profile function is fully
// set, so it's safe to call sys.setprofile() again (reentrant call).
Py_XDECREF(old_profileobj);
return 0;
}
void
PyEval_SetProfile(Py_tracefunc func, PyObject *arg)
{
@ -2240,33 +1999,6 @@ PyEval_SetProfileAllThreads(Py_tracefunc func, PyObject *arg)
}
}
int
_PyEval_SetTrace(PyThreadState *tstate, Py_tracefunc func, PyObject *arg)
{
assert(is_tstate_valid(tstate));
/* The caller must hold the GIL */
assert(PyGILState_Check());
/* Call _PySys_Audit() in the context of the current thread state,
even if tstate is not the current thread state. */
PyThreadState *current_tstate = _PyThreadState_GET();
if (_PySys_Audit(current_tstate, "sys.settrace", NULL) < 0) {
return -1;
}
tstate->c_tracefunc = func;
PyObject *old_traceobj = tstate->c_traceobj;
tstate->c_traceobj = Py_XNewRef(arg);
/* Flag that tracing or profiling is turned on */
_PyThreadState_UpdateTracingState(tstate);
// gh-98257: Only call Py_XDECREF() once the new trace function is fully
// set, so it's safe to call sys.settrace() again (reentrant call).
Py_XDECREF(old_traceobj);
return 0;
}
void
PyEval_SetTrace(Py_tracefunc func, PyObject *arg)
{
@ -2492,114 +2224,6 @@ PyEval_GetFuncDesc(PyObject *func)
return " object";
}
#define C_TRACE(x, call) \
if (use_tracing && tstate->c_profilefunc) { \
if (call_trace(tstate->c_profilefunc, tstate->c_profileobj, \
tstate, tstate->cframe->current_frame, \
PyTrace_C_CALL, func)) { \
x = NULL; \
} \
else { \
x = call; \
if (tstate->c_profilefunc != NULL) { \
if (x == NULL) { \
call_trace_protected(tstate->c_profilefunc, \
tstate->c_profileobj, \
tstate, tstate->cframe->current_frame, \
PyTrace_C_EXCEPTION, func); \
/* XXX should pass (type, value, tb) */ \
} else { \
if (call_trace(tstate->c_profilefunc, \
tstate->c_profileobj, \
tstate, tstate->cframe->current_frame, \
PyTrace_C_RETURN, func)) { \
Py_DECREF(x); \
x = NULL; \
} \
} \
} \
} \
} else { \
x = call; \
}
static PyObject *
trace_call_function(PyThreadState *tstate,
PyObject *func,
PyObject **args, Py_ssize_t nargs,
PyObject *kwnames)
{
int use_tracing = 1;
PyObject *x;
if (PyCFunction_CheckExact(func) || PyCMethod_CheckExact(func)) {
C_TRACE(x, PyObject_Vectorcall(func, args, nargs, kwnames));
return x;
}
else if (Py_IS_TYPE(func, &PyMethodDescr_Type) && nargs > 0) {
/* We need to create a temporary bound method as argument
for profiling.
If nargs == 0, then this cannot work because we have no
"self". In any case, the call itself would raise
TypeError (foo needs an argument), so we just skip
profiling. */
PyObject *self = args[0];
func = Py_TYPE(func)->tp_descr_get(func, self, (PyObject*)Py_TYPE(self));
if (func == NULL) {
return NULL;
}
C_TRACE(x, PyObject_Vectorcall(func,
args+1, nargs-1,
kwnames));
Py_DECREF(func);
return x;
}
return PyObject_Vectorcall(func, args, nargs | PY_VECTORCALL_ARGUMENTS_OFFSET, kwnames);
}
static PyObject *
do_call_core(PyThreadState *tstate,
PyObject *func,
PyObject *callargs,
PyObject *kwdict,
int use_tracing
)
{
PyObject *result;
if (PyCFunction_CheckExact(func) || PyCMethod_CheckExact(func)) {
C_TRACE(result, PyObject_Call(func, callargs, kwdict));
return result;
}
else if (Py_IS_TYPE(func, &PyMethodDescr_Type)) {
Py_ssize_t nargs = PyTuple_GET_SIZE(callargs);
if (nargs > 0 && use_tracing) {
/* We need to create a temporary bound method as argument
for profiling.
If nargs == 0, then this cannot work because we have no
"self". In any case, the call itself would raise
TypeError (foo needs an argument), so we just skip
profiling. */
PyObject *self = PyTuple_GET_ITEM(callargs, 0);
func = Py_TYPE(func)->tp_descr_get(func, self, (PyObject*)Py_TYPE(self));
if (func == NULL) {
return NULL;
}
C_TRACE(result, _PyObject_FastCallDictTstate(
tstate, func,
&_PyTuple_ITEMS(callargs)[1],
nargs - 1,
kwdict));
Py_DECREF(func);
return result;
}
}
EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_FUNCTION_EX, func);
return PyObject_Call(func, callargs, kwdict);
}
/* Extract a slice index from a PyLong or an object with the
nb_index slot defined, and store in *pi.
Silently reduce values larger than PY_SSIZE_T_MAX to PY_SSIZE_T_MAX,
@ -2973,69 +2597,6 @@ PyUnstable_Eval_RequestCodeExtraIndex(freefunc free)
return new_index;
}
static void
dtrace_function_entry(_PyInterpreterFrame *frame)
{
const char *filename;
const char *funcname;
int lineno;
PyCodeObject *code = frame->f_code;
filename = PyUnicode_AsUTF8(code->co_filename);
funcname = PyUnicode_AsUTF8(code->co_name);
lineno = _PyInterpreterFrame_GetLine(frame);
PyDTrace_FUNCTION_ENTRY(filename, funcname, lineno);
}
static void
dtrace_function_return(_PyInterpreterFrame *frame)
{
const char *filename;
const char *funcname;
int lineno;
PyCodeObject *code = frame->f_code;
filename = PyUnicode_AsUTF8(code->co_filename);
funcname = PyUnicode_AsUTF8(code->co_name);
lineno = _PyInterpreterFrame_GetLine(frame);
PyDTrace_FUNCTION_RETURN(filename, funcname, lineno);
}
/* DTrace equivalent of maybe_call_line_trace. */
static void
maybe_dtrace_line(_PyInterpreterFrame *frame,
PyTraceInfo *trace_info,
int instr_prev)
{
const char *co_filename, *co_name;
/* If the last instruction executed isn't in the current
instruction window, reset the window.
*/
initialize_trace_info(trace_info, frame);
int lastline = _PyCode_CheckLineNumber(instr_prev*sizeof(_Py_CODEUNIT), &trace_info->bounds);
int addr = _PyInterpreterFrame_LASTI(frame) * sizeof(_Py_CODEUNIT);
int line = _PyCode_CheckLineNumber(addr, &trace_info->bounds);
if (line != -1) {
/* Trace backward edges or first instruction of a new line */
if (_PyInterpreterFrame_LASTI(frame) < instr_prev ||
(line != lastline && addr == trace_info->bounds.ar_start))
{
co_filename = PyUnicode_AsUTF8(frame->f_code->co_filename);
if (!co_filename) {
co_filename = "?";
}
co_name = PyUnicode_AsUTF8(frame->f_code->co_name);
if (!co_name) {
co_name = "?";
}
PyDTrace_LINE(co_filename, co_name, line);
}
}
}
/* Implement Py_EnterRecursiveCall() and Py_LeaveRecursiveCall() as functions
for the limited API. */

View file

@ -93,8 +93,6 @@
{ \
NEXTOPARG(); \
PRE_DISPATCH_GOTO(); \
assert(cframe.use_tracing == 0 || cframe.use_tracing == 255); \
opcode |= cframe.use_tracing OR_DTRACE_LINE; \
DISPATCH_GOTO(); \
}
@ -102,7 +100,6 @@
{ \
opcode = next_instr->op.code; \
PRE_DISPATCH_GOTO(); \
opcode |= cframe.use_tracing OR_DTRACE_LINE; \
DISPATCH_GOTO(); \
}
@ -183,7 +180,7 @@ GETITEM(PyObject *v, Py_ssize_t i) {
#define PREDICT(next_op) \
do { \
_Py_CODEUNIT word = *next_instr; \
opcode = word.op.code | cframe.use_tracing OR_DTRACE_LINE; \
opcode = word.op.code; \
if (opcode == next_op) { \
oparg = word.op.arg; \
INSTRUCTION_START(next_op); \
@ -283,47 +280,6 @@ GETITEM(PyObject *v, Py_ssize_t i) {
#define BUILTINS() frame->f_builtins
#define LOCALS() frame->f_locals
/* Shared opcode macros */
#define TRACE_FUNCTION_EXIT() \
if (cframe.use_tracing) { \
if (trace_function_exit(tstate, frame, retval)) { \
Py_DECREF(retval); \
goto exit_unwind; \
} \
}
#define DTRACE_FUNCTION_EXIT() \
if (PyDTrace_FUNCTION_RETURN_ENABLED()) { \
dtrace_function_return(frame); \
}
#define TRACE_FUNCTION_UNWIND() \
if (cframe.use_tracing) { \
/* Since we are already unwinding, \
* we don't care if this raises */ \
trace_function_exit(tstate, frame, NULL); \
}
#define TRACE_FUNCTION_ENTRY() \
if (cframe.use_tracing) { \
_PyFrame_SetStackPointer(frame, stack_pointer); \
int err = trace_function_entry(tstate, frame); \
stack_pointer = _PyFrame_GetStackPointer(frame); \
frame->stacktop = -1; \
if (err) { \
goto error; \
} \
}
#define TRACE_FUNCTION_THROW_ENTRY() \
if (cframe.use_tracing) { \
assert(frame->stacktop >= 0); \
if (trace_function_entry(tstate, frame)) { \
goto exit_unwind; \
} \
}
#define DTRACE_FUNCTION_ENTRY() \
if (PyDTrace_FUNCTION_ENTRY_ENABLED()) { \
dtrace_function_entry(frame); \
@ -371,3 +327,18 @@ do { \
_Py_DECREF_NO_DEALLOC(right); \
} \
} while (0)
// If a trace function sets a new f_lineno and
// *then* raises, we use the destination when searching
// for an exception handler, displaying the traceback, and so on
#define INSTRUMENTED_JUMP(src, dest, event) \
do { \
_PyFrame_SetStackPointer(frame, stack_pointer); \
int err = _Py_call_instrumentation_jump(tstate, event, frame, src, dest); \
stack_pointer = _PyFrame_GetStackPointer(frame); \
if (err) { \
next_instr = (dest)+1; \
goto error; \
} \
next_instr = frame->prev_instr; \
} while (0);

311
Python/clinic/instrumentation.c.h generated Normal file
View file

@ -0,0 +1,311 @@
/*[clinic input]
preserve
[clinic start generated code]*/
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
# include "pycore_gc.h" // PyGC_Head
# include "pycore_runtime.h" // _Py_ID()
#endif
PyDoc_STRVAR(monitoring_use_tool_id__doc__,
"use_tool_id($module, tool_id, name, /)\n"
"--\n"
"\n");
#define MONITORING_USE_TOOL_ID_METHODDEF \
{"use_tool_id", _PyCFunction_CAST(monitoring_use_tool_id), METH_FASTCALL, monitoring_use_tool_id__doc__},
static PyObject *
monitoring_use_tool_id_impl(PyObject *module, int tool_id, PyObject *name);
static PyObject *
monitoring_use_tool_id(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
int tool_id;
PyObject *name;
if (!_PyArg_CheckPositional("use_tool_id", nargs, 2, 2)) {
goto exit;
}
tool_id = _PyLong_AsInt(args[0]);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
name = args[1];
return_value = monitoring_use_tool_id_impl(module, tool_id, name);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_free_tool_id__doc__,
"free_tool_id($module, tool_id, /)\n"
"--\n"
"\n");
#define MONITORING_FREE_TOOL_ID_METHODDEF \
{"free_tool_id", (PyCFunction)monitoring_free_tool_id, METH_O, monitoring_free_tool_id__doc__},
static PyObject *
monitoring_free_tool_id_impl(PyObject *module, int tool_id);
static PyObject *
monitoring_free_tool_id(PyObject *module, PyObject *arg)
{
PyObject *return_value = NULL;
int tool_id;
tool_id = _PyLong_AsInt(arg);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
return_value = monitoring_free_tool_id_impl(module, tool_id);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_get_tool__doc__,
"get_tool($module, tool_id, /)\n"
"--\n"
"\n");
#define MONITORING_GET_TOOL_METHODDEF \
{"get_tool", (PyCFunction)monitoring_get_tool, METH_O, monitoring_get_tool__doc__},
static PyObject *
monitoring_get_tool_impl(PyObject *module, int tool_id);
static PyObject *
monitoring_get_tool(PyObject *module, PyObject *arg)
{
PyObject *return_value = NULL;
int tool_id;
tool_id = _PyLong_AsInt(arg);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
return_value = monitoring_get_tool_impl(module, tool_id);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_register_callback__doc__,
"register_callback($module, tool_id, event, func, /)\n"
"--\n"
"\n");
#define MONITORING_REGISTER_CALLBACK_METHODDEF \
{"register_callback", _PyCFunction_CAST(monitoring_register_callback), METH_FASTCALL, monitoring_register_callback__doc__},
static PyObject *
monitoring_register_callback_impl(PyObject *module, int tool_id, int event,
PyObject *func);
static PyObject *
monitoring_register_callback(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
int tool_id;
int event;
PyObject *func;
if (!_PyArg_CheckPositional("register_callback", nargs, 3, 3)) {
goto exit;
}
tool_id = _PyLong_AsInt(args[0]);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
event = _PyLong_AsInt(args[1]);
if (event == -1 && PyErr_Occurred()) {
goto exit;
}
func = args[2];
return_value = monitoring_register_callback_impl(module, tool_id, event, func);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_get_events__doc__,
"get_events($module, tool_id, /)\n"
"--\n"
"\n");
#define MONITORING_GET_EVENTS_METHODDEF \
{"get_events", (PyCFunction)monitoring_get_events, METH_O, monitoring_get_events__doc__},
static int
monitoring_get_events_impl(PyObject *module, int tool_id);
static PyObject *
monitoring_get_events(PyObject *module, PyObject *arg)
{
PyObject *return_value = NULL;
int tool_id;
int _return_value;
tool_id = _PyLong_AsInt(arg);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
_return_value = monitoring_get_events_impl(module, tool_id);
if ((_return_value == -1) && PyErr_Occurred()) {
goto exit;
}
return_value = PyLong_FromLong((long)_return_value);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_set_events__doc__,
"set_events($module, tool_id, event_set, /)\n"
"--\n"
"\n");
#define MONITORING_SET_EVENTS_METHODDEF \
{"set_events", _PyCFunction_CAST(monitoring_set_events), METH_FASTCALL, monitoring_set_events__doc__},
static PyObject *
monitoring_set_events_impl(PyObject *module, int tool_id, int event_set);
static PyObject *
monitoring_set_events(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
int tool_id;
int event_set;
if (!_PyArg_CheckPositional("set_events", nargs, 2, 2)) {
goto exit;
}
tool_id = _PyLong_AsInt(args[0]);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
event_set = _PyLong_AsInt(args[1]);
if (event_set == -1 && PyErr_Occurred()) {
goto exit;
}
return_value = monitoring_set_events_impl(module, tool_id, event_set);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_get_local_events__doc__,
"get_local_events($module, tool_id, code, /)\n"
"--\n"
"\n");
#define MONITORING_GET_LOCAL_EVENTS_METHODDEF \
{"get_local_events", _PyCFunction_CAST(monitoring_get_local_events), METH_FASTCALL, monitoring_get_local_events__doc__},
static int
monitoring_get_local_events_impl(PyObject *module, int tool_id,
PyObject *code);
static PyObject *
monitoring_get_local_events(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
int tool_id;
PyObject *code;
int _return_value;
if (!_PyArg_CheckPositional("get_local_events", nargs, 2, 2)) {
goto exit;
}
tool_id = _PyLong_AsInt(args[0]);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
code = args[1];
_return_value = monitoring_get_local_events_impl(module, tool_id, code);
if ((_return_value == -1) && PyErr_Occurred()) {
goto exit;
}
return_value = PyLong_FromLong((long)_return_value);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_set_local_events__doc__,
"set_local_events($module, tool_id, code, event_set, /)\n"
"--\n"
"\n");
#define MONITORING_SET_LOCAL_EVENTS_METHODDEF \
{"set_local_events", _PyCFunction_CAST(monitoring_set_local_events), METH_FASTCALL, monitoring_set_local_events__doc__},
static PyObject *
monitoring_set_local_events_impl(PyObject *module, int tool_id,
PyObject *code, int event_set);
static PyObject *
monitoring_set_local_events(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
int tool_id;
PyObject *code;
int event_set;
if (!_PyArg_CheckPositional("set_local_events", nargs, 3, 3)) {
goto exit;
}
tool_id = _PyLong_AsInt(args[0]);
if (tool_id == -1 && PyErr_Occurred()) {
goto exit;
}
code = args[1];
event_set = _PyLong_AsInt(args[2]);
if (event_set == -1 && PyErr_Occurred()) {
goto exit;
}
return_value = monitoring_set_local_events_impl(module, tool_id, code, event_set);
exit:
return return_value;
}
PyDoc_STRVAR(monitoring_restart_events__doc__,
"restart_events($module, /)\n"
"--\n"
"\n");
#define MONITORING_RESTART_EVENTS_METHODDEF \
{"restart_events", (PyCFunction)monitoring_restart_events, METH_NOARGS, monitoring_restart_events__doc__},
static PyObject *
monitoring_restart_events_impl(PyObject *module);
static PyObject *
monitoring_restart_events(PyObject *module, PyObject *Py_UNUSED(ignored))
{
return monitoring_restart_events_impl(module);
}
PyDoc_STRVAR(monitoring__all_events__doc__,
"_all_events($module, /)\n"
"--\n"
"\n");
#define MONITORING__ALL_EVENTS_METHODDEF \
{"_all_events", (PyCFunction)monitoring__all_events, METH_NOARGS, monitoring__all_events__doc__},
static PyObject *
monitoring__all_events_impl(PyObject *module);
static PyObject *
monitoring__all_events(PyObject *module, PyObject *Py_UNUSED(ignored))
{
return monitoring__all_events_impl(module);
}
/*[clinic end generated code: output=11cc0803875b3ffa input=a9049054013a1b77]*/

View file

@ -1427,8 +1427,7 @@ compiler_add_yield_from(struct compiler *c, location loc, int await)
ADDOP(c, loc, CLEANUP_THROW);
USE_LABEL(c, exit);
ADDOP_I(c, loc, SWAP, 2);
ADDOP(c, loc, POP_TOP);
ADDOP(c, loc, END_SEND);
return SUCCESS;
}

File diff suppressed because it is too large Load diff

2021
Python/instrumentation.c Normal file

File diff suppressed because it is too large Load diff

528
Python/legacy_tracing.c Normal file
View file

@ -0,0 +1,528 @@
/* Support for legacy tracing on top of PEP 669 instrumentation
* Provides callables to forward PEP 669 events to legacy events.
*/
#include <stddef.h>
#include "Python.h"
#include "pycore_ceval.h"
#include "pycore_object.h"
#include "pycore_sysmodule.h"
typedef struct _PyLegacyEventHandler {
PyObject_HEAD
vectorcallfunc vectorcall;
int event;
} _PyLegacyEventHandler;
/* The Py_tracefunc function expects the following arguments:
* obj: the trace object (PyObject *)
* frame: the current frame (PyFrameObject *)
* kind: the kind of event, see PyTrace_XXX #defines (int)
* arg: The arg (a PyObject *)
*/
static PyObject *
call_profile_func(_PyLegacyEventHandler *self, PyObject *arg)
{
PyThreadState *tstate = _PyThreadState_GET();
if (tstate->c_profilefunc == NULL) {
Py_RETURN_NONE;
}
PyFrameObject *frame = PyEval_GetFrame();
if (frame == NULL) {
PyErr_SetString(PyExc_SystemError,
"Missing frame when calling profile function.");
return NULL;
}
Py_INCREF(frame);
int err = tstate->c_profilefunc(tstate->c_profileobj, frame, self->event, arg);
Py_DECREF(frame);
if (err) {
return NULL;
}
Py_RETURN_NONE;
}
static PyObject *
sys_profile_func2(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 2);
return call_profile_func(self, Py_None);
}
static PyObject *
sys_profile_func3(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 3);
return call_profile_func(self, args[2]);
}
static PyObject *
sys_profile_call_or_return(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 4);
PyObject *callable = args[2];
if (PyCFunction_Check(callable)) {
return call_profile_func(self, callable);
}
if (Py_TYPE(callable) == &PyMethodDescr_Type) {
PyObject *self_arg = args[3];
/* For backwards compatibility need to
* convert to builtin method */
/* If no arg, skip */
if (self_arg == &_PyInstrumentation_MISSING) {
Py_RETURN_NONE;
}
PyObject *meth = Py_TYPE(callable)->tp_descr_get(
callable, self_arg, (PyObject*)Py_TYPE(self_arg));
if (meth == NULL) {
return NULL;
}
PyObject *res = call_profile_func(self, meth);
Py_DECREF(meth);
return res;
}
Py_RETURN_NONE;
}
static PyObject *
call_trace_func(_PyLegacyEventHandler *self, PyObject *arg)
{
PyThreadState *tstate = _PyThreadState_GET();
if (tstate->c_tracefunc == NULL) {
Py_RETURN_NONE;
}
PyFrameObject *frame = PyEval_GetFrame();
if (frame == NULL) {
PyErr_SetString(PyExc_SystemError,
"Missing frame when calling trace function.");
return NULL;
}
Py_INCREF(frame);
int err = tstate->c_tracefunc(tstate->c_traceobj, frame, self->event, arg);
Py_DECREF(frame);
if (err) {
return NULL;
}
Py_RETURN_NONE;
}
static PyObject *
sys_trace_exception_func(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 3);
PyObject *exc = args[2];
assert(PyExceptionInstance_Check(exc));
PyObject *type = (PyObject *)Py_TYPE(exc);
PyObject *tb = PyException_GetTraceback(exc);
if (tb == NULL) {
tb = Py_NewRef(Py_None);
}
PyObject *tuple = PyTuple_Pack(3, type, exc, tb);
Py_DECREF(tb);
if (tuple == NULL) {
return NULL;
}
PyObject *res = call_trace_func(self, tuple);
Py_DECREF(tuple);
return res;
}
static PyObject *
sys_trace_func2(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 2);
return call_trace_func(self, Py_None);
}
static PyObject *
sys_trace_return(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(!PyErr_Occurred());
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 3);
assert(PyCode_Check(args[0]));
PyObject *val = args[2];
PyObject *res = call_trace_func(self, val);
return res;
}
static PyObject *
sys_trace_yield(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 3);
return call_trace_func(self, args[2]);
}
static PyObject *
sys_trace_instruction_func(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
assert(PyVectorcall_NARGS(nargsf) == 2);
PyFrameObject *frame = PyEval_GetFrame();
if (frame == NULL) {
PyErr_SetString(PyExc_SystemError,
"Missing frame when calling trace function.");
return NULL;
}
if (!frame->f_trace_opcodes) {
Py_RETURN_NONE;
}
Py_INCREF(frame);
PyThreadState *tstate = _PyThreadState_GET();
int err = tstate->c_tracefunc(tstate->c_traceobj, frame, self->event, Py_None);
frame->f_lineno = 0;
Py_DECREF(frame);
if (err) {
return NULL;
}
Py_RETURN_NONE;
}
static PyObject *
trace_line(
PyThreadState *tstate, _PyLegacyEventHandler *self,
PyFrameObject *frame, int line
) {
if (!frame->f_trace_lines) {
Py_RETURN_NONE;
}
if (line < 0) {
Py_RETURN_NONE;
}
frame ->f_last_traced_line = line;
Py_INCREF(frame);
frame->f_lineno = line;
int err = tstate->c_tracefunc(tstate->c_traceobj, frame, self->event, Py_None);
frame->f_lineno = 0;
Py_DECREF(frame);
if (err) {
return NULL;
}
Py_RETURN_NONE;
}
static PyObject *
sys_trace_line_func(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
PyThreadState *tstate = _PyThreadState_GET();
if (tstate->c_tracefunc == NULL) {
Py_RETURN_NONE;
}
assert(PyVectorcall_NARGS(nargsf) == 2);
int line = _PyLong_AsInt(args[1]);
assert(line >= 0);
PyFrameObject *frame = PyEval_GetFrame();
if (frame == NULL) {
PyErr_SetString(PyExc_SystemError,
"Missing frame when calling trace function.");
return NULL;
}
assert(args[0] == (PyObject *)frame->f_frame->f_code);
if (frame ->f_last_traced_line == line) {
/* Already traced this line */
Py_RETURN_NONE;
}
return trace_line(tstate, self, frame, line);
}
static PyObject *
sys_trace_jump_func(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
PyThreadState *tstate = _PyThreadState_GET();
if (tstate->c_tracefunc == NULL) {
Py_RETURN_NONE;
}
assert(PyVectorcall_NARGS(nargsf) == 3);
int from = _PyLong_AsInt(args[1])/sizeof(_Py_CODEUNIT);
assert(from >= 0);
int to = _PyLong_AsInt(args[2])/sizeof(_Py_CODEUNIT);
assert(to >= 0);
PyFrameObject *frame = PyEval_GetFrame();
if (frame == NULL) {
PyErr_SetString(PyExc_SystemError,
"Missing frame when calling trace function.");
return NULL;
}
if (!frame->f_trace_lines) {
Py_RETURN_NONE;
}
PyCodeObject *code = (PyCodeObject *)args[0];
assert(PyCode_Check(code));
assert(code == frame->f_frame->f_code);
/* We can call _Py_Instrumentation_GetLine because we always set
* line events for tracing */
int to_line = _Py_Instrumentation_GetLine(code, to);
/* Backward jump: Always generate event
* Forward jump: Only generate event if jumping to different line. */
if (to > from && frame->f_last_traced_line == to_line) {
/* Already traced this line */
Py_RETURN_NONE;
}
return trace_line(tstate, self, frame, to_line);
}
/* We don't care about the exception here,
* we just treat it as a possible new line
*/
static PyObject *
sys_trace_exception_handled(
_PyLegacyEventHandler *self, PyObject *const *args,
size_t nargsf, PyObject *kwnames
) {
assert(kwnames == NULL);
PyThreadState *tstate = _PyThreadState_GET();
if (tstate->c_tracefunc == NULL) {
Py_RETURN_NONE;
}
assert(PyVectorcall_NARGS(nargsf) == 3);
PyFrameObject *frame = PyEval_GetFrame();
PyCodeObject *code = (PyCodeObject *)args[0];
assert(PyCode_Check(code));
assert(code == frame->f_frame->f_code);
assert(PyLong_Check(args[1]));
int offset = _PyLong_AsInt(args[1])/sizeof(_Py_CODEUNIT);
/* We can call _Py_Instrumentation_GetLine because we always set
* line events for tracing */
int line = _Py_Instrumentation_GetLine(code, offset);
if (frame->f_last_traced_line == line) {
/* Already traced this line */
Py_RETURN_NONE;
}
return trace_line(tstate, self, frame, line);
}
PyTypeObject _PyLegacyEventHandler_Type = {
_PyVarObject_IMMORTAL_INIT(&PyType_Type, 0),
"sys.legacy_event_handler",
sizeof(_PyLegacyEventHandler),
.tp_dealloc = (destructor)PyObject_Free,
.tp_vectorcall_offset = offsetof(_PyLegacyEventHandler, vectorcall),
.tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE |
Py_TPFLAGS_HAVE_VECTORCALL | Py_TPFLAGS_DISALLOW_INSTANTIATION,
.tp_call = PyVectorcall_Call,
};
static int
set_callbacks(int tool, vectorcallfunc vectorcall, int legacy_event, int event1, int event2)
{
_PyLegacyEventHandler *callback =
PyObject_NEW(_PyLegacyEventHandler, &_PyLegacyEventHandler_Type);
if (callback == NULL) {
return -1;
}
callback->vectorcall = vectorcall;
callback->event = legacy_event;
Py_XDECREF(_PyMonitoring_RegisterCallback(tool, event1, (PyObject *)callback));
if (event2 >= 0) {
Py_XDECREF(_PyMonitoring_RegisterCallback(tool, event2, (PyObject *)callback));
}
Py_DECREF(callback);
return 0;
}
#ifndef NDEBUG
/* Ensure that tstate is valid: sanity check for PyEval_AcquireThread() and
PyEval_RestoreThread(). Detect if tstate memory was freed. It can happen
when a thread continues to run after Python finalization, especially
daemon threads. */
static int
is_tstate_valid(PyThreadState *tstate)
{
assert(!_PyMem_IsPtrFreed(tstate));
assert(!_PyMem_IsPtrFreed(tstate->interp));
return 1;
}
#endif
int
_PyEval_SetProfile(PyThreadState *tstate, Py_tracefunc func, PyObject *arg)
{
assert(is_tstate_valid(tstate));
/* The caller must hold the GIL */
assert(PyGILState_Check());
/* Call _PySys_Audit() in the context of the current thread state,
even if tstate is not the current thread state. */
PyThreadState *current_tstate = _PyThreadState_GET();
if (_PySys_Audit(current_tstate, "sys.setprofile", NULL) < 0) {
return -1;
}
/* Setup PEP 669 monitoring callbacks and events. */
if (!tstate->interp->sys_profile_initialized) {
tstate->interp->sys_profile_initialized = true;
if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID,
(vectorcallfunc)sys_profile_func2, PyTrace_CALL,
PY_MONITORING_EVENT_PY_START, PY_MONITORING_EVENT_PY_RESUME)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID,
(vectorcallfunc)sys_profile_func3, PyTrace_RETURN,
PY_MONITORING_EVENT_PY_RETURN, PY_MONITORING_EVENT_PY_YIELD)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID,
(vectorcallfunc)sys_profile_func2, PyTrace_RETURN,
PY_MONITORING_EVENT_PY_UNWIND, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID,
(vectorcallfunc)sys_profile_call_or_return, PyTrace_C_CALL,
PY_MONITORING_EVENT_CALL, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID,
(vectorcallfunc)sys_profile_call_or_return, PyTrace_C_RETURN,
PY_MONITORING_EVENT_C_RETURN, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID,
(vectorcallfunc)sys_profile_call_or_return, PyTrace_C_EXCEPTION,
PY_MONITORING_EVENT_C_RAISE, -1)) {
return -1;
}
}
int delta = (func != NULL) - (tstate->c_profilefunc != NULL);
tstate->c_profilefunc = func;
PyObject *old_profileobj = tstate->c_profileobj;
tstate->c_profileobj = Py_XNewRef(arg);
Py_XDECREF(old_profileobj);
tstate->interp->sys_profiling_threads += delta;
assert(tstate->interp->sys_profiling_threads >= 0);
uint32_t events = 0;
if (tstate->interp->sys_profiling_threads) {
events =
(1 << PY_MONITORING_EVENT_PY_START) | (1 << PY_MONITORING_EVENT_PY_RESUME) |
(1 << PY_MONITORING_EVENT_PY_RETURN) | (1 << PY_MONITORING_EVENT_PY_YIELD) |
(1 << PY_MONITORING_EVENT_CALL) | (1 << PY_MONITORING_EVENT_PY_UNWIND);
}
return _PyMonitoring_SetEvents(PY_MONITORING_SYS_PROFILE_ID, events);
}
int
_PyEval_SetTrace(PyThreadState *tstate, Py_tracefunc func, PyObject *arg)
{
assert(is_tstate_valid(tstate));
/* The caller must hold the GIL */
assert(PyGILState_Check());
/* Call _PySys_Audit() in the context of the current thread state,
even if tstate is not the current thread state. */
PyThreadState *current_tstate = _PyThreadState_GET();
if (_PySys_Audit(current_tstate, "sys.settrace", NULL) < 0) {
return -1;
}
assert(tstate->interp->sys_tracing_threads >= 0);
/* Setup PEP 669 monitoring callbacks and events. */
if (!tstate->interp->sys_trace_initialized) {
tstate->interp->sys_trace_initialized = true;
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_func2, PyTrace_CALL,
PY_MONITORING_EVENT_PY_START, PY_MONITORING_EVENT_PY_RESUME)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_func2, PyTrace_CALL,
PY_MONITORING_EVENT_PY_THROW, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_return, PyTrace_RETURN,
PY_MONITORING_EVENT_PY_RETURN, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_yield, PyTrace_RETURN,
PY_MONITORING_EVENT_PY_YIELD, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_exception_func, PyTrace_EXCEPTION,
PY_MONITORING_EVENT_RAISE, PY_MONITORING_EVENT_STOP_ITERATION)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_line_func, PyTrace_LINE,
PY_MONITORING_EVENT_LINE, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_func2, PyTrace_RETURN,
PY_MONITORING_EVENT_PY_UNWIND, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_jump_func, PyTrace_LINE,
PY_MONITORING_EVENT_JUMP, PY_MONITORING_EVENT_BRANCH)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_instruction_func, PyTrace_OPCODE,
PY_MONITORING_EVENT_INSTRUCTION, -1)) {
return -1;
}
if (set_callbacks(PY_MONITORING_SYS_TRACE_ID,
(vectorcallfunc)sys_trace_exception_handled, PyTrace_LINE,
PY_MONITORING_EVENT_EXCEPTION_HANDLED, -1)) {
return -1;
}
}
int delta = (func != NULL) - (tstate->c_tracefunc != NULL);
tstate->c_tracefunc = func;
PyObject *old_traceobj = tstate->c_traceobj;
tstate->c_traceobj = Py_XNewRef(arg);
Py_XDECREF(old_traceobj);
tstate->interp->sys_tracing_threads += delta;
assert(tstate->interp->sys_tracing_threads >= 0);
uint32_t events = 0;
if (tstate->interp->sys_tracing_threads) {
events =
(1 << PY_MONITORING_EVENT_PY_START) | (1 << PY_MONITORING_EVENT_PY_RESUME) |
(1 << PY_MONITORING_EVENT_PY_RETURN) | (1 << PY_MONITORING_EVENT_PY_YIELD) |
(1 << PY_MONITORING_EVENT_RAISE) | (1 << PY_MONITORING_EVENT_LINE) |
(1 << PY_MONITORING_EVENT_JUMP) | (1 << PY_MONITORING_EVENT_BRANCH) |
(1 << PY_MONITORING_EVENT_PY_UNWIND) | (1 << PY_MONITORING_EVENT_PY_THROW) |
(1 << PY_MONITORING_EVENT_STOP_ITERATION) |
(1 << PY_MONITORING_EVENT_EXCEPTION_HANDLED);
if (tstate->interp->f_opcode_trace_set) {
events |= (1 << PY_MONITORING_EVENT_INSTRUCTION);
}
}
return _PyMonitoring_SetEvents(PY_MONITORING_SYS_TRACE_ID, events);
}

View file

@ -32,7 +32,6 @@ def write_contents(f):
"""
opcode = find_module('opcode')
targets = ['_unknown_opcode'] * 256
targets[255] = "TARGET_DO_TRACING"
for opname, op in opcode.opmap.items():
if not opcode.is_pseudo(op):
targets[op] = "TARGET_%s" % opname

View file

@ -13,6 +13,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 0;
case RESUME:
return 0;
case INSTRUMENTED_RESUME:
return 0;
case LOAD_CLOSURE:
return 0;
case LOAD_FAST_CHECK:
@ -39,6 +41,12 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 0;
case END_FOR:
return 1+1;
case INSTRUMENTED_END_FOR:
return 2;
case END_SEND:
return 2;
case INSTRUMENTED_END_SEND:
return 2;
case UNARY_NEGATIVE:
return 1;
case UNARY_NOT:
@ -97,8 +105,12 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 1;
case RETURN_VALUE:
return 1;
case INSTRUMENTED_RETURN_VALUE:
return 1;
case RETURN_CONST:
return 0;
case INSTRUMENTED_RETURN_CONST:
return 0;
case GET_AITER:
return 1;
case GET_ANEXT:
@ -109,6 +121,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 2;
case SEND_GEN:
return 2;
case INSTRUMENTED_YIELD_VALUE:
return 1;
case YIELD_VALUE:
return 1;
case POP_EXCEPT:
@ -263,6 +277,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 1;
case FOR_ITER:
return 1;
case INSTRUMENTED_FOR_ITER:
return 0;
case FOR_ITER_LIST:
return 1;
case FOR_ITER_TUPLE:
@ -287,6 +303,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 1;
case KW_NAMES:
return 0;
case INSTRUMENTED_CALL:
return 0;
case CALL:
return oparg + 2;
case CALL_BOUND_METHOD_EXACT_ARGS:
@ -323,6 +341,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return oparg + 2;
case CALL_NO_KW_METHOD_DESCRIPTOR_FAST:
return oparg + 2;
case INSTRUMENTED_CALL_FUNCTION_EX:
return 0;
case CALL_FUNCTION_EX:
return ((oparg & 1) ? 1 : 0) + 3;
case MAKE_FUNCTION:
@ -339,10 +359,28 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 2;
case SWAP:
return (oparg-2) + 2;
case INSTRUMENTED_LINE:
return 0;
case INSTRUMENTED_INSTRUCTION:
return 0;
case INSTRUMENTED_JUMP_FORWARD:
return 0;
case INSTRUMENTED_JUMP_BACKWARD:
return 0;
case INSTRUMENTED_POP_JUMP_IF_TRUE:
return 0;
case INSTRUMENTED_POP_JUMP_IF_FALSE:
return 0;
case INSTRUMENTED_POP_JUMP_IF_NONE:
return 0;
case INSTRUMENTED_POP_JUMP_IF_NOT_NONE:
return 0;
case EXTENDED_ARG:
return 0;
case CACHE:
return 0;
case RESERVED:
return 0;
default:
return -1;
}
@ -359,6 +397,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 0;
case RESUME:
return 0;
case INSTRUMENTED_RESUME:
return 0;
case LOAD_CLOSURE:
return 1;
case LOAD_FAST_CHECK:
@ -385,6 +425,12 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 1;
case END_FOR:
return 0+0;
case INSTRUMENTED_END_FOR:
return 0;
case END_SEND:
return 1;
case INSTRUMENTED_END_SEND:
return 1;
case UNARY_NEGATIVE:
return 1;
case UNARY_NOT:
@ -443,8 +489,12 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 0;
case RETURN_VALUE:
return 0;
case INSTRUMENTED_RETURN_VALUE:
return 0;
case RETURN_CONST:
return 0;
case INSTRUMENTED_RETURN_CONST:
return 0;
case GET_AITER:
return 1;
case GET_ANEXT:
@ -455,6 +505,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 2;
case SEND_GEN:
return 1;
case INSTRUMENTED_YIELD_VALUE:
return 1;
case YIELD_VALUE:
return 1;
case POP_EXCEPT:
@ -609,6 +661,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 1;
case FOR_ITER:
return 2;
case INSTRUMENTED_FOR_ITER:
return 0;
case FOR_ITER_LIST:
return 2;
case FOR_ITER_TUPLE:
@ -633,6 +687,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return ((oparg & 1) ? 1 : 0) + 1;
case KW_NAMES:
return 0;
case INSTRUMENTED_CALL:
return 0;
case CALL:
return 1;
case CALL_BOUND_METHOD_EXACT_ARGS:
@ -669,6 +725,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 1;
case CALL_NO_KW_METHOD_DESCRIPTOR_FAST:
return 1;
case INSTRUMENTED_CALL_FUNCTION_EX:
return 0;
case CALL_FUNCTION_EX:
return 1;
case MAKE_FUNCTION:
@ -685,10 +743,28 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 1;
case SWAP:
return (oparg-2) + 2;
case INSTRUMENTED_LINE:
return 0;
case INSTRUMENTED_INSTRUCTION:
return 0;
case INSTRUMENTED_JUMP_FORWARD:
return 0;
case INSTRUMENTED_JUMP_BACKWARD:
return 0;
case INSTRUMENTED_POP_JUMP_IF_TRUE:
return 0;
case INSTRUMENTED_POP_JUMP_IF_FALSE:
return 0;
case INSTRUMENTED_POP_JUMP_IF_NONE:
return 0;
case INSTRUMENTED_POP_JUMP_IF_NOT_NONE:
return 0;
case EXTENDED_ARG:
return 0;
case CACHE:
return 0;
case RESERVED:
return 0;
default:
return -1;
}
@ -707,6 +783,7 @@ extern const struct opcode_metadata _PyOpcode_opcode_metadata[256];
const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[NOP] = { true, INSTR_FMT_IX },
[RESUME] = { true, INSTR_FMT_IB },
[INSTRUMENTED_RESUME] = { true, INSTR_FMT_IB },
[LOAD_CLOSURE] = { true, INSTR_FMT_IB },
[LOAD_FAST_CHECK] = { true, INSTR_FMT_IB },
[LOAD_FAST] = { true, INSTR_FMT_IB },
@ -720,6 +797,9 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[POP_TOP] = { true, INSTR_FMT_IX },
[PUSH_NULL] = { true, INSTR_FMT_IX },
[END_FOR] = { true, INSTR_FMT_IB },
[INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX },
[END_SEND] = { true, INSTR_FMT_IX },
[INSTRUMENTED_END_SEND] = { true, INSTR_FMT_IX },
[UNARY_NEGATIVE] = { true, INSTR_FMT_IX },
[UNARY_NOT] = { true, INSTR_FMT_IX },
[UNARY_INVERT] = { true, INSTR_FMT_IX },
@ -749,12 +829,15 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[RAISE_VARARGS] = { true, INSTR_FMT_IB },
[INTERPRETER_EXIT] = { true, INSTR_FMT_IX },
[RETURN_VALUE] = { true, INSTR_FMT_IX },
[INSTRUMENTED_RETURN_VALUE] = { true, INSTR_FMT_IX },
[RETURN_CONST] = { true, INSTR_FMT_IB },
[INSTRUMENTED_RETURN_CONST] = { true, INSTR_FMT_IB },
[GET_AITER] = { true, INSTR_FMT_IX },
[GET_ANEXT] = { true, INSTR_FMT_IX },
[GET_AWAITABLE] = { true, INSTR_FMT_IB },
[SEND] = { true, INSTR_FMT_IBC },
[SEND_GEN] = { true, INSTR_FMT_IBC },
[INSTRUMENTED_YIELD_VALUE] = { true, INSTR_FMT_IX },
[YIELD_VALUE] = { true, INSTR_FMT_IX },
[POP_EXCEPT] = { true, INSTR_FMT_IX },
[RERAISE] = { true, INSTR_FMT_IB },
@ -832,6 +915,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[GET_ITER] = { true, INSTR_FMT_IX },
[GET_YIELD_FROM_ITER] = { true, INSTR_FMT_IX },
[FOR_ITER] = { true, INSTR_FMT_IBC },
[INSTRUMENTED_FOR_ITER] = { true, INSTR_FMT_IB },
[FOR_ITER_LIST] = { true, INSTR_FMT_IBC },
[FOR_ITER_TUPLE] = { true, INSTR_FMT_IBC },
[FOR_ITER_RANGE] = { true, INSTR_FMT_IBC },
@ -844,6 +928,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC00000000 },
[LOAD_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IBC00000000 },
[KW_NAMES] = { true, INSTR_FMT_IB },
[INSTRUMENTED_CALL] = { true, INSTR_FMT_IB },
[CALL] = { true, INSTR_FMT_IBC00 },
[CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IBC00 },
[CALL_PY_EXACT_ARGS] = { true, INSTR_FMT_IBC00 },
@ -862,6 +947,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = { true, INSTR_FMT_IBC00 },
[CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS] = { true, INSTR_FMT_IBC00 },
[CALL_NO_KW_METHOD_DESCRIPTOR_FAST] = { true, INSTR_FMT_IBC00 },
[INSTRUMENTED_CALL_FUNCTION_EX] = { true, INSTR_FMT_IX },
[CALL_FUNCTION_EX] = { true, INSTR_FMT_IB },
[MAKE_FUNCTION] = { true, INSTR_FMT_IB },
[RETURN_GENERATOR] = { true, INSTR_FMT_IX },
@ -870,7 +956,16 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[COPY] = { true, INSTR_FMT_IB },
[BINARY_OP] = { true, INSTR_FMT_IBC },
[SWAP] = { true, INSTR_FMT_IB },
[INSTRUMENTED_LINE] = { true, INSTR_FMT_IX },
[INSTRUMENTED_INSTRUCTION] = { true, INSTR_FMT_IX },
[INSTRUMENTED_JUMP_FORWARD] = { true, INSTR_FMT_IB },
[INSTRUMENTED_JUMP_BACKWARD] = { true, INSTR_FMT_IB },
[INSTRUMENTED_POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IB },
[INSTRUMENTED_POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IB },
[INSTRUMENTED_POP_JUMP_IF_NONE] = { true, INSTR_FMT_IB },
[INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IB },
[EXTENDED_ARG] = { true, INSTR_FMT_IB },
[CACHE] = { true, INSTR_FMT_IX },
[RESERVED] = { true, INSTR_FMT_IX },
};
#endif

View file

@ -4,17 +4,19 @@ static void *opcode_targets[256] = {
&&TARGET_PUSH_NULL,
&&TARGET_INTERPRETER_EXIT,
&&TARGET_END_FOR,
&&TARGET_END_SEND,
&&TARGET_BINARY_OP_ADD_FLOAT,
&&TARGET_BINARY_OP_ADD_INT,
&&TARGET_BINARY_OP_ADD_UNICODE,
&&TARGET_BINARY_OP_INPLACE_ADD_UNICODE,
&&TARGET_NOP,
&&TARGET_BINARY_OP_MULTIPLY_FLOAT,
&&TARGET_BINARY_OP_INPLACE_ADD_UNICODE,
&&TARGET_UNARY_NEGATIVE,
&&TARGET_UNARY_NOT,
&&TARGET_BINARY_OP_MULTIPLY_FLOAT,
&&TARGET_BINARY_OP_MULTIPLY_INT,
&&TARGET_BINARY_OP_SUBTRACT_FLOAT,
&&TARGET_UNARY_INVERT,
&&TARGET_BINARY_OP_SUBTRACT_FLOAT,
&&TARGET_RESERVED,
&&TARGET_BINARY_OP_SUBTRACT_INT,
&&TARGET_BINARY_SUBSCR_DICT,
&&TARGET_BINARY_SUBSCR_GETITEM,
@ -22,21 +24,21 @@ static void *opcode_targets[256] = {
&&TARGET_BINARY_SUBSCR_TUPLE_INT,
&&TARGET_CALL_PY_EXACT_ARGS,
&&TARGET_CALL_PY_WITH_DEFAULTS,
&&TARGET_CALL_BOUND_METHOD_EXACT_ARGS,
&&TARGET_CALL_BUILTIN_CLASS,
&&TARGET_BINARY_SUBSCR,
&&TARGET_BINARY_SLICE,
&&TARGET_STORE_SLICE,
&&TARGET_CALL_BUILTIN_FAST_WITH_KEYWORDS,
&&TARGET_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS,
&&TARGET_CALL_BOUND_METHOD_EXACT_ARGS,
&&TARGET_CALL_BUILTIN_CLASS,
&&TARGET_GET_LEN,
&&TARGET_MATCH_MAPPING,
&&TARGET_MATCH_SEQUENCE,
&&TARGET_MATCH_KEYS,
&&TARGET_CALL_NO_KW_BUILTIN_FAST,
&&TARGET_CALL_BUILTIN_FAST_WITH_KEYWORDS,
&&TARGET_PUSH_EXC_INFO,
&&TARGET_CHECK_EXC_MATCH,
&&TARGET_CHECK_EG_MATCH,
&&TARGET_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS,
&&TARGET_CALL_NO_KW_BUILTIN_FAST,
&&TARGET_CALL_NO_KW_BUILTIN_O,
&&TARGET_CALL_NO_KW_ISINSTANCE,
&&TARGET_CALL_NO_KW_LEN,
@ -46,8 +48,6 @@ static void *opcode_targets[256] = {
&&TARGET_CALL_NO_KW_METHOD_DESCRIPTOR_O,
&&TARGET_CALL_NO_KW_STR_1,
&&TARGET_CALL_NO_KW_TUPLE_1,
&&TARGET_CALL_NO_KW_TYPE_1,
&&TARGET_COMPARE_OP_FLOAT,
&&TARGET_WITH_EXCEPT_START,
&&TARGET_GET_AITER,
&&TARGET_GET_ANEXT,
@ -55,39 +55,39 @@ static void *opcode_targets[256] = {
&&TARGET_BEFORE_WITH,
&&TARGET_END_ASYNC_FOR,
&&TARGET_CLEANUP_THROW,
&&TARGET_CALL_NO_KW_TYPE_1,
&&TARGET_COMPARE_OP_FLOAT,
&&TARGET_COMPARE_OP_INT,
&&TARGET_COMPARE_OP_STR,
&&TARGET_FOR_ITER_LIST,
&&TARGET_FOR_ITER_TUPLE,
&&TARGET_STORE_SUBSCR,
&&TARGET_DELETE_SUBSCR,
&&TARGET_FOR_ITER_LIST,
&&TARGET_FOR_ITER_TUPLE,
&&TARGET_FOR_ITER_RANGE,
&&TARGET_FOR_ITER_GEN,
&&TARGET_LOAD_ATTR_CLASS,
&&TARGET_LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN,
&&TARGET_LOAD_ATTR_INSTANCE_VALUE,
&&TARGET_LOAD_ATTR_MODULE,
&&TARGET_GET_ITER,
&&TARGET_GET_YIELD_FROM_ITER,
&&TARGET_LOAD_ATTR_PROPERTY,
&&TARGET_LOAD_ATTR_INSTANCE_VALUE,
&&TARGET_LOAD_BUILD_CLASS,
&&TARGET_LOAD_ATTR_SLOT,
&&TARGET_LOAD_ATTR_WITH_HINT,
&&TARGET_LOAD_ATTR_MODULE,
&&TARGET_LOAD_ATTR_PROPERTY,
&&TARGET_LOAD_ASSERTION_ERROR,
&&TARGET_RETURN_GENERATOR,
&&TARGET_LOAD_ATTR_SLOT,
&&TARGET_LOAD_ATTR_WITH_HINT,
&&TARGET_LOAD_ATTR_METHOD_LAZY_DICT,
&&TARGET_LOAD_ATTR_METHOD_NO_DICT,
&&TARGET_LOAD_ATTR_METHOD_WITH_VALUES,
&&TARGET_LOAD_CONST__LOAD_FAST,
&&TARGET_LOAD_FAST__LOAD_CONST,
&&TARGET_LOAD_FAST__LOAD_FAST,
&&TARGET_LOAD_GLOBAL_BUILTIN,
&&TARGET_RETURN_VALUE,
&&TARGET_LOAD_GLOBAL_MODULE,
&&TARGET_LOAD_FAST__LOAD_FAST,
&&TARGET_SETUP_ANNOTATIONS,
&&TARGET_LOAD_GLOBAL_BUILTIN,
&&TARGET_LOAD_GLOBAL_MODULE,
&&TARGET_STORE_ATTR_INSTANCE_VALUE,
&&TARGET_STORE_ATTR_SLOT,
&&TARGET_STORE_ATTR_WITH_HINT,
&&TARGET_POP_EXCEPT,
&&TARGET_STORE_NAME,
&&TARGET_DELETE_NAME,
@ -110,9 +110,9 @@ static void *opcode_targets[256] = {
&&TARGET_IMPORT_NAME,
&&TARGET_IMPORT_FROM,
&&TARGET_JUMP_FORWARD,
&&TARGET_STORE_ATTR_SLOT,
&&TARGET_STORE_ATTR_WITH_HINT,
&&TARGET_STORE_FAST__LOAD_FAST,
&&TARGET_STORE_FAST__STORE_FAST,
&&TARGET_STORE_SUBSCR_DICT,
&&TARGET_POP_JUMP_IF_FALSE,
&&TARGET_POP_JUMP_IF_TRUE,
&&TARGET_LOAD_GLOBAL,
@ -140,9 +140,9 @@ static void *opcode_targets[256] = {
&&TARGET_STORE_DEREF,
&&TARGET_DELETE_DEREF,
&&TARGET_JUMP_BACKWARD,
&&TARGET_STORE_SUBSCR_LIST_INT,
&&TARGET_STORE_FAST__STORE_FAST,
&&TARGET_CALL_FUNCTION_EX,
&&TARGET_UNPACK_SEQUENCE_LIST,
&&TARGET_STORE_SUBSCR_DICT,
&&TARGET_EXTENDED_ARG,
&&TARGET_LIST_APPEND,
&&TARGET_SET_ADD,
@ -152,15 +152,15 @@ static void *opcode_targets[256] = {
&&TARGET_YIELD_VALUE,
&&TARGET_RESUME,
&&TARGET_MATCH_CLASS,
&&TARGET_UNPACK_SEQUENCE_TUPLE,
&&TARGET_UNPACK_SEQUENCE_TWO_TUPLE,
&&TARGET_STORE_SUBSCR_LIST_INT,
&&TARGET_UNPACK_SEQUENCE_LIST,
&&TARGET_FORMAT_VALUE,
&&TARGET_BUILD_CONST_KEY_MAP,
&&TARGET_BUILD_STRING,
&&TARGET_UNPACK_SEQUENCE_TUPLE,
&&TARGET_UNPACK_SEQUENCE_TWO_TUPLE,
&&TARGET_SEND_GEN,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&TARGET_LIST_EXTEND,
&&TARGET_SET_UPDATE,
&&TARGET_DICT_MERGE,
@ -237,22 +237,22 @@ static void *opcode_targets[256] = {
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&TARGET_DO_TRACING
&&TARGET_INSTRUMENTED_POP_JUMP_IF_NONE,
&&TARGET_INSTRUMENTED_POP_JUMP_IF_NOT_NONE,
&&TARGET_INSTRUMENTED_RESUME,
&&TARGET_INSTRUMENTED_CALL,
&&TARGET_INSTRUMENTED_RETURN_VALUE,
&&TARGET_INSTRUMENTED_YIELD_VALUE,
&&TARGET_INSTRUMENTED_CALL_FUNCTION_EX,
&&TARGET_INSTRUMENTED_JUMP_FORWARD,
&&TARGET_INSTRUMENTED_JUMP_BACKWARD,
&&TARGET_INSTRUMENTED_RETURN_CONST,
&&TARGET_INSTRUMENTED_FOR_ITER,
&&TARGET_INSTRUMENTED_POP_JUMP_IF_FALSE,
&&TARGET_INSTRUMENTED_POP_JUMP_IF_TRUE,
&&TARGET_INSTRUMENTED_END_FOR,
&&TARGET_INSTRUMENTED_END_SEND,
&&TARGET_INSTRUMENTED_INSTRUCTION,
&&TARGET_INSTRUMENTED_LINE,
&&_unknown_opcode
};

View file

@ -625,7 +625,6 @@ free_interpreter(PyInterpreterState *interp)
main interpreter. We fix those fields here, in addition
to the other dynamically initialized fields.
*/
static void
init_interpreter(PyInterpreterState *interp,
_PyRuntimeState *runtime, int64_t id,
@ -650,12 +649,22 @@ init_interpreter(PyInterpreterState *interp,
_PyGC_InitState(&interp->gc);
PyConfig_InitPythonConfig(&interp->config);
_PyType_InitCache(interp);
for(int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) {
interp->monitors.tools[i] = 0;
}
for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) {
for(int e = 0; e < PY_MONITORING_EVENTS; e++) {
interp->monitoring_callables[t][e] = NULL;
}
}
interp->sys_profile_initialized = false;
interp->sys_trace_initialized = false;
if (interp != &runtime->_main_interpreter) {
/* Fix the self-referential, statically initialized fields. */
interp->dtoa = (struct _dtoa_state)_dtoa_state_INIT(interp);
}
interp->f_opcode_trace_set = false;
interp->_initialized = 1;
}
@ -788,6 +797,20 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate)
Py_CLEAR(interp->audit_hooks);
for(int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) {
interp->monitors.tools[i] = 0;
}
for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) {
for(int e = 0; e < PY_MONITORING_EVENTS; e++) {
Py_CLEAR(interp->monitoring_callables[t][e]);
}
}
interp->sys_profile_initialized = false;
interp->sys_trace_initialized = false;
for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) {
Py_CLEAR(interp->monitoring_tool_names[t]);
}
PyConfig_Clear(&interp->config);
Py_CLEAR(interp->codec_search_path);
Py_CLEAR(interp->codec_search_cache);
@ -845,7 +868,7 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate)
interp->code_watchers[i] = NULL;
}
interp->active_code_watchers = 0;
interp->f_opcode_trace_set = false;
// XXX Once we have one allocator per interpreter (i.e.
// per-interpreter GC) we must ensure that all of the interpreter's
// objects have been cleaned up at the point.
@ -1237,6 +1260,7 @@ init_threadstate(PyThreadState *tstate,
tstate->datastack_chunk = NULL;
tstate->datastack_top = NULL;
tstate->datastack_limit = NULL;
tstate->what_event = -1;
tstate->_status.initialized = 1;
}
@ -1412,8 +1436,14 @@ PyThreadState_Clear(PyThreadState *tstate)
"PyThreadState_Clear: warning: thread still has a generator\n");
}
tstate->c_profilefunc = NULL;
tstate->c_tracefunc = NULL;
if (tstate->c_profilefunc != NULL) {
tstate->interp->sys_profiling_threads--;
tstate->c_profilefunc = NULL;
}
if (tstate->c_tracefunc != NULL) {
tstate->interp->sys_tracing_threads--;
tstate->c_tracefunc = NULL;
}
Py_CLEAR(tstate->c_profileobj);
Py_CLEAR(tstate->c_traceobj);

View file

@ -273,7 +273,8 @@ _PyCode_Quicken(PyCodeObject *code)
_Py_CODEUNIT *instructions = _PyCode_CODE(code);
for (int i = 0; i < Py_SIZE(code); i++) {
int previous_opcode = opcode;
opcode = _PyOpcode_Deopt[instructions[i].op.code];
opcode = _Py_GetBaseOpcode(code, i);
assert(opcode < MIN_INSTRUMENTED_OPCODE);
int caches = _PyOpcode_Caches[opcode];
if (caches) {
instructions[i + 1].cache = adaptive_counter_warmup();
@ -1737,6 +1738,7 @@ _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, int nargs,
{
assert(ENABLE_SPECIALIZATION);
assert(_PyOpcode_Caches[CALL] == INLINE_CACHE_ENTRIES_CALL);
assert(_Py_OPCODE(*instr) != INSTRUMENTED_CALL);
_PyCallCache *cache = (_PyCallCache *)(instr + 1);
int fail;
if (PyCFunction_CheckExact(callable)) {
@ -2149,7 +2151,9 @@ _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg)
goto success;
}
else if (tp == &PyGen_Type && oparg <= SHRT_MAX) {
assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR);
assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR ||
instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == INSTRUMENTED_END_FOR
);
instr->op.code = FOR_ITER_GEN;
goto success;
}

View file

@ -3409,6 +3409,7 @@ error:
return _PyStatus_ERR("can't set preliminary stderr");
}
PyObject *_Py_CreateMonitoringObject(void);
/* Create sys module without all attributes.
_PySys_UpdateConfig() should be called later to add remaining attributes. */
@ -3458,6 +3459,16 @@ _PySys_Create(PyThreadState *tstate, PyObject **sysmod_p)
goto error;
}
PyObject *monitoring = _Py_CreateMonitoringObject();
if (monitoring == NULL) {
goto error;
}
int err = PyDict_SetItemString(sysdict, "monitoring", monitoring);
Py_DECREF(monitoring);
if (err < 0) {
goto error;
}
assert(!_PyErr_Occurred(tstate));
*sysmod_p = sysmod;

View file

@ -255,7 +255,6 @@ class Printer:
self.write(f".co_names = {co_names},")
self.write(f".co_exceptiontable = {co_exceptiontable},")
self.field(code, "co_flags")
self.write("._co_linearray_entry_size = 0,")
self.field(code, "co_argcount")
self.field(code, "co_posonlyargcount")
self.field(code, "co_kwonlyargcount")
@ -276,7 +275,6 @@ class Printer:
self.write(f".co_qualname = {co_qualname},")
self.write(f".co_linetable = {co_linetable},")
self.write(f"._co_cached = NULL,")
self.write("._co_linearray = NULL,")
self.write(f".co_code_adaptive = {co_code_adaptive},")
for i, op in enumerate(code.co_code[::2]):
if op == RESUME:

View file

@ -89,6 +89,7 @@ def main(opcode_py, outfile='Include/opcode.h', internaloutfile='Include/interna
HAVE_ARGUMENT = opcode["HAVE_ARGUMENT"]
MIN_PSEUDO_OPCODE = opcode["MIN_PSEUDO_OPCODE"]
MAX_PSEUDO_OPCODE = opcode["MAX_PSEUDO_OPCODE"]
MIN_INSTRUMENTED_OPCODE = opcode["MIN_INSTRUMENTED_OPCODE"]
NUM_OPCODES = len(opname)
used = [ False ] * len(opname)
@ -105,9 +106,6 @@ def main(opcode_py, outfile='Include/opcode.h', internaloutfile='Include/interna
specialized_opmap[name] = next_op
opname_including_specialized[next_op] = name
used[next_op] = True
specialized_opmap['DO_TRACING'] = 255
opname_including_specialized[255] = 'DO_TRACING'
used[255] = True
with open(outfile, 'w') as fobj, open(internaloutfile, 'w') as iobj:
fobj.write(header)
@ -120,6 +118,8 @@ def main(opcode_py, outfile='Include/opcode.h', internaloutfile='Include/interna
fobj.write(DEFINE.format("HAVE_ARGUMENT", HAVE_ARGUMENT))
if op == MIN_PSEUDO_OPCODE:
fobj.write(DEFINE.format("MIN_PSEUDO_OPCODE", MIN_PSEUDO_OPCODE))
if op == MIN_INSTRUMENTED_OPCODE:
fobj.write(DEFINE.format("MIN_INSTRUMENTED_OPCODE", MIN_INSTRUMENTED_OPCODE))
fobj.write(DEFINE.format(name, op))

View file

@ -135,6 +135,9 @@ Objects/stringlib/unicode_format.h - PyFieldNameIter_Type -
Objects/unicodeobject.c - EncodingMapType -
#Objects/unicodeobject.c - PyFieldNameIter_Type -
#Objects/unicodeobject.c - PyFormatterIter_Type -
Python/legacy_tracing.c - _PyLegacyEventHandler_Type -
Objects/object.c - _PyLegacyEventHandler_Type -
##-----------------------
## static builtin structseq
@ -297,6 +300,8 @@ Objects/object.c - _Py_NotImplementedStruct -
Objects/setobject.c - _dummy_struct -
Objects/setobject.c - _PySet_Dummy -
Objects/sliceobject.c - _Py_EllipsisObject -
Python/instrumentation.c - DISABLE -
Python/instrumentation.c - _PyInstrumentation_MISSING -
##################################

Can't render this file because it has a wrong number of fields in line 4.