mirror of
https://github.com/python/cpython.git
synced 2025-08-04 00:48:58 +00:00
GH-122029: Break INSTRUMENTED_CALL into micro-ops, so that its behavior is consistent with CALL (GH-122177)
This commit is contained in:
parent
afb0aa6ed2
commit
95a73917cd
11 changed files with 341 additions and 153 deletions
|
@ -3241,20 +3241,6 @@ dummy_func(
|
|||
unused/1 +
|
||||
_LOAD_ATTR_METHOD_LAZY_DICT;
|
||||
|
||||
inst(INSTRUMENTED_CALL, (unused/3 -- )) {
|
||||
int is_meth = PyStackRef_AsPyObjectBorrow(PEEK(oparg + 1)) != NULL;
|
||||
int total_args = oparg + is_meth;
|
||||
PyObject *function = PyStackRef_AsPyObjectBorrow(PEEK(oparg + 2));
|
||||
PyObject *arg = total_args == 0 ?
|
||||
&_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(PEEK(total_args));
|
||||
int err = _Py_call_instrumentation_2args(
|
||||
tstate, PY_MONITORING_EVENT_CALL,
|
||||
frame, this_instr, function, arg);
|
||||
ERROR_IF(err, error);
|
||||
PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
GO_TO_INSTRUCTION(CALL);
|
||||
}
|
||||
|
||||
// Cache layout: counter/1, func_version/2
|
||||
// CALL_INTRINSIC_1/2, CALL_KW, and CALL_FUNCTION_EX aren't members!
|
||||
family(CALL, INLINE_CACHE_ENTRIES_CALL) = {
|
||||
|
@ -3292,28 +3278,34 @@ dummy_func(
|
|||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_MAYBE_EXPAND_METHOD, (callable, self_or_null, args[oparg] -- func, maybe_self, args[oparg])) {
|
||||
if (PyStackRef_TYPE(callable) == &PyMethod_Type && PyStackRef_IsNull(self_or_null)) {
|
||||
PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
|
||||
PyObject *self = ((PyMethodObject *)callable_o)->im_self;
|
||||
maybe_self = PyStackRef_FromPyObjectNew(self);
|
||||
PyObject *method = ((PyMethodObject *)callable_o)->im_func;
|
||||
func = PyStackRef_FromPyObjectNew(method);
|
||||
/* Make sure that callable and all args are in memory */
|
||||
args[-2] = func;
|
||||
args[-1] = maybe_self;
|
||||
PyStackRef_CLOSE(callable);
|
||||
}
|
||||
else {
|
||||
func = callable;
|
||||
maybe_self = self_or_null;
|
||||
}
|
||||
}
|
||||
|
||||
// When calling Python, inline the call using DISPATCH_INLINED().
|
||||
op(_CALL, (callable, self_or_null, args[oparg] -- res)) {
|
||||
op(_DO_CALL, (callable, self_or_null, args[oparg] -- res)) {
|
||||
PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
|
||||
PyObject *self_or_null_o = PyStackRef_AsPyObjectBorrow(self_or_null);
|
||||
|
||||
// oparg counts all of the args, but *not* self:
|
||||
int total_args = oparg;
|
||||
if (self_or_null_o != NULL) {
|
||||
if (!PyStackRef_IsNull(self_or_null)) {
|
||||
args--;
|
||||
total_args++;
|
||||
}
|
||||
else if (Py_TYPE(callable_o) == &PyMethod_Type) {
|
||||
args--;
|
||||
total_args++;
|
||||
PyObject *self = ((PyMethodObject *)callable_o)->im_self;
|
||||
args[0] = PyStackRef_FromPyObjectNew(self);
|
||||
PyObject *method = ((PyMethodObject *)callable_o)->im_func;
|
||||
args[-1] = PyStackRef_FromPyObjectNew(method);
|
||||
PyStackRef_CLOSE(callable);
|
||||
callable_o = method;
|
||||
callable = args[-1];
|
||||
}
|
||||
// Check if the call can be inlined or not
|
||||
if (Py_TYPE(callable_o) == &PyFunction_Type &&
|
||||
tstate->interp->eval_frame == NULL &&
|
||||
|
@ -3376,7 +3368,28 @@ dummy_func(
|
|||
CHECK_EVAL_BREAKER();
|
||||
}
|
||||
|
||||
macro(CALL) = _SPECIALIZE_CALL + unused/2 + _CALL + _CHECK_PERIODIC;
|
||||
op(_MONITOR_CALL, (func, maybe_self, args[oparg] -- func, maybe_self, args[oparg])) {
|
||||
int is_meth = !PyStackRef_IsNull(maybe_self);
|
||||
PyObject *function = PyStackRef_AsPyObjectBorrow(func);
|
||||
PyObject *arg0;
|
||||
if (is_meth) {
|
||||
arg0 = PyStackRef_AsPyObjectBorrow(maybe_self);
|
||||
}
|
||||
else if (oparg) {
|
||||
arg0 = PyStackRef_AsPyObjectBorrow(args[0]);
|
||||
}
|
||||
else {
|
||||
arg0 = &_PyInstrumentation_MISSING;
|
||||
}
|
||||
int err = _Py_call_instrumentation_2args(
|
||||
tstate, PY_MONITORING_EVENT_CALL,
|
||||
frame, this_instr, function, arg0
|
||||
);
|
||||
ERROR_IF(err, error);
|
||||
}
|
||||
|
||||
macro(CALL) = _SPECIALIZE_CALL + unused/2 + _MAYBE_EXPAND_METHOD + _DO_CALL + _CHECK_PERIODIC;
|
||||
macro(INSTRUMENTED_CALL) = unused/3 + _MAYBE_EXPAND_METHOD + _MONITOR_CALL + _DO_CALL + _CHECK_PERIODIC;
|
||||
|
||||
op(_PY_FRAME_GENERAL, (callable, self_or_null, args[oparg] -- new_frame: _PyInterpreterFrame*)) {
|
||||
PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
|
||||
|
|
34
Python/executor_cases.c.h
generated
34
Python/executor_cases.c.h
generated
|
@ -3584,15 +3584,45 @@
|
|||
break;
|
||||
}
|
||||
|
||||
/* _INSTRUMENTED_CALL is not a viable micro-op for tier 2 because it is instrumented */
|
||||
case _MAYBE_EXPAND_METHOD: {
|
||||
_PyStackRef *args;
|
||||
_PyStackRef self_or_null;
|
||||
_PyStackRef callable;
|
||||
_PyStackRef func;
|
||||
_PyStackRef maybe_self;
|
||||
oparg = CURRENT_OPARG();
|
||||
args = &stack_pointer[-oparg];
|
||||
self_or_null = stack_pointer[-1 - oparg];
|
||||
callable = stack_pointer[-2 - oparg];
|
||||
if (PyStackRef_TYPE(callable) == &PyMethod_Type && PyStackRef_IsNull(self_or_null)) {
|
||||
PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
|
||||
PyObject *self = ((PyMethodObject *)callable_o)->im_self;
|
||||
maybe_self = PyStackRef_FromPyObjectNew(self);
|
||||
PyObject *method = ((PyMethodObject *)callable_o)->im_func;
|
||||
func = PyStackRef_FromPyObjectNew(method);
|
||||
/* Make sure that callable and all args are in memory */
|
||||
args[-2] = func;
|
||||
args[-1] = maybe_self;
|
||||
PyStackRef_CLOSE(callable);
|
||||
}
|
||||
else {
|
||||
func = callable;
|
||||
maybe_self = self_or_null;
|
||||
}
|
||||
stack_pointer[-2 - oparg] = func;
|
||||
stack_pointer[-1 - oparg] = maybe_self;
|
||||
break;
|
||||
}
|
||||
|
||||
/* _CALL is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */
|
||||
/* _DO_CALL is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */
|
||||
|
||||
case _CHECK_PERIODIC: {
|
||||
CHECK_EVAL_BREAKER();
|
||||
break;
|
||||
}
|
||||
|
||||
/* _MONITOR_CALL is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */
|
||||
|
||||
case _PY_FRAME_GENERAL: {
|
||||
_PyStackRef *args;
|
||||
_PyStackRef self_or_null;
|
||||
|
|
176
Python/generated_cases.c.h
generated
176
Python/generated_cases.c.h
generated
|
@ -773,6 +773,8 @@
|
|||
_PyStackRef callable;
|
||||
_PyStackRef self_or_null;
|
||||
_PyStackRef *args;
|
||||
_PyStackRef func;
|
||||
_PyStackRef maybe_self;
|
||||
_PyStackRef res;
|
||||
// _SPECIALIZE_CALL
|
||||
self_or_null = stack_pointer[-1 - oparg];
|
||||
|
@ -792,27 +794,35 @@
|
|||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
/* Skip 2 cache entries */
|
||||
// _CALL
|
||||
// _MAYBE_EXPAND_METHOD
|
||||
{
|
||||
if (PyStackRef_TYPE(callable) == &PyMethod_Type && PyStackRef_IsNull(self_or_null)) {
|
||||
PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
|
||||
PyObject *self = ((PyMethodObject *)callable_o)->im_self;
|
||||
maybe_self = PyStackRef_FromPyObjectNew(self);
|
||||
PyObject *method = ((PyMethodObject *)callable_o)->im_func;
|
||||
func = PyStackRef_FromPyObjectNew(method);
|
||||
/* Make sure that callable and all args are in memory */
|
||||
args[-2] = func;
|
||||
args[-1] = maybe_self;
|
||||
PyStackRef_CLOSE(callable);
|
||||
}
|
||||
else {
|
||||
func = callable;
|
||||
maybe_self = self_or_null;
|
||||
}
|
||||
}
|
||||
// _DO_CALL
|
||||
self_or_null = maybe_self;
|
||||
callable = func;
|
||||
{
|
||||
PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
|
||||
PyObject *self_or_null_o = PyStackRef_AsPyObjectBorrow(self_or_null);
|
||||
// oparg counts all of the args, but *not* self:
|
||||
int total_args = oparg;
|
||||
if (self_or_null_o != NULL) {
|
||||
if (!PyStackRef_IsNull(self_or_null)) {
|
||||
args--;
|
||||
total_args++;
|
||||
}
|
||||
else if (Py_TYPE(callable_o) == &PyMethod_Type) {
|
||||
args--;
|
||||
total_args++;
|
||||
PyObject *self = ((PyMethodObject *)callable_o)->im_self;
|
||||
args[0] = PyStackRef_FromPyObjectNew(self);
|
||||
PyObject *method = ((PyMethodObject *)callable_o)->im_func;
|
||||
args[-1] = PyStackRef_FromPyObjectNew(method);
|
||||
PyStackRef_CLOSE(callable);
|
||||
callable_o = method;
|
||||
callable = args[-1];
|
||||
}
|
||||
// Check if the call can be inlined or not
|
||||
if (Py_TYPE(callable_o) == &PyFunction_Type &&
|
||||
tstate->interp->eval_frame == NULL &&
|
||||
|
@ -3504,18 +3514,134 @@
|
|||
(void)this_instr;
|
||||
next_instr += 4;
|
||||
INSTRUCTION_STATS(INSTRUMENTED_CALL);
|
||||
_PyStackRef callable;
|
||||
_PyStackRef self_or_null;
|
||||
_PyStackRef *args;
|
||||
_PyStackRef func;
|
||||
_PyStackRef maybe_self;
|
||||
_PyStackRef res;
|
||||
/* Skip 3 cache entries */
|
||||
int is_meth = PyStackRef_AsPyObjectBorrow(PEEK(oparg + 1)) != NULL;
|
||||
int total_args = oparg + is_meth;
|
||||
PyObject *function = PyStackRef_AsPyObjectBorrow(PEEK(oparg + 2));
|
||||
PyObject *arg = total_args == 0 ?
|
||||
&_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(PEEK(total_args));
|
||||
int err = _Py_call_instrumentation_2args(
|
||||
tstate, PY_MONITORING_EVENT_CALL,
|
||||
frame, this_instr, function, arg);
|
||||
if (err) goto error;
|
||||
PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
GO_TO_INSTRUCTION(CALL);
|
||||
// _MAYBE_EXPAND_METHOD
|
||||
args = &stack_pointer[-oparg];
|
||||
self_or_null = stack_pointer[-1 - oparg];
|
||||
callable = stack_pointer[-2 - oparg];
|
||||
{
|
||||
if (PyStackRef_TYPE(callable) == &PyMethod_Type && PyStackRef_IsNull(self_or_null)) {
|
||||
PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
|
||||
PyObject *self = ((PyMethodObject *)callable_o)->im_self;
|
||||
maybe_self = PyStackRef_FromPyObjectNew(self);
|
||||
PyObject *method = ((PyMethodObject *)callable_o)->im_func;
|
||||
func = PyStackRef_FromPyObjectNew(method);
|
||||
/* Make sure that callable and all args are in memory */
|
||||
args[-2] = func;
|
||||
args[-1] = maybe_self;
|
||||
PyStackRef_CLOSE(callable);
|
||||
}
|
||||
else {
|
||||
func = callable;
|
||||
maybe_self = self_or_null;
|
||||
}
|
||||
}
|
||||
// _MONITOR_CALL
|
||||
{
|
||||
int is_meth = !PyStackRef_IsNull(maybe_self);
|
||||
PyObject *function = PyStackRef_AsPyObjectBorrow(func);
|
||||
PyObject *arg0;
|
||||
if (is_meth) {
|
||||
arg0 = PyStackRef_AsPyObjectBorrow(maybe_self);
|
||||
}
|
||||
else if (oparg) {
|
||||
arg0 = PyStackRef_AsPyObjectBorrow(args[0]);
|
||||
}
|
||||
else {
|
||||
arg0 = &_PyInstrumentation_MISSING;
|
||||
}
|
||||
int err = _Py_call_instrumentation_2args(
|
||||
tstate, PY_MONITORING_EVENT_CALL,
|
||||
frame, this_instr, function, arg0
|
||||
);
|
||||
if (err) goto error;
|
||||
}
|
||||
// _DO_CALL
|
||||
self_or_null = maybe_self;
|
||||
callable = func;
|
||||
{
|
||||
PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
|
||||
// oparg counts all of the args, but *not* self:
|
||||
int total_args = oparg;
|
||||
if (!PyStackRef_IsNull(self_or_null)) {
|
||||
args--;
|
||||
total_args++;
|
||||
}
|
||||
// Check if the call can be inlined or not
|
||||
if (Py_TYPE(callable_o) == &PyFunction_Type &&
|
||||
tstate->interp->eval_frame == NULL &&
|
||||
((PyFunctionObject *)callable_o)->vectorcall == _PyFunction_Vectorcall)
|
||||
{
|
||||
int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags;
|
||||
PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o));
|
||||
_PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit(
|
||||
tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals,
|
||||
args, total_args, NULL
|
||||
);
|
||||
// Manipulate stack directly since we leave using DISPATCH_INLINED().
|
||||
STACK_SHRINK(oparg + 2);
|
||||
// The frame has stolen all the arguments from the stack,
|
||||
// so there is no need to clean them up.
|
||||
if (new_frame == NULL) {
|
||||
goto error;
|
||||
}
|
||||
frame->return_offset = (uint16_t)(next_instr - this_instr);
|
||||
DISPATCH_INLINED(new_frame);
|
||||
}
|
||||
/* Callable is not a normal Python function */
|
||||
STACKREFS_TO_PYOBJECTS(args, total_args, args_o);
|
||||
if (CONVERSION_FAILED(args_o)) {
|
||||
PyStackRef_CLOSE(callable);
|
||||
PyStackRef_CLOSE(self_or_null);
|
||||
for (int _i = oparg; --_i >= 0;) {
|
||||
PyStackRef_CLOSE(args[_i]);
|
||||
}
|
||||
if (true) { stack_pointer += -2 - oparg; goto error; }
|
||||
}
|
||||
PyObject *res_o = PyObject_Vectorcall(
|
||||
callable_o, args_o,
|
||||
total_args | PY_VECTORCALL_ARGUMENTS_OFFSET,
|
||||
NULL);
|
||||
STACKREFS_TO_PYOBJECTS_CLEANUP(args_o);
|
||||
if (opcode == INSTRUMENTED_CALL) {
|
||||
PyObject *arg = total_args == 0 ?
|
||||
&_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(args[0]);
|
||||
if (res_o == NULL) {
|
||||
_Py_call_instrumentation_exc2(
|
||||
tstate, PY_MONITORING_EVENT_C_RAISE,
|
||||
frame, this_instr, callable_o, arg);
|
||||
}
|
||||
else {
|
||||
int err = _Py_call_instrumentation_2args(
|
||||
tstate, PY_MONITORING_EVENT_C_RETURN,
|
||||
frame, this_instr, callable_o, arg);
|
||||
if (err < 0) {
|
||||
Py_CLEAR(res_o);
|
||||
}
|
||||
}
|
||||
}
|
||||
assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
|
||||
PyStackRef_CLOSE(callable);
|
||||
for (int i = 0; i < total_args; i++) {
|
||||
PyStackRef_CLOSE(args[i]);
|
||||
}
|
||||
if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; }
|
||||
res = PyStackRef_FromPyObjectSteal(res_o);
|
||||
}
|
||||
// _CHECK_PERIODIC
|
||||
{
|
||||
}
|
||||
stack_pointer[-2 - oparg] = res;
|
||||
stack_pointer += -1 - oparg;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
CHECK_EVAL_BREAKER();
|
||||
DISPATCH();
|
||||
}
|
||||
|
||||
TARGET(INSTRUMENTED_CALL_FUNCTION_EX) {
|
||||
|
|
2
Python/opcode_targets.h
generated
2
Python/opcode_targets.h
generated
|
@ -240,7 +240,6 @@ static void *opcode_targets[256] = {
|
|||
&&TARGET_INSTRUMENTED_END_SEND,
|
||||
&&TARGET_INSTRUMENTED_LOAD_SUPER_ATTR,
|
||||
&&TARGET_INSTRUMENTED_FOR_ITER,
|
||||
&&TARGET_INSTRUMENTED_CALL,
|
||||
&&TARGET_INSTRUMENTED_CALL_KW,
|
||||
&&TARGET_INSTRUMENTED_CALL_FUNCTION_EX,
|
||||
&&TARGET_INSTRUMENTED_INSTRUCTION,
|
||||
|
@ -253,6 +252,7 @@ static void *opcode_targets[256] = {
|
|||
&&TARGET_INSTRUMENTED_RETURN_VALUE,
|
||||
&&TARGET_INSTRUMENTED_RETURN_CONST,
|
||||
&&TARGET_INSTRUMENTED_YIELD_VALUE,
|
||||
&&TARGET_INSTRUMENTED_CALL,
|
||||
&&TARGET_INSTRUMENTED_LINE,
|
||||
&&_unknown_opcode,
|
||||
};
|
||||
|
|
18
Python/optimizer_cases.c.h
generated
18
Python/optimizer_cases.c.h
generated
|
@ -1598,14 +1598,28 @@
|
|||
break;
|
||||
}
|
||||
|
||||
/* _INSTRUMENTED_CALL is not a viable micro-op for tier 2 */
|
||||
case _MAYBE_EXPAND_METHOD: {
|
||||
_Py_UopsSymbol *func;
|
||||
_Py_UopsSymbol *maybe_self;
|
||||
_Py_UopsSymbol **args;
|
||||
func = sym_new_not_null(ctx);
|
||||
maybe_self = sym_new_not_null(ctx);
|
||||
for (int _i = oparg; --_i >= 0;) {
|
||||
args[_i] = sym_new_not_null(ctx);
|
||||
}
|
||||
stack_pointer[-2 - oparg] = func;
|
||||
stack_pointer[-1 - oparg] = maybe_self;
|
||||
break;
|
||||
}
|
||||
|
||||
/* _CALL is not a viable micro-op for tier 2 */
|
||||
/* _DO_CALL is not a viable micro-op for tier 2 */
|
||||
|
||||
case _CHECK_PERIODIC: {
|
||||
break;
|
||||
}
|
||||
|
||||
/* _MONITOR_CALL is not a viable micro-op for tier 2 */
|
||||
|
||||
case _PY_FRAME_GENERAL: {
|
||||
_Py_UopsSymbol **args;
|
||||
_Py_UopsSymbol *self_or_null;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue