mirror of
https://github.com/python/cpython.git
synced 2025-10-17 12:18:23 +00:00
gh-117139: Convert the evaluation stack to stack refs (#118450)
This PR sets up tagged pointers for CPython. The general idea is to create a separate struct _PyStackRef for everything on the evaluation stack to store the bits. This forces the C compiler to warn us if we try to cast things or pull things out of the struct directly. Only for free threading: We tag the low bit if something is deferred - that means we skip incref and decref operations on it. This behavior may change in the future if Mark's plans to defer all objects in the interpreter loop pans out. This implies a strict stack reference discipline is required. ALL incref and decref operations on stackrefs must use the stackref variants. It is unsafe to untag something then do normal incref/decref ops on it. The new incref and decref variants are called dup and close. They mimic a "handle" API operating on these stackrefs. Please read Include/internal/pycore_stackref.h for more information! --------- Co-authored-by: Mark Shannon <9448417+markshannon@users.noreply.github.com>
This commit is contained in:
parent
d611c4c8e9
commit
22b0de2755
35 changed files with 5228 additions and 3758 deletions
2018
Python/bytecodes.c
2018
Python/bytecodes.c
File diff suppressed because it is too large
Load diff
183
Python/ceval.c
183
Python/ceval.c
|
@ -39,6 +39,7 @@
|
|||
#include "opcode.h"
|
||||
#include "pydtrace.h"
|
||||
#include "setobject.h"
|
||||
#include "pycore_stackref.h"
|
||||
|
||||
#include <stdbool.h> // bool
|
||||
|
||||
|
@ -104,33 +105,34 @@
|
|||
|
||||
#ifdef LLTRACE
|
||||
static void
|
||||
dump_stack(_PyInterpreterFrame *frame, PyObject **stack_pointer)
|
||||
dump_stack(_PyInterpreterFrame *frame, _PyStackRef *stack_pointer)
|
||||
{
|
||||
PyObject **stack_base = _PyFrame_Stackbase(frame);
|
||||
_PyStackRef *stack_base = _PyFrame_Stackbase(frame);
|
||||
PyObject *exc = PyErr_GetRaisedException();
|
||||
printf(" stack=[");
|
||||
for (PyObject **ptr = stack_base; ptr < stack_pointer; ptr++) {
|
||||
for (_PyStackRef *ptr = stack_base; ptr < stack_pointer; ptr++) {
|
||||
if (ptr != stack_base) {
|
||||
printf(", ");
|
||||
}
|
||||
if (*ptr == NULL) {
|
||||
PyObject *obj = PyStackRef_AsPyObjectBorrow(*ptr);
|
||||
if (obj == NULL) {
|
||||
printf("<nil>");
|
||||
continue;
|
||||
}
|
||||
if (
|
||||
*ptr == Py_None
|
||||
|| PyBool_Check(*ptr)
|
||||
|| PyLong_CheckExact(*ptr)
|
||||
|| PyFloat_CheckExact(*ptr)
|
||||
|| PyUnicode_CheckExact(*ptr)
|
||||
obj == Py_None
|
||||
|| PyBool_Check(obj)
|
||||
|| PyLong_CheckExact(obj)
|
||||
|| PyFloat_CheckExact(obj)
|
||||
|| PyUnicode_CheckExact(obj)
|
||||
) {
|
||||
if (PyObject_Print(*ptr, stdout, 0) == 0) {
|
||||
if (PyObject_Print(obj, stdout, 0) == 0) {
|
||||
continue;
|
||||
}
|
||||
PyErr_Clear();
|
||||
}
|
||||
// Don't call __repr__(), it might recurse into the interpreter.
|
||||
printf("<%s at %p>", Py_TYPE(*ptr)->tp_name, (void *)(*ptr));
|
||||
printf("<%s at %p>", Py_TYPE(obj)->tp_name, (void *)(ptr->bits));
|
||||
}
|
||||
printf("]\n");
|
||||
fflush(stdout);
|
||||
|
@ -139,7 +141,7 @@ dump_stack(_PyInterpreterFrame *frame, PyObject **stack_pointer)
|
|||
|
||||
static void
|
||||
lltrace_instruction(_PyInterpreterFrame *frame,
|
||||
PyObject **stack_pointer,
|
||||
_PyStackRef *stack_pointer,
|
||||
_Py_CODEUNIT *next_instr,
|
||||
int opcode,
|
||||
int oparg)
|
||||
|
@ -695,6 +697,35 @@ extern void _PyUOpPrint(const _PyUOpInstruction *uop);
|
|||
#endif
|
||||
|
||||
|
||||
PyObject **
|
||||
_PyObjectArray_FromStackRefArray(_PyStackRef *input, Py_ssize_t nargs, PyObject **scratch)
|
||||
{
|
||||
PyObject **result;
|
||||
if (nargs > MAX_STACKREF_SCRATCH) {
|
||||
// +1 in case PY_VECTORCALL_ARGUMENTS_OFFSET is set.
|
||||
result = PyMem_Malloc((nargs + 1) * sizeof(PyObject *));
|
||||
if (result == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
result++;
|
||||
}
|
||||
else {
|
||||
result = scratch;
|
||||
}
|
||||
for (int i = 0; i < nargs; i++) {
|
||||
result[i] = PyStackRef_AsPyObjectBorrow(input[i]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
void
|
||||
_PyObjectArray_Free(PyObject **array, PyObject **scratch)
|
||||
{
|
||||
if (array != scratch) {
|
||||
PyMem_Free(array);
|
||||
}
|
||||
}
|
||||
|
||||
/* _PyEval_EvalFrameDefault() is a *big* function,
|
||||
* so consume 3 units of C stack */
|
||||
#define PY_EVAL_C_STACK_UNITS 2
|
||||
|
@ -773,7 +804,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
|
|||
/* Local "register" variables.
|
||||
* These are cached values from the frame and code object. */
|
||||
_Py_CODEUNIT *next_instr;
|
||||
PyObject **stack_pointer;
|
||||
_PyStackRef *stack_pointer;
|
||||
|
||||
#if defined(_Py_TIER2) && !defined(_Py_JIT)
|
||||
/* Tier 2 interpreter state */
|
||||
|
@ -916,10 +947,9 @@ exception_unwind:
|
|||
assert(_PyErr_Occurred(tstate));
|
||||
|
||||
/* Pop remaining stack entries. */
|
||||
PyObject **stackbase = _PyFrame_Stackbase(frame);
|
||||
_PyStackRef *stackbase = _PyFrame_Stackbase(frame);
|
||||
while (stack_pointer > stackbase) {
|
||||
PyObject *o = POP();
|
||||
Py_XDECREF(o);
|
||||
PyStackRef_XCLOSE(POP());
|
||||
}
|
||||
assert(STACK_LEVEL() == 0);
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
|
@ -928,10 +958,9 @@ exception_unwind:
|
|||
}
|
||||
|
||||
assert(STACK_LEVEL() >= level);
|
||||
PyObject **new_top = _PyFrame_Stackbase(frame) + level;
|
||||
_PyStackRef *new_top = _PyFrame_Stackbase(frame) + level;
|
||||
while (stack_pointer > new_top) {
|
||||
PyObject *v = POP();
|
||||
Py_XDECREF(v);
|
||||
PyStackRef_XCLOSE(POP());
|
||||
}
|
||||
if (lasti) {
|
||||
int frame_lasti = _PyInterpreterFrame_LASTI(frame);
|
||||
|
@ -939,7 +968,7 @@ exception_unwind:
|
|||
if (lasti == NULL) {
|
||||
goto exception_unwind;
|
||||
}
|
||||
PUSH(lasti);
|
||||
PUSH(PyStackRef_FromPyObjectSteal(lasti));
|
||||
}
|
||||
|
||||
/* Make the raw exception data
|
||||
|
@ -947,7 +976,7 @@ exception_unwind:
|
|||
so a program can emulate the
|
||||
Python main loop. */
|
||||
PyObject *exc = _PyErr_GetRaisedException(tstate);
|
||||
PUSH(exc);
|
||||
PUSH(PyStackRef_FromPyObjectSteal(exc));
|
||||
next_instr = _PyCode_CODE(_PyFrame_GetCode(frame)) + handler;
|
||||
|
||||
if (monitor_handled(tstate, frame, next_instr, exc) < 0) {
|
||||
|
@ -1217,7 +1246,7 @@ format_missing(PyThreadState *tstate, const char *kind,
|
|||
static void
|
||||
missing_arguments(PyThreadState *tstate, PyCodeObject *co,
|
||||
Py_ssize_t missing, Py_ssize_t defcount,
|
||||
PyObject **localsplus, PyObject *qualname)
|
||||
_PyStackRef *localsplus, PyObject *qualname)
|
||||
{
|
||||
Py_ssize_t i, j = 0;
|
||||
Py_ssize_t start, end;
|
||||
|
@ -1238,7 +1267,7 @@ missing_arguments(PyThreadState *tstate, PyCodeObject *co,
|
|||
end = start + co->co_kwonlyargcount;
|
||||
}
|
||||
for (i = start; i < end; i++) {
|
||||
if (localsplus[i] == NULL) {
|
||||
if (PyStackRef_IsNull(localsplus[i])) {
|
||||
PyObject *raw = PyTuple_GET_ITEM(co->co_localsplusnames, i);
|
||||
PyObject *name = PyObject_Repr(raw);
|
||||
if (name == NULL) {
|
||||
|
@ -1256,7 +1285,7 @@ missing_arguments(PyThreadState *tstate, PyCodeObject *co,
|
|||
static void
|
||||
too_many_positional(PyThreadState *tstate, PyCodeObject *co,
|
||||
Py_ssize_t given, PyObject *defaults,
|
||||
PyObject **localsplus, PyObject *qualname)
|
||||
_PyStackRef *localsplus, PyObject *qualname)
|
||||
{
|
||||
int plural;
|
||||
Py_ssize_t kwonly_given = 0;
|
||||
|
@ -1267,7 +1296,7 @@ too_many_positional(PyThreadState *tstate, PyCodeObject *co,
|
|||
assert((co->co_flags & CO_VARARGS) == 0);
|
||||
/* Count missing keyword-only args. */
|
||||
for (i = co_argcount; i < co_argcount + co->co_kwonlyargcount; i++) {
|
||||
if (localsplus[i] != NULL) {
|
||||
if (PyStackRef_AsPyObjectBorrow(localsplus[i]) != NULL) {
|
||||
kwonly_given++;
|
||||
}
|
||||
}
|
||||
|
@ -1445,7 +1474,7 @@ get_exception_handler(PyCodeObject *code, int index, int *level, int *handler, i
|
|||
|
||||
static int
|
||||
initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
||||
PyObject **localsplus, PyObject *const *args,
|
||||
_PyStackRef *localsplus, _PyStackRef const *args,
|
||||
Py_ssize_t argcount, PyObject *kwnames)
|
||||
{
|
||||
PyCodeObject *co = (PyCodeObject*)func->func_code;
|
||||
|
@ -1463,8 +1492,8 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
|||
if (co->co_flags & CO_VARARGS) {
|
||||
i++;
|
||||
}
|
||||
assert(localsplus[i] == NULL);
|
||||
localsplus[i] = kwdict;
|
||||
assert(PyStackRef_IsNull(localsplus[i]));
|
||||
localsplus[i] = PyStackRef_FromPyObjectSteal(kwdict);
|
||||
}
|
||||
else {
|
||||
kwdict = NULL;
|
||||
|
@ -1479,9 +1508,8 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
|||
n = argcount;
|
||||
}
|
||||
for (j = 0; j < n; j++) {
|
||||
PyObject *x = args[j];
|
||||
assert(localsplus[j] == NULL);
|
||||
localsplus[j] = x;
|
||||
assert(PyStackRef_IsNull(localsplus[j]));
|
||||
localsplus[j] = args[j];
|
||||
}
|
||||
|
||||
/* Pack other positional arguments into the *args argument */
|
||||
|
@ -1492,18 +1520,23 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
|||
}
|
||||
else {
|
||||
assert(args != NULL);
|
||||
u = _PyTuple_FromArraySteal(args + n, argcount - n);
|
||||
STACKREFS_TO_PYOBJECTS((_PyStackRef *)args, argcount, args_o);
|
||||
if (args_o == NULL) {
|
||||
goto fail_pre_positional;
|
||||
}
|
||||
u = _PyTuple_FromArraySteal((args_o + n), argcount - n);
|
||||
STACKREFS_TO_PYOBJECTS_CLEANUP(args_o);
|
||||
}
|
||||
if (u == NULL) {
|
||||
goto fail_post_positional;
|
||||
}
|
||||
assert(localsplus[total_args] == NULL);
|
||||
localsplus[total_args] = u;
|
||||
assert(PyStackRef_AsPyObjectBorrow(localsplus[total_args]) == NULL);
|
||||
localsplus[total_args] = PyStackRef_FromPyObjectSteal(u);
|
||||
}
|
||||
else if (argcount > n) {
|
||||
/* Too many positional args. Error is reported later */
|
||||
for (j = n; j < argcount; j++) {
|
||||
Py_DECREF(args[j]);
|
||||
PyStackRef_CLOSE(args[j]);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1513,7 +1546,7 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
|||
for (i = 0; i < kwcount; i++) {
|
||||
PyObject **co_varnames;
|
||||
PyObject *keyword = PyTuple_GET_ITEM(kwnames, i);
|
||||
PyObject *value = args[i+argcount];
|
||||
_PyStackRef value_stackref = args[i+argcount];
|
||||
Py_ssize_t j;
|
||||
|
||||
if (keyword == NULL || !PyUnicode_Check(keyword)) {
|
||||
|
@ -1586,27 +1619,26 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
|||
goto kw_fail;
|
||||
}
|
||||
|
||||
if (PyDict_SetItem(kwdict, keyword, value) == -1) {
|
||||
if (PyDict_SetItem(kwdict, keyword, PyStackRef_AsPyObjectSteal(value_stackref)) == -1) {
|
||||
goto kw_fail;
|
||||
}
|
||||
Py_DECREF(value);
|
||||
PyStackRef_CLOSE(value_stackref);
|
||||
continue;
|
||||
|
||||
kw_fail:
|
||||
for (;i < kwcount; i++) {
|
||||
PyObject *value = args[i+argcount];
|
||||
Py_DECREF(value);
|
||||
PyStackRef_CLOSE(args[i+argcount]);
|
||||
}
|
||||
goto fail_post_args;
|
||||
|
||||
kw_found:
|
||||
if (localsplus[j] != NULL) {
|
||||
if (PyStackRef_AsPyObjectBorrow(localsplus[j]) != NULL) {
|
||||
_PyErr_Format(tstate, PyExc_TypeError,
|
||||
"%U() got multiple values for argument '%S'",
|
||||
func->func_qualname, keyword);
|
||||
goto kw_fail;
|
||||
}
|
||||
localsplus[j] = value;
|
||||
localsplus[j] = value_stackref;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1623,7 +1655,7 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
|||
Py_ssize_t m = co->co_argcount - defcount;
|
||||
Py_ssize_t missing = 0;
|
||||
for (i = argcount; i < m; i++) {
|
||||
if (localsplus[i] == NULL) {
|
||||
if (PyStackRef_IsNull(localsplus[i])) {
|
||||
missing++;
|
||||
}
|
||||
}
|
||||
|
@ -1639,9 +1671,9 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
|||
if (defcount) {
|
||||
PyObject **defs = &PyTuple_GET_ITEM(func->func_defaults, 0);
|
||||
for (; i < defcount; i++) {
|
||||
if (localsplus[m+i] == NULL) {
|
||||
if (PyStackRef_AsPyObjectBorrow(localsplus[m+i]) == NULL) {
|
||||
PyObject *def = defs[i];
|
||||
localsplus[m+i] = Py_NewRef(def);
|
||||
localsplus[m+i] = PyStackRef_FromPyObjectNew(def);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1651,7 +1683,7 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
|||
if (co->co_kwonlyargcount > 0) {
|
||||
Py_ssize_t missing = 0;
|
||||
for (i = co->co_argcount; i < total_args; i++) {
|
||||
if (localsplus[i] != NULL)
|
||||
if (PyStackRef_AsPyObjectBorrow(localsplus[i]) != NULL)
|
||||
continue;
|
||||
PyObject *varname = PyTuple_GET_ITEM(co->co_localsplusnames, i);
|
||||
if (func->func_kwdefaults != NULL) {
|
||||
|
@ -1660,7 +1692,7 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
|||
goto fail_post_args;
|
||||
}
|
||||
if (def) {
|
||||
localsplus[i] = def;
|
||||
localsplus[i] = PyStackRef_FromPyObjectSteal(def);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -1676,14 +1708,14 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
|||
|
||||
fail_pre_positional:
|
||||
for (j = 0; j < argcount; j++) {
|
||||
Py_DECREF(args[j]);
|
||||
PyStackRef_CLOSE(args[j]);
|
||||
}
|
||||
/* fall through */
|
||||
fail_post_positional:
|
||||
if (kwnames) {
|
||||
Py_ssize_t kwcount = PyTuple_GET_SIZE(kwnames);
|
||||
for (j = argcount; j < argcount+kwcount; j++) {
|
||||
Py_DECREF(args[j]);
|
||||
PyStackRef_CLOSE(args[j]);
|
||||
}
|
||||
}
|
||||
/* fall through */
|
||||
|
@ -1738,7 +1770,7 @@ _PyEval_FrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame * frame)
|
|||
/* Consumes references to func, locals and all the args */
|
||||
_PyInterpreterFrame *
|
||||
_PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func,
|
||||
PyObject *locals, PyObject* const* args,
|
||||
PyObject *locals, _PyStackRef const* args,
|
||||
size_t argcount, PyObject *kwnames)
|
||||
{
|
||||
PyCodeObject * code = (PyCodeObject *)func->func_code;
|
||||
|
@ -1759,18 +1791,45 @@ fail:
|
|||
Py_DECREF(func);
|
||||
Py_XDECREF(locals);
|
||||
for (size_t i = 0; i < argcount; i++) {
|
||||
Py_DECREF(args[i]);
|
||||
PyStackRef_CLOSE(args[i]);
|
||||
}
|
||||
if (kwnames) {
|
||||
Py_ssize_t kwcount = PyTuple_GET_SIZE(kwnames);
|
||||
for (Py_ssize_t i = 0; i < kwcount; i++) {
|
||||
Py_DECREF(args[i+argcount]);
|
||||
PyStackRef_CLOSE(args[i+argcount]);
|
||||
}
|
||||
}
|
||||
PyErr_NoMemory();
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static _PyInterpreterFrame *
|
||||
_PyEvalFramePushAndInit_UnTagged(PyThreadState *tstate, PyFunctionObject *func,
|
||||
PyObject *locals, PyObject *const* args,
|
||||
size_t argcount, PyObject *kwnames)
|
||||
{
|
||||
#if defined(Py_GIL_DISABLED)
|
||||
size_t kw_count = kwnames == NULL ? 0 : PyTuple_GET_SIZE(kwnames);
|
||||
size_t total_argcount = argcount + kw_count;
|
||||
_PyStackRef *tagged_args_buffer = PyMem_Malloc(sizeof(_PyStackRef) * total_argcount);
|
||||
if (tagged_args_buffer == NULL) {
|
||||
PyErr_NoMemory();
|
||||
return NULL;
|
||||
}
|
||||
for (size_t i = 0; i < argcount; i++) {
|
||||
tagged_args_buffer[i] = PyStackRef_FromPyObjectSteal(args[i]);
|
||||
}
|
||||
for (size_t i = 0; i < kw_count; i++) {
|
||||
tagged_args_buffer[argcount + i] = PyStackRef_FromPyObjectSteal(args[argcount + i]);
|
||||
}
|
||||
_PyInterpreterFrame *res = _PyEvalFramePushAndInit(tstate, func, locals, (_PyStackRef const *)tagged_args_buffer, argcount, kwnames);
|
||||
PyMem_Free(tagged_args_buffer);
|
||||
return res;
|
||||
#else
|
||||
return _PyEvalFramePushAndInit(tstate, func, locals, (_PyStackRef const *)args, argcount, kwnames);
|
||||
#endif
|
||||
}
|
||||
|
||||
/* Same as _PyEvalFramePushAndInit but takes an args tuple and kwargs dict.
|
||||
Steals references to func, callargs and kwargs.
|
||||
*/
|
||||
|
@ -1795,7 +1854,7 @@ _PyEvalFramePushAndInit_Ex(PyThreadState *tstate, PyFunctionObject *func,
|
|||
Py_INCREF(PyTuple_GET_ITEM(callargs, i));
|
||||
}
|
||||
}
|
||||
_PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit(
|
||||
_PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_UnTagged(
|
||||
tstate, (PyFunctionObject *)func, locals,
|
||||
newargs, nargs, kwnames
|
||||
);
|
||||
|
@ -1833,7 +1892,7 @@ _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func,
|
|||
Py_INCREF(args[i+argcount]);
|
||||
}
|
||||
}
|
||||
_PyInterpreterFrame *frame = _PyEvalFramePushAndInit(
|
||||
_PyInterpreterFrame *frame = _PyEvalFramePushAndInit_UnTagged(
|
||||
tstate, func, locals, args, argcount, kwnames);
|
||||
if (frame == NULL) {
|
||||
return NULL;
|
||||
|
@ -2085,8 +2144,8 @@ _PyEval_ExceptionGroupMatch(PyObject* exc_value, PyObject *match_type,
|
|||
*/
|
||||
|
||||
int
|
||||
_PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v,
|
||||
int argcnt, int argcntafter, PyObject **sp)
|
||||
_PyEval_UnpackIterableStackRef(PyThreadState *tstate, _PyStackRef v_stackref,
|
||||
int argcnt, int argcntafter, _PyStackRef *sp)
|
||||
{
|
||||
int i = 0, j = 0;
|
||||
Py_ssize_t ll = 0;
|
||||
|
@ -2094,6 +2153,7 @@ _PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v,
|
|||
PyObject *w;
|
||||
PyObject *l = NULL; /* variable list */
|
||||
|
||||
PyObject *v = PyStackRef_AsPyObjectBorrow(v_stackref);
|
||||
assert(v != NULL);
|
||||
|
||||
it = PyObject_GetIter(v);
|
||||
|
@ -2128,7 +2188,7 @@ _PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v,
|
|||
}
|
||||
goto Error;
|
||||
}
|
||||
*--sp = w;
|
||||
*--sp = PyStackRef_FromPyObjectSteal(w);
|
||||
}
|
||||
|
||||
if (argcntafter == -1) {
|
||||
|
@ -2150,7 +2210,7 @@ _PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v,
|
|||
l = PySequence_List(it);
|
||||
if (l == NULL)
|
||||
goto Error;
|
||||
*--sp = l;
|
||||
*--sp = PyStackRef_FromPyObjectSteal(l);
|
||||
i++;
|
||||
|
||||
ll = PyList_GET_SIZE(l);
|
||||
|
@ -2163,7 +2223,7 @@ _PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v,
|
|||
|
||||
/* Pop the "after-variable" args off the list. */
|
||||
for (j = argcntafter; j > 0; j--, i++) {
|
||||
*--sp = PyList_GET_ITEM(l, ll - j);
|
||||
*--sp = PyStackRef_FromPyObjectSteal(PyList_GET_ITEM(l, ll - j));
|
||||
}
|
||||
/* Resize the list. */
|
||||
Py_SET_SIZE(l, ll - argcntafter);
|
||||
|
@ -2171,8 +2231,9 @@ _PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v,
|
|||
return 1;
|
||||
|
||||
Error:
|
||||
for (; i > 0; i--, sp++)
|
||||
Py_DECREF(*sp);
|
||||
for (; i > 0; i--, sp++) {
|
||||
PyStackRef_CLOSE(*sp);
|
||||
}
|
||||
Py_XDECREF(it);
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -264,9 +264,9 @@ GETITEM(PyObject *v, Py_ssize_t i) {
|
|||
This is because it is possible that during the DECREF the frame is
|
||||
accessed by other code (e.g. a __del__ method or gc.collect()) and the
|
||||
variable would be pointing to already-freed memory. */
|
||||
#define SETLOCAL(i, value) do { PyObject *tmp = GETLOCAL(i); \
|
||||
#define SETLOCAL(i, value) do { _PyStackRef tmp = GETLOCAL(i); \
|
||||
GETLOCAL(i) = value; \
|
||||
Py_XDECREF(tmp); } while (0)
|
||||
PyStackRef_XCLOSE(tmp); } while (0)
|
||||
|
||||
#define GO_TO_INSTRUCTION(op) goto PREDICT_ID(op)
|
||||
|
||||
|
@ -449,3 +449,34 @@ do { \
|
|||
#define EXIT_TO_TRACE() goto exit_to_trace
|
||||
#define EXIT_TO_TIER1() goto exit_to_tier1
|
||||
#define EXIT_TO_TIER1_DYNAMIC() goto exit_to_tier1_dynamic;
|
||||
|
||||
/* Stackref macros */
|
||||
|
||||
/* How much scratch space to give stackref to PyObject* conversion. */
|
||||
#define MAX_STACKREF_SCRATCH 10
|
||||
|
||||
#ifdef Py_GIL_DISABLED
|
||||
#define STACKREFS_TO_PYOBJECTS(ARGS, ARG_COUNT, NAME) \
|
||||
/* +1 because vectorcall might use -1 to write self */ \
|
||||
PyObject *NAME##_temp[MAX_STACKREF_SCRATCH+1]; \
|
||||
PyObject **NAME = _PyObjectArray_FromStackRefArray(ARGS, ARG_COUNT, NAME##_temp + 1);
|
||||
#else
|
||||
#define STACKREFS_TO_PYOBJECTS(ARGS, ARG_COUNT, NAME) \
|
||||
PyObject **NAME = (PyObject **)ARGS; \
|
||||
assert(NAME != NULL);
|
||||
#endif
|
||||
|
||||
#ifdef Py_GIL_DISABLED
|
||||
#define STACKREFS_TO_PYOBJECTS_CLEANUP(NAME) \
|
||||
/* +1 because we +1 previously */ \
|
||||
_PyObjectArray_Free(NAME - 1, NAME##_temp);
|
||||
#else
|
||||
#define STACKREFS_TO_PYOBJECTS_CLEANUP(NAME) \
|
||||
(void)(NAME);
|
||||
#endif
|
||||
|
||||
#ifdef Py_GIL_DISABLED
|
||||
#define CONVERSION_FAILED(NAME) ((NAME) == NULL)
|
||||
#else
|
||||
#define CONVERSION_FAILED(NAME) (0)
|
||||
#endif
|
||||
|
|
2767
Python/executor_cases.c.h
generated
2767
Python/executor_cases.c.h
generated
File diff suppressed because it is too large
Load diff
|
@ -16,11 +16,11 @@ _PyFrame_Traverse(_PyInterpreterFrame *frame, visitproc visit, void *arg)
|
|||
Py_VISIT(frame->f_funcobj);
|
||||
Py_VISIT(_PyFrame_GetCode(frame));
|
||||
/* locals */
|
||||
PyObject **locals = _PyFrame_GetLocalsArray(frame);
|
||||
_PyStackRef *locals = _PyFrame_GetLocalsArray(frame);
|
||||
int i = 0;
|
||||
/* locals and stack */
|
||||
for (; i <frame->stacktop; i++) {
|
||||
Py_VISIT(locals[i]);
|
||||
Py_VISIT(PyStackRef_AsPyObjectBorrow(locals[i]));
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
@ -101,7 +101,7 @@ _PyFrame_ClearLocals(_PyInterpreterFrame *frame)
|
|||
int stacktop = frame->stacktop;
|
||||
frame->stacktop = 0;
|
||||
for (int i = 0; i < stacktop; i++) {
|
||||
Py_XDECREF(frame->localsplus[i]);
|
||||
PyStackRef_XCLOSE(frame->localsplus[i]);
|
||||
}
|
||||
Py_CLEAR(frame->f_locals);
|
||||
}
|
||||
|
|
3211
Python/generated_cases.c.h
generated
3211
Python/generated_cases.c.h
generated
File diff suppressed because it is too large
Load diff
|
@ -196,7 +196,7 @@ _Py_SetTier2Optimizer(_PyOptimizerObject *optimizer)
|
|||
int
|
||||
_PyOptimizer_Optimize(
|
||||
_PyInterpreterFrame *frame, _Py_CODEUNIT *start,
|
||||
PyObject **stack_pointer, _PyExecutorObject **executor_ptr)
|
||||
_PyStackRef *stack_pointer, _PyExecutorObject **executor_ptr)
|
||||
{
|
||||
PyCodeObject *code = _PyFrame_GetCode(frame);
|
||||
assert(PyCode_Check(code));
|
||||
|
@ -1393,7 +1393,7 @@ counter_optimize(
|
|||
_Py_CODEUNIT *target = instr + 1 + _PyOpcode_Caches[JUMP_BACKWARD] - oparg;
|
||||
_PyUOpInstruction buffer[4] = {
|
||||
{ .opcode = _START_EXECUTOR, .jump_target = 3, .format=UOP_FORMAT_JUMP },
|
||||
{ .opcode = _LOAD_CONST_INLINE_BORROW, .operand = (uintptr_t)self },
|
||||
{ .opcode = _LOAD_CONST_INLINE, .operand = (uintptr_t)self },
|
||||
{ .opcode = _INTERNAL_INCREMENT_OPT_COUNTER },
|
||||
{ .opcode = _EXIT_TRACE, .target = (uint32_t)(target - _PyCode_CODE(code)), .format=UOP_FORMAT_TARGET }
|
||||
};
|
||||
|
|
18
Python/optimizer_cases.c.h
generated
18
Python/optimizer_cases.c.h
generated
|
@ -955,9 +955,9 @@
|
|||
/* _INSTRUMENTED_LOAD_SUPER_ATTR is not a viable micro-op for tier 2 */
|
||||
|
||||
case _LOAD_SUPER_ATTR_ATTR: {
|
||||
_Py_UopsSymbol *attr;
|
||||
attr = sym_new_not_null(ctx);
|
||||
stack_pointer[-3] = attr;
|
||||
_Py_UopsSymbol *attr_st;
|
||||
attr_st = sym_new_not_null(ctx);
|
||||
stack_pointer[-3] = attr_st;
|
||||
stack_pointer += -2;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
break;
|
||||
|
@ -1319,9 +1319,9 @@
|
|||
}
|
||||
|
||||
case _GET_LEN: {
|
||||
_Py_UopsSymbol *len_o;
|
||||
len_o = sym_new_not_null(ctx);
|
||||
stack_pointer[0] = len_o;
|
||||
_Py_UopsSymbol *len;
|
||||
len = sym_new_not_null(ctx);
|
||||
stack_pointer[0] = len;
|
||||
stack_pointer += 1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
break;
|
||||
|
@ -1898,9 +1898,9 @@
|
|||
}
|
||||
|
||||
case _SET_FUNCTION_ATTRIBUTE: {
|
||||
_Py_UopsSymbol *func;
|
||||
func = sym_new_not_null(ctx);
|
||||
stack_pointer[-2] = func;
|
||||
_Py_UopsSymbol *func_st;
|
||||
func_st = sym_new_not_null(ctx);
|
||||
stack_pointer[-2] = func_st;
|
||||
stack_pointer += -1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
break;
|
||||
|
|
|
@ -679,7 +679,10 @@ specialize_module_load_attr(
|
|||
/* Attribute specialization */
|
||||
|
||||
void
|
||||
_Py_Specialize_LoadSuperAttr(PyObject *global_super, PyObject *cls, _Py_CODEUNIT *instr, int load_method) {
|
||||
_Py_Specialize_LoadSuperAttr(_PyStackRef global_super_st, _PyStackRef cls_st, _Py_CODEUNIT *instr, int load_method) {
|
||||
PyObject *global_super = PyStackRef_AsPyObjectBorrow(global_super_st);
|
||||
PyObject *cls = PyStackRef_AsPyObjectBorrow(cls_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[LOAD_SUPER_ATTR] == INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR);
|
||||
_PySuperAttrCache *cache = (_PySuperAttrCache *)(instr + 1);
|
||||
|
@ -885,8 +888,10 @@ static int specialize_attr_loadclassattr(PyObject* owner, _Py_CODEUNIT* instr, P
|
|||
static int specialize_class_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name);
|
||||
|
||||
void
|
||||
_Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
|
||||
_Py_Specialize_LoadAttr(_PyStackRef owner_st, _Py_CODEUNIT *instr, PyObject *name)
|
||||
{
|
||||
PyObject *owner = PyStackRef_AsPyObjectBorrow(owner_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[LOAD_ATTR] == INLINE_CACHE_ENTRIES_LOAD_ATTR);
|
||||
_PyAttrCache *cache = (_PyAttrCache *)(instr + 1);
|
||||
|
@ -1081,8 +1086,10 @@ success:
|
|||
}
|
||||
|
||||
void
|
||||
_Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
|
||||
_Py_Specialize_StoreAttr(_PyStackRef owner_st, _Py_CODEUNIT *instr, PyObject *name)
|
||||
{
|
||||
PyObject *owner = PyStackRef_AsPyObjectBorrow(owner_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[STORE_ATTR] == INLINE_CACHE_ENTRIES_STORE_ATTR);
|
||||
_PyAttrCache *cache = (_PyAttrCache *)(instr + 1);
|
||||
|
@ -1521,8 +1528,11 @@ type_get_version(PyTypeObject *t, int opcode)
|
|||
|
||||
void
|
||||
_Py_Specialize_BinarySubscr(
|
||||
PyObject *container, PyObject *sub, _Py_CODEUNIT *instr)
|
||||
_PyStackRef container_st, _PyStackRef sub_st, _Py_CODEUNIT *instr)
|
||||
{
|
||||
PyObject *container = PyStackRef_AsPyObjectBorrow(container_st);
|
||||
PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[BINARY_SUBSCR] ==
|
||||
INLINE_CACHE_ENTRIES_BINARY_SUBSCR);
|
||||
|
@ -1621,8 +1631,11 @@ success:
|
|||
}
|
||||
|
||||
void
|
||||
_Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *instr)
|
||||
_Py_Specialize_StoreSubscr(_PyStackRef container_st, _PyStackRef sub_st, _Py_CODEUNIT *instr)
|
||||
{
|
||||
PyObject *container = PyStackRef_AsPyObjectBorrow(container_st);
|
||||
PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
_PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)(instr + 1);
|
||||
PyTypeObject *container_type = Py_TYPE(container);
|
||||
|
@ -1939,8 +1952,10 @@ specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs)
|
|||
}
|
||||
|
||||
void
|
||||
_Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, int nargs)
|
||||
_Py_Specialize_Call(_PyStackRef callable_st, _Py_CODEUNIT *instr, int nargs)
|
||||
{
|
||||
PyObject *callable = PyStackRef_AsPyObjectBorrow(callable_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[CALL] == INLINE_CACHE_ENTRIES_CALL);
|
||||
assert(_Py_OPCODE(*instr) != INSTRUMENTED_CALL);
|
||||
|
@ -2056,9 +2071,11 @@ binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs)
|
|||
#endif // Py_STATS
|
||||
|
||||
void
|
||||
_Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
|
||||
int oparg, PyObject **locals)
|
||||
_Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *instr,
|
||||
int oparg, _PyStackRef *locals)
|
||||
{
|
||||
PyObject *lhs = PyStackRef_AsPyObjectBorrow(lhs_st);
|
||||
PyObject *rhs = PyStackRef_AsPyObjectBorrow(rhs_st);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[BINARY_OP] == INLINE_CACHE_ENTRIES_BINARY_OP);
|
||||
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)(instr + 1);
|
||||
|
@ -2071,7 +2088,7 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
|
|||
if (PyUnicode_CheckExact(lhs)) {
|
||||
_Py_CODEUNIT next = instr[INLINE_CACHE_ENTRIES_BINARY_OP + 1];
|
||||
bool to_store = (next.op.code == STORE_FAST);
|
||||
if (to_store && locals[next.op.arg] == lhs) {
|
||||
if (to_store && PyStackRef_AsPyObjectBorrow(locals[next.op.arg]) == lhs) {
|
||||
instr->op.code = BINARY_OP_INPLACE_ADD_UNICODE;
|
||||
goto success;
|
||||
}
|
||||
|
@ -2163,9 +2180,12 @@ compare_op_fail_kind(PyObject *lhs, PyObject *rhs)
|
|||
#endif // Py_STATS
|
||||
|
||||
void
|
||||
_Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
|
||||
_Py_Specialize_CompareOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *instr,
|
||||
int oparg)
|
||||
{
|
||||
PyObject *lhs = PyStackRef_AsPyObjectBorrow(lhs_st);
|
||||
PyObject *rhs = PyStackRef_AsPyObjectBorrow(rhs_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[COMPARE_OP] == INLINE_CACHE_ENTRIES_COMPARE_OP);
|
||||
// All of these specializations compute boolean values, so they're all valid
|
||||
|
@ -2226,8 +2246,10 @@ unpack_sequence_fail_kind(PyObject *seq)
|
|||
#endif // Py_STATS
|
||||
|
||||
void
|
||||
_Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr, int oparg)
|
||||
_Py_Specialize_UnpackSequence(_PyStackRef seq_st, _Py_CODEUNIT *instr, int oparg)
|
||||
{
|
||||
PyObject *seq = PyStackRef_AsPyObjectBorrow(seq_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[UNPACK_SEQUENCE] ==
|
||||
INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE);
|
||||
|
@ -2337,12 +2359,12 @@ int
|
|||
#endif // Py_STATS
|
||||
|
||||
void
|
||||
_Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg)
|
||||
_Py_Specialize_ForIter(_PyStackRef iter, _Py_CODEUNIT *instr, int oparg)
|
||||
{
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[FOR_ITER] == INLINE_CACHE_ENTRIES_FOR_ITER);
|
||||
_PyForIterCache *cache = (_PyForIterCache *)(instr + 1);
|
||||
PyTypeObject *tp = Py_TYPE(iter);
|
||||
PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(iter));
|
||||
if (tp == &PyListIter_Type) {
|
||||
instr->op.code = FOR_ITER_LIST;
|
||||
goto success;
|
||||
|
@ -2379,8 +2401,10 @@ success:
|
|||
}
|
||||
|
||||
void
|
||||
_Py_Specialize_Send(PyObject *receiver, _Py_CODEUNIT *instr)
|
||||
_Py_Specialize_Send(_PyStackRef receiver_st, _Py_CODEUNIT *instr)
|
||||
{
|
||||
PyObject *receiver = PyStackRef_AsPyObjectBorrow(receiver_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[SEND] == INLINE_CACHE_ENTRIES_SEND);
|
||||
_PySendCache *cache = (_PySendCache *)(instr + 1);
|
||||
|
@ -2406,11 +2430,12 @@ success:
|
|||
}
|
||||
|
||||
void
|
||||
_Py_Specialize_ToBool(PyObject *value, _Py_CODEUNIT *instr)
|
||||
_Py_Specialize_ToBool(_PyStackRef value_o, _Py_CODEUNIT *instr)
|
||||
{
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[TO_BOOL] == INLINE_CACHE_ENTRIES_TO_BOOL);
|
||||
_PyToBoolCache *cache = (_PyToBoolCache *)(instr + 1);
|
||||
PyObject *value = PyStackRef_AsPyObjectBorrow(value_o);
|
||||
if (PyBool_Check(value)) {
|
||||
instr->op.code = TO_BOOL_BOOL;
|
||||
goto success;
|
||||
|
@ -2520,8 +2545,10 @@ static int containsop_fail_kind(PyObject *value) {
|
|||
#endif // Py_STATS
|
||||
|
||||
void
|
||||
_Py_Specialize_ContainsOp(PyObject *value, _Py_CODEUNIT *instr)
|
||||
_Py_Specialize_ContainsOp(_PyStackRef value_st, _Py_CODEUNIT *instr)
|
||||
{
|
||||
PyObject *value = PyStackRef_AsPyObjectBorrow(value_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[CONTAINS_OP] == INLINE_CACHE_ENTRIES_COMPARE_OP);
|
||||
_PyContainsOpCache *cache = (_PyContainsOpCache *)(instr + 1);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue