mirror of
https://github.com/python/cpython.git
synced 2025-08-03 16:39:00 +00:00
bpo-46409: Make generators in bytecode (GH-30633)
* Add RETURN_GENERATOR and JUMP_NO_INTERRUPT opcodes. * Trim frame and generator by word each. * Minor refactor of frame.c * Update test.test_sys to account for smaller frames. * Treat generator functions as normal functions when evaluating and specializing.
This commit is contained in:
parent
d05a66339b
commit
b04dfbbe4b
18 changed files with 236 additions and 205 deletions
197
Python/ceval.c
197
Python/ceval.c
|
@ -1345,7 +1345,7 @@ eval_frame_handle_pending(PyThreadState *tstate)
|
|||
|
||||
#define CHECK_EVAL_BREAKER() \
|
||||
if (_Py_atomic_load_relaxed(eval_breaker)) { \
|
||||
goto check_eval_breaker; \
|
||||
goto handle_eval_breaker; \
|
||||
}
|
||||
|
||||
|
||||
|
@ -1620,12 +1620,6 @@ trace_function_exit(PyThreadState *tstate, InterpreterFrame *frame, PyObject *re
|
|||
return 0;
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
make_coro(PyThreadState *tstate, PyFunctionObject *func,
|
||||
PyObject *locals,
|
||||
PyObject* const* args, size_t argcount,
|
||||
PyObject *kwnames);
|
||||
|
||||
static int
|
||||
skip_backwards_over_extended_args(PyCodeObject *code, int offset)
|
||||
{
|
||||
|
@ -1760,49 +1754,21 @@ resume_frame:
|
|||
assert(!_PyErr_Occurred(tstate));
|
||||
#endif
|
||||
|
||||
check_eval_breaker:
|
||||
{
|
||||
assert(STACK_LEVEL() >= 0); /* else underflow */
|
||||
assert(STACK_LEVEL() <= frame->f_code->co_stacksize); /* else overflow */
|
||||
assert(!_PyErr_Occurred(tstate));
|
||||
|
||||
/* Do periodic things. Doing this every time through
|
||||
the loop would add too much overhead, so we do it
|
||||
only every Nth instruction. We also do it if
|
||||
``pending.calls_to_do'' is set, i.e. when an asynchronous
|
||||
event needs attention (e.g. a signal handler or
|
||||
async I/O handler); see Py_AddPendingCall() and
|
||||
Py_MakePendingCalls() above. */
|
||||
|
||||
if (_Py_atomic_load_relaxed(eval_breaker)) {
|
||||
opcode = _Py_OPCODE(*next_instr);
|
||||
if (opcode != BEFORE_ASYNC_WITH &&
|
||||
opcode != SEND &&
|
||||
_Py_OPCODE(next_instr[-1]) != SEND) {
|
||||
/* Few cases where we skip running signal handlers and other
|
||||
pending calls:
|
||||
- If we're about to enter the 'with:'. It will prevent
|
||||
emitting a resource warning in the common idiom
|
||||
'with open(path) as file:'.
|
||||
- If we're about to enter the 'async with:'.
|
||||
- If we're about to enter the 'try:' of a try/finally (not
|
||||
*very* useful, but might help in some cases and it's
|
||||
traditional)
|
||||
- If we're resuming a chain of nested 'yield from' or
|
||||
'await' calls, then each frame is parked with YIELD_FROM
|
||||
as its next opcode. If the user hit control-C we want to
|
||||
wait until we've reached the innermost frame before
|
||||
running the signal handler and raising KeyboardInterrupt
|
||||
(see bpo-30039).
|
||||
*/
|
||||
if (eval_frame_handle_pending(tstate) != 0) {
|
||||
goto error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
DISPATCH();
|
||||
|
||||
handle_eval_breaker:
|
||||
|
||||
/* Do periodic things, like check for signals and async I/0.
|
||||
* We need to do reasonably frequently, but not too frequently.
|
||||
* All loops should include a check of the eval breaker.
|
||||
* We also check on return from any builtin function.
|
||||
*/
|
||||
if (eval_frame_handle_pending(tstate) != 0) {
|
||||
goto error;
|
||||
}
|
||||
DISPATCH();
|
||||
|
||||
{
|
||||
/* Start instructions */
|
||||
#if USE_COMPUTED_GOTOS
|
||||
{
|
||||
|
@ -1834,6 +1800,9 @@ check_eval_breaker:
|
|||
next_instr = first_instr + nexti;
|
||||
}
|
||||
frame->f_state = FRAME_EXECUTING;
|
||||
if (_Py_atomic_load_relaxed(eval_breaker) && oparg < 2) {
|
||||
goto handle_eval_breaker;
|
||||
}
|
||||
DISPATCH();
|
||||
}
|
||||
|
||||
|
@ -4152,6 +4121,17 @@ check_eval_breaker:
|
|||
DISPATCH();
|
||||
}
|
||||
|
||||
TARGET(JUMP_NO_INTERRUPT) {
|
||||
/* This bytecode is used in the `yield from` or `await` loop.
|
||||
* If there is an interrupt, we want it handled in the innermost
|
||||
* generator or coroutine, so we deliberately do not check it here.
|
||||
* (see bpo-30039).
|
||||
*/
|
||||
frame->f_state = FRAME_EXECUTING;
|
||||
JUMPTO(oparg);
|
||||
DISPATCH();
|
||||
}
|
||||
|
||||
TARGET(JUMP_ABSOLUTE_QUICK) {
|
||||
assert(oparg < INSTR_OFFSET());
|
||||
JUMPTO(oparg);
|
||||
|
@ -4627,28 +4607,25 @@ check_eval_breaker:
|
|||
// Check if the call can be inlined or not
|
||||
if (Py_TYPE(function) == &PyFunction_Type && tstate->interp->eval_frame == NULL) {
|
||||
int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(function))->co_flags;
|
||||
int is_generator = code_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR);
|
||||
if (!is_generator) {
|
||||
PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : PyFunction_GET_GLOBALS(function);
|
||||
STACK_SHRINK(oparg);
|
||||
InterpreterFrame *new_frame = _PyEvalFramePushAndInit(
|
||||
tstate, (PyFunctionObject *)function, locals,
|
||||
stack_pointer, nargs, kwnames
|
||||
);
|
||||
STACK_SHRINK(postcall_shrink);
|
||||
RESET_STACK_ADJUST_FOR_CALLS;
|
||||
// The frame has stolen all the arguments from the stack,
|
||||
// so there is no need to clean them up.
|
||||
Py_XDECREF(kwnames);
|
||||
Py_DECREF(function);
|
||||
if (new_frame == NULL) {
|
||||
goto error;
|
||||
}
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
new_frame->previous = frame;
|
||||
cframe.current_frame = frame = new_frame;
|
||||
goto start_frame;
|
||||
PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : PyFunction_GET_GLOBALS(function);
|
||||
STACK_SHRINK(oparg);
|
||||
InterpreterFrame *new_frame = _PyEvalFramePushAndInit(
|
||||
tstate, (PyFunctionObject *)function, locals,
|
||||
stack_pointer, nargs, kwnames
|
||||
);
|
||||
STACK_SHRINK(postcall_shrink);
|
||||
RESET_STACK_ADJUST_FOR_CALLS;
|
||||
// The frame has stolen all the arguments from the stack,
|
||||
// so there is no need to clean them up.
|
||||
Py_XDECREF(kwnames);
|
||||
Py_DECREF(function);
|
||||
if (new_frame == NULL) {
|
||||
goto error;
|
||||
}
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
new_frame->previous = frame;
|
||||
cframe.current_frame = frame = new_frame;
|
||||
goto start_frame;
|
||||
}
|
||||
/* Callable is not a normal Python function */
|
||||
PyObject *res;
|
||||
|
@ -5076,6 +5053,40 @@ check_eval_breaker:
|
|||
DISPATCH();
|
||||
}
|
||||
|
||||
TARGET(RETURN_GENERATOR) {
|
||||
PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(frame->f_func);
|
||||
if (gen == NULL) {
|
||||
goto error;
|
||||
}
|
||||
assert(EMPTY());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
InterpreterFrame *gen_frame = (InterpreterFrame *)gen->gi_iframe;
|
||||
_PyFrame_Copy(frame, gen_frame);
|
||||
assert(frame->frame_obj == NULL);
|
||||
gen->gi_frame_valid = 1;
|
||||
gen_frame->is_generator = true;
|
||||
gen_frame->f_state = FRAME_CREATED;
|
||||
_Py_LeaveRecursiveCall(tstate);
|
||||
if (!frame->is_entry) {
|
||||
InterpreterFrame *prev = frame->previous;
|
||||
_PyThreadState_PopFrame(tstate, frame);
|
||||
frame = cframe.current_frame = prev;
|
||||
_PyFrame_StackPush(frame, (PyObject *)gen);
|
||||
goto resume_frame;
|
||||
}
|
||||
/* Make sure that frame is in a valid state */
|
||||
frame->stacktop = 0;
|
||||
frame->f_locals = NULL;
|
||||
Py_INCREF(frame->f_func);
|
||||
Py_INCREF(frame->f_code);
|
||||
/* Restore previous cframe and return. */
|
||||
tstate->cframe = cframe.previous;
|
||||
tstate->cframe->use_tracing = cframe.use_tracing;
|
||||
assert(tstate->cframe->current_frame == frame->previous);
|
||||
assert(!_PyErr_Occurred(tstate));
|
||||
return (PyObject *)gen;
|
||||
}
|
||||
|
||||
TARGET(BUILD_SLICE) {
|
||||
PyObject *start, *stop, *step, *slice;
|
||||
if (oparg == 3)
|
||||
|
@ -5222,11 +5233,14 @@ check_eval_breaker:
|
|||
frame->f_lasti = INSTR_OFFSET();
|
||||
TRACING_NEXTOPARG();
|
||||
if (opcode == RESUME) {
|
||||
if (oparg < 2) {
|
||||
CHECK_EVAL_BREAKER();
|
||||
}
|
||||
/* Call tracing */
|
||||
TRACE_FUNCTION_ENTRY();
|
||||
DTRACE_FUNCTION_ENTRY();
|
||||
}
|
||||
else {
|
||||
else if (frame->f_state > FRAME_CREATED) {
|
||||
/* line-by-line tracing support */
|
||||
if (PyDTrace_LINE_ENABLED()) {
|
||||
maybe_dtrace_line(frame, &tstate->trace_info, instr_prev);
|
||||
|
@ -5961,33 +5975,6 @@ fail_post_args:
|
|||
return -1;
|
||||
}
|
||||
|
||||
/* Consumes all the references to the args */
|
||||
static PyObject *
|
||||
make_coro(PyThreadState *tstate, PyFunctionObject *func,
|
||||
PyObject *locals,
|
||||
PyObject* const* args, size_t argcount,
|
||||
PyObject *kwnames)
|
||||
{
|
||||
assert (((PyCodeObject *)func->func_code)->co_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR));
|
||||
PyObject *gen = _Py_MakeCoro(func);
|
||||
if (gen == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
InterpreterFrame *frame = (InterpreterFrame *)((PyGenObject *)gen)->gi_iframe;
|
||||
PyCodeObject *code = (PyCodeObject *)func->func_code;
|
||||
_PyFrame_InitializeSpecials(frame, func, locals, code->co_nlocalsplus);
|
||||
for (int i = 0; i < code->co_nlocalsplus; i++) {
|
||||
frame->localsplus[i] = NULL;
|
||||
}
|
||||
((PyGenObject *)gen)->gi_frame_valid = 1;
|
||||
if (initialize_locals(tstate, func, frame->localsplus, args, argcount, kwnames)) {
|
||||
Py_DECREF(gen);
|
||||
return NULL;
|
||||
}
|
||||
frame->generator = gen;
|
||||
return gen;
|
||||
}
|
||||
|
||||
/* Consumes all the references to the args */
|
||||
static InterpreterFrame *
|
||||
_PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func,
|
||||
|
@ -6041,10 +6028,7 @@ _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func,
|
|||
PyObject* const* args, size_t argcount,
|
||||
PyObject *kwnames)
|
||||
{
|
||||
PyCodeObject *code = (PyCodeObject *)func->func_code;
|
||||
/* _PyEvalFramePushAndInit and make_coro consume
|
||||
* all the references to their arguments
|
||||
*/
|
||||
/* _PyEvalFramePushAndInit consumes all the references to its arguments */
|
||||
for (size_t i = 0; i < argcount; i++) {
|
||||
Py_INCREF(args[i]);
|
||||
}
|
||||
|
@ -6054,19 +6038,16 @@ _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func,
|
|||
Py_INCREF(args[i+argcount]);
|
||||
}
|
||||
}
|
||||
int is_coro = code->co_flags &
|
||||
(CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR);
|
||||
if (is_coro) {
|
||||
return make_coro(tstate, func, locals, args, argcount, kwnames);
|
||||
}
|
||||
InterpreterFrame *frame = _PyEvalFramePushAndInit(
|
||||
tstate, func, locals, args, argcount, kwnames);
|
||||
if (frame == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
PyObject *retval = _PyEval_EvalFrame(tstate, frame, 0);
|
||||
assert(frame->stacktop >= 0);
|
||||
assert(_PyFrame_GetStackPointer(frame) == _PyFrame_Stackbase(frame));
|
||||
assert(
|
||||
_PyFrame_GetStackPointer(frame) == _PyFrame_Stackbase(frame) ||
|
||||
_PyFrame_GetStackPointer(frame) == frame->localsplus
|
||||
);
|
||||
_PyEvalFrameClearAndPop(tstate, frame);
|
||||
return retval;
|
||||
}
|
||||
|
|
|
@ -969,6 +969,7 @@ stack_effect(int opcode, int oparg, int jump)
|
|||
/* Jumps */
|
||||
case JUMP_FORWARD:
|
||||
case JUMP_ABSOLUTE:
|
||||
case JUMP_NO_INTERRUPT:
|
||||
return 0;
|
||||
|
||||
case JUMP_IF_TRUE_OR_POP:
|
||||
|
@ -1017,6 +1018,9 @@ stack_effect(int opcode, int oparg, int jump)
|
|||
case DELETE_FAST:
|
||||
return 0;
|
||||
|
||||
case RETURN_GENERATOR:
|
||||
return 0;
|
||||
|
||||
case RAISE_VARARGS:
|
||||
return -oparg;
|
||||
|
||||
|
@ -1841,7 +1845,7 @@ compiler_add_yield_from(struct compiler *c, int await)
|
|||
ADDOP_JUMP(c, SEND, exit);
|
||||
compiler_use_next_block(c, resume);
|
||||
ADDOP_I(c, RESUME, await ? 3 : 2);
|
||||
ADDOP_JUMP(c, JUMP_ABSOLUTE, start);
|
||||
ADDOP_JUMP(c, JUMP_NO_INTERRUPT, start);
|
||||
compiler_use_next_block(c, exit);
|
||||
return 1;
|
||||
}
|
||||
|
@ -7055,6 +7059,7 @@ stackdepth(struct compiler *c)
|
|||
}
|
||||
depth = new_depth;
|
||||
if (instr->i_opcode == JUMP_ABSOLUTE ||
|
||||
instr->i_opcode == JUMP_NO_INTERRUPT ||
|
||||
instr->i_opcode == JUMP_FORWARD ||
|
||||
instr->i_opcode == RETURN_VALUE ||
|
||||
instr->i_opcode == RAISE_VARARGS ||
|
||||
|
@ -7572,9 +7577,6 @@ normalize_jumps(struct assembler *a)
|
|||
if (last->i_target->b_visited == 0) {
|
||||
last->i_opcode = JUMP_FORWARD;
|
||||
}
|
||||
else if (b->b_iused >= 2 && b->b_instr[b->b_iused-2].i_opcode == SEND) {
|
||||
last->i_opcode = JUMP_ABSOLUTE_QUICK;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -7998,6 +8000,34 @@ insert_prefix_instructions(struct compiler *c, basicblock *entryblock,
|
|||
}
|
||||
assert(c->u->u_firstlineno > 0);
|
||||
|
||||
/* Add the generator prefix instructions. */
|
||||
if (flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) {
|
||||
struct instr make_gen = {
|
||||
.i_opcode = RETURN_GENERATOR,
|
||||
.i_oparg = 0,
|
||||
.i_lineno = c->u->u_firstlineno,
|
||||
.i_col_offset = -1,
|
||||
.i_end_lineno = c->u->u_firstlineno,
|
||||
.i_end_col_offset = -1,
|
||||
.i_target = NULL,
|
||||
};
|
||||
if (insert_instruction(entryblock, 0, &make_gen) < 0) {
|
||||
return -1;
|
||||
}
|
||||
struct instr pop_top = {
|
||||
.i_opcode = POP_TOP,
|
||||
.i_oparg = 0,
|
||||
.i_lineno = -1,
|
||||
.i_col_offset = -1,
|
||||
.i_end_lineno = -1,
|
||||
.i_end_col_offset = -1,
|
||||
.i_target = NULL,
|
||||
};
|
||||
if (insert_instruction(entryblock, 1, &pop_top) < 0) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
/* Set up cells for any variable that escapes, to be put in a closure. */
|
||||
const int ncellvars = (int)PyDict_GET_SIZE(c->u->u_cellvars);
|
||||
if (ncellvars) {
|
||||
|
@ -8036,22 +8066,6 @@ insert_prefix_instructions(struct compiler *c, basicblock *entryblock,
|
|||
PyMem_RawFree(sorted);
|
||||
}
|
||||
|
||||
/* Add the generator prefix instructions. */
|
||||
if (flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) {
|
||||
struct instr pop_top = {
|
||||
.i_opcode = POP_TOP,
|
||||
.i_oparg = 0,
|
||||
.i_lineno = -1,
|
||||
.i_col_offset = -1,
|
||||
.i_end_lineno = -1,
|
||||
.i_end_col_offset = -1,
|
||||
.i_target = NULL,
|
||||
};
|
||||
if (insert_instruction(entryblock, 0, &pop_top) < 0) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
if (nfreevars) {
|
||||
struct instr copy_frees = {
|
||||
.i_opcode = COPY_FREE_VARS,
|
||||
|
@ -8801,6 +8815,7 @@ normalize_basic_block(basicblock *bb) {
|
|||
break;
|
||||
case JUMP_ABSOLUTE:
|
||||
case JUMP_FORWARD:
|
||||
case JUMP_NO_INTERRUPT:
|
||||
bb->b_nofallthrough = 1;
|
||||
/* fall through */
|
||||
case POP_JUMP_IF_NOT_NONE:
|
||||
|
@ -8985,6 +9000,7 @@ optimize_cfg(struct compiler *c, struct assembler *a, PyObject *consts)
|
|||
if (b->b_iused > 0) {
|
||||
struct instr *b_last_instr = &b->b_instr[b->b_iused - 1];
|
||||
if (b_last_instr->i_opcode == JUMP_ABSOLUTE ||
|
||||
b_last_instr->i_opcode == JUMP_NO_INTERRUPT ||
|
||||
b_last_instr->i_opcode == JUMP_FORWARD) {
|
||||
if (b_last_instr->i_target == b->b_next) {
|
||||
assert(b->b_next->b_iused);
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#include "frameobject.h"
|
||||
#include "pycore_frame.h"
|
||||
#include "pycore_object.h" // _PyObject_GC_UNTRACK()
|
||||
#include "opcode.h"
|
||||
|
||||
int
|
||||
_PyFrame_Traverse(InterpreterFrame *frame, visitproc visit, void *arg)
|
||||
|
@ -51,15 +52,6 @@ _PyFrame_Copy(InterpreterFrame *src, InterpreterFrame *dest)
|
|||
memcpy(dest, src, size);
|
||||
}
|
||||
|
||||
static inline void
|
||||
clear_specials(InterpreterFrame *frame)
|
||||
{
|
||||
frame->generator = NULL;
|
||||
Py_XDECREF(frame->frame_obj);
|
||||
Py_XDECREF(frame->f_locals);
|
||||
Py_DECREF(frame->f_func);
|
||||
Py_DECREF(frame->f_code);
|
||||
}
|
||||
|
||||
static void
|
||||
take_ownership(PyFrameObject *f, InterpreterFrame *frame)
|
||||
|
@ -94,8 +86,8 @@ void
|
|||
_PyFrame_Clear(InterpreterFrame * frame)
|
||||
{
|
||||
/* It is the responsibility of the owning generator/coroutine
|
||||
* to have cleared the generator pointer */
|
||||
assert(frame->generator == NULL);
|
||||
* to have cleared the enclosing generator, if any. */
|
||||
assert(!frame->is_generator);
|
||||
if (frame->frame_obj) {
|
||||
PyFrameObject *f = frame->frame_obj;
|
||||
frame->frame_obj = NULL;
|
||||
|
@ -110,5 +102,8 @@ _PyFrame_Clear(InterpreterFrame * frame)
|
|||
for (int i = 0; i < frame->stacktop; i++) {
|
||||
Py_XDECREF(frame->localsplus[i]);
|
||||
}
|
||||
clear_specials(frame);
|
||||
Py_XDECREF(frame->frame_obj);
|
||||
Py_XDECREF(frame->f_locals);
|
||||
Py_DECREF(frame->f_func);
|
||||
Py_DECREF(frame->f_code);
|
||||
}
|
||||
|
|
16
Python/opcode_targets.h
generated
16
Python/opcode_targets.h
generated
|
@ -74,19 +74,19 @@ static void *opcode_targets[256] = {
|
|||
&&TARGET_LOAD_METHOD_CACHED,
|
||||
&&TARGET_GET_AWAITABLE,
|
||||
&&TARGET_LOAD_ASSERTION_ERROR,
|
||||
&&TARGET_RETURN_GENERATOR,
|
||||
&&TARGET_LOAD_METHOD_CLASS,
|
||||
&&TARGET_LOAD_METHOD_MODULE,
|
||||
&&TARGET_LOAD_METHOD_NO_DICT,
|
||||
&&TARGET_STORE_ATTR_ADAPTIVE,
|
||||
&&TARGET_STORE_ATTR_INSTANCE_VALUE,
|
||||
&&TARGET_STORE_ATTR_SLOT,
|
||||
&&TARGET_STORE_ATTR_WITH_HINT,
|
||||
&&TARGET_LIST_TO_TUPLE,
|
||||
&&TARGET_RETURN_VALUE,
|
||||
&&TARGET_IMPORT_STAR,
|
||||
&&TARGET_SETUP_ANNOTATIONS,
|
||||
&&TARGET_YIELD_VALUE,
|
||||
&&TARGET_LOAD_FAST__LOAD_FAST,
|
||||
&&TARGET_STORE_ATTR_WITH_HINT,
|
||||
&&TARGET_PREP_RERAISE_STAR,
|
||||
&&TARGET_POP_EXCEPT,
|
||||
&&TARGET_STORE_NAME,
|
||||
|
@ -130,26 +130,26 @@ static void *opcode_targets[256] = {
|
|||
&&TARGET_POP_JUMP_IF_NOT_NONE,
|
||||
&&TARGET_POP_JUMP_IF_NONE,
|
||||
&&TARGET_RAISE_VARARGS,
|
||||
&&TARGET_STORE_FAST__LOAD_FAST,
|
||||
&&TARGET_LOAD_FAST__LOAD_FAST,
|
||||
&&TARGET_MAKE_FUNCTION,
|
||||
&&TARGET_BUILD_SLICE,
|
||||
&&TARGET_LOAD_FAST__LOAD_CONST,
|
||||
&&TARGET_JUMP_NO_INTERRUPT,
|
||||
&&TARGET_MAKE_CELL,
|
||||
&&TARGET_LOAD_CLOSURE,
|
||||
&&TARGET_LOAD_DEREF,
|
||||
&&TARGET_STORE_DEREF,
|
||||
&&TARGET_DELETE_DEREF,
|
||||
&&TARGET_LOAD_CONST__LOAD_FAST,
|
||||
&&TARGET_STORE_FAST__STORE_FAST,
|
||||
&&TARGET_STORE_FAST__LOAD_FAST,
|
||||
&&TARGET_LOAD_FAST__LOAD_CONST,
|
||||
&&TARGET_CALL_FUNCTION_EX,
|
||||
&&_unknown_opcode,
|
||||
&&TARGET_LOAD_CONST__LOAD_FAST,
|
||||
&&TARGET_EXTENDED_ARG,
|
||||
&&TARGET_LIST_APPEND,
|
||||
&&TARGET_SET_ADD,
|
||||
&&TARGET_MAP_ADD,
|
||||
&&TARGET_LOAD_CLASSDEREF,
|
||||
&&TARGET_COPY_FREE_VARS,
|
||||
&&_unknown_opcode,
|
||||
&&TARGET_STORE_FAST__STORE_FAST,
|
||||
&&TARGET_RESUME,
|
||||
&&TARGET_MATCH_CLASS,
|
||||
&&_unknown_opcode,
|
||||
|
|
|
@ -499,7 +499,6 @@ initial_counter_value(void) {
|
|||
#define SPEC_FAIL_DIFFERENT_TYPES 12
|
||||
|
||||
/* Calls */
|
||||
#define SPEC_FAIL_GENERATOR 7
|
||||
#define SPEC_FAIL_COMPLEX_PARAMETERS 8
|
||||
#define SPEC_FAIL_WRONG_NUMBER_ARGUMENTS 9
|
||||
#define SPEC_FAIL_CO_NOT_OPTIMIZED 10
|
||||
|
@ -1153,9 +1152,6 @@ _Py_IDENTIFIER(__getitem__);
|
|||
static int
|
||||
function_kind(PyCodeObject *code) {
|
||||
int flags = code->co_flags;
|
||||
if (flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) {
|
||||
return SPEC_FAIL_GENERATOR;
|
||||
}
|
||||
if ((flags & (CO_VARKEYWORDS | CO_VARARGS)) || code->co_kwonlyargcount) {
|
||||
return SPEC_FAIL_COMPLEX_PARAMETERS;
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue