mirror of
https://github.com/python/cpython.git
synced 2025-08-31 05:58:33 +00:00
gh-114058: Foundations of the Tier2 redundancy eliminator (GH-115085)
--------- Co-authored-by: Mark Shannon <9448417+markshannon@users.noreply.github.com> Co-authored-by: Jules <57632293+JuliaPoo@users.noreply.github.com> Co-authored-by: Guido van Rossum <gvanrossum@users.noreply.github.com>
This commit is contained in:
parent
ccc76c3e88
commit
7cce857622
25 changed files with 3137 additions and 140 deletions
|
@ -133,7 +133,7 @@ dummy_func(
|
|||
switch (opcode) {
|
||||
|
||||
// BEGIN BYTECODES //
|
||||
inst(NOP, (--)) {
|
||||
pure inst(NOP, (--)) {
|
||||
}
|
||||
|
||||
family(RESUME, 0) = {
|
||||
|
@ -411,12 +411,12 @@ dummy_func(
|
|||
// BINARY_OP_INPLACE_ADD_UNICODE, // See comments at that opcode.
|
||||
};
|
||||
|
||||
op(_GUARD_BOTH_INT, (left, right -- left: &PYLONG_TYPE, right: &PYLONG_TYPE)) {
|
||||
op(_GUARD_BOTH_INT, (left, right -- left, right)) {
|
||||
DEOPT_IF(!PyLong_CheckExact(left));
|
||||
DEOPT_IF(!PyLong_CheckExact(right));
|
||||
}
|
||||
|
||||
pure op(_BINARY_OP_MULTIPLY_INT, (left, right -- res: &PYLONG_TYPE)) {
|
||||
pure op(_BINARY_OP_MULTIPLY_INT, (left, right -- res)) {
|
||||
STAT_INC(BINARY_OP, hit);
|
||||
res = _PyLong_Multiply((PyLongObject *)left, (PyLongObject *)right);
|
||||
_Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
|
||||
|
@ -424,7 +424,7 @@ dummy_func(
|
|||
ERROR_IF(res == NULL, error);
|
||||
}
|
||||
|
||||
pure op(_BINARY_OP_ADD_INT, (left, right -- res: &PYLONG_TYPE)) {
|
||||
pure op(_BINARY_OP_ADD_INT, (left, right -- res)) {
|
||||
STAT_INC(BINARY_OP, hit);
|
||||
res = _PyLong_Add((PyLongObject *)left, (PyLongObject *)right);
|
||||
_Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
|
||||
|
@ -432,7 +432,7 @@ dummy_func(
|
|||
ERROR_IF(res == NULL, error);
|
||||
}
|
||||
|
||||
pure op(_BINARY_OP_SUBTRACT_INT, (left, right -- res: &PYLONG_TYPE)) {
|
||||
pure op(_BINARY_OP_SUBTRACT_INT, (left, right -- res)) {
|
||||
STAT_INC(BINARY_OP, hit);
|
||||
res = _PyLong_Subtract((PyLongObject *)left, (PyLongObject *)right);
|
||||
_Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
|
||||
|
@ -447,12 +447,12 @@ dummy_func(
|
|||
macro(BINARY_OP_SUBTRACT_INT) =
|
||||
_GUARD_BOTH_INT + unused/1 + _BINARY_OP_SUBTRACT_INT;
|
||||
|
||||
op(_GUARD_BOTH_FLOAT, (left, right -- left: &PYFLOAT_TYPE, right: &PYFLOAT_TYPE)) {
|
||||
op(_GUARD_BOTH_FLOAT, (left, right -- left, right)) {
|
||||
DEOPT_IF(!PyFloat_CheckExact(left));
|
||||
DEOPT_IF(!PyFloat_CheckExact(right));
|
||||
}
|
||||
|
||||
pure op(_BINARY_OP_MULTIPLY_FLOAT, (left, right -- res: &PYFLOAT_TYPE)) {
|
||||
pure op(_BINARY_OP_MULTIPLY_FLOAT, (left, right -- res)) {
|
||||
STAT_INC(BINARY_OP, hit);
|
||||
double dres =
|
||||
((PyFloatObject *)left)->ob_fval *
|
||||
|
@ -460,7 +460,7 @@ dummy_func(
|
|||
DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dres, res);
|
||||
}
|
||||
|
||||
pure op(_BINARY_OP_ADD_FLOAT, (left, right -- res: &PYFLOAT_TYPE)) {
|
||||
pure op(_BINARY_OP_ADD_FLOAT, (left, right -- res)) {
|
||||
STAT_INC(BINARY_OP, hit);
|
||||
double dres =
|
||||
((PyFloatObject *)left)->ob_fval +
|
||||
|
@ -468,7 +468,7 @@ dummy_func(
|
|||
DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dres, res);
|
||||
}
|
||||
|
||||
pure op(_BINARY_OP_SUBTRACT_FLOAT, (left, right -- res: &PYFLOAT_TYPE)) {
|
||||
pure op(_BINARY_OP_SUBTRACT_FLOAT, (left, right -- res)) {
|
||||
STAT_INC(BINARY_OP, hit);
|
||||
double dres =
|
||||
((PyFloatObject *)left)->ob_fval -
|
||||
|
@ -483,12 +483,12 @@ dummy_func(
|
|||
macro(BINARY_OP_SUBTRACT_FLOAT) =
|
||||
_GUARD_BOTH_FLOAT + unused/1 + _BINARY_OP_SUBTRACT_FLOAT;
|
||||
|
||||
op(_GUARD_BOTH_UNICODE, (left, right -- left: &PYUNICODE_TYPE, right: &PYUNICODE_TYPE)) {
|
||||
op(_GUARD_BOTH_UNICODE, (left, right -- left, right)) {
|
||||
DEOPT_IF(!PyUnicode_CheckExact(left));
|
||||
DEOPT_IF(!PyUnicode_CheckExact(right));
|
||||
}
|
||||
|
||||
pure op(_BINARY_OP_ADD_UNICODE, (left, right -- res: &PYUNICODE_TYPE)) {
|
||||
pure op(_BINARY_OP_ADD_UNICODE, (left, right -- res)) {
|
||||
STAT_INC(BINARY_OP, hit);
|
||||
res = PyUnicode_Concat(left, right);
|
||||
_Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc);
|
||||
|
@ -1877,7 +1877,7 @@ dummy_func(
|
|||
something was returned by a descriptor protocol). Set
|
||||
the second element of the stack to NULL, to signal
|
||||
CALL that it's not a method call.
|
||||
NULL | meth | arg1 | ... | argN
|
||||
meth | NULL | arg1 | ... | argN
|
||||
*/
|
||||
DECREF_INPUTS();
|
||||
ERROR_IF(attr == NULL, error);
|
||||
|
@ -1901,7 +1901,7 @@ dummy_func(
|
|||
LOAD_ATTR,
|
||||
};
|
||||
|
||||
op(_GUARD_TYPE_VERSION, (type_version/2, owner -- owner: &(GUARD_TYPE_VERSION_TYPE + type_version))) {
|
||||
op(_GUARD_TYPE_VERSION, (type_version/2, owner -- owner)) {
|
||||
PyTypeObject *tp = Py_TYPE(owner);
|
||||
assert(type_version != 0);
|
||||
DEOPT_IF(tp->tp_version_tag != type_version);
|
||||
|
@ -2082,7 +2082,7 @@ dummy_func(
|
|||
DISPATCH_INLINED(new_frame);
|
||||
}
|
||||
|
||||
op(_GUARD_DORV_VALUES, (owner -- owner: &GUARD_DORV_VALUES_TYPE)) {
|
||||
op(_GUARD_DORV_VALUES, (owner -- owner)) {
|
||||
assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT);
|
||||
PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner);
|
||||
DEOPT_IF(!_PyDictOrValues_IsValues(dorv));
|
||||
|
@ -2711,7 +2711,7 @@ dummy_func(
|
|||
DEOPT_IF(r->len <= 0);
|
||||
}
|
||||
|
||||
op(_ITER_NEXT_RANGE, (iter -- iter, next: &PYLONG_TYPE)) {
|
||||
op(_ITER_NEXT_RANGE, (iter -- iter, next)) {
|
||||
_PyRangeIterObject *r = (_PyRangeIterObject *)iter;
|
||||
assert(Py_TYPE(r) == &PyRangeIter_Type);
|
||||
assert(r->len > 0);
|
||||
|
@ -2869,13 +2869,13 @@ dummy_func(
|
|||
exc_info->exc_value = Py_NewRef(new_exc);
|
||||
}
|
||||
|
||||
op(_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, (owner -- owner: &GUARD_DORV_VALUES_INST_ATTR_FROM_DICT_TYPE)) {
|
||||
op(_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, (owner -- owner)) {
|
||||
assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT);
|
||||
PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner);
|
||||
DEOPT_IF(!_PyDictOrValues_IsValues(*dorv) && !_PyObject_MakeInstanceAttributesFromDict(owner, dorv));
|
||||
}
|
||||
|
||||
op(_GUARD_KEYS_VERSION, (keys_version/2, owner -- owner: &(GUARD_KEYS_VERSION_TYPE + keys_version))) {
|
||||
op(_GUARD_KEYS_VERSION, (keys_version/2, owner -- owner)) {
|
||||
PyTypeObject *owner_cls = Py_TYPE(owner);
|
||||
PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls;
|
||||
DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version);
|
||||
|
@ -3090,7 +3090,7 @@ dummy_func(
|
|||
|
||||
macro(CALL) = _SPECIALIZE_CALL + unused/2 + _CALL;
|
||||
|
||||
op(_CHECK_CALL_BOUND_METHOD_EXACT_ARGS, (callable, null, unused[oparg] -- callable: &PYMETHOD_TYPE, null: &NULL_TYPE, unused[oparg])) {
|
||||
op(_CHECK_CALL_BOUND_METHOD_EXACT_ARGS, (callable, null, unused[oparg] -- callable, null, unused[oparg])) {
|
||||
DEOPT_IF(null != NULL);
|
||||
DEOPT_IF(Py_TYPE(callable) != &PyMethod_Type);
|
||||
}
|
||||
|
@ -3108,7 +3108,7 @@ dummy_func(
|
|||
DEOPT_IF(tstate->interp->eval_frame);
|
||||
}
|
||||
|
||||
op(_CHECK_FUNCTION_EXACT_ARGS, (func_version/2, callable, self_or_null, unused[oparg] -- callable: &(PYFUNCTION_TYPE_VERSION_TYPE + func_version), self_or_null, unused[oparg])) {
|
||||
op(_CHECK_FUNCTION_EXACT_ARGS, (func_version/2, callable, self_or_null, unused[oparg] -- callable, self_or_null, unused[oparg])) {
|
||||
DEOPT_IF(!PyFunction_Check(callable));
|
||||
PyFunctionObject *func = (PyFunctionObject *)callable;
|
||||
DEOPT_IF(func->func_version != func_version);
|
||||
|
@ -4059,23 +4059,23 @@ dummy_func(
|
|||
DEOPT_IF(!current_executor->vm_data.valid);
|
||||
}
|
||||
|
||||
op(_LOAD_CONST_INLINE, (ptr/4 -- value)) {
|
||||
pure op(_LOAD_CONST_INLINE, (ptr/4 -- value)) {
|
||||
TIER_TWO_ONLY
|
||||
value = Py_NewRef(ptr);
|
||||
}
|
||||
|
||||
op(_LOAD_CONST_INLINE_BORROW, (ptr/4 -- value)) {
|
||||
pure op(_LOAD_CONST_INLINE_BORROW, (ptr/4 -- value)) {
|
||||
TIER_TWO_ONLY
|
||||
value = ptr;
|
||||
}
|
||||
|
||||
op(_LOAD_CONST_INLINE_WITH_NULL, (ptr/4 -- value, null)) {
|
||||
pure op(_LOAD_CONST_INLINE_WITH_NULL, (ptr/4 -- value, null)) {
|
||||
TIER_TWO_ONLY
|
||||
value = Py_NewRef(ptr);
|
||||
null = NULL;
|
||||
}
|
||||
|
||||
op(_LOAD_CONST_INLINE_BORROW_WITH_NULL, (ptr/4 -- value, null)) {
|
||||
pure op(_LOAD_CONST_INLINE_BORROW_WITH_NULL, (ptr/4 -- value, null)) {
|
||||
TIER_TWO_ONLY
|
||||
value = ptr;
|
||||
null = NULL;
|
||||
|
|
2
Python/executor_cases.c.h
generated
2
Python/executor_cases.c.h
generated
|
@ -1598,7 +1598,7 @@
|
|||
something was returned by a descriptor protocol). Set
|
||||
the second element of the stack to NULL, to signal
|
||||
CALL that it's not a method call.
|
||||
NULL | meth | arg1 | ... | argN
|
||||
meth | NULL | arg1 | ... | argN
|
||||
*/
|
||||
Py_DECREF(owner);
|
||||
if (attr == NULL) goto pop_1_error_tier_two;
|
||||
|
|
2
Python/generated_cases.c.h
generated
2
Python/generated_cases.c.h
generated
|
@ -3420,7 +3420,7 @@
|
|||
something was returned by a descriptor protocol). Set
|
||||
the second element of the stack to NULL, to signal
|
||||
CALL that it's not a method call.
|
||||
NULL | meth | arg1 | ... | argN
|
||||
meth | NULL | arg1 | ... | argN
|
||||
*/
|
||||
Py_DECREF(owner);
|
||||
if (attr == NULL) goto pop_1_error;
|
||||
|
|
|
@ -17,8 +17,6 @@
|
|||
#include "pycore_uop_metadata.h" // Uop tables
|
||||
#undef NEED_OPCODE_METADATA
|
||||
|
||||
#define UOP_MAX_TRACE_LENGTH 512
|
||||
|
||||
#define MAX_EXECUTORS_SIZE 256
|
||||
|
||||
|
||||
|
@ -308,8 +306,6 @@ BRANCH_TO_GUARD[4][2] = {
|
|||
[POP_JUMP_IF_NOT_NONE - POP_JUMP_IF_FALSE][1] = _GUARD_IS_NOT_NONE_POP,
|
||||
};
|
||||
|
||||
#define TRACE_STACK_SIZE 5
|
||||
|
||||
#define CONFIDENCE_RANGE 1000
|
||||
#define CONFIDENCE_CUTOFF 333
|
||||
|
||||
|
@ -323,10 +319,11 @@ BRANCH_TO_GUARD[4][2] = {
|
|||
|
||||
#define ADD_TO_TRACE(OPCODE, OPARG, OPERAND, TARGET) \
|
||||
DPRINTF(2, \
|
||||
" ADD_TO_TRACE(%s, %d, %" PRIu64 ")\n", \
|
||||
" ADD_TO_TRACE(%s, %d, %" PRIu64 ", %d)\n", \
|
||||
_PyUOpName(OPCODE), \
|
||||
(OPARG), \
|
||||
(uint64_t)(OPERAND)); \
|
||||
(uint64_t)(OPERAND), \
|
||||
TARGET); \
|
||||
assert(trace_length < max_length); \
|
||||
trace[trace_length].opcode = (OPCODE); \
|
||||
trace[trace_length].oparg = (OPARG); \
|
||||
|
@ -825,11 +822,13 @@ uop_optimize(
|
|||
char *uop_optimize = Py_GETENV("PYTHONUOPSOPTIMIZE");
|
||||
if (uop_optimize == NULL || *uop_optimize > '0') {
|
||||
err = _Py_uop_analyze_and_optimize(frame, buffer,
|
||||
UOP_MAX_TRACE_LENGTH, curr_stackentries, &dependencies);
|
||||
UOP_MAX_TRACE_LENGTH,
|
||||
curr_stackentries, &dependencies);
|
||||
if (err <= 0) {
|
||||
return err;
|
||||
}
|
||||
}
|
||||
assert(err == 1);
|
||||
_PyExecutorObject *executor = make_executor_from_uops(buffer, &dependencies);
|
||||
if (executor == NULL) {
|
||||
return -1;
|
||||
|
|
|
@ -1,3 +1,14 @@
|
|||
/*
|
||||
* This file contains the support code for CPython's uops redundancy eliminator.
|
||||
* It also performs some simple optimizations.
|
||||
* It performs a traditional data-flow analysis[1] over the trace of uops.
|
||||
* Using the information gained, it chooses to emit, or skip certain instructions
|
||||
* if possible.
|
||||
*
|
||||
* [1] For information on data-flow analysis, please see
|
||||
* https://clang.llvm.org/docs/DataFlowAnalysisIntro.html
|
||||
*
|
||||
* */
|
||||
#include "Python.h"
|
||||
#include "opcode.h"
|
||||
#include "pycore_dict.h"
|
||||
|
@ -9,10 +20,355 @@
|
|||
#include "pycore_dict.h"
|
||||
#include "pycore_long.h"
|
||||
#include "cpython/optimizer.h"
|
||||
#include "pycore_optimizer.h"
|
||||
#include "pycore_object.h"
|
||||
#include "pycore_dict.h"
|
||||
#include "pycore_function.h"
|
||||
#include "pycore_uop_metadata.h"
|
||||
#include "pycore_uop_ids.h"
|
||||
#include "pycore_range.h"
|
||||
|
||||
#include <stdarg.h>
|
||||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
#include <stddef.h>
|
||||
#include "pycore_optimizer.h"
|
||||
|
||||
// Holds locals, stack, locals, stack ... co_consts (in that order)
|
||||
#define MAX_ABSTRACT_INTERP_SIZE 4096
|
||||
|
||||
#define OVERALLOCATE_FACTOR 5
|
||||
|
||||
#define TY_ARENA_SIZE (UOP_MAX_TRACE_LENGTH * OVERALLOCATE_FACTOR)
|
||||
|
||||
// Need extras for root frame and for overflow frame (see TRACE_STACK_PUSH())
|
||||
#define MAX_ABSTRACT_FRAME_DEPTH (TRACE_STACK_SIZE + 2)
|
||||
|
||||
#ifdef Py_DEBUG
|
||||
static const char *const DEBUG_ENV = "PYTHON_OPT_DEBUG";
|
||||
static inline int get_lltrace(void) {
|
||||
char *uop_debug = Py_GETENV(DEBUG_ENV);
|
||||
int lltrace = 0;
|
||||
if (uop_debug != NULL && *uop_debug >= '0') {
|
||||
lltrace = *uop_debug - '0'; // TODO: Parse an int and all that
|
||||
}
|
||||
return lltrace;
|
||||
}
|
||||
#define DPRINTF(level, ...) \
|
||||
if (get_lltrace() >= (level)) { printf(__VA_ARGS__); }
|
||||
#else
|
||||
#define DPRINTF(level, ...)
|
||||
#endif
|
||||
|
||||
|
||||
// Flags for below.
|
||||
#define KNOWN 1 << 0
|
||||
#define TRUE_CONST 1 << 1
|
||||
#define IS_NULL 1 << 2
|
||||
#define NOT_NULL 1 << 3
|
||||
|
||||
typedef struct {
|
||||
int flags;
|
||||
PyTypeObject *typ;
|
||||
// constant propagated value (might be NULL)
|
||||
PyObject *const_val;
|
||||
} _Py_UOpsSymType;
|
||||
|
||||
|
||||
typedef struct _Py_UOpsAbstractFrame {
|
||||
// Max stacklen
|
||||
int stack_len;
|
||||
int locals_len;
|
||||
|
||||
_Py_UOpsSymType **stack_pointer;
|
||||
_Py_UOpsSymType **stack;
|
||||
_Py_UOpsSymType **locals;
|
||||
} _Py_UOpsAbstractFrame;
|
||||
|
||||
|
||||
typedef struct ty_arena {
|
||||
int ty_curr_number;
|
||||
int ty_max_number;
|
||||
_Py_UOpsSymType arena[TY_ARENA_SIZE];
|
||||
} ty_arena;
|
||||
|
||||
// Tier 2 types meta interpreter
|
||||
typedef struct _Py_UOpsAbstractInterpContext {
|
||||
PyObject_HEAD
|
||||
// The current "executing" frame.
|
||||
_Py_UOpsAbstractFrame *frame;
|
||||
_Py_UOpsAbstractFrame frames[MAX_ABSTRACT_FRAME_DEPTH];
|
||||
int curr_frame_depth;
|
||||
|
||||
// Arena for the symbolic types.
|
||||
ty_arena t_arena;
|
||||
|
||||
_Py_UOpsSymType **n_consumed;
|
||||
_Py_UOpsSymType **limit;
|
||||
_Py_UOpsSymType *locals_and_stack[MAX_ABSTRACT_INTERP_SIZE];
|
||||
} _Py_UOpsAbstractInterpContext;
|
||||
|
||||
static inline _Py_UOpsSymType* sym_new_unknown(_Py_UOpsAbstractInterpContext *ctx);
|
||||
|
||||
// 0 on success, -1 on error.
|
||||
static _Py_UOpsAbstractFrame *
|
||||
ctx_frame_new(
|
||||
_Py_UOpsAbstractInterpContext *ctx,
|
||||
PyCodeObject *co,
|
||||
_Py_UOpsSymType **localsplus_start,
|
||||
int n_locals_already_filled,
|
||||
int curr_stackentries
|
||||
)
|
||||
{
|
||||
assert(ctx->curr_frame_depth < MAX_ABSTRACT_FRAME_DEPTH);
|
||||
_Py_UOpsAbstractFrame *frame = &ctx->frames[ctx->curr_frame_depth];
|
||||
|
||||
frame->stack_len = co->co_stacksize;
|
||||
frame->locals_len = co->co_nlocalsplus;
|
||||
|
||||
frame->locals = localsplus_start;
|
||||
frame->stack = frame->locals + co->co_nlocalsplus;
|
||||
frame->stack_pointer = frame->stack + curr_stackentries;
|
||||
ctx->n_consumed = localsplus_start + (co->co_nlocalsplus + co->co_stacksize);
|
||||
if (ctx->n_consumed >= ctx->limit) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
// Initialize with the initial state of all local variables
|
||||
for (int i = n_locals_already_filled; i < co->co_nlocalsplus; i++) {
|
||||
_Py_UOpsSymType *local = sym_new_unknown(ctx);
|
||||
if (local == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
frame->locals[i] = local;
|
||||
}
|
||||
|
||||
|
||||
// Initialize the stack as well
|
||||
for (int i = 0; i < curr_stackentries; i++) {
|
||||
_Py_UOpsSymType *stackvar = sym_new_unknown(ctx);
|
||||
if (stackvar == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
frame->stack[i] = stackvar;
|
||||
}
|
||||
|
||||
return frame;
|
||||
}
|
||||
|
||||
static void
|
||||
abstractcontext_fini(_Py_UOpsAbstractInterpContext *ctx)
|
||||
{
|
||||
if (ctx == NULL) {
|
||||
return;
|
||||
}
|
||||
ctx->curr_frame_depth = 0;
|
||||
int tys = ctx->t_arena.ty_curr_number;
|
||||
for (int i = 0; i < tys; i++) {
|
||||
Py_CLEAR(ctx->t_arena.arena[i].const_val);
|
||||
}
|
||||
}
|
||||
|
||||
static int
|
||||
abstractcontext_init(
|
||||
_Py_UOpsAbstractInterpContext *ctx,
|
||||
PyCodeObject *co,
|
||||
int curr_stacklen,
|
||||
int ir_entries
|
||||
)
|
||||
{
|
||||
ctx->limit = ctx->locals_and_stack + MAX_ABSTRACT_INTERP_SIZE;
|
||||
ctx->n_consumed = ctx->locals_and_stack;
|
||||
#ifdef Py_DEBUG // Aids debugging a little. There should never be NULL in the abstract interpreter.
|
||||
for (int i = 0 ; i < MAX_ABSTRACT_INTERP_SIZE; i++) {
|
||||
ctx->locals_and_stack[i] = NULL;
|
||||
}
|
||||
#endif
|
||||
|
||||
// Setup the arena for sym expressions.
|
||||
ctx->t_arena.ty_curr_number = 0;
|
||||
ctx->t_arena.ty_max_number = TY_ARENA_SIZE;
|
||||
|
||||
// Frame setup
|
||||
ctx->curr_frame_depth = 0;
|
||||
_Py_UOpsAbstractFrame *frame = ctx_frame_new(ctx, co, ctx->n_consumed, 0, curr_stacklen);
|
||||
if (frame == NULL) {
|
||||
return -1;
|
||||
}
|
||||
ctx->curr_frame_depth++;
|
||||
ctx->frame = frame;
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
ctx_frame_pop(
|
||||
_Py_UOpsAbstractInterpContext *ctx
|
||||
)
|
||||
{
|
||||
_Py_UOpsAbstractFrame *frame = ctx->frame;
|
||||
|
||||
ctx->n_consumed = frame->locals;
|
||||
ctx->curr_frame_depth--;
|
||||
assert(ctx->curr_frame_depth >= 1);
|
||||
ctx->frame = &ctx->frames[ctx->curr_frame_depth - 1];
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
// Takes a borrowed reference to const_val, turns that into a strong reference.
|
||||
static _Py_UOpsSymType*
|
||||
sym_new(_Py_UOpsAbstractInterpContext *ctx,
|
||||
PyObject *const_val)
|
||||
{
|
||||
_Py_UOpsSymType *self = &ctx->t_arena.arena[ctx->t_arena.ty_curr_number];
|
||||
if (ctx->t_arena.ty_curr_number >= ctx->t_arena.ty_max_number) {
|
||||
OPT_STAT_INC(optimizer_failure_reason_no_memory);
|
||||
DPRINTF(1, "out of space for symbolic expression type\n");
|
||||
return NULL;
|
||||
}
|
||||
ctx->t_arena.ty_curr_number++;
|
||||
self->const_val = NULL;
|
||||
self->typ = NULL;
|
||||
self->flags = 0;
|
||||
|
||||
if (const_val != NULL) {
|
||||
self->const_val = Py_NewRef(const_val);
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
static inline void
|
||||
sym_set_flag(_Py_UOpsSymType *sym, int flag)
|
||||
{
|
||||
sym->flags |= flag;
|
||||
}
|
||||
|
||||
static inline void
|
||||
sym_clear_flag(_Py_UOpsSymType *sym, int flag)
|
||||
{
|
||||
sym->flags &= (~flag);
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sym_has_flag(_Py_UOpsSymType *sym, int flag)
|
||||
{
|
||||
return (sym->flags & flag) != 0;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sym_is_known(_Py_UOpsSymType *sym)
|
||||
{
|
||||
return sym_has_flag(sym, KNOWN);
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sym_is_not_null(_Py_UOpsSymType *sym)
|
||||
{
|
||||
return (sym->flags & (IS_NULL | NOT_NULL)) == NOT_NULL;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sym_is_null(_Py_UOpsSymType *sym)
|
||||
{
|
||||
return (sym->flags & (IS_NULL | NOT_NULL)) == IS_NULL;
|
||||
}
|
||||
|
||||
static inline void
|
||||
sym_set_type(_Py_UOpsSymType *sym, PyTypeObject *tp)
|
||||
{
|
||||
assert(PyType_Check(tp));
|
||||
sym->typ = tp;
|
||||
sym_set_flag(sym, KNOWN);
|
||||
sym_set_flag(sym, NOT_NULL);
|
||||
}
|
||||
|
||||
static inline void
|
||||
sym_set_null(_Py_UOpsSymType *sym)
|
||||
{
|
||||
sym_set_flag(sym, IS_NULL);
|
||||
sym_set_flag(sym, KNOWN);
|
||||
}
|
||||
|
||||
|
||||
static inline _Py_UOpsSymType*
|
||||
sym_new_unknown(_Py_UOpsAbstractInterpContext *ctx)
|
||||
{
|
||||
return sym_new(ctx,NULL);
|
||||
}
|
||||
|
||||
static inline _Py_UOpsSymType*
|
||||
sym_new_known_notnull(_Py_UOpsAbstractInterpContext *ctx)
|
||||
{
|
||||
_Py_UOpsSymType *res = sym_new_unknown(ctx);
|
||||
if (res == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
sym_set_flag(res, NOT_NULL);
|
||||
return res;
|
||||
}
|
||||
|
||||
static inline _Py_UOpsSymType*
|
||||
sym_new_known_type(_Py_UOpsAbstractInterpContext *ctx,
|
||||
PyTypeObject *typ)
|
||||
{
|
||||
_Py_UOpsSymType *res = sym_new(ctx,NULL);
|
||||
if (res == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
sym_set_type(res, typ);
|
||||
return res;
|
||||
}
|
||||
|
||||
// Takes a borrowed reference to const_val.
|
||||
static inline _Py_UOpsSymType*
|
||||
sym_new_const(_Py_UOpsAbstractInterpContext *ctx, PyObject *const_val)
|
||||
{
|
||||
assert(const_val != NULL);
|
||||
_Py_UOpsSymType *temp = sym_new(
|
||||
ctx,
|
||||
const_val
|
||||
);
|
||||
if (temp == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
sym_set_type(temp, Py_TYPE(const_val));
|
||||
sym_set_flag(temp, TRUE_CONST);
|
||||
sym_set_flag(temp, KNOWN);
|
||||
sym_set_flag(temp, NOT_NULL);
|
||||
return temp;
|
||||
}
|
||||
|
||||
static _Py_UOpsSymType*
|
||||
sym_new_null(_Py_UOpsAbstractInterpContext *ctx)
|
||||
{
|
||||
_Py_UOpsSymType *null_sym = sym_new_unknown(ctx);
|
||||
if (null_sym == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
sym_set_null(null_sym);
|
||||
return null_sym;
|
||||
}
|
||||
|
||||
|
||||
static inline bool
|
||||
sym_matches_type(_Py_UOpsSymType *sym, PyTypeObject *typ)
|
||||
{
|
||||
assert(typ == NULL || PyType_Check(typ));
|
||||
if (!sym_has_flag(sym, KNOWN)) {
|
||||
return false;
|
||||
}
|
||||
return sym->typ == typ;
|
||||
}
|
||||
|
||||
|
||||
static inline bool
|
||||
op_is_end(uint32_t opcode)
|
||||
{
|
||||
return opcode == _EXIT_TRACE || opcode == _JUMP_TO_TOP;
|
||||
}
|
||||
|
||||
static int
|
||||
get_mutations(PyObject* dict) {
|
||||
|
@ -199,14 +555,138 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer,
|
|||
builtins = func->func_builtins;
|
||||
break;
|
||||
}
|
||||
case _JUMP_TO_TOP:
|
||||
case _EXIT_TRACE:
|
||||
return 1;
|
||||
default:
|
||||
if (op_is_end(opcode)) {
|
||||
return 1;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
#define STACK_LEVEL() ((int)(stack_pointer - ctx->frame->stack))
|
||||
|
||||
#define GETLOCAL(idx) ((ctx->frame->locals[idx]))
|
||||
|
||||
#define REPLACE_OP(INST, OP, ARG, OPERAND) \
|
||||
INST->opcode = OP; \
|
||||
INST->oparg = ARG; \
|
||||
INST->operand = OPERAND;
|
||||
|
||||
#define _LOAD_ATTR_NOT_NULL \
|
||||
do { \
|
||||
attr = sym_new_known_notnull(ctx); \
|
||||
if (attr == NULL) { \
|
||||
goto error; \
|
||||
} \
|
||||
null = sym_new_null(ctx); \
|
||||
if (null == NULL) { \
|
||||
goto error; \
|
||||
} \
|
||||
} while (0);
|
||||
|
||||
|
||||
/* 1 for success, 0 for not ready, cannot error at the moment. */
|
||||
static int
|
||||
uop_redundancy_eliminator(
|
||||
PyCodeObject *co,
|
||||
_PyUOpInstruction *trace,
|
||||
int trace_len,
|
||||
int curr_stacklen
|
||||
)
|
||||
{
|
||||
|
||||
_Py_UOpsAbstractInterpContext context;
|
||||
_Py_UOpsAbstractInterpContext *ctx = &context;
|
||||
|
||||
if (abstractcontext_init(
|
||||
ctx,
|
||||
co, curr_stacklen,
|
||||
trace_len) < 0) {
|
||||
goto out_of_space;
|
||||
}
|
||||
|
||||
for (_PyUOpInstruction *this_instr = trace;
|
||||
this_instr < trace + trace_len && !op_is_end(this_instr->opcode);
|
||||
this_instr++) {
|
||||
|
||||
int oparg = this_instr->oparg;
|
||||
uint32_t opcode = this_instr->opcode;
|
||||
|
||||
_Py_UOpsSymType **stack_pointer = ctx->frame->stack_pointer;
|
||||
|
||||
DPRINTF(3, "Abstract interpreting %s:%d ",
|
||||
_PyOpcode_uop_name[opcode],
|
||||
oparg);
|
||||
switch (opcode) {
|
||||
#include "tier2_redundancy_eliminator_cases.c.h"
|
||||
|
||||
default:
|
||||
DPRINTF(1, "Unknown opcode in abstract interpreter\n");
|
||||
Py_UNREACHABLE();
|
||||
}
|
||||
assert(ctx->frame != NULL);
|
||||
DPRINTF(3, " stack_level %d\n", STACK_LEVEL());
|
||||
ctx->frame->stack_pointer = stack_pointer;
|
||||
assert(STACK_LEVEL() >= 0);
|
||||
}
|
||||
|
||||
abstractcontext_fini(ctx);
|
||||
return 1;
|
||||
|
||||
out_of_space:
|
||||
DPRINTF(1, "Out of space in abstract interpreter\n");
|
||||
abstractcontext_fini(ctx);
|
||||
return 0;
|
||||
|
||||
error:
|
||||
DPRINTF(1, "Encountered error in abstract interpreter\n");
|
||||
abstractcontext_fini(ctx);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static void
|
||||
remove_unneeded_uops(_PyUOpInstruction *buffer, int buffer_size)
|
||||
{
|
||||
int last_set_ip = -1;
|
||||
bool maybe_invalid = false;
|
||||
for (int pc = 0; pc < buffer_size; pc++) {
|
||||
int opcode = buffer[pc].opcode;
|
||||
if (opcode == _SET_IP) {
|
||||
buffer[pc].opcode = NOP;
|
||||
last_set_ip = pc;
|
||||
}
|
||||
else if (opcode == _CHECK_VALIDITY) {
|
||||
if (maybe_invalid) {
|
||||
maybe_invalid = false;
|
||||
}
|
||||
else {
|
||||
buffer[pc].opcode = NOP;
|
||||
}
|
||||
}
|
||||
else if (op_is_end(opcode)) {
|
||||
break;
|
||||
}
|
||||
else {
|
||||
if (_PyUop_Flags[opcode] & HAS_ESCAPES_FLAG) {
|
||||
maybe_invalid = true;
|
||||
if (last_set_ip >= 0) {
|
||||
buffer[last_set_ip].opcode = _SET_IP;
|
||||
}
|
||||
}
|
||||
if ((_PyUop_Flags[opcode] & HAS_ERROR_FLAG) || opcode == _PUSH_FRAME) {
|
||||
if (last_set_ip >= 0) {
|
||||
buffer[last_set_ip].opcode = _SET_IP;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
peephole_opt(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, int buffer_size)
|
||||
{
|
||||
|
@ -250,44 +730,9 @@ peephole_opt(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, int buffer_s
|
|||
}
|
||||
}
|
||||
|
||||
static void
|
||||
remove_unneeded_uops(_PyUOpInstruction *buffer, int buffer_size)
|
||||
{
|
||||
int last_set_ip = -1;
|
||||
bool maybe_invalid = false;
|
||||
for (int pc = 0; pc < buffer_size; pc++) {
|
||||
int opcode = buffer[pc].opcode;
|
||||
if (opcode == _SET_IP) {
|
||||
buffer[pc].opcode = NOP;
|
||||
last_set_ip = pc;
|
||||
}
|
||||
else if (opcode == _CHECK_VALIDITY) {
|
||||
if (maybe_invalid) {
|
||||
maybe_invalid = false;
|
||||
}
|
||||
else {
|
||||
buffer[pc].opcode = NOP;
|
||||
}
|
||||
}
|
||||
else if (opcode == _JUMP_TO_TOP || opcode == _EXIT_TRACE) {
|
||||
break;
|
||||
}
|
||||
else {
|
||||
if (_PyUop_Flags[opcode] & HAS_ESCAPES_FLAG) {
|
||||
maybe_invalid = true;
|
||||
if (last_set_ip >= 0) {
|
||||
buffer[last_set_ip].opcode = _SET_IP;
|
||||
}
|
||||
}
|
||||
if ((_PyUop_Flags[opcode] & HAS_ERROR_FLAG) || opcode == _PUSH_FRAME) {
|
||||
if (last_set_ip >= 0) {
|
||||
buffer[last_set_ip].opcode = _SET_IP;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 0 - failure, no error raised, just fall back to Tier 1
|
||||
// -1 - failure, and raise error
|
||||
// 1 - optimizer success
|
||||
int
|
||||
_Py_uop_analyze_and_optimize(
|
||||
_PyInterpreterFrame *frame,
|
||||
|
@ -297,11 +742,33 @@ _Py_uop_analyze_and_optimize(
|
|||
_PyBloomFilter *dependencies
|
||||
)
|
||||
{
|
||||
OPT_STAT_INC(optimizer_attempts);
|
||||
|
||||
int err = remove_globals(frame, buffer, buffer_size, dependencies);
|
||||
if (err <= 0) {
|
||||
return err;
|
||||
if (err == 0) {
|
||||
goto not_ready;
|
||||
}
|
||||
if (err < 0) {
|
||||
goto error;
|
||||
}
|
||||
|
||||
peephole_opt(frame, buffer, buffer_size);
|
||||
|
||||
err = uop_redundancy_eliminator(
|
||||
(PyCodeObject *)frame->f_executable, buffer,
|
||||
buffer_size, curr_stacklen);
|
||||
|
||||
if (err == 0) {
|
||||
goto not_ready;
|
||||
}
|
||||
assert(err == 1);
|
||||
|
||||
remove_unneeded_uops(buffer, buffer_size);
|
||||
|
||||
OPT_STAT_INC(optimizer_successes);
|
||||
return 1;
|
||||
not_ready:
|
||||
return 0;
|
||||
error:
|
||||
return -1;
|
||||
}
|
||||
|
|
|
@ -240,6 +240,11 @@ print_optimization_stats(FILE *out, OptimizationStats *stats)
|
|||
print_histogram(out, "Trace run length", stats->trace_run_length_hist);
|
||||
print_histogram(out, "Optimized trace length", stats->optimized_trace_length_hist);
|
||||
|
||||
fprintf(out, "Optimization optimizer attempts: %" PRIu64 "\n", stats->optimizer_attempts);
|
||||
fprintf(out, "Optimization optimizer successes: %" PRIu64 "\n", stats->optimizer_successes);
|
||||
fprintf(out, "Optimization optimizer failure no memory: %" PRIu64 "\n",
|
||||
stats->optimizer_failure_reason_no_memory);
|
||||
|
||||
const char* const* names;
|
||||
for (int i = 0; i < 512; i++) {
|
||||
if (i < 256) {
|
||||
|
|
272
Python/tier2_redundancy_eliminator_bytecodes.c
Normal file
272
Python/tier2_redundancy_eliminator_bytecodes.c
Normal file
|
@ -0,0 +1,272 @@
|
|||
#include "Python.h"
|
||||
#include "pycore_uops.h"
|
||||
#include "pycore_uop_ids.h"
|
||||
|
||||
#define op(name, ...) /* NAME is ignored */
|
||||
|
||||
typedef struct _Py_UOpsSymType _Py_UOpsSymType;
|
||||
typedef struct _Py_UOpsAbstractInterpContext _Py_UOpsAbstractInterpContext;
|
||||
typedef struct _Py_UOpsAbstractFrame _Py_UOpsAbstractFrame;
|
||||
|
||||
static int
|
||||
dummy_func(void) {
|
||||
|
||||
PyCodeObject *code;
|
||||
int oparg;
|
||||
_Py_UOpsSymType *flag;
|
||||
_Py_UOpsSymType *left;
|
||||
_Py_UOpsSymType *right;
|
||||
_Py_UOpsSymType *value;
|
||||
_Py_UOpsSymType *res;
|
||||
_Py_UOpsSymType *iter;
|
||||
_Py_UOpsSymType *top;
|
||||
_Py_UOpsSymType *bottom;
|
||||
_Py_UOpsAbstractFrame *frame;
|
||||
_Py_UOpsAbstractInterpContext *ctx;
|
||||
_PyUOpInstruction *this_instr;
|
||||
_PyBloomFilter *dependencies;
|
||||
int modified;
|
||||
|
||||
// BEGIN BYTECODES //
|
||||
|
||||
op(_LOAD_FAST_CHECK, (-- value)) {
|
||||
value = GETLOCAL(oparg);
|
||||
// We guarantee this will error - just bail and don't optimize it.
|
||||
if (sym_is_null(value)) {
|
||||
goto out_of_space;
|
||||
}
|
||||
}
|
||||
|
||||
op(_LOAD_FAST, (-- value)) {
|
||||
value = GETLOCAL(oparg);
|
||||
}
|
||||
|
||||
op(_LOAD_FAST_AND_CLEAR, (-- value)) {
|
||||
value = GETLOCAL(oparg);
|
||||
_Py_UOpsSymType *temp = sym_new_null(ctx);
|
||||
if (temp == NULL) {
|
||||
goto out_of_space;
|
||||
}
|
||||
GETLOCAL(oparg) = temp;
|
||||
}
|
||||
|
||||
op(_STORE_FAST, (value --)) {
|
||||
GETLOCAL(oparg) = value;
|
||||
}
|
||||
|
||||
op(_PUSH_NULL, (-- res)) {
|
||||
res = sym_new_null(ctx);
|
||||
if (res == NULL) {
|
||||
goto out_of_space;
|
||||
};
|
||||
}
|
||||
|
||||
op(_GUARD_BOTH_INT, (left, right -- left, right)) {
|
||||
if (sym_matches_type(left, &PyLong_Type) &&
|
||||
sym_matches_type(right, &PyLong_Type)) {
|
||||
REPLACE_OP(this_instr, _NOP, 0, 0);
|
||||
}
|
||||
sym_set_type(left, &PyLong_Type);
|
||||
sym_set_type(right, &PyLong_Type);
|
||||
}
|
||||
|
||||
op(_GUARD_BOTH_FLOAT, (left, right -- left, right)) {
|
||||
if (sym_matches_type(left, &PyFloat_Type) &&
|
||||
sym_matches_type(right, &PyFloat_Type)) {
|
||||
REPLACE_OP(this_instr, _NOP, 0 ,0);
|
||||
}
|
||||
sym_set_type(left, &PyFloat_Type);
|
||||
sym_set_type(right, &PyFloat_Type);
|
||||
}
|
||||
|
||||
|
||||
op(_BINARY_OP_ADD_INT, (left, right -- res)) {
|
||||
// TODO constant propagation
|
||||
(void)left;
|
||||
(void)right;
|
||||
res = sym_new_known_type(ctx, &PyLong_Type);
|
||||
if (res == NULL) {
|
||||
goto out_of_space;
|
||||
}
|
||||
}
|
||||
|
||||
op(_LOAD_CONST, (-- value)) {
|
||||
// There should be no LOAD_CONST. It should be all
|
||||
// replaced by peephole_opt.
|
||||
Py_UNREACHABLE();
|
||||
}
|
||||
|
||||
op(_LOAD_CONST_INLINE, (ptr/4 -- value)) {
|
||||
value = sym_new_const(ctx, ptr);
|
||||
if (value == NULL) {
|
||||
goto out_of_space;
|
||||
}
|
||||
}
|
||||
|
||||
op(_LOAD_CONST_INLINE_BORROW, (ptr/4 -- value)) {
|
||||
value = sym_new_const(ctx, ptr);
|
||||
if (value == NULL) {
|
||||
goto out_of_space;
|
||||
}
|
||||
}
|
||||
|
||||
op(_LOAD_CONST_INLINE_WITH_NULL, (ptr/4 -- value, null)) {
|
||||
value = sym_new_const(ctx, ptr);
|
||||
if (value == NULL) {
|
||||
goto out_of_space;
|
||||
}
|
||||
null = sym_new_null(ctx);
|
||||
if (null == NULL) {
|
||||
goto out_of_space;
|
||||
}
|
||||
}
|
||||
|
||||
op(_LOAD_CONST_INLINE_BORROW_WITH_NULL, (ptr/4 -- value, null)) {
|
||||
value = sym_new_const(ctx, ptr);
|
||||
if (value == NULL) {
|
||||
goto out_of_space;
|
||||
}
|
||||
null = sym_new_null(ctx);
|
||||
if (null == NULL) {
|
||||
goto out_of_space;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
op(_COPY, (bottom, unused[oparg-1] -- bottom, unused[oparg-1], top)) {
|
||||
assert(oparg > 0);
|
||||
top = bottom;
|
||||
}
|
||||
|
||||
op(_SWAP, (bottom, unused[oparg-2], top --
|
||||
top, unused[oparg-2], bottom)) {
|
||||
}
|
||||
|
||||
op(_LOAD_ATTR_INSTANCE_VALUE, (index/1, owner -- attr, null if (oparg & 1))) {
|
||||
_LOAD_ATTR_NOT_NULL
|
||||
(void)index;
|
||||
(void)owner;
|
||||
}
|
||||
|
||||
op(_LOAD_ATTR_MODULE, (index/1, owner -- attr, null if (oparg & 1))) {
|
||||
_LOAD_ATTR_NOT_NULL
|
||||
(void)index;
|
||||
(void)owner;
|
||||
}
|
||||
|
||||
op(_LOAD_ATTR_WITH_HINT, (hint/1, owner -- attr, null if (oparg & 1))) {
|
||||
_LOAD_ATTR_NOT_NULL
|
||||
(void)hint;
|
||||
(void)owner;
|
||||
}
|
||||
|
||||
op(_LOAD_ATTR_SLOT, (index/1, owner -- attr, null if (oparg & 1))) {
|
||||
_LOAD_ATTR_NOT_NULL
|
||||
(void)index;
|
||||
(void)owner;
|
||||
}
|
||||
|
||||
op(_LOAD_ATTR_CLASS, (descr/4, owner -- attr, null if (oparg & 1))) {
|
||||
_LOAD_ATTR_NOT_NULL
|
||||
(void)descr;
|
||||
(void)owner;
|
||||
}
|
||||
|
||||
op(_CHECK_FUNCTION_EXACT_ARGS, (func_version/2, callable, self_or_null, unused[oparg] -- callable, self_or_null, unused[oparg])) {
|
||||
sym_set_type(callable, &PyFunction_Type);
|
||||
(void)self_or_null;
|
||||
(void)func_version;
|
||||
}
|
||||
|
||||
op(_CHECK_CALL_BOUND_METHOD_EXACT_ARGS, (callable, null, unused[oparg] -- callable, null, unused[oparg])) {
|
||||
sym_set_null(null);
|
||||
sym_set_type(callable, &PyMethod_Type);
|
||||
}
|
||||
|
||||
op(_INIT_CALL_PY_EXACT_ARGS, (callable, self_or_null, args[oparg] -- new_frame: _Py_UOpsAbstractFrame *)) {
|
||||
int argcount = oparg;
|
||||
|
||||
(void)callable;
|
||||
|
||||
PyFunctionObject *func = (PyFunctionObject *)(this_instr + 2)->operand;
|
||||
if (func == NULL) {
|
||||
goto error;
|
||||
}
|
||||
PyCodeObject *co = (PyCodeObject *)func->func_code;
|
||||
|
||||
assert(self_or_null != NULL);
|
||||
assert(args != NULL);
|
||||
if (sym_is_not_null(self_or_null)) {
|
||||
// Bound method fiddling, same as _INIT_CALL_PY_EXACT_ARGS in VM
|
||||
args--;
|
||||
argcount++;
|
||||
}
|
||||
|
||||
_Py_UOpsSymType **localsplus_start = ctx->n_consumed;
|
||||
int n_locals_already_filled = 0;
|
||||
// Can determine statically, so we interleave the new locals
|
||||
// and make the current stack the new locals.
|
||||
// This also sets up for true call inlining.
|
||||
if (sym_is_known(self_or_null)) {
|
||||
localsplus_start = args;
|
||||
n_locals_already_filled = argcount;
|
||||
}
|
||||
new_frame = ctx_frame_new(ctx, co, localsplus_start, n_locals_already_filled, 0);
|
||||
if (new_frame == NULL){
|
||||
goto out_of_space;
|
||||
}
|
||||
}
|
||||
|
||||
op(_POP_FRAME, (retval -- res)) {
|
||||
SYNC_SP();
|
||||
ctx->frame->stack_pointer = stack_pointer;
|
||||
ctx_frame_pop(ctx);
|
||||
stack_pointer = ctx->frame->stack_pointer;
|
||||
res = retval;
|
||||
}
|
||||
|
||||
op(_PUSH_FRAME, (new_frame: _Py_UOpsAbstractFrame * -- unused if (0))) {
|
||||
SYNC_SP();
|
||||
ctx->frame->stack_pointer = stack_pointer;
|
||||
ctx->frame = new_frame;
|
||||
ctx->curr_frame_depth++;
|
||||
stack_pointer = new_frame->stack_pointer;
|
||||
}
|
||||
|
||||
op(_UNPACK_SEQUENCE, (seq -- values[oparg])) {
|
||||
/* This has to be done manually */
|
||||
(void)seq;
|
||||
for (int i = 0; i < oparg; i++) {
|
||||
values[i] = sym_new_unknown(ctx);
|
||||
if (values[i] == NULL) {
|
||||
goto out_of_space;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
op(_UNPACK_EX, (seq -- values[oparg & 0xFF], unused, unused[oparg >> 8])) {
|
||||
/* This has to be done manually */
|
||||
(void)seq;
|
||||
int totalargs = (oparg & 0xFF) + (oparg >> 8) + 1;
|
||||
for (int i = 0; i < totalargs; i++) {
|
||||
values[i] = sym_new_unknown(ctx);
|
||||
if (values[i] == NULL) {
|
||||
goto out_of_space;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
op(_ITER_NEXT_RANGE, (iter -- iter, next)) {
|
||||
next = sym_new_known_type(ctx, &PyLong_Type);
|
||||
if (next == NULL) {
|
||||
goto out_of_space;
|
||||
}
|
||||
(void)iter;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// END BYTECODES //
|
||||
|
||||
}
|
1676
Python/tier2_redundancy_eliminator_cases.c.h
Normal file
1676
Python/tier2_redundancy_eliminator_cases.c.h
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue