mirror of
https://github.com/python/cpython.git
synced 2025-07-19 09:15:34 +00:00
gh-81057: Move contextvars-related Globals to _PyRuntimeState (gh-99400)
This is part of the effort to consolidate global variables, to make them easier to manage (and make it easier to later move some of them to PyInterpreterState). https://github.com/python/cpython/issues/81057
This commit is contained in:
parent
5f55067e23
commit
01fa907aa8
9 changed files with 82 additions and 90 deletions
|
@ -1235,25 +1235,29 @@ token_new(PyContext *ctx, PyContextVar *var, PyObject *val)
|
|||
/////////////////////////// Token.MISSING
|
||||
|
||||
|
||||
static PyObject *_token_missing;
|
||||
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD
|
||||
} PyContextTokenMissing;
|
||||
|
||||
|
||||
static PyObject *
|
||||
context_token_missing_tp_repr(PyObject *self)
|
||||
{
|
||||
return PyUnicode_FromString("<Token.MISSING>");
|
||||
}
|
||||
|
||||
static void
|
||||
context_token_missing_tp_dealloc(_PyContextTokenMissing *Py_UNUSED(self))
|
||||
{
|
||||
#ifdef Py_DEBUG
|
||||
/* The singleton is statically allocated. */
|
||||
_Py_FatalRefcountError("deallocating the token missing singleton");
|
||||
#else
|
||||
return;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
PyTypeObject _PyContextTokenMissing_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0)
|
||||
"Token.MISSING",
|
||||
sizeof(PyContextTokenMissing),
|
||||
sizeof(_PyContextTokenMissing),
|
||||
.tp_dealloc = (destructor)context_token_missing_tp_dealloc,
|
||||
.tp_getattro = PyObject_GenericGetAttr,
|
||||
.tp_flags = Py_TPFLAGS_DEFAULT,
|
||||
.tp_repr = context_token_missing_tp_repr,
|
||||
|
@ -1263,17 +1267,7 @@ PyTypeObject _PyContextTokenMissing_Type = {
|
|||
static PyObject *
|
||||
get_token_missing(void)
|
||||
{
|
||||
if (_token_missing != NULL) {
|
||||
return Py_NewRef(_token_missing);
|
||||
}
|
||||
|
||||
_token_missing = (PyObject *)PyObject_New(
|
||||
PyContextTokenMissing, &_PyContextTokenMissing_Type);
|
||||
if (_token_missing == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return Py_NewRef(_token_missing);
|
||||
return Py_NewRef(&_Py_SINGLETON(context_token_missing));
|
||||
}
|
||||
|
||||
|
||||
|
@ -1298,15 +1292,11 @@ _PyContext_ClearFreeList(PyInterpreterState *interp)
|
|||
void
|
||||
_PyContext_Fini(PyInterpreterState *interp)
|
||||
{
|
||||
if (_Py_IsMainInterpreter(interp)) {
|
||||
Py_CLEAR(_token_missing);
|
||||
}
|
||||
_PyContext_ClearFreeList(interp);
|
||||
#if defined(Py_DEBUG) && PyContext_MAXFREELIST > 0
|
||||
struct _Py_context_state *state = &interp->context;
|
||||
state->numfree = -1;
|
||||
#endif
|
||||
_PyHamt_Fini(interp);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -319,13 +319,6 @@ typedef struct {
|
|||
} PyHamtNode_Array;
|
||||
|
||||
|
||||
typedef struct {
|
||||
PyObject_VAR_HEAD
|
||||
uint32_t b_bitmap;
|
||||
PyObject *b_array[1];
|
||||
} PyHamtNode_Bitmap;
|
||||
|
||||
|
||||
typedef struct {
|
||||
PyObject_VAR_HEAD
|
||||
int32_t c_hash;
|
||||
|
@ -333,10 +326,6 @@ typedef struct {
|
|||
} PyHamtNode_Collision;
|
||||
|
||||
|
||||
static PyHamtNode_Bitmap *_empty_bitmap_node;
|
||||
static PyHamtObject *_empty_hamt;
|
||||
|
||||
|
||||
static PyHamtObject *
|
||||
hamt_alloc(void);
|
||||
|
||||
|
@ -521,13 +510,16 @@ hamt_node_bitmap_new(Py_ssize_t size)
|
|||
PyHamtNode_Bitmap *node;
|
||||
Py_ssize_t i;
|
||||
|
||||
if (size == 0) {
|
||||
/* Since bitmap nodes are immutable, we can cache the instance
|
||||
for size=0 and reuse it whenever we need an empty bitmap node.
|
||||
*/
|
||||
return (PyHamtNode *)Py_NewRef(&_Py_SINGLETON(hamt_bitmap_node_empty));
|
||||
}
|
||||
|
||||
assert(size >= 0);
|
||||
assert(size % 2 == 0);
|
||||
|
||||
if (size == 0 && _empty_bitmap_node != NULL) {
|
||||
return (PyHamtNode *)Py_NewRef(_empty_bitmap_node);
|
||||
}
|
||||
|
||||
/* No freelist; allocate a new bitmap node */
|
||||
node = PyObject_GC_NewVar(
|
||||
PyHamtNode_Bitmap, &_PyHamt_BitmapNode_Type, size);
|
||||
|
@ -545,13 +537,6 @@ hamt_node_bitmap_new(Py_ssize_t size)
|
|||
|
||||
_PyObject_GC_TRACK(node);
|
||||
|
||||
if (size == 0 && _empty_bitmap_node == NULL) {
|
||||
/* Since bitmap nodes are immutable, we can cache the instance
|
||||
for size=0 and reuse it whenever we need an empty bitmap node.
|
||||
*/
|
||||
_empty_bitmap_node = (PyHamtNode_Bitmap*)Py_NewRef(node);
|
||||
}
|
||||
|
||||
return (PyHamtNode *)node;
|
||||
}
|
||||
|
||||
|
@ -1142,6 +1127,16 @@ hamt_node_bitmap_dealloc(PyHamtNode_Bitmap *self)
|
|||
Py_ssize_t len = Py_SIZE(self);
|
||||
Py_ssize_t i;
|
||||
|
||||
if (Py_SIZE(self) == 0) {
|
||||
/* The empty node is statically allocated. */
|
||||
assert(self == &_Py_SINGLETON(hamt_bitmap_node_empty));
|
||||
#ifdef Py_DEBUG
|
||||
_Py_FatalRefcountError("deallocating the empty hamt node bitmap singleton");
|
||||
#else
|
||||
return;
|
||||
#endif
|
||||
}
|
||||
|
||||
PyObject_GC_UnTrack(self);
|
||||
Py_TRASHCAN_BEGIN(self, hamt_node_bitmap_dealloc)
|
||||
|
||||
|
@ -2431,33 +2426,15 @@ hamt_alloc(void)
|
|||
return o;
|
||||
}
|
||||
|
||||
#define _empty_hamt \
|
||||
(&_Py_INTERP_SINGLETON(_PyInterpreterState_Get(), hamt_empty))
|
||||
|
||||
PyHamtObject *
|
||||
_PyHamt_New(void)
|
||||
{
|
||||
if (_empty_hamt != NULL) {
|
||||
/* HAMT is an immutable object so we can easily cache an
|
||||
empty instance. */
|
||||
return (PyHamtObject*)Py_NewRef(_empty_hamt);
|
||||
}
|
||||
|
||||
PyHamtObject *o = hamt_alloc();
|
||||
if (o == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
o->h_root = hamt_node_bitmap_new(0);
|
||||
if (o->h_root == NULL) {
|
||||
Py_DECREF(o);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
o->h_count = 0;
|
||||
|
||||
if (_empty_hamt == NULL) {
|
||||
_empty_hamt = (PyHamtObject*)Py_NewRef(o);
|
||||
}
|
||||
|
||||
return o;
|
||||
/* HAMT is an immutable object so we can easily cache an
|
||||
empty instance. */
|
||||
return (PyHamtObject*)Py_NewRef(_empty_hamt);
|
||||
}
|
||||
|
||||
#ifdef Py_DEBUG
|
||||
|
@ -2673,6 +2650,15 @@ hamt_tp_traverse(PyHamtObject *self, visitproc visit, void *arg)
|
|||
static void
|
||||
hamt_tp_dealloc(PyHamtObject *self)
|
||||
{
|
||||
if (self == _empty_hamt) {
|
||||
/* The empty one is statically allocated. */
|
||||
#ifdef Py_DEBUG
|
||||
_Py_FatalRefcountError("deallocating the empty hamt singleton");
|
||||
#else
|
||||
return;
|
||||
#endif
|
||||
}
|
||||
|
||||
PyObject_GC_UnTrack(self);
|
||||
if (self->h_weakreflist != NULL) {
|
||||
PyObject_ClearWeakRefs((PyObject*)self);
|
||||
|
@ -2908,11 +2894,3 @@ PyTypeObject _PyHamt_CollisionNode_Type = {
|
|||
.tp_free = PyObject_GC_Del,
|
||||
.tp_hash = PyObject_HashNotImplemented,
|
||||
};
|
||||
|
||||
|
||||
void
|
||||
_PyHamt_Fini(PyInterpreterState *interp)
|
||||
{
|
||||
Py_CLEAR(_empty_hamt);
|
||||
Py_CLEAR(_empty_bitmap_node);
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue