GH-125174: Mark objects as statically allocated. (#127797)

* Set a bit in the unused part of the refcount on 64 bit machines and the free-threaded build.

* Use the top of the refcount range on 32 bit machines
This commit is contained in:
Mark Shannon 2024-12-11 17:37:38 +00:00 committed by GitHub
parent dd9da738ad
commit bc262de06b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 99 additions and 13 deletions

View file

@ -73,15 +73,25 @@ PyAPI_FUNC(int) _PyObject_IsFreed(PyObject *);
#define _PyObject_HEAD_INIT(type) \
{ \
.ob_ref_local = _Py_IMMORTAL_REFCNT_LOCAL, \
.ob_flags = _Py_STATICALLY_ALLOCATED_FLAG, \
.ob_type = (type) \
}
#else
#if SIZEOF_VOID_P > 4
#define _PyObject_HEAD_INIT(type) \
{ \
.ob_refcnt = _Py_IMMORTAL_INITIAL_REFCNT, \
.ob_flags = _Py_STATICALLY_ALLOCATED_FLAG, \
.ob_type = (type) \
}
#else
#define _PyObject_HEAD_INIT(type) \
{ \
.ob_refcnt = _Py_IMMORTAL_INITIAL_REFCNT, \
.ob_refcnt = _Py_STATIC_IMMORTAL_INITIAL_REFCNT, \
.ob_type = (type) \
}
#endif
#endif
#define _PyVarObject_HEAD_INIT(type, size) \
{ \
.ob_base = _PyObject_HEAD_INIT(type), \
@ -127,7 +137,11 @@ static inline void _Py_RefcntAdd(PyObject* op, Py_ssize_t n)
_Py_AddRefTotal(_PyThreadState_GET(), n);
#endif
#if !defined(Py_GIL_DISABLED)
#if SIZEOF_VOID_P > 4
op->ob_refcnt += (PY_UINT32_T)n;
#else
op->ob_refcnt += n;
#endif
#else
if (_Py_IsOwnedByCurrentThread(op)) {
uint32_t local = op->ob_ref_local;

View file

@ -71,7 +71,7 @@ whose size is determined when the object is allocated.
#define PyObject_HEAD_INIT(type) \
{ \
0, \
0, \
_Py_STATICALLY_ALLOCATED_FLAG, \
{ 0 }, \
0, \
_Py_IMMORTAL_REFCNT_LOCAL, \
@ -81,7 +81,7 @@ whose size is determined when the object is allocated.
#else
#define PyObject_HEAD_INIT(type) \
{ \
{ _Py_IMMORTAL_INITIAL_REFCNT }, \
{ _Py_STATIC_IMMORTAL_INITIAL_REFCNT }, \
(type) \
},
#endif
@ -120,9 +120,19 @@ struct _object {
__pragma(warning(disable: 4201))
#endif
union {
Py_ssize_t ob_refcnt;
#if SIZEOF_VOID_P > 4
PY_UINT32_T ob_refcnt_split[2];
PY_INT64_T ob_refcnt_full; /* This field is needed for efficient initialization with Clang on ARM */
struct {
# if PY_BIG_ENDIAN
PY_UINT32_T ob_flags;
PY_UINT32_T ob_refcnt;
# else
PY_UINT32_T ob_refcnt;
PY_UINT32_T ob_flags;
# endif
};
#else
Py_ssize_t ob_refcnt;
#endif
};
#ifdef _MSC_VER
@ -142,7 +152,7 @@ struct _object {
// trashcan mechanism as a linked list pointer and by the GC to store the
// computed "gc_refs" refcount.
uintptr_t ob_tid;
uint16_t _padding;
uint16_t ob_flags;
PyMutex ob_mutex; // per-object lock
uint8_t ob_gc_bits; // gc-related state
uint32_t ob_ref_local; // local reference count

View file

@ -19,6 +19,9 @@ immortal. The latter should be the only instances that require
cleanup during runtime finalization.
*/
/* Leave the low bits for refcount overflow for old stable ABI code */
#define _Py_STATICALLY_ALLOCATED_FLAG (1 << 7)
#if SIZEOF_VOID_P > 4
/*
In 64+ bit systems, any object whose 32 bit reference count is >= 2**31
@ -39,7 +42,8 @@ beyond the refcount limit. Immortality checks for reference count decreases will
be done by checking the bit sign flag in the lower 32 bits.
*/
#define _Py_IMMORTAL_INITIAL_REFCNT ((Py_ssize_t)(3UL << 30))
#define _Py_IMMORTAL_INITIAL_REFCNT (3UL << 30)
#define _Py_STATIC_IMMORTAL_INITIAL_REFCNT ((Py_ssize_t)(_Py_IMMORTAL_INITIAL_REFCNT | (((Py_ssize_t)_Py_STATICALLY_ALLOCATED_FLAG) << 32)))
#else
/*
@ -54,8 +58,10 @@ immortality, but the execution would still be correct.
Reference count increases and decreases will first go through an immortality
check by comparing the reference count field to the minimum immortality refcount.
*/
#define _Py_IMMORTAL_INITIAL_REFCNT ((Py_ssize_t)(3L << 29))
#define _Py_IMMORTAL_INITIAL_REFCNT ((Py_ssize_t)(5L << 28))
#define _Py_IMMORTAL_MINIMUM_REFCNT ((Py_ssize_t)(1L << 30))
#define _Py_STATIC_IMMORTAL_INITIAL_REFCNT ((Py_ssize_t)(7L << 28))
#define _Py_STATIC_IMMORTAL_MINIMUM_REFCNT ((Py_ssize_t)(6L << 28))
#endif
// Py_GIL_DISABLED builds indicate immortal objects using `ob_ref_local`, which is
@ -123,10 +129,21 @@ static inline Py_ALWAYS_INLINE int _Py_IsImmortal(PyObject *op)
#define _Py_IsImmortal(op) _Py_IsImmortal(_PyObject_CAST(op))
static inline Py_ALWAYS_INLINE int _Py_IsStaticImmortal(PyObject *op)
{
#if defined(Py_GIL_DISABLED) || SIZEOF_VOID_P > 4
return (op->ob_flags & _Py_STATICALLY_ALLOCATED_FLAG) != 0;
#else
return op->ob_refcnt >= _Py_STATIC_IMMORTAL_MINIMUM_REFCNT;
#endif
}
#define _Py_IsStaticImmortal(op) _Py_IsStaticImmortal(_PyObject_CAST(op))
// Py_SET_REFCNT() implementation for stable ABI
PyAPI_FUNC(void) _Py_SetRefcnt(PyObject *ob, Py_ssize_t refcnt);
static inline void Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) {
assert(refcnt >= 0);
#if defined(Py_LIMITED_API) && Py_LIMITED_API+0 >= 0x030d0000
// Stable ABI implements Py_SET_REFCNT() as a function call
// on limited C API version 3.13 and newer.
@ -139,9 +156,12 @@ static inline void Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) {
if (_Py_IsImmortal(ob)) {
return;
}
#ifndef Py_GIL_DISABLED
#if SIZEOF_VOID_P > 4
ob->ob_refcnt = (PY_UINT32_T)refcnt;
#else
ob->ob_refcnt = refcnt;
#endif
#else
if (_Py_IsOwnedByCurrentThread(ob)) {
if ((size_t)refcnt > (size_t)UINT32_MAX) {
@ -252,13 +272,13 @@ static inline Py_ALWAYS_INLINE void Py_INCREF(PyObject *op)
_Py_atomic_add_ssize(&op->ob_ref_shared, (1 << _Py_REF_SHARED_SHIFT));
}
#elif SIZEOF_VOID_P > 4
PY_UINT32_T cur_refcnt = op->ob_refcnt_split[PY_BIG_ENDIAN];
PY_UINT32_T cur_refcnt = op->ob_refcnt;
if (((int32_t)cur_refcnt) < 0) {
// the object is immortal
_Py_INCREF_IMMORTAL_STAT_INC();
return;
}
op->ob_refcnt_split[PY_BIG_ENDIAN] = cur_refcnt + 1;
op->ob_refcnt = cur_refcnt + 1;
#else
if (_Py_IsImmortal(op)) {
_Py_INCREF_IMMORTAL_STAT_INC();
@ -354,7 +374,13 @@ static inline void Py_DECREF(PyObject *op)
#elif defined(Py_REF_DEBUG)
static inline void Py_DECREF(const char *filename, int lineno, PyObject *op)
{
#if SIZEOF_VOID_P > 4
/* If an object has been freed, it will have a negative full refcnt
* If it has not it been freed, will have a very large refcnt */
if (op->ob_refcnt_full <= 0 || op->ob_refcnt > (UINT32_MAX - (1<<20))) {
#else
if (op->ob_refcnt <= 0) {
#endif
_Py_NegativeRefcount(filename, lineno, op);
}
if (_Py_IsImmortal(op)) {

View file

@ -2691,7 +2691,7 @@ class ShutdownTest(unittest.TestCase):
class ImmortalTests(unittest.TestCase):
if sys.maxsize < (1 << 32):
IMMORTAL_REFCOUNT = 3 << 29
IMMORTAL_REFCOUNT = 7 << 28
else:
IMMORTAL_REFCOUNT = 3 << 30

View file

@ -2,6 +2,7 @@ import unittest
from test.support import import_helper
_testcapi = import_helper.import_module('_testcapi')
_testinternalcapi = import_helper.import_module('_testinternalcapi')
class TestCAPI(unittest.TestCase):
@ -11,6 +12,21 @@ class TestCAPI(unittest.TestCase):
def test_immortal_small_ints(self):
_testcapi.test_immortal_small_ints()
class TestInternalCAPI(unittest.TestCase):
def test_immortal_builtins(self):
for obj in range(-5, 256):
self.assertTrue(_testinternalcapi.is_static_immortal(obj))
self.assertTrue(_testinternalcapi.is_static_immortal(None))
self.assertTrue(_testinternalcapi.is_static_immortal(False))
self.assertTrue(_testinternalcapi.is_static_immortal(True))
self.assertTrue(_testinternalcapi.is_static_immortal(...))
self.assertTrue(_testinternalcapi.is_static_immortal(()))
for obj in range(300, 400):
self.assertFalse(_testinternalcapi.is_static_immortal(obj))
for obj in ([], {}, set()):
self.assertFalse(_testinternalcapi.is_static_immortal(obj))
if __name__ == "__main__":
unittest.main()

View file

@ -2049,6 +2049,15 @@ get_tracked_heap_size(PyObject *self, PyObject *Py_UNUSED(ignored))
return PyLong_FromInt64(PyInterpreterState_Get()->gc.heap_size);
}
static PyObject *
is_static_immortal(PyObject *self, PyObject *op)
{
if (_Py_IsStaticImmortal(op)) {
Py_RETURN_TRUE;
}
Py_RETURN_FALSE;
}
static PyMethodDef module_functions[] = {
{"get_configs", get_configs, METH_NOARGS},
{"get_recursion_depth", get_recursion_depth, METH_NOARGS},
@ -2146,6 +2155,7 @@ static PyMethodDef module_functions[] = {
{"identify_type_slot_wrappers", identify_type_slot_wrappers, METH_NOARGS},
{"has_deferred_refcount", has_deferred_refcount, METH_O},
{"get_tracked_heap_size", get_tracked_heap_size, METH_NOARGS},
{"is_static_immortal", is_static_immortal, METH_O},
{NULL, NULL} /* sentinel */
};

View file

@ -2475,10 +2475,16 @@ new_reference(PyObject *op)
{
// Skip the immortal object check in Py_SET_REFCNT; always set refcnt to 1
#if !defined(Py_GIL_DISABLED)
#if SIZEOF_VOID_P > 4
op->ob_refcnt_full = 1;
assert(op->ob_refcnt == 1);
assert(op->ob_flags == 0);
#else
op->ob_refcnt = 1;
#endif
#else
op->ob_tid = _Py_ThreadId();
op->_padding = 0;
op->ob_flags = 0;
op->ob_mutex = (PyMutex){ 0 };
op->ob_gc_bits = 0;
op->ob_ref_local = 1;
@ -2515,6 +2521,10 @@ _Py_SetImmortalUntracked(PyObject *op)
|| PyUnicode_CHECK_INTERNED(op) == SSTATE_INTERNED_IMMORTAL_STATIC);
}
#endif
// Check if already immortal to avoid degrading from static immortal to plain immortal
if (_Py_IsImmortal(op)) {
return;
}
#ifdef Py_GIL_DISABLED
op->ob_tid = _Py_UNOWNED_TID;
op->ob_ref_local = _Py_IMMORTAL_REFCNT_LOCAL;