Simplify Refcounting

Instead of -max_size to -1 for regular refcounts, use 1 to max_size.
0 still means constant refcount.
The highest bit is used to signify atomic refcounting required.
This does not turn on any sort of atomic refcounting.
This commit is contained in:
Brendan Hansknecht 2024-12-31 17:35:46 -08:00
parent 8001de5468
commit 4b8693537a
No known key found for this signature in database
GPG key ID: 0EA784685083E75B
15 changed files with 57 additions and 84 deletions

View file

@ -179,23 +179,19 @@ pub const RocList = extern struct {
}
pub fn isUnique(self: RocList) bool {
return self.refcountMachine() == utils.REFCOUNT_ONE;
return utils.rcUnique(@bitCast(self.refcount()));
}
fn refcountMachine(self: RocList) usize {
fn refcount(self: RocList) usize {
if (self.getCapacity() == 0 and !self.isSeamlessSlice()) {
// the zero-capacity is Clone, copying it will not leak memory
return utils.REFCOUNT_ONE;
return 1;
}
const ptr: [*]usize = @as([*]usize, @ptrCast(@alignCast(self.getAllocationDataPtr())));
return (ptr - 1)[0];
}
fn refcountHuman(self: RocList) usize {
return self.refcountMachine() - utils.REFCOUNT_ONE + 1;
}
pub fn makeUniqueExtra(self: RocList, alignment: u32, element_width: usize, elements_refcounted: bool, dec: Dec, update_mode: UpdateMode) RocList {
if (update_mode == .InPlace) {
return self;

View file

@ -366,22 +366,18 @@ pub const RocStr = extern struct {
}
fn isRefcountOne(self: RocStr) bool {
return self.refcountMachine() == utils.REFCOUNT_ONE;
return utils.rcUnique(@bitCast(self.refcount()));
}
fn refcountMachine(self: RocStr) usize {
fn refcount(self: RocStr) usize {
if ((self.getCapacity() == 0 and !self.isSeamlessSlice()) or self.isSmallStr()) {
return utils.REFCOUNT_ONE;
return 1;
}
const ptr: [*]usize = @as([*]usize, @ptrCast(@alignCast(self.bytes)));
return (ptr - 1)[0];
}
fn refcountHuman(self: RocStr) usize {
return self.refcountMachine() - utils.REFCOUNT_ONE + 1;
}
pub fn asSlice(self: *const RocStr) []const u8 {
return self.asU8ptr()[0..self.len()];
}

View file

@ -175,11 +175,10 @@ pub const IncN = fn (?[*]u8, u64) callconv(.C) void;
pub const Dec = fn (?[*]u8) callconv(.C) void;
const REFCOUNT_MAX_ISIZE: isize = 0;
pub const REFCOUNT_ONE_ISIZE: isize = std.math.minInt(isize);
pub const REFCOUNT_ONE: usize = @as(usize, @bitCast(REFCOUNT_ONE_ISIZE));
// Only top bit set.
pub const REFCOUNT_IS_NOT_ATOMIC_MASK: isize = REFCOUNT_ONE_ISIZE;
pub const REFCOUNT_ONE_ATOMIC_ISIZE: isize = 1;
pub const REFCOUNT_IS_ATOMIC_MASK: isize = std.math.minInt(isize);
// All other bits of the refcount.
pub const REFCOUNT_VALUE_MASK = ~REFCOUNT_IS_ATOMIC_MASK;
pub const IntWidth = enum(u8) {
U8 = 0,
@ -220,21 +219,17 @@ pub fn increfRcPtrC(ptr_to_refcount: *isize, amount: isize) callconv(.C) void {
const old = @as(usize, @bitCast(refcount));
const new = old + @as(usize, @intCast(amount));
const oldH = old - REFCOUNT_ONE + 1;
const newH = new - REFCOUNT_ONE + 1;
std.debug.print("{} + {} = {}!\n", .{ oldH, amount, newH });
std.debug.print("{} + {} = {}!\n", .{ old, amount, new });
}
ptr_to_refcount.* = refcount + amount;
ptr_to_refcount.* = refcount +% amount;
},
.atomic => {
// If the first bit of the refcount is set, this variable is threadlocal.
// Use normal refcounting instead of atomic.
if (refcount & REFCOUNT_IS_NOT_ATOMIC_MASK != 0) {
ptr_to_refcount.* = refcount + amount;
} else {
// If the first bit of the refcount is set, this variable is atomic.
if (refcount & REFCOUNT_IS_ATOMIC_MASK != 0) {
_ = @atomicRmw(isize, ptr_to_refcount, .Add, amount, .monotonic);
} else {
ptr_to_refcount.* = refcount +% amount;
}
},
.none => unreachable,
@ -386,27 +381,23 @@ inline fn decref_ptr_to_refcount(
const new = @as(usize, @bitCast(refcount -% 1));
if (DEBUG_INCDEC and builtin.target.cpu.arch != .wasm32) {
const oldH = old - REFCOUNT_ONE + 1;
const newH = new - REFCOUNT_ONE + 1;
std.debug.print("{} - 1 = {}!\n", .{ oldH, newH });
std.debug.print("{} - 1 = {}!\n", .{ old, new });
}
if (refcount == REFCOUNT_ONE_ISIZE) {
if (refcount == 1) {
free_ptr_to_refcount(refcount_ptr, alignment, elements_refcounted);
}
},
.atomic => {
// If the first bit of the refcount is set, this variable is threadlocal.
// Use normal refcounting instead of atomic.
if (refcount & REFCOUNT_IS_NOT_ATOMIC_MASK != 0) {
refcount_ptr[0] = refcount -% 1;
if (refcount == REFCOUNT_ONE_ISIZE) {
// If the first bit of the refcount is set, this variable is atomic.
if (refcount & REFCOUNT_IS_ATOMIC_MASK != 0) {
const last = @atomicRmw(isize, &refcount_ptr[0], .Sub, 1, .monotonic);
if (last & REFCOUNT_VALUE_MASK == 1) {
free_ptr_to_refcount(refcount_ptr, alignment, elements_refcounted);
}
} else {
const last = @atomicRmw(isize, &refcount_ptr[0], .Sub, 1, .monotonic);
if (last == REFCOUNT_ONE_ATOMIC_ISIZE) {
refcount_ptr[0] = refcount -% 1;
if (refcount & REFCOUNT_VALUE_MASK == 1) {
free_ptr_to_refcount(refcount_ptr, alignment, elements_refcounted);
}
}
@ -433,12 +424,16 @@ pub fn isUnique(
std.debug.print("| is unique {*}\n", .{isizes - 1});
}
return rcUnique(refcount);
}
pub fn rcUnique(refcount: isize) bool {
switch (RC_TYPE) {
.normal => {
return refcount == REFCOUNT_ONE_ISIZE;
return refcount == 1;
},
.atomic => {
return refcount == REFCOUNT_ONE_ISIZE or refcount == REFCOUNT_ONE_ATOMIC_ISIZE;
return refcount & REFCOUNT_VALUE_MASK == 1;
},
.none => {
return false;
@ -523,7 +518,7 @@ pub fn allocateWithRefcount(
const data_ptr = new_bytes + extra_bytes;
const refcount_ptr = @as([*]usize, @ptrCast(@as([*]align(ptr_width) u8, @alignCast(data_ptr)) - ptr_width));
refcount_ptr[0] = if (RC_TYPE == .none) REFCOUNT_MAX_ISIZE else REFCOUNT_ONE;
refcount_ptr[0] = if (RC_TYPE == .none) REFCOUNT_MAX_ISIZE else 1;
return data_ptr;
}
@ -573,10 +568,10 @@ pub const UpdateMode = enum(u8) {
};
test "increfC, refcounted data" {
var mock_rc: isize = REFCOUNT_ONE_ISIZE + 17;
var mock_rc: isize = 17;
const ptr_to_refcount: *isize = &mock_rc;
increfRcPtrC(ptr_to_refcount, 2);
try std.testing.expectEqual(mock_rc, REFCOUNT_ONE_ISIZE + 19);
try std.testing.expectEqual(mock_rc, 19);
}
test "increfC, static data" {