Merge pull request #2789 from rtfeldman/atomic-rc

Use zig for all refcounts and add atomic support
This commit is contained in:
Brendan Hansknecht 2022-04-03 22:23:28 +00:00 committed by GitHub
commit 19c02aa087
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 104 additions and 177 deletions

View file

@ -266,6 +266,7 @@ pub fn gen_from_mono_module_llvm(
|| name.starts_with("roc_builtins.dec")
|| name.starts_with("list.RocList")
|| name.starts_with("dict.RocDict")
|| name.contains("incref")
|| name.contains("decref")
{
function.add_attribute(AttributeLoc::Function, enum_attr);

View file

@ -155,6 +155,7 @@ comptime {
exportUtilsFn(utils.increfC, "incref");
exportUtilsFn(utils.decrefC, "decref");
exportUtilsFn(utils.decrefCheckNullC, "decref_check_null");
exportUtilsFn(utils.allocateWithRefcountC, "allocate_with_refcount");
exportExpectFn(expect.expectFailedC, "expect_failed");
exportExpectFn(expect.getExpectFailuresC, "get_expect_failures");
exportExpectFn(expect.deinitFailuresC, "deinit_failures");

View file

@ -1,5 +1,6 @@
const std = @import("std");
const always_inline = std.builtin.CallOptions.Modifier.always_inline;
const Monotonic = std.builtin.AtomicOrder.Monotonic;
pub fn WithOverflow(comptime T: type) type {
return extern struct { value: T, has_overflowed: bool };
@ -120,10 +121,32 @@ pub const IntWidth = enum(u8) {
I128 = 9,
};
const Refcount = enum {
none,
normal,
atomic,
};
const RC_TYPE = Refcount.normal;
pub fn increfC(ptr_to_refcount: *isize, amount: isize) callconv(.C) void {
if (RC_TYPE == Refcount.none) return;
var refcount = ptr_to_refcount.*;
var masked_amount = if (refcount == REFCOUNT_MAX_ISIZE) 0 else amount;
ptr_to_refcount.* = refcount + masked_amount;
if (refcount < REFCOUNT_MAX_ISIZE) {
switch (RC_TYPE) {
Refcount.normal => {
ptr_to_refcount.* = std.math.min(refcount + amount, REFCOUNT_MAX_ISIZE);
},
Refcount.atomic => {
var next = std.math.min(refcount + amount, REFCOUNT_MAX_ISIZE);
while (@cmpxchgWeak(isize, ptr_to_refcount, refcount, next, Monotonic, Monotonic)) |found| {
refcount = found;
next = std.math.min(refcount + amount, REFCOUNT_MAX_ISIZE);
}
},
Refcount.none => unreachable,
}
}
}
pub fn decrefC(
@ -169,71 +192,51 @@ inline fn decref_ptr_to_refcount(
refcount_ptr: [*]isize,
alignment: u32,
) void {
const refcount: isize = refcount_ptr[0];
if (RC_TYPE == Refcount.none) return;
const extra_bytes = std.math.max(alignment, @sizeOf(usize));
if (refcount == REFCOUNT_ONE_ISIZE) {
dealloc(@ptrCast([*]u8, refcount_ptr) - (extra_bytes - @sizeOf(usize)), alignment);
} else if (refcount < 0) {
refcount_ptr[0] = refcount - 1;
switch (RC_TYPE) {
Refcount.normal => {
const refcount: isize = refcount_ptr[0];
if (refcount == REFCOUNT_ONE_ISIZE) {
dealloc(@ptrCast([*]u8, refcount_ptr) - (extra_bytes - @sizeOf(usize)), alignment);
} else if (refcount < REFCOUNT_MAX_ISIZE) {
refcount_ptr[0] = refcount - 1;
}
},
Refcount.atomic => {
if (refcount_ptr[0] < REFCOUNT_MAX_ISIZE) {
var last = @atomicRmw(isize, &refcount_ptr[0], std.builtin.AtomicRmwOp.Sub, 1, Monotonic);
if (last == REFCOUNT_ONE_ISIZE) {
dealloc(@ptrCast([*]u8, refcount_ptr) - (extra_bytes - @sizeOf(usize)), alignment);
}
}
},
Refcount.none => unreachable,
}
}
pub fn allocateWithRefcountC(
data_bytes: usize,
element_alignment: u32,
) callconv(.C) [*]u8 {
return allocateWithRefcount(data_bytes, element_alignment);
}
pub fn allocateWithRefcount(
data_bytes: usize,
element_alignment: u32,
) [*]u8 {
const alignment = std.math.max(@sizeOf(usize), element_alignment);
const first_slot_offset = std.math.max(@sizeOf(usize), element_alignment);
const ptr_width = @sizeOf(usize);
const alignment = std.math.max(ptr_width, element_alignment);
const length = alignment + data_bytes;
switch (alignment) {
16 => {
// TODO handle alloc failing!
var new_bytes: [*]align(16) u8 = @alignCast(16, alloc(length, alignment) orelse unreachable);
var new_bytes: [*]u8 = alloc(length, alignment) orelse unreachable;
var as_usize_array = @ptrCast([*]usize, new_bytes);
as_usize_array[0] = 0;
as_usize_array[1] = REFCOUNT_ONE;
const data_ptr = new_bytes + alignment;
const refcount_ptr = @ptrCast([*]usize, @alignCast(ptr_width, data_ptr) - ptr_width);
refcount_ptr[0] = if (RC_TYPE == Refcount.none) REFCOUNT_MAX_ISIZE else REFCOUNT_ONE;
var as_u8_array = @ptrCast([*]u8, new_bytes);
const first_slot = as_u8_array + first_slot_offset;
return first_slot;
},
8 => {
// TODO handle alloc failing!
var raw = alloc(length, alignment) orelse unreachable;
var new_bytes: [*]align(8) u8 = @alignCast(8, raw);
var as_isize_array = @ptrCast([*]isize, new_bytes);
as_isize_array[0] = REFCOUNT_ONE_ISIZE;
var as_u8_array = @ptrCast([*]u8, new_bytes);
const first_slot = as_u8_array + first_slot_offset;
return first_slot;
},
4 => {
// TODO handle alloc failing!
var raw = alloc(length, alignment) orelse unreachable;
var new_bytes: [*]align(@alignOf(isize)) u8 = @alignCast(@alignOf(isize), raw);
var as_isize_array = @ptrCast([*]isize, new_bytes);
as_isize_array[0] = REFCOUNT_ONE_ISIZE;
var as_u8_array = @ptrCast([*]u8, new_bytes);
const first_slot = as_u8_array + first_slot_offset;
return first_slot;
},
else => {
// const stdout = std.io.getStdOut().writer();
// stdout.print("alignment: {d}", .{alignment}) catch unreachable;
// @panic("allocateWithRefcount with invalid alignment");
unreachable;
},
}
return data_ptr;
}
pub const CSlice = extern struct {

View file

@ -371,6 +371,7 @@ pub const DEC_MUL_WITH_OVERFLOW: &str = "roc_builtins.dec.mul_with_overflow";
pub const DEC_DIV: &str = "roc_builtins.dec.div";
pub const UTILS_TEST_PANIC: &str = "roc_builtins.utils.test_panic";
pub const UTILS_ALLOCATE_WITH_REFCOUNT: &str = "roc_builtins.utils.allocate_with_refcount";
pub const UTILS_INCREF: &str = "roc_builtins.utils.incref";
pub const UTILS_DECREF: &str = "roc_builtins.utils.decref";
pub const UTILS_DECREF_CHECK_NULL: &str = "roc_builtins.utils.decref_check_null";

View file

@ -2124,15 +2124,11 @@ fn reserve_with_refcount_help<'a, 'ctx, 'env>(
stack_size: u32,
alignment_bytes: u32,
) -> PointerValue<'ctx> {
let ctx = env.context;
let len_type = env.ptr_int();
let value_bytes_intvalue = len_type.const_int(stack_size as u64, false);
let rc1 = crate::llvm::refcounting::refcount_1(ctx, env.target_info);
allocate_with_refcount_help(env, basic_type, alignment_bytes, value_bytes_intvalue, rc1)
allocate_with_refcount_help(env, basic_type, alignment_bytes, value_bytes_intvalue)
}
pub fn allocate_with_refcount<'a, 'ctx, 'env>(
@ -2153,74 +2149,22 @@ pub fn allocate_with_refcount_help<'a, 'ctx, 'env>(
value_type: impl BasicType<'ctx>,
alignment_bytes: u32,
number_of_data_bytes: IntValue<'ctx>,
initial_refcount: IntValue<'ctx>,
) -> PointerValue<'ctx> {
let builder = env.builder;
let ptr = call_bitcode_fn(
env,
&[
number_of_data_bytes.into(),
env.alignment_const(alignment_bytes).into(),
],
roc_builtins::bitcode::UTILS_ALLOCATE_WITH_REFCOUNT,
)
.into_pointer_value();
let len_type = env.ptr_int();
let ptr_width_u32 = env.target_info.ptr_width() as u32;
let ptr_type = value_type.ptr_type(AddressSpace::Generic);
let extra_bytes = alignment_bytes.max(ptr_width_u32);
let ptr = {
// number of bytes we will allocated
let number_of_bytes = builder.build_int_add(
len_type.const_int(extra_bytes as u64, false),
number_of_data_bytes,
"add_extra_bytes",
);
env.call_alloc(number_of_bytes, alignment_bytes)
};
// We must return a pointer to the first element:
let data_ptr = {
let int_type = env.ptr_int();
let as_usize_ptr = builder
.build_bitcast(
ptr,
int_type.ptr_type(AddressSpace::Generic),
"to_usize_ptr",
)
.into_pointer_value();
let index = match extra_bytes {
n if n == ptr_width_u32 => 1,
n if n == 2 * ptr_width_u32 => 2,
_ => unreachable!("invalid extra_bytes, {}", extra_bytes),
};
let index_intvalue = int_type.const_int(index, false);
let ptr_type = value_type.ptr_type(AddressSpace::Generic);
unsafe {
builder.build_pointer_cast(
env.builder
.build_in_bounds_gep(as_usize_ptr, &[index_intvalue], "get_data_ptr"),
ptr_type,
"alloc_cast_to_desired",
)
}
};
let refcount_ptr = match extra_bytes {
n if n == ptr_width_u32 => {
// the allocated pointer is the same as the refcounted pointer
unsafe { PointerToRefcount::from_ptr(env, ptr) }
}
n if n == 2 * ptr_width_u32 => {
// the refcount is stored just before the start of the actual data
// but in this case (because of alignment) not at the start of the allocated buffer
PointerToRefcount::from_ptr_to_data(env, data_ptr)
}
n => unreachable!("invalid extra_bytes {}", n),
};
// let rc1 = crate::llvm::refcounting::refcount_1(ctx, env.ptr_bytes);
refcount_ptr.set_refcount(env, initial_refcount);
data_ptr
env.builder
.build_bitcast(ptr, ptr_type, "alloc_cast_to_desired")
.into_pointer_value()
}
macro_rules! dict_key_value_layout {

View file

@ -1259,7 +1259,6 @@ pub fn allocate_list<'a, 'ctx, 'env>(
number_of_elements: IntValue<'ctx>,
) -> PointerValue<'ctx> {
let builder = env.builder;
let ctx = env.context;
let len_type = env.ptr_int();
let elem_bytes = elem_layout.stack_size(env.target_info) as u64;
@ -1267,13 +1266,9 @@ pub fn allocate_list<'a, 'ctx, 'env>(
let number_of_data_bytes =
builder.build_int_mul(bytes_per_element, number_of_elements, "data_length");
// the refcount of a new list is initially 1
// we assume that the list is indeed used (dead variables are eliminated)
let rc1 = crate::llvm::refcounting::refcount_1(ctx, env.target_info);
let basic_type = basic_type_from_layout(env, elem_layout);
let alignment_bytes = elem_layout.alignment_bytes(env.target_info);
allocate_with_refcount_help(env, basic_type, alignment_bytes, number_of_data_bytes, rc1)
allocate_with_refcount_help(env, basic_type, alignment_bytes, number_of_data_bytes)
}
pub fn store_list<'a, 'ctx, 'env>(

View file

@ -8,7 +8,6 @@ use crate::llvm::build_list::{incrementing_elem_loop, list_len, load_list};
use crate::llvm::convert::basic_type_from_layout;
use bumpalo::collections::Vec;
use inkwell::basic_block::BasicBlock;
use inkwell::context::Context;
use inkwell::module::Linkage;
use inkwell::types::{AnyTypeEnum, BasicMetadataTypeEnum, BasicType, BasicTypeEnum};
use inkwell::values::{
@ -18,22 +17,10 @@ use inkwell::{AddressSpace, IntPredicate};
use roc_module::symbol::Interns;
use roc_module::symbol::Symbol;
use roc_mono::layout::{Builtin, Layout, LayoutIds, UnionLayout};
use roc_target::TargetInfo;
use super::build::load_roc_value;
use super::convert::{argument_type_from_layout, argument_type_from_union_layout};
/// "Infinite" reference count, for static values
/// Ref counts are encoded as negative numbers where isize::MIN represents 1
pub const REFCOUNT_MAX: usize = 0_usize;
pub fn refcount_1(ctx: &Context, target_info: TargetInfo) -> IntValue<'_> {
match target_info.ptr_width() {
roc_target::PtrWidth::Bytes4 => ctx.i32_type().const_int(i32::MIN as u64, false),
roc_target::PtrWidth::Bytes8 => ctx.i64_type().const_int(i64::MIN as u64, false),
}
}
pub struct PointerToRefcount<'ctx> {
value: PointerValue<'ctx>,
}
@ -96,7 +83,14 @@ impl<'ctx> PointerToRefcount<'ctx> {
pub fn is_1<'a, 'env>(&self, env: &Env<'a, 'ctx, 'env>) -> IntValue<'ctx> {
let current = self.get_refcount(env);
let one = refcount_1(env.context, env.target_info);
let one = match env.target_info.ptr_width() {
roc_target::PtrWidth::Bytes4 => {
env.context.i32_type().const_int(i32::MIN as u64, false)
}
roc_target::PtrWidth::Bytes8 => {
env.context.i64_type().const_int(i64::MIN as u64, false)
}
};
env.builder
.build_int_compare(IntPredicate::EQ, current, one, "is_one")
@ -125,38 +119,7 @@ impl<'ctx> PointerToRefcount<'ctx> {
}
fn increment<'a, 'env>(&self, amount: IntValue<'ctx>, env: &Env<'a, 'ctx, 'env>) {
let refcount = self.get_refcount(env);
let builder = env.builder;
let refcount_type = env.ptr_int();
let is_static_allocation = builder.build_int_compare(
IntPredicate::EQ,
refcount,
refcount_type.const_int(REFCOUNT_MAX as u64, false),
"refcount_max_check",
);
let block = env.builder.get_insert_block().expect("to be in a function");
let parent = block.get_parent().unwrap();
let modify_block = env
.context
.append_basic_block(parent, "inc_refcount_modify");
let cont_block = env.context.append_basic_block(parent, "inc_refcount_cont");
env.builder
.build_conditional_branch(is_static_allocation, cont_block, modify_block);
{
env.builder.position_at_end(modify_block);
let incremented = builder.build_int_add(refcount, amount, "increment_refcount");
self.set_refcount(env, incremented);
env.builder.build_unconditional_branch(cont_block);
}
env.builder.position_at_end(cont_block);
incref_pointer(env, self.value, amount);
}
pub fn decrement<'a, 'env>(&self, env: &Env<'a, 'ctx, 'env>, layout: &Layout<'a>) {
@ -232,6 +195,25 @@ impl<'ctx> PointerToRefcount<'ctx> {
}
}
fn incref_pointer<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
pointer: PointerValue<'ctx>,
amount: IntValue<'ctx>,
) {
call_void_bitcode_fn(
env,
&[
env.builder.build_bitcast(
pointer,
env.ptr_int().ptr_type(AddressSpace::Generic),
"to_isize_ptr",
),
amount.into(),
],
roc_builtins::bitcode::UTILS_INCREF,
);
}
fn decref_pointer<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
pointer: PointerValue<'ctx>,