mirror of
https://github.com/roc-lang/roc.git
synced 2025-08-04 04:08:19 +00:00
Merge remote-tracking branch 'remote/main' into builtin-task
This commit is contained in:
commit
b489c44b19
262 changed files with 11354 additions and 5821 deletions
|
@ -98,7 +98,7 @@ fn generateObjectFile(
|
|||
|
||||
const obj_file = obj.getEmittedBin();
|
||||
|
||||
var suffix =
|
||||
const suffix =
|
||||
if (target.os_tag == .windows)
|
||||
"obj"
|
||||
else
|
||||
|
|
|
@ -84,12 +84,12 @@ pub const RocList = extern struct {
|
|||
return true;
|
||||
}
|
||||
|
||||
pub fn fromSlice(comptime T: type, slice: []const T) RocList {
|
||||
pub fn fromSlice(comptime T: type, slice: []const T, elements_refcounted: bool) RocList {
|
||||
if (slice.len == 0) {
|
||||
return RocList.empty();
|
||||
}
|
||||
|
||||
var list = allocate(@alignOf(T), slice.len, @sizeOf(T));
|
||||
const list = allocate(@alignOf(T), slice.len, @sizeOf(T), elements_refcounted);
|
||||
|
||||
if (slice.len > 0) {
|
||||
const dest = list.bytes orelse unreachable;
|
||||
|
@ -107,7 +107,7 @@ pub const RocList = extern struct {
|
|||
// The pointer is to just after the refcount.
|
||||
// For big lists, it just returns their bytes pointer.
|
||||
// For seamless slices, it returns the pointer stored in capacity_or_alloc_ptr.
|
||||
pub fn getAllocationPtr(self: RocList) ?[*]u8 {
|
||||
pub fn getAllocationDataPtr(self: RocList) ?[*]u8 {
|
||||
const list_alloc_ptr = @intFromPtr(self.bytes);
|
||||
const slice_alloc_ptr = self.capacity_or_alloc_ptr << 1;
|
||||
const slice_mask = self.seamlessSliceMask();
|
||||
|
@ -115,9 +115,60 @@ pub const RocList = extern struct {
|
|||
return @as(?[*]u8, @ptrFromInt(alloc_ptr));
|
||||
}
|
||||
|
||||
pub fn decref(self: RocList, alignment: u32) void {
|
||||
// This function is only valid if the list has refcounted elements.
|
||||
fn getAllocationElementCount(self: RocList) usize {
|
||||
if (self.isSeamlessSlice()) {
|
||||
// Seamless slices always refer to an underlying allocation.
|
||||
const alloc_ptr = self.getAllocationDataPtr() orelse unreachable;
|
||||
// - 1 is refcount.
|
||||
// - 2 is size on heap.
|
||||
const ptr = @as([*]usize, @ptrCast(@alignCast(alloc_ptr))) - 2;
|
||||
return ptr[0];
|
||||
} else {
|
||||
return self.length;
|
||||
}
|
||||
}
|
||||
|
||||
// This needs to be called when creating seamless slices from unique list.
|
||||
// It will put the allocation size on the heap to enable the seamless slice to free the underlying allocation.
|
||||
fn setAllocationElementCount(self: RocList, elements_refcounted: bool) void {
|
||||
if (elements_refcounted and !self.isSeamlessSlice()) {
|
||||
// - 1 is refcount.
|
||||
// - 2 is size on heap.
|
||||
const ptr = @as([*]usize, @alignCast(@ptrCast(self.getAllocationDataPtr()))) - 2;
|
||||
ptr[0] = self.length;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn incref(self: RocList, amount: isize, elements_refcounted: bool) void {
|
||||
// If the list is unique and not a seamless slice, the length needs to be store on the heap if the elements are refcounted.
|
||||
if (elements_refcounted and self.isUnique() and !self.isSeamlessSlice()) {
|
||||
if (self.getAllocationDataPtr()) |source| {
|
||||
// - 1 is refcount.
|
||||
// - 2 is size on heap.
|
||||
const ptr = @as([*]usize, @alignCast(@ptrCast(source))) - 2;
|
||||
ptr[0] = self.length;
|
||||
}
|
||||
}
|
||||
utils.increfDataPtrC(self.getAllocationDataPtr(), amount);
|
||||
}
|
||||
|
||||
pub fn decref(self: RocList, alignment: u32, element_width: usize, elements_refcounted: bool, dec: Dec) void {
|
||||
// If unique, decref will free the list. Before that happens, all elements must be decremented.
|
||||
if (elements_refcounted and self.isUnique()) {
|
||||
if (self.getAllocationDataPtr()) |source| {
|
||||
const count = self.getAllocationElementCount();
|
||||
|
||||
var i: usize = 0;
|
||||
while (i < count) : (i += 1) {
|
||||
const element = source + i * element_width;
|
||||
dec(element);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We use the raw capacity to ensure we always decrement the refcount of seamless slices.
|
||||
utils.decref(self.getAllocationPtr(), self.capacity_or_alloc_ptr, alignment);
|
||||
utils.decref(self.getAllocationDataPtr(), self.capacity_or_alloc_ptr, alignment, elements_refcounted);
|
||||
}
|
||||
|
||||
pub fn elements(self: RocList, comptime T: type) ?[*]T {
|
||||
|
@ -134,7 +185,7 @@ pub const RocList = extern struct {
|
|||
return utils.REFCOUNT_ONE;
|
||||
}
|
||||
|
||||
const ptr: [*]usize = @as([*]usize, @ptrCast(@alignCast(self.bytes)));
|
||||
const ptr: [*]usize = @as([*]usize, @ptrCast(@alignCast(self.getAllocationDataPtr())));
|
||||
return (ptr - 1)[0];
|
||||
}
|
||||
|
||||
|
@ -142,15 +193,22 @@ pub const RocList = extern struct {
|
|||
return self.refcountMachine() - utils.REFCOUNT_ONE + 1;
|
||||
}
|
||||
|
||||
pub fn makeUniqueExtra(self: RocList, alignment: u32, element_width: usize, update_mode: UpdateMode) RocList {
|
||||
pub fn makeUniqueExtra(self: RocList, alignment: u32, element_width: usize, elements_refcounted: bool, dec: Dec, update_mode: UpdateMode) RocList {
|
||||
if (update_mode == .InPlace) {
|
||||
return self;
|
||||
} else {
|
||||
return self.makeUnique(alignment, element_width);
|
||||
return self.makeUnique(alignment, element_width, elements_refcounted, dec);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn makeUnique(self: RocList, alignment: u32, element_width: usize) RocList {
|
||||
pub fn makeUnique(
|
||||
self: RocList,
|
||||
alignment: u32,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
inc: Inc,
|
||||
dec: Dec,
|
||||
) RocList {
|
||||
if (self.isUnique()) {
|
||||
return self;
|
||||
}
|
||||
|
@ -158,12 +216,12 @@ pub const RocList = extern struct {
|
|||
if (self.isEmpty()) {
|
||||
// Empty is not necessarily unique on it's own.
|
||||
// The list could have capacity and be shared.
|
||||
self.decref(alignment);
|
||||
self.decref(alignment, element_width, elements_refcounted, dec);
|
||||
return RocList.empty();
|
||||
}
|
||||
|
||||
// unfortunately, we have to clone
|
||||
var new_list = RocList.allocate(alignment, self.length, element_width);
|
||||
const new_list = RocList.allocate(alignment, self.length, element_width, elements_refcounted);
|
||||
|
||||
var old_bytes: [*]u8 = @as([*]u8, @ptrCast(self.bytes));
|
||||
var new_bytes: [*]u8 = @as([*]u8, @ptrCast(new_list.bytes));
|
||||
|
@ -171,8 +229,15 @@ pub const RocList = extern struct {
|
|||
const number_of_bytes = self.len() * element_width;
|
||||
@memcpy(new_bytes[0..number_of_bytes], old_bytes[0..number_of_bytes]);
|
||||
|
||||
// NOTE we fuse an increment of all keys/values with a decrement of the input list.
|
||||
self.decref(alignment);
|
||||
// Increment refcount of all elements now in a new list.
|
||||
if (elements_refcounted) {
|
||||
var i: usize = 0;
|
||||
while (i < self.len()) : (i += 1) {
|
||||
inc(new_bytes + i * element_width);
|
||||
}
|
||||
}
|
||||
|
||||
self.decref(alignment, element_width, elements_refcounted, dec);
|
||||
|
||||
return new_list;
|
||||
}
|
||||
|
@ -181,6 +246,7 @@ pub const RocList = extern struct {
|
|||
alignment: u32,
|
||||
length: usize,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
) RocList {
|
||||
if (length == 0) {
|
||||
return empty();
|
||||
|
@ -189,7 +255,7 @@ pub const RocList = extern struct {
|
|||
const capacity = utils.calculateCapacity(0, length, element_width);
|
||||
const data_bytes = capacity * element_width;
|
||||
return RocList{
|
||||
.bytes = utils.allocateWithRefcount(data_bytes, alignment),
|
||||
.bytes = utils.allocateWithRefcount(data_bytes, alignment, elements_refcounted),
|
||||
.length = length,
|
||||
.capacity_or_alloc_ptr = capacity,
|
||||
};
|
||||
|
@ -199,6 +265,7 @@ pub const RocList = extern struct {
|
|||
alignment: u32,
|
||||
length: usize,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
) RocList {
|
||||
if (length == 0) {
|
||||
return empty();
|
||||
|
@ -206,7 +273,7 @@ pub const RocList = extern struct {
|
|||
|
||||
const data_bytes = length * element_width;
|
||||
return RocList{
|
||||
.bytes = utils.allocateWithRefcount(data_bytes, alignment),
|
||||
.bytes = utils.allocateWithRefcount(data_bytes, alignment, elements_refcounted),
|
||||
.length = length,
|
||||
.capacity_or_alloc_ptr = length,
|
||||
};
|
||||
|
@ -217,6 +284,8 @@ pub const RocList = extern struct {
|
|||
alignment: u32,
|
||||
new_length: usize,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
inc: Inc,
|
||||
) RocList {
|
||||
if (self.bytes) |source_ptr| {
|
||||
if (self.isUnique() and !self.isSeamlessSlice()) {
|
||||
|
@ -225,13 +294,13 @@ pub const RocList = extern struct {
|
|||
return RocList{ .bytes = self.bytes, .length = new_length, .capacity_or_alloc_ptr = capacity };
|
||||
} else {
|
||||
const new_capacity = utils.calculateCapacity(capacity, new_length, element_width);
|
||||
const new_source = utils.unsafeReallocate(source_ptr, alignment, capacity, new_capacity, element_width);
|
||||
const new_source = utils.unsafeReallocate(source_ptr, alignment, capacity, new_capacity, element_width, elements_refcounted);
|
||||
return RocList{ .bytes = new_source, .length = new_length, .capacity_or_alloc_ptr = new_capacity };
|
||||
}
|
||||
}
|
||||
return self.reallocateFresh(alignment, new_length, element_width);
|
||||
return self.reallocateFresh(alignment, new_length, element_width, elements_refcounted, inc);
|
||||
}
|
||||
return RocList.allocate(alignment, new_length, element_width);
|
||||
return RocList.allocate(alignment, new_length, element_width, elements_refcounted);
|
||||
}
|
||||
|
||||
/// reallocate by explicitly making a new allocation and copying elements over
|
||||
|
@ -240,244 +309,52 @@ pub const RocList = extern struct {
|
|||
alignment: u32,
|
||||
new_length: usize,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
inc: Inc,
|
||||
) RocList {
|
||||
const old_length = self.length;
|
||||
|
||||
const result = RocList.allocate(alignment, new_length, element_width);
|
||||
const result = RocList.allocate(alignment, new_length, element_width, elements_refcounted);
|
||||
|
||||
// transfer the memory
|
||||
if (self.bytes) |source_ptr| {
|
||||
// transfer the memory
|
||||
const dest_ptr = result.bytes orelse unreachable;
|
||||
|
||||
@memcpy(dest_ptr[0..(old_length * element_width)], source_ptr[0..(old_length * element_width)]);
|
||||
@memset(dest_ptr[(old_length * element_width)..(new_length * element_width)], 0);
|
||||
|
||||
// Increment refcount of all elements now in a new list.
|
||||
if (elements_refcounted) {
|
||||
var i: usize = 0;
|
||||
while (i < old_length) : (i += 1) {
|
||||
inc(dest_ptr + i * element_width);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.decref(alignment);
|
||||
// Calls utils.decref directly to avoid decrementing the refcount of elements.
|
||||
utils.decref(self.getAllocationDataPtr(), self.capacity_or_alloc_ptr, alignment, elements_refcounted);
|
||||
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
const Caller0 = *const fn (?[*]u8, ?[*]u8) callconv(.C) void;
|
||||
const Caller1 = *const fn (?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
|
||||
const Caller2 = *const fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
|
||||
const Caller3 = *const fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
|
||||
const Caller4 = *const fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
|
||||
|
||||
pub fn listMap(
|
||||
list: RocList,
|
||||
caller: Caller1,
|
||||
data: Opaque,
|
||||
inc_n_data: IncN,
|
||||
data_is_owned: bool,
|
||||
alignment: u32,
|
||||
old_element_width: usize,
|
||||
new_element_width: usize,
|
||||
) callconv(.C) RocList {
|
||||
if (list.bytes) |source_ptr| {
|
||||
const size = list.len();
|
||||
var i: usize = 0;
|
||||
const output = RocList.allocate(alignment, size, new_element_width);
|
||||
const target_ptr = output.bytes orelse unreachable;
|
||||
|
||||
if (data_is_owned) {
|
||||
inc_n_data(data, size);
|
||||
}
|
||||
|
||||
while (i < size) : (i += 1) {
|
||||
caller(data, source_ptr + (i * old_element_width), target_ptr + (i * new_element_width));
|
||||
}
|
||||
|
||||
return output;
|
||||
} else {
|
||||
return RocList.empty();
|
||||
}
|
||||
pub fn listIncref(list: RocList, amount: isize, elements_refcounted: bool) callconv(.C) void {
|
||||
list.incref(amount, elements_refcounted);
|
||||
}
|
||||
|
||||
fn decrementTail(list: RocList, start_index: usize, element_width: usize, dec: Dec) void {
|
||||
if (list.bytes) |source| {
|
||||
var i = start_index;
|
||||
while (i < list.len()) : (i += 1) {
|
||||
const element = source + i * element_width;
|
||||
dec(element);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn listMap2(
|
||||
list1: RocList,
|
||||
list2: RocList,
|
||||
caller: Caller2,
|
||||
data: Opaque,
|
||||
inc_n_data: IncN,
|
||||
data_is_owned: bool,
|
||||
alignment: u32,
|
||||
a_width: usize,
|
||||
b_width: usize,
|
||||
c_width: usize,
|
||||
dec_a: Dec,
|
||||
dec_b: Dec,
|
||||
) callconv(.C) RocList {
|
||||
const output_length = @min(list1.len(), list2.len());
|
||||
|
||||
// if the lists don't have equal length, we must consume the remaining elements
|
||||
// In this case we consume by (recursively) decrementing the elements
|
||||
decrementTail(list1, output_length, a_width, dec_a);
|
||||
decrementTail(list2, output_length, b_width, dec_b);
|
||||
|
||||
if (data_is_owned) {
|
||||
inc_n_data(data, output_length);
|
||||
}
|
||||
|
||||
if (list1.bytes) |source_a| {
|
||||
if (list2.bytes) |source_b| {
|
||||
const output = RocList.allocate(alignment, output_length, c_width);
|
||||
const target_ptr = output.bytes orelse unreachable;
|
||||
|
||||
var i: usize = 0;
|
||||
while (i < output_length) : (i += 1) {
|
||||
const element_a = source_a + i * a_width;
|
||||
const element_b = source_b + i * b_width;
|
||||
const target = target_ptr + i * c_width;
|
||||
caller(data, element_a, element_b, target);
|
||||
}
|
||||
|
||||
return output;
|
||||
} else {
|
||||
return RocList.empty();
|
||||
}
|
||||
} else {
|
||||
return RocList.empty();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn listMap3(
|
||||
list1: RocList,
|
||||
list2: RocList,
|
||||
list3: RocList,
|
||||
caller: Caller3,
|
||||
data: Opaque,
|
||||
inc_n_data: IncN,
|
||||
data_is_owned: bool,
|
||||
alignment: u32,
|
||||
a_width: usize,
|
||||
b_width: usize,
|
||||
c_width: usize,
|
||||
d_width: usize,
|
||||
dec_a: Dec,
|
||||
dec_b: Dec,
|
||||
dec_c: Dec,
|
||||
) callconv(.C) RocList {
|
||||
const smaller_length = @min(list1.len(), list2.len());
|
||||
const output_length = @min(smaller_length, list3.len());
|
||||
|
||||
decrementTail(list1, output_length, a_width, dec_a);
|
||||
decrementTail(list2, output_length, b_width, dec_b);
|
||||
decrementTail(list3, output_length, c_width, dec_c);
|
||||
|
||||
if (data_is_owned) {
|
||||
inc_n_data(data, output_length);
|
||||
}
|
||||
|
||||
if (list1.bytes) |source_a| {
|
||||
if (list2.bytes) |source_b| {
|
||||
if (list3.bytes) |source_c| {
|
||||
const output = RocList.allocate(alignment, output_length, d_width);
|
||||
const target_ptr = output.bytes orelse unreachable;
|
||||
|
||||
var i: usize = 0;
|
||||
while (i < output_length) : (i += 1) {
|
||||
const element_a = source_a + i * a_width;
|
||||
const element_b = source_b + i * b_width;
|
||||
const element_c = source_c + i * c_width;
|
||||
const target = target_ptr + i * d_width;
|
||||
|
||||
caller(data, element_a, element_b, element_c, target);
|
||||
}
|
||||
|
||||
return output;
|
||||
} else {
|
||||
return RocList.empty();
|
||||
}
|
||||
} else {
|
||||
return RocList.empty();
|
||||
}
|
||||
} else {
|
||||
return RocList.empty();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn listMap4(
|
||||
list1: RocList,
|
||||
list2: RocList,
|
||||
list3: RocList,
|
||||
list4: RocList,
|
||||
caller: Caller4,
|
||||
data: Opaque,
|
||||
inc_n_data: IncN,
|
||||
data_is_owned: bool,
|
||||
alignment: u32,
|
||||
a_width: usize,
|
||||
b_width: usize,
|
||||
c_width: usize,
|
||||
d_width: usize,
|
||||
e_width: usize,
|
||||
dec_a: Dec,
|
||||
dec_b: Dec,
|
||||
dec_c: Dec,
|
||||
dec_d: Dec,
|
||||
) callconv(.C) RocList {
|
||||
const output_length = @min(@min(list1.len(), list2.len()), @min(list3.len(), list4.len()));
|
||||
|
||||
decrementTail(list1, output_length, a_width, dec_a);
|
||||
decrementTail(list2, output_length, b_width, dec_b);
|
||||
decrementTail(list3, output_length, c_width, dec_c);
|
||||
decrementTail(list4, output_length, d_width, dec_d);
|
||||
|
||||
if (data_is_owned) {
|
||||
inc_n_data(data, output_length);
|
||||
}
|
||||
|
||||
if (list1.bytes) |source_a| {
|
||||
if (list2.bytes) |source_b| {
|
||||
if (list3.bytes) |source_c| {
|
||||
if (list4.bytes) |source_d| {
|
||||
const output = RocList.allocate(alignment, output_length, e_width);
|
||||
const target_ptr = output.bytes orelse unreachable;
|
||||
|
||||
var i: usize = 0;
|
||||
while (i < output_length) : (i += 1) {
|
||||
const element_a = source_a + i * a_width;
|
||||
const element_b = source_b + i * b_width;
|
||||
const element_c = source_c + i * c_width;
|
||||
const element_d = source_d + i * d_width;
|
||||
const target = target_ptr + i * e_width;
|
||||
|
||||
caller(data, element_a, element_b, element_c, element_d, target);
|
||||
}
|
||||
|
||||
return output;
|
||||
} else {
|
||||
return RocList.empty();
|
||||
}
|
||||
} else {
|
||||
return RocList.empty();
|
||||
}
|
||||
} else {
|
||||
return RocList.empty();
|
||||
}
|
||||
} else {
|
||||
return RocList.empty();
|
||||
}
|
||||
pub fn listDecref(list: RocList, alignment: u32, element_width: usize, elements_refcounted: bool, dec: Dec) callconv(.C) void {
|
||||
list.decref(alignment, element_width, elements_refcounted, dec);
|
||||
}
|
||||
|
||||
pub fn listWithCapacity(
|
||||
capacity: u64,
|
||||
alignment: u32,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
inc: Inc,
|
||||
) callconv(.C) RocList {
|
||||
return listReserve(RocList.empty(), alignment, capacity, element_width, .InPlace);
|
||||
return listReserve(RocList.empty(), alignment, capacity, element_width, elements_refcounted, inc, .InPlace);
|
||||
}
|
||||
|
||||
pub fn listReserve(
|
||||
|
@ -485,6 +362,8 @@ pub fn listReserve(
|
|||
alignment: u32,
|
||||
spare: u64,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
inc: Inc,
|
||||
update_mode: UpdateMode,
|
||||
) callconv(.C) RocList {
|
||||
const original_len = list.len();
|
||||
|
@ -497,7 +376,7 @@ pub fn listReserve(
|
|||
// Make sure on 32-bit targets we don't accidentally wrap when we cast our U64 desired capacity to U32.
|
||||
const reserve_size: u64 = @min(desired_cap, @as(u64, @intCast(std.math.maxInt(usize))));
|
||||
|
||||
var output = list.reallocate(alignment, @as(usize, @intCast(reserve_size)), element_width);
|
||||
var output = list.reallocate(alignment, @as(usize, @intCast(reserve_size)), element_width, elements_refcounted, inc);
|
||||
output.length = original_len;
|
||||
return output;
|
||||
}
|
||||
|
@ -507,6 +386,9 @@ pub fn listReleaseExcessCapacity(
|
|||
list: RocList,
|
||||
alignment: u32,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
inc: Inc,
|
||||
dec: Dec,
|
||||
update_mode: UpdateMode,
|
||||
) callconv(.C) RocList {
|
||||
const old_length = list.len();
|
||||
|
@ -514,16 +396,27 @@ pub fn listReleaseExcessCapacity(
|
|||
if ((update_mode == .InPlace or list.isUnique()) and list.capacity_or_alloc_ptr == old_length) {
|
||||
return list;
|
||||
} else if (old_length == 0) {
|
||||
list.decref(alignment);
|
||||
list.decref(alignment, element_width, elements_refcounted, dec);
|
||||
return RocList.empty();
|
||||
} else {
|
||||
var output = RocList.allocateExact(alignment, old_length, element_width);
|
||||
// TODO: This can be made more efficient, but has to work around the `decref`.
|
||||
// If the list is unique, we can avoid incrementing and decrementing the live items.
|
||||
// We can just decrement the dead elements and free the old list.
|
||||
// This pattern is also like true in other locations like listConcat and listDropAt.
|
||||
const output = RocList.allocateExact(alignment, old_length, element_width, elements_refcounted);
|
||||
if (list.bytes) |source_ptr| {
|
||||
const dest_ptr = output.bytes orelse unreachable;
|
||||
|
||||
@memcpy(dest_ptr[0..(old_length * element_width)], source_ptr[0..(old_length * element_width)]);
|
||||
if (elements_refcounted) {
|
||||
var i: usize = 0;
|
||||
while (i < old_length) : (i += 1) {
|
||||
const element = source_ptr + i * element_width;
|
||||
inc(element);
|
||||
}
|
||||
}
|
||||
}
|
||||
list.decref(alignment);
|
||||
list.decref(alignment, element_width, elements_refcounted, dec);
|
||||
return output;
|
||||
}
|
||||
}
|
||||
|
@ -547,15 +440,30 @@ pub fn listAppendUnsafe(
|
|||
return output;
|
||||
}
|
||||
|
||||
fn listAppend(list: RocList, alignment: u32, element: Opaque, element_width: usize, update_mode: UpdateMode) callconv(.C) RocList {
|
||||
const with_capacity = listReserve(list, alignment, 1, element_width, update_mode);
|
||||
fn listAppend(
|
||||
list: RocList,
|
||||
alignment: u32,
|
||||
element: Opaque,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
inc: Inc,
|
||||
update_mode: UpdateMode,
|
||||
) callconv(.C) RocList {
|
||||
const with_capacity = listReserve(list, alignment, 1, element_width, elements_refcounted, inc, update_mode);
|
||||
return listAppendUnsafe(with_capacity, element, element_width);
|
||||
}
|
||||
|
||||
pub fn listPrepend(list: RocList, alignment: u32, element: Opaque, element_width: usize) callconv(.C) RocList {
|
||||
pub fn listPrepend(
|
||||
list: RocList,
|
||||
alignment: u32,
|
||||
element: Opaque,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
inc: Inc,
|
||||
) callconv(.C) RocList {
|
||||
const old_length = list.len();
|
||||
// TODO: properly wire in update mode.
|
||||
var with_capacity = listReserve(list, alignment, 1, element_width, .Immutable);
|
||||
var with_capacity = listReserve(list, alignment, 1, element_width, elements_refcounted, inc, .Immutable);
|
||||
with_capacity.length += 1;
|
||||
|
||||
// can't use one memcpy here because source and target overlap
|
||||
|
@ -586,8 +494,15 @@ pub fn listSwap(
|
|||
element_width: usize,
|
||||
index_1: u64,
|
||||
index_2: u64,
|
||||
elements_refcounted: bool,
|
||||
inc: Inc,
|
||||
dec: Dec,
|
||||
update_mode: UpdateMode,
|
||||
) callconv(.C) RocList {
|
||||
// Early exit to avoid swapping the same element.
|
||||
if (index_1 == index_2)
|
||||
return list;
|
||||
|
||||
const size = @as(u64, @intCast(list.len()));
|
||||
if (index_1 == index_2 or index_1 >= size or index_2 >= size) {
|
||||
// Either one index was out of bounds, or both indices were the same; just return
|
||||
|
@ -598,7 +513,7 @@ pub fn listSwap(
|
|||
if (update_mode == .InPlace) {
|
||||
break :blk list;
|
||||
} else {
|
||||
break :blk list.makeUnique(alignment, element_width);
|
||||
break :blk list.makeUnique(alignment, element_width, elements_refcounted, inc, dec);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -616,26 +531,30 @@ pub fn listSublist(
|
|||
list: RocList,
|
||||
alignment: u32,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
start_u64: u64,
|
||||
len_u64: u64,
|
||||
dec: Dec,
|
||||
) callconv(.C) RocList {
|
||||
const size = list.len();
|
||||
if (size == 0 or start_u64 >= @as(u64, @intCast(size))) {
|
||||
// Decrement the reference counts of all elements.
|
||||
if (list.bytes) |source_ptr| {
|
||||
var i: usize = 0;
|
||||
while (i < size) : (i += 1) {
|
||||
const element = source_ptr + i * element_width;
|
||||
dec(element);
|
||||
}
|
||||
}
|
||||
if (size == 0 or len_u64 == 0 or start_u64 >= @as(u64, @intCast(size))) {
|
||||
if (list.isUnique()) {
|
||||
// Decrement the reference counts of all elements.
|
||||
if (list.bytes) |source_ptr| {
|
||||
if (elements_refcounted) {
|
||||
var i: usize = 0;
|
||||
while (i < size) : (i += 1) {
|
||||
const element = source_ptr + i * element_width;
|
||||
dec(element);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var output = list;
|
||||
output.length = 0;
|
||||
return output;
|
||||
}
|
||||
list.decref(alignment);
|
||||
list.decref(alignment, element_width, elements_refcounted, dec);
|
||||
return RocList.empty();
|
||||
}
|
||||
|
||||
|
@ -643,7 +562,6 @@ pub fn listSublist(
|
|||
// This cast is lossless because we would have early-returned already
|
||||
// if `start_u64` were greater than `size`, and `size` fits in usize.
|
||||
const start: usize = @intCast(start_u64);
|
||||
const drop_start_len = start;
|
||||
|
||||
// (size - start) can't overflow because we would have early-returned already
|
||||
// if `start` were greater than `size`.
|
||||
|
@ -654,32 +572,25 @@ pub fn listSublist(
|
|||
// than something that fit in usize.
|
||||
const keep_len = @as(usize, @intCast(@min(len_u64, @as(u64, @intCast(size_minus_start)))));
|
||||
|
||||
// This can't overflow because if len > size_minus_start,
|
||||
// then keep_len == size_minus_start and this will be 0.
|
||||
// Alternatively, if len <= size_minus_start, then keep_len will
|
||||
// be equal to len, meaning keep_len <= size_minus_start too,
|
||||
// which in turn means this won't overflow.
|
||||
const drop_end_len = size_minus_start - keep_len;
|
||||
|
||||
// Decrement the reference counts of elements before `start`.
|
||||
var i: usize = 0;
|
||||
while (i < drop_start_len) : (i += 1) {
|
||||
const element = source_ptr + i * element_width;
|
||||
dec(element);
|
||||
}
|
||||
|
||||
// Decrement the reference counts of elements after `start + keep_len`.
|
||||
i = 0;
|
||||
while (i < drop_end_len) : (i += 1) {
|
||||
const element = source_ptr + (start + keep_len + i) * element_width;
|
||||
dec(element);
|
||||
}
|
||||
|
||||
if (start == 0 and list.isUnique()) {
|
||||
// The list is unique, we actually have to decrement refcounts to elements we aren't keeping around.
|
||||
// Decrement the reference counts of elements after `start + keep_len`.
|
||||
if (elements_refcounted) {
|
||||
const drop_end_len = size_minus_start - keep_len;
|
||||
var i: usize = 0;
|
||||
while (i < drop_end_len) : (i += 1) {
|
||||
const element = source_ptr + (start + keep_len + i) * element_width;
|
||||
dec(element);
|
||||
}
|
||||
}
|
||||
|
||||
var output = list;
|
||||
output.length = keep_len;
|
||||
return output;
|
||||
} else {
|
||||
if (list.isUnique()) {
|
||||
list.setAllocationElementCount(elements_refcounted);
|
||||
}
|
||||
const list_alloc_ptr = (@intFromPtr(source_ptr) >> 1) | SEAMLESS_SLICE_BIT;
|
||||
const slice_alloc_ptr = list.capacity_or_alloc_ptr;
|
||||
const slice_mask = list.seamlessSliceMask();
|
||||
|
@ -699,7 +610,9 @@ pub fn listDropAt(
|
|||
list: RocList,
|
||||
alignment: u32,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
drop_index_u64: u64,
|
||||
inc: Inc,
|
||||
dec: Dec,
|
||||
) callconv(.C) RocList {
|
||||
const size = list.len();
|
||||
|
@ -708,11 +621,11 @@ pub fn listDropAt(
|
|||
// For simplicity, do this by calling listSublist.
|
||||
// In the future, we can test if it is faster to manually inline the important parts here.
|
||||
if (drop_index_u64 == 0) {
|
||||
return listSublist(list, alignment, element_width, 1, size -| 1, dec);
|
||||
return listSublist(list, alignment, element_width, elements_refcounted, 1, size -| 1, dec);
|
||||
} else if (drop_index_u64 == size_u64 - 1) { // It's fine if (size - 1) wraps on size == 0 here,
|
||||
// because if size is 0 then it's always fine for this branch to be taken; no
|
||||
// matter what drop_index was, we're size == 0, so empty list will always be returned.
|
||||
return listSublist(list, alignment, element_width, 0, size -| 1, dec);
|
||||
return listSublist(list, alignment, element_width, elements_refcounted, 0, size -| 1, dec);
|
||||
}
|
||||
|
||||
if (list.bytes) |source_ptr| {
|
||||
|
@ -724,15 +637,17 @@ pub fn listDropAt(
|
|||
// were >= than `size`, and we know `size` fits in usize.
|
||||
const drop_index: usize = @intCast(drop_index_u64);
|
||||
|
||||
const element = source_ptr + drop_index * element_width;
|
||||
dec(element);
|
||||
if (elements_refcounted) {
|
||||
const element = source_ptr + drop_index * element_width;
|
||||
dec(element);
|
||||
}
|
||||
|
||||
// NOTE
|
||||
// we need to return an empty list explicitly,
|
||||
// because we rely on the pointer field being null if the list is empty
|
||||
// which also requires duplicating the utils.decref call to spend the RC token
|
||||
if (size < 2) {
|
||||
list.decref(alignment);
|
||||
list.decref(alignment, element_width, elements_refcounted, dec);
|
||||
return RocList.empty();
|
||||
}
|
||||
|
||||
|
@ -751,7 +666,7 @@ pub fn listDropAt(
|
|||
return new_list;
|
||||
}
|
||||
|
||||
const output = RocList.allocate(alignment, size - 1, element_width);
|
||||
const output = RocList.allocate(alignment, size - 1, element_width, elements_refcounted);
|
||||
const target_ptr = output.bytes orelse unreachable;
|
||||
|
||||
const head_size = drop_index * element_width;
|
||||
|
@ -762,7 +677,15 @@ pub fn listDropAt(
|
|||
const tail_size = (size - drop_index - 1) * element_width;
|
||||
@memcpy(tail_target[0..tail_size], tail_source[0..tail_size]);
|
||||
|
||||
list.decref(alignment);
|
||||
if (elements_refcounted) {
|
||||
var i: usize = 0;
|
||||
while (i < output.len()) : (i += 1) {
|
||||
const cloned_elem = target_ptr + i * element_width;
|
||||
inc(cloned_elem);
|
||||
}
|
||||
}
|
||||
|
||||
list.decref(alignment, element_width, elements_refcounted, dec);
|
||||
|
||||
return output;
|
||||
} else {
|
||||
|
@ -812,8 +735,11 @@ pub fn listSortWith(
|
|||
data_is_owned: bool,
|
||||
alignment: u32,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
inc: Inc,
|
||||
dec: Dec,
|
||||
) callconv(.C) RocList {
|
||||
var list = input.makeUnique(alignment, element_width);
|
||||
var list = input.makeUnique(alignment, element_width, elements_refcounted, inc, dec);
|
||||
|
||||
if (data_is_owned) {
|
||||
inc_n_data(data, list.len());
|
||||
|
@ -845,7 +771,7 @@ fn swap(width_initial: usize, p1: [*]u8, p2: [*]u8) void {
|
|||
var ptr2 = p2;
|
||||
|
||||
var buffer_actual: [threshold]u8 = undefined;
|
||||
var buffer: [*]u8 = buffer_actual[0..];
|
||||
const buffer: [*]u8 = buffer_actual[0..];
|
||||
|
||||
while (true) {
|
||||
if (width < threshold) {
|
||||
|
@ -863,43 +789,60 @@ fn swap(width_initial: usize, p1: [*]u8, p2: [*]u8) void {
|
|||
}
|
||||
|
||||
fn swapElements(source_ptr: [*]u8, element_width: usize, index_1: usize, index_2: usize) void {
|
||||
var element_at_i = source_ptr + (index_1 * element_width);
|
||||
var element_at_j = source_ptr + (index_2 * element_width);
|
||||
const element_at_i = source_ptr + (index_1 * element_width);
|
||||
const element_at_j = source_ptr + (index_2 * element_width);
|
||||
|
||||
return swap(element_width, element_at_i, element_at_j);
|
||||
}
|
||||
|
||||
pub fn listConcat(list_a: RocList, list_b: RocList, alignment: u32, element_width: usize) callconv(.C) RocList {
|
||||
pub fn listConcat(
|
||||
list_a: RocList,
|
||||
list_b: RocList,
|
||||
alignment: u32,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
inc: Inc,
|
||||
dec: Dec,
|
||||
) callconv(.C) RocList {
|
||||
// NOTE we always use list_a! because it is owned, we must consume it, and it may have unused capacity
|
||||
if (list_b.isEmpty()) {
|
||||
if (list_a.getCapacity() == 0) {
|
||||
// a could be a seamless slice, so we still need to decref.
|
||||
list_a.decref(alignment);
|
||||
list_a.decref(alignment, element_width, elements_refcounted, dec);
|
||||
return list_b;
|
||||
} else {
|
||||
// we must consume this list. Even though it has no elements, it could still have capacity
|
||||
list_b.decref(alignment);
|
||||
list_b.decref(alignment, element_width, elements_refcounted, dec);
|
||||
|
||||
return list_a;
|
||||
}
|
||||
} else if (list_a.isUnique()) {
|
||||
const total_length: usize = list_a.len() + list_b.len();
|
||||
|
||||
const resized_list_a = list_a.reallocate(alignment, total_length, element_width);
|
||||
const resized_list_a = list_a.reallocate(alignment, total_length, element_width, elements_refcounted, inc);
|
||||
|
||||
// These must exist, otherwise, the lists would have been empty.
|
||||
const source_a = resized_list_a.bytes orelse unreachable;
|
||||
const source_b = list_b.bytes orelse unreachable;
|
||||
@memcpy(source_a[(list_a.len() * element_width)..(total_length * element_width)], source_b[0..(list_b.len() * element_width)]);
|
||||
|
||||
// Increment refcount of all cloned elements.
|
||||
if (elements_refcounted) {
|
||||
var i: usize = 0;
|
||||
while (i < list_b.len()) : (i += 1) {
|
||||
const cloned_elem = source_b + i * element_width;
|
||||
inc(cloned_elem);
|
||||
}
|
||||
}
|
||||
|
||||
// decrement list b.
|
||||
list_b.decref(alignment);
|
||||
list_b.decref(alignment, element_width, elements_refcounted, dec);
|
||||
|
||||
return resized_list_a;
|
||||
} else if (list_b.isUnique()) {
|
||||
const total_length: usize = list_a.len() + list_b.len();
|
||||
|
||||
const resized_list_b = list_b.reallocate(alignment, total_length, element_width);
|
||||
const resized_list_b = list_b.reallocate(alignment, total_length, element_width, elements_refcounted, inc);
|
||||
|
||||
// These must exist, otherwise, the lists would have been empty.
|
||||
const source_a = list_a.bytes orelse unreachable;
|
||||
|
@ -913,14 +856,23 @@ pub fn listConcat(list_a: RocList, list_b: RocList, alignment: u32, element_widt
|
|||
mem.copyBackwards(u8, source_b[byte_count_a .. byte_count_a + byte_count_b], source_b[0..byte_count_b]);
|
||||
@memcpy(source_b[0..byte_count_a], source_a[0..byte_count_a]);
|
||||
|
||||
// Increment refcount of all cloned elements.
|
||||
if (elements_refcounted) {
|
||||
var i: usize = 0;
|
||||
while (i < list_a.len()) : (i += 1) {
|
||||
const cloned_elem = source_a + i * element_width;
|
||||
inc(cloned_elem);
|
||||
}
|
||||
}
|
||||
|
||||
// decrement list a.
|
||||
list_a.decref(alignment);
|
||||
list_a.decref(alignment, element_width, elements_refcounted, dec);
|
||||
|
||||
return resized_list_b;
|
||||
}
|
||||
const total_length: usize = list_a.len() + list_b.len();
|
||||
|
||||
const output = RocList.allocate(alignment, total_length, element_width);
|
||||
const output = RocList.allocate(alignment, total_length, element_width, elements_refcounted);
|
||||
|
||||
// These must exist, otherwise, the lists would have been empty.
|
||||
const target = output.bytes orelse unreachable;
|
||||
|
@ -930,9 +882,23 @@ pub fn listConcat(list_a: RocList, list_b: RocList, alignment: u32, element_widt
|
|||
@memcpy(target[0..(list_a.len() * element_width)], source_a[0..(list_a.len() * element_width)]);
|
||||
@memcpy(target[(list_a.len() * element_width)..(total_length * element_width)], source_b[0..(list_b.len() * element_width)]);
|
||||
|
||||
// Increment refcount of all cloned elements.
|
||||
if (elements_refcounted) {
|
||||
var i: usize = 0;
|
||||
while (i < list_a.len()) : (i += 1) {
|
||||
const cloned_elem = source_a + i * element_width;
|
||||
inc(cloned_elem);
|
||||
}
|
||||
i = 0;
|
||||
while (i < list_b.len()) : (i += 1) {
|
||||
const cloned_elem = source_b + i * element_width;
|
||||
inc(cloned_elem);
|
||||
}
|
||||
}
|
||||
|
||||
// decrement list a and b.
|
||||
list_a.decref(alignment);
|
||||
list_b.decref(alignment);
|
||||
list_a.decref(alignment, element_width, elements_refcounted, dec);
|
||||
list_b.decref(alignment, element_width, elements_refcounted, dec);
|
||||
|
||||
return output;
|
||||
}
|
||||
|
@ -960,6 +926,9 @@ pub fn listReplace(
|
|||
index: u64,
|
||||
element: Opaque,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
inc: Inc,
|
||||
dec: Dec,
|
||||
out_element: ?[*]u8,
|
||||
) callconv(.C) RocList {
|
||||
// INVARIANT: bounds checking happens on the roc side
|
||||
|
@ -969,7 +938,8 @@ pub fn listReplace(
|
|||
// so we don't do a bounds check here. Hence, the list is also non-empty,
|
||||
// because inserting into an empty list is always out of bounds,
|
||||
// and it's always safe to cast index to usize.
|
||||
return listReplaceInPlaceHelp(list.makeUnique(alignment, element_width), @as(usize, @intCast(index)), element, element_width, out_element);
|
||||
// because inserting into an empty list is always out of bounds
|
||||
return listReplaceInPlaceHelp(list.makeUnique(alignment, element_width, elements_refcounted, inc, dec), @as(usize, @intCast(index)), element, element_width, out_element);
|
||||
}
|
||||
|
||||
inline fn listReplaceInPlaceHelp(
|
||||
|
@ -1001,8 +971,11 @@ pub fn listClone(
|
|||
list: RocList,
|
||||
alignment: u32,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
inc: Inc,
|
||||
dec: Dec,
|
||||
) callconv(.C) RocList {
|
||||
return list.makeUnique(alignment, element_width);
|
||||
return list.makeUnique(alignment, element_width, elements_refcounted, inc, dec);
|
||||
}
|
||||
|
||||
pub fn listCapacity(
|
||||
|
@ -1014,23 +987,25 @@ pub fn listCapacity(
|
|||
pub fn listAllocationPtr(
|
||||
list: RocList,
|
||||
) callconv(.C) ?[*]u8 {
|
||||
return list.getAllocationPtr();
|
||||
return list.getAllocationDataPtr();
|
||||
}
|
||||
|
||||
fn rcNone(_: ?[*]u8) callconv(.C) void {}
|
||||
|
||||
test "listConcat: non-unique with unique overlapping" {
|
||||
var nonUnique = RocList.fromSlice(u8, ([_]u8{1})[0..]);
|
||||
var bytes: [*]u8 = @as([*]u8, @ptrCast(nonUnique.bytes));
|
||||
var nonUnique = RocList.fromSlice(u8, ([_]u8{1})[0..], false);
|
||||
const bytes: [*]u8 = @as([*]u8, @ptrCast(nonUnique.bytes));
|
||||
const ptr_width = @sizeOf(usize);
|
||||
const refcount_ptr = @as([*]isize, @ptrCast(@as([*]align(ptr_width) u8, @alignCast(bytes)) - ptr_width));
|
||||
utils.increfRcPtrC(&refcount_ptr[0], 1);
|
||||
defer nonUnique.decref(@sizeOf(u8)); // listConcat will dec the other refcount
|
||||
defer nonUnique.decref(@alignOf(u8), @sizeOf(u8), false, rcNone); // listConcat will dec the other refcount
|
||||
|
||||
var unique = RocList.fromSlice(u8, ([_]u8{ 2, 3, 4 })[0..]);
|
||||
defer unique.decref(@sizeOf(u8));
|
||||
var unique = RocList.fromSlice(u8, ([_]u8{ 2, 3, 4 })[0..], false);
|
||||
defer unique.decref(@alignOf(u8), @sizeOf(u8), false, rcNone);
|
||||
|
||||
var concatted = listConcat(nonUnique, unique, 1, 1);
|
||||
var wanted = RocList.fromSlice(u8, ([_]u8{ 1, 2, 3, 4 })[0..]);
|
||||
defer wanted.decref(@sizeOf(u8));
|
||||
var concatted = listConcat(nonUnique, unique, 1, 1, false, rcNone, rcNone);
|
||||
var wanted = RocList.fromSlice(u8, ([_]u8{ 1, 2, 3, 4 })[0..], false);
|
||||
defer wanted.decref(@alignOf(u8), @sizeOf(u8), false, rcNone);
|
||||
|
||||
try expect(concatted.eql(wanted));
|
||||
}
|
||||
|
@ -1045,7 +1020,7 @@ pub fn listConcatUtf8(
|
|||
const combined_length = list.len() + string.len();
|
||||
|
||||
// List U8 has alignment 1 and element_width 1
|
||||
var result = list.reallocate(1, combined_length, 1);
|
||||
const result = list.reallocate(1, combined_length, 1, false, &rcNone);
|
||||
// We just allocated combined_length, which is > 0 because string.len() > 0
|
||||
var bytes = result.bytes orelse unreachable;
|
||||
@memcpy(bytes[list.len()..combined_length], string.asU8ptr()[0..string.len()]);
|
||||
|
@ -1055,13 +1030,13 @@ pub fn listConcatUtf8(
|
|||
}
|
||||
|
||||
test "listConcatUtf8" {
|
||||
const list = RocList.fromSlice(u8, &[_]u8{ 1, 2, 3, 4 });
|
||||
defer list.decref(1);
|
||||
const list = RocList.fromSlice(u8, &[_]u8{ 1, 2, 3, 4 }, false);
|
||||
defer list.decref(1, 1, false, &rcNone);
|
||||
const string_bytes = "🐦";
|
||||
const string = str.RocStr.init(string_bytes, string_bytes.len);
|
||||
defer string.decref();
|
||||
const ret = listConcatUtf8(list, string);
|
||||
const expected = RocList.fromSlice(u8, &[_]u8{ 1, 2, 3, 4, 240, 159, 144, 166 });
|
||||
defer expected.decref(1);
|
||||
const expected = RocList.fromSlice(u8, &[_]u8{ 1, 2, 3, 4, 240, 159, 144, 166 }, false);
|
||||
defer expected.decref(1, 1, false, &rcNone);
|
||||
try expect(ret.eql(expected));
|
||||
}
|
||||
|
|
|
@ -65,10 +65,6 @@ comptime {
|
|||
const list = @import("list.zig");
|
||||
|
||||
comptime {
|
||||
exportListFn(list.listMap, "map");
|
||||
exportListFn(list.listMap2, "map2");
|
||||
exportListFn(list.listMap3, "map3");
|
||||
exportListFn(list.listMap4, "map4");
|
||||
exportListFn(list.listAppendUnsafe, "append_unsafe");
|
||||
exportListFn(list.listReserve, "reserve");
|
||||
exportListFn(list.listPrepend, "prepend");
|
||||
|
@ -86,6 +82,8 @@ comptime {
|
|||
exportListFn(list.listAllocationPtr, "allocation_ptr");
|
||||
exportListFn(list.listReleaseExcessCapacity, "release_excess_capacity");
|
||||
exportListFn(list.listConcatUtf8, "concat_utf8");
|
||||
exportListFn(list.listIncref, "incref");
|
||||
exportListFn(list.listDecref, "decref");
|
||||
}
|
||||
|
||||
// Num Module
|
||||
|
|
|
@ -96,7 +96,7 @@ pub const RocStr = extern struct {
|
|||
}
|
||||
|
||||
fn allocateBig(length: usize, capacity: usize) RocStr {
|
||||
const first_element = utils.allocateWithRefcount(capacity, @sizeOf(usize));
|
||||
const first_element = utils.allocateWithRefcount(capacity, @sizeOf(usize), false);
|
||||
|
||||
return RocStr{
|
||||
.bytes = first_element,
|
||||
|
@ -172,7 +172,7 @@ pub const RocStr = extern struct {
|
|||
|
||||
pub fn decref(self: RocStr) void {
|
||||
if (!self.isSmallStr()) {
|
||||
utils.decref(self.getAllocationPtr(), self.capacity_or_alloc_ptr, RocStr.alignment);
|
||||
utils.decref(self.getAllocationPtr(), self.capacity_or_alloc_ptr, RocStr.alignment, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -247,6 +247,7 @@ pub const RocStr = extern struct {
|
|||
old_capacity,
|
||||
new_capacity,
|
||||
element_width,
|
||||
false,
|
||||
);
|
||||
|
||||
return RocStr{ .bytes = new_source, .length = new_length, .capacity_or_alloc_ptr = new_capacity };
|
||||
|
@ -600,7 +601,7 @@ fn strFromFloatHelp(comptime T: type, float: T) RocStr {
|
|||
// Str.split
|
||||
pub fn strSplit(string: RocStr, delimiter: RocStr) callconv(.C) RocList {
|
||||
const segment_count = countSegments(string, delimiter);
|
||||
const list = RocList.allocate(@alignOf(RocStr), segment_count, @sizeOf(RocStr));
|
||||
const list = RocList.allocate(@alignOf(RocStr), segment_count, @sizeOf(RocStr), true);
|
||||
|
||||
if (list.bytes) |bytes| {
|
||||
const strings = @as([*]RocStr, @ptrCast(@alignCast(bytes)));
|
||||
|
@ -1427,7 +1428,7 @@ inline fn strToBytes(arg: RocStr) RocList {
|
|||
if (length == 0) {
|
||||
return RocList.empty();
|
||||
} else if (arg.isSmallStr()) {
|
||||
const ptr = utils.allocateWithRefcount(length, RocStr.alignment);
|
||||
const ptr = utils.allocateWithRefcount(length, RocStr.alignment, false);
|
||||
|
||||
@memcpy(ptr[0..length], arg.asU8ptr()[0..length]);
|
||||
|
||||
|
@ -1457,7 +1458,7 @@ pub fn fromUtf8(
|
|||
update_mode: UpdateMode,
|
||||
) FromUtf8Result {
|
||||
if (list.len() == 0) {
|
||||
list.decref(1); // Alignment 1 for List U8
|
||||
list.decref(@alignOf(u8), @sizeOf(u8), false, rcNone);
|
||||
return FromUtf8Result{
|
||||
.is_ok = true,
|
||||
.string = RocStr.empty(),
|
||||
|
@ -1479,7 +1480,7 @@ pub fn fromUtf8(
|
|||
} else {
|
||||
const temp = errorToProblem(bytes);
|
||||
|
||||
list.decref(1); // Alignment 1 for List U8
|
||||
list.decref(@alignOf(u8), @sizeOf(u8), false, rcNone);
|
||||
|
||||
return FromUtf8Result{
|
||||
.is_ok = false,
|
||||
|
@ -1603,7 +1604,7 @@ fn expectOk(result: FromUtf8Result) !void {
|
|||
}
|
||||
|
||||
fn sliceHelp(bytes: [*]const u8, length: usize) RocList {
|
||||
var list = RocList.allocate(RocStr.alignment, length, @sizeOf(u8));
|
||||
var list = RocList.allocate(RocStr.alignment, length, @sizeOf(u8), false);
|
||||
var list_bytes = list.bytes orelse unreachable;
|
||||
@memcpy(list_bytes[0..length], bytes[0..length]);
|
||||
list.length = length;
|
||||
|
@ -1971,6 +1972,13 @@ fn countTrailingWhitespaceBytes(string: RocStr) usize {
|
|||
return byte_count;
|
||||
}
|
||||
|
||||
fn rcNone(_: ?[*]u8) callconv(.C) void {}
|
||||
|
||||
fn decStr(ptr: ?[*]u8) callconv(.C) void {
|
||||
const str_ptr = @as(*RocStr, @ptrCast(@alignCast(ptr orelse unreachable)));
|
||||
str_ptr.decref();
|
||||
}
|
||||
|
||||
/// A backwards version of Utf8View from std.unicode
|
||||
const ReverseUtf8View = struct {
|
||||
bytes: []const u8,
|
||||
|
|
|
@ -219,6 +219,7 @@ pub fn increfRcPtrC(ptr_to_refcount: *isize, amount: isize) callconv(.C) void {
|
|||
pub fn decrefRcPtrC(
|
||||
bytes_or_null: ?[*]isize,
|
||||
alignment: u32,
|
||||
elements_refcounted: bool,
|
||||
) callconv(.C) void {
|
||||
// IMPORTANT: bytes_or_null is this case is expected to be a pointer to the refcount
|
||||
// (NOT the start of the data, or the start of the allocation)
|
||||
|
@ -226,22 +227,24 @@ pub fn decrefRcPtrC(
|
|||
// this is of course unsafe, but we trust what we get from the llvm side
|
||||
var bytes = @as([*]isize, @ptrCast(bytes_or_null));
|
||||
|
||||
return @call(.always_inline, decref_ptr_to_refcount, .{ bytes, alignment });
|
||||
return @call(.always_inline, decref_ptr_to_refcount, .{ bytes, alignment, elements_refcounted });
|
||||
}
|
||||
|
||||
pub fn decrefCheckNullC(
|
||||
bytes_or_null: ?[*]u8,
|
||||
alignment: u32,
|
||||
elements_refcounted: bool,
|
||||
) callconv(.C) void {
|
||||
if (bytes_or_null) |bytes| {
|
||||
const isizes: [*]isize = @as([*]isize, @ptrCast(@alignCast(bytes)));
|
||||
return @call(.always_inline, decref_ptr_to_refcount, .{ isizes - 1, alignment });
|
||||
return @call(.always_inline, decref_ptr_to_refcount, .{ isizes - 1, alignment, elements_refcounted });
|
||||
}
|
||||
}
|
||||
|
||||
pub fn decrefDataPtrC(
|
||||
bytes_or_null: ?[*]u8,
|
||||
alignment: u32,
|
||||
elements_refcounted: bool,
|
||||
) callconv(.C) void {
|
||||
var bytes = bytes_or_null orelse return;
|
||||
|
||||
|
@ -252,7 +255,7 @@ pub fn decrefDataPtrC(
|
|||
const isizes: [*]isize = @as([*]isize, @ptrFromInt(unmasked_ptr));
|
||||
const rc_ptr = isizes - 1;
|
||||
|
||||
return decrefRcPtrC(rc_ptr, alignment);
|
||||
return decrefRcPtrC(rc_ptr, alignment, elements_refcounted);
|
||||
}
|
||||
|
||||
pub fn increfDataPtrC(
|
||||
|
@ -273,6 +276,7 @@ pub fn increfDataPtrC(
|
|||
pub fn freeDataPtrC(
|
||||
bytes_or_null: ?[*]u8,
|
||||
alignment: u32,
|
||||
elements_refcounted: bool,
|
||||
) callconv(.C) void {
|
||||
var bytes = bytes_or_null orelse return;
|
||||
|
||||
|
@ -283,21 +287,23 @@ pub fn freeDataPtrC(
|
|||
const isizes: [*]isize = @as([*]isize, @ptrFromInt(masked_ptr));
|
||||
|
||||
// we always store the refcount right before the data
|
||||
return freeRcPtrC(isizes - 1, alignment);
|
||||
return freeRcPtrC(isizes - 1, alignment, elements_refcounted);
|
||||
}
|
||||
|
||||
pub fn freeRcPtrC(
|
||||
bytes_or_null: ?[*]isize,
|
||||
alignment: u32,
|
||||
elements_refcounted: bool,
|
||||
) callconv(.C) void {
|
||||
var bytes = bytes_or_null orelse return;
|
||||
return free_ptr_to_refcount(bytes, alignment);
|
||||
return free_ptr_to_refcount(bytes, alignment, elements_refcounted);
|
||||
}
|
||||
|
||||
pub fn decref(
|
||||
bytes_or_null: ?[*]u8,
|
||||
data_bytes: usize,
|
||||
alignment: u32,
|
||||
elements_refcounted: bool,
|
||||
) void {
|
||||
if (data_bytes == 0) {
|
||||
return;
|
||||
|
@ -307,15 +313,18 @@ pub fn decref(
|
|||
|
||||
const isizes: [*]isize = @as([*]isize, @ptrCast(@alignCast(bytes)));
|
||||
|
||||
decref_ptr_to_refcount(isizes - 1, alignment);
|
||||
decref_ptr_to_refcount(isizes - 1, alignment, elements_refcounted);
|
||||
}
|
||||
|
||||
inline fn free_ptr_to_refcount(
|
||||
refcount_ptr: [*]isize,
|
||||
alignment: u32,
|
||||
elements_refcounted: bool,
|
||||
) void {
|
||||
if (RC_TYPE == Refcount.none) return;
|
||||
const extra_bytes = @max(alignment, @sizeOf(usize));
|
||||
const ptr_width = @sizeOf(usize);
|
||||
const required_space: usize = if (elements_refcounted) (2 * ptr_width) else ptr_width;
|
||||
const extra_bytes = @max(required_space, alignment);
|
||||
const allocation_ptr = @as([*]u8, @ptrCast(refcount_ptr)) - (extra_bytes - @sizeOf(usize));
|
||||
|
||||
// NOTE: we don't even check whether the refcount is "infinity" here!
|
||||
|
@ -328,7 +337,8 @@ inline fn free_ptr_to_refcount(
|
|||
|
||||
inline fn decref_ptr_to_refcount(
|
||||
refcount_ptr: [*]isize,
|
||||
alignment: u32,
|
||||
element_alignment: u32,
|
||||
elements_refcounted: bool,
|
||||
) void {
|
||||
if (RC_TYPE == Refcount.none) return;
|
||||
|
||||
|
@ -336,6 +346,10 @@ inline fn decref_ptr_to_refcount(
|
|||
std.debug.print("| decrement {*}: ", .{refcount_ptr});
|
||||
}
|
||||
|
||||
// Due to RC alignmen tmust take into acount pointer size.
|
||||
const ptr_width = @sizeOf(usize);
|
||||
const alignment = @max(ptr_width, element_alignment);
|
||||
|
||||
// Ensure that the refcount is not whole program lifetime.
|
||||
const refcount: isize = refcount_ptr[0];
|
||||
if (refcount != REFCOUNT_MAX_ISIZE) {
|
||||
|
@ -353,13 +367,13 @@ inline fn decref_ptr_to_refcount(
|
|||
}
|
||||
|
||||
if (refcount == REFCOUNT_ONE_ISIZE) {
|
||||
free_ptr_to_refcount(refcount_ptr, alignment);
|
||||
free_ptr_to_refcount(refcount_ptr, alignment, elements_refcounted);
|
||||
}
|
||||
},
|
||||
Refcount.atomic => {
|
||||
var last = @atomicRmw(isize, &refcount_ptr[0], std.builtin.AtomicRmwOp.Sub, 1, Monotonic);
|
||||
if (last == REFCOUNT_ONE_ISIZE) {
|
||||
free_ptr_to_refcount(refcount_ptr, alignment);
|
||||
free_ptr_to_refcount(refcount_ptr, alignment, elements_refcounted);
|
||||
}
|
||||
},
|
||||
Refcount.none => unreachable,
|
||||
|
@ -438,17 +452,23 @@ pub inline fn calculateCapacity(
|
|||
pub fn allocateWithRefcountC(
|
||||
data_bytes: usize,
|
||||
element_alignment: u32,
|
||||
elements_refcounted: bool,
|
||||
) callconv(.C) [*]u8 {
|
||||
return allocateWithRefcount(data_bytes, element_alignment);
|
||||
return allocateWithRefcount(data_bytes, element_alignment, elements_refcounted);
|
||||
}
|
||||
|
||||
pub fn allocateWithRefcount(
|
||||
data_bytes: usize,
|
||||
element_alignment: u32,
|
||||
elements_refcounted: bool,
|
||||
) [*]u8 {
|
||||
// If the element type is refcounted, we need to also allocate space to store the element count on the heap.
|
||||
// This is used so that a seamless slice can de-allocate the underlying list type.
|
||||
const ptr_width = @sizeOf(usize);
|
||||
const alignment = @max(ptr_width, element_alignment);
|
||||
const length = alignment + data_bytes;
|
||||
const required_space: usize = if (elements_refcounted) (2 * ptr_width) else ptr_width;
|
||||
const extra_bytes = @max(required_space, element_alignment);
|
||||
const length = extra_bytes + data_bytes;
|
||||
|
||||
var new_bytes: [*]u8 = alloc(length, alignment) orelse unreachable;
|
||||
|
||||
|
@ -456,7 +476,7 @@ pub fn allocateWithRefcount(
|
|||
std.debug.print("+ allocated {*} ({} bytes with alignment {})\n", .{ new_bytes, data_bytes, alignment });
|
||||
}
|
||||
|
||||
const data_ptr = new_bytes + alignment;
|
||||
const data_ptr = new_bytes + extra_bytes;
|
||||
const refcount_ptr = @as([*]usize, @ptrCast(@as([*]align(ptr_width) u8, @alignCast(data_ptr)) - ptr_width));
|
||||
refcount_ptr[0] = if (RC_TYPE == Refcount.none) REFCOUNT_MAX_ISIZE else REFCOUNT_ONE;
|
||||
|
||||
|
@ -474,11 +494,14 @@ pub fn unsafeReallocate(
|
|||
old_length: usize,
|
||||
new_length: usize,
|
||||
element_width: usize,
|
||||
elements_refcounted: bool,
|
||||
) [*]u8 {
|
||||
const align_width: usize = @max(alignment, @sizeOf(usize));
|
||||
const ptr_width: usize = @sizeOf(usize);
|
||||
const required_space: usize = if (elements_refcounted) (2 * ptr_width) else ptr_width;
|
||||
const extra_bytes = @max(required_space, alignment);
|
||||
|
||||
const old_width = align_width + old_length * element_width;
|
||||
const new_width = align_width + new_length * element_width;
|
||||
const old_width = extra_bytes + old_length * element_width;
|
||||
const new_width = extra_bytes + new_length * element_width;
|
||||
|
||||
if (old_width >= new_width) {
|
||||
return source_ptr;
|
||||
|
@ -486,10 +509,10 @@ pub fn unsafeReallocate(
|
|||
|
||||
// TODO handle out of memory
|
||||
// NOTE realloc will dealloc the original allocation
|
||||
const old_allocation = source_ptr - align_width;
|
||||
const old_allocation = source_ptr - extra_bytes;
|
||||
const new_allocation = realloc(old_allocation, new_width, old_width, alignment);
|
||||
|
||||
const new_source = @as([*]u8, @ptrCast(new_allocation)) + align_width;
|
||||
const new_source = @as([*]u8, @ptrCast(new_allocation)) + extra_bytes;
|
||||
return new_source;
|
||||
}
|
||||
|
||||
|
|
|
@ -746,6 +746,15 @@ keepErrs = \list, toResult ->
|
|||
## expect List.map ["", "a", "bc"] Str.isEmpty == [Bool.true, Bool.false, Bool.false]
|
||||
## ```
|
||||
map : List a, (a -> b) -> List b
|
||||
map = \list, mapper ->
|
||||
# TODO: allow checking the refcounting and running the map inplace.
|
||||
# Preferably allow it even if the types are different (must be same size with padding though).
|
||||
length = List.len list
|
||||
List.walk
|
||||
list
|
||||
(List.withCapacity length)
|
||||
\state, elem ->
|
||||
List.appendUnsafe state (mapper elem)
|
||||
|
||||
## Run a transformation function on the first element of each list,
|
||||
## and use that as the first element in the returned list.
|
||||
|
@ -757,16 +766,56 @@ map : List a, (a -> b) -> List b
|
|||
## zipped = List.map2 ["a", "b", "c"] [1, 2, 3] Pair
|
||||
## ```
|
||||
map2 : List a, List b, (a, b -> c) -> List c
|
||||
map2 = \listA, listB, mapper ->
|
||||
length = Num.min (List.len listA) (List.len listB)
|
||||
map2Help listA listB (List.withCapacity length) mapper 0 length
|
||||
|
||||
map2Help : List a, List b, List c, (a, b -> c), U64, U64 -> List c
|
||||
map2Help = \listA, listB, out, mapper, index, length ->
|
||||
if index < length then
|
||||
mapped = mapper (List.getUnsafe listA index) (List.getUnsafe listB index)
|
||||
|
||||
map2Help listA listB (List.appendUnsafe out mapped) mapper (Num.addWrap index 1) length
|
||||
else
|
||||
out
|
||||
|
||||
## Run a transformation function on the first element of each list,
|
||||
## and use that as the first element in the returned list.
|
||||
## Repeat until a list runs out of elements.
|
||||
map3 : List a, List b, List c, (a, b, c -> d) -> List d
|
||||
map3 = \listA, listB, listC, mapper ->
|
||||
length = Num.min
|
||||
(Num.min (List.len listA) (List.len listB))
|
||||
(List.len listC)
|
||||
map3Help listA listB listC (List.withCapacity length) mapper 0 length
|
||||
|
||||
map3Help : List a, List b, List c, List d, (a, b, c -> d), U64, U64 -> List d
|
||||
map3Help = \listA, listB, listC, out, mapper, index, length ->
|
||||
if index < length then
|
||||
mapped = mapper (List.getUnsafe listA index) (List.getUnsafe listB index) (List.getUnsafe listC index)
|
||||
|
||||
map3Help listA listB listC (List.appendUnsafe out mapped) mapper (Num.addWrap index 1) length
|
||||
else
|
||||
out
|
||||
|
||||
## Run a transformation function on the first element of each list,
|
||||
## and use that as the first element in the returned list.
|
||||
## Repeat until a list runs out of elements.
|
||||
map4 : List a, List b, List c, List d, (a, b, c, d -> e) -> List e
|
||||
map4 = \listA, listB, listC, listD, mapper ->
|
||||
length = Num.min
|
||||
(Num.min (List.len listA) (List.len listB))
|
||||
(Num.min (List.len listC) (List.len listD))
|
||||
map4Help listA listB listC listD (List.withCapacity length) mapper 0 length
|
||||
|
||||
map4Help : List a, List b, List c, List d, List e, (a, b, c, d -> e), U64, U64 -> List e
|
||||
map4Help = \listA, listB, listC, listD, out, mapper, index, length ->
|
||||
if index < length then
|
||||
mapped = mapper (List.getUnsafe listA index) (List.getUnsafe listB index) (List.getUnsafe listC index) (List.getUnsafe listD index)
|
||||
|
||||
map4Help listA listB listC listD (List.append out mapped) mapper (Num.addWrap index 1) length
|
||||
else
|
||||
out
|
||||
|
||||
## This works like [List.map], except it also passes the index
|
||||
## of the element to the conversion function.
|
||||
|
|
|
@ -391,6 +391,8 @@ pub const LIST_CAPACITY: &str = "roc_builtins.list.capacity";
|
|||
pub const LIST_ALLOCATION_PTR: &str = "roc_builtins.list.allocation_ptr";
|
||||
pub const LIST_RELEASE_EXCESS_CAPACITY: &str = "roc_builtins.list.release_excess_capacity";
|
||||
pub const LIST_CONCAT_UTF8: &str = "roc_builtins.list.concat_utf8";
|
||||
pub const LIST_INCREF: &str = "roc_builtins.list.incref";
|
||||
pub const LIST_DECREF: &str = "roc_builtins.list.decref";
|
||||
|
||||
pub const DEC_ABS: &str = "roc_builtins.dec.abs";
|
||||
pub const DEC_ACOS: &str = "roc_builtins.dec.acos";
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue