Rearrange arg order for roc_alloc etc

This commit is contained in:
Richard Feldman 2021-05-24 20:27:04 -04:00
parent 7d7588ca19
commit 0b3715ebee
22 changed files with 166 additions and 161 deletions

View file

@ -73,7 +73,7 @@ const Alignment = packed enum(u8) {
Align8KeyFirst,
Align8ValueFirst,
fn toUsize(self: Alignment) usize {
fn toU32(self: Alignment) u32 {
switch (self) {
.Align16KeyFirst => return 16,
.Align16ValueFirst => return 16,
@ -97,14 +97,14 @@ pub fn decref(
bytes_or_null: ?[*]u8,
data_bytes: usize,
) void {
return utils.decref(alignment.toUsize(), bytes_or_null, data_bytes);
return utils.decref(alignment.toU32(), bytes_or_null, data_bytes);
}
pub fn allocateWithRefcount(
alignment: Alignment,
data_bytes: usize,
alignment: Alignment,
) [*]u8 {
return utils.allocateWithRefcount(alignment.toUsize(), data_bytes);
return utils.allocateWithRefcount(data_bytes, alignment.toU32());
}
pub const RocDict = extern struct {
@ -132,7 +132,7 @@ pub const RocDict = extern struct {
const data_bytes = number_of_slots * slot_size;
return RocDict{
.dict_bytes = allocateWithRefcount(alignment, data_bytes),
.dict_bytes = allocateWithRefcount(data_bytes, alignment),
.number_of_levels = number_of_levels,
.dict_entries_len = number_of_entries,
};
@ -152,7 +152,7 @@ pub const RocDict = extern struct {
const delta_capacity = new_capacity - old_capacity;
const data_bytes = new_capacity * slot_size;
const first_slot = allocateWithRefcount(alignment, data_bytes);
const first_slot = allocateWithRefcount(data_bytes, alignment);
// transfer the memory
@ -570,7 +570,7 @@ pub fn dictKeys(dict: RocDict, alignment: Alignment, key_width: usize, value_wid
}
const data_bytes = length * key_width;
var ptr = allocateWithRefcount(alignment, data_bytes);
var ptr = allocateWithRefcount(data_bytes, alignment);
var offset = blk: {
if (alignment.keyFirst()) {
@ -619,7 +619,7 @@ pub fn dictValues(dict: RocDict, alignment: Alignment, key_width: usize, value_w
}
const data_bytes = length * value_width;
var ptr = allocateWithRefcount(alignment, data_bytes);
var ptr = allocateWithRefcount(data_bytes, alignment);
var offset = blk: {
if (alignment.keyFirst()) {
@ -772,10 +772,10 @@ pub fn dictWalk(
inc_value: Inc,
output: Opaque,
) callconv(.C) void {
const alignment_usize = alignment.toUsize();
const alignment_u32 = alignment.toU32();
// allocate space to write the result of the stepper into
// experimentally aliasing the accum and output pointers is not a good idea
const bytes_ptr: [*]u8 = utils.alloc(alignment_usize, accum_width);
const bytes_ptr: [*]u8 = utils.alloc(accum_width, alignment_u32);
var b1 = output orelse unreachable;
var b2 = bytes_ptr;
@ -804,5 +804,5 @@ pub fn dictWalk(
}
@memcpy(output orelse unreachable, b2, accum_width);
utils.dealloc(alignment_usize, bytes_ptr);
utils.dealloc(bytes_ptr, alignment_u32);
}

View file

@ -41,19 +41,19 @@ pub const RocList = extern struct {
}
pub fn allocate(
alignment: usize,
alignment: u32,
length: usize,
element_size: usize,
) RocList {
const data_bytes = length * element_size;
return RocList{
.bytes = utils.allocateWithRefcount(alignment, data_bytes),
.bytes = utils.allocateWithRefcount(data_bytes, alignment),
.length = length,
};
}
pub fn makeUnique(self: RocList, alignment: usize, element_width: usize) RocList {
pub fn makeUnique(self: RocList, alignment: u32, element_width: usize) RocList {
if (self.isEmpty()) {
return self;
}
@ -80,7 +80,7 @@ pub const RocList = extern struct {
pub fn reallocate(
self: RocList,
alignment: usize,
alignment: u32,
new_length: usize,
element_width: usize,
) RocList {
@ -98,7 +98,7 @@ pub const RocList = extern struct {
/// reallocate by explicitly making a new allocation and copying elements over
fn reallocateFresh(
self: RocList,
alignment: usize,
alignment: u32,
new_length: usize,
element_width: usize,
) RocList {
@ -106,7 +106,7 @@ pub const RocList = extern struct {
const delta_length = new_length - old_length;
const data_bytes = new_length * element_width;
const first_slot = utils.allocateWithRefcount(alignment, data_bytes);
const first_slot = utils.allocateWithRefcount(data_bytes, alignment);
// transfer the memory
@ -133,7 +133,7 @@ const Caller1 = fn (?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
const Caller2 = fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
const Caller3 = fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
pub fn listReverse(list: RocList, alignment: usize, element_width: usize) callconv(.C) RocList {
pub fn listReverse(list: RocList, alignment: u32, element_width: usize) callconv(.C) RocList {
if (list.bytes) |source_ptr| {
const size = list.len();
@ -177,7 +177,7 @@ pub fn listMap(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
old_element_width: usize,
new_element_width: usize,
) callconv(.C) RocList {
@ -207,7 +207,7 @@ pub fn listMapWithIndex(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
old_element_width: usize,
new_element_width: usize,
) callconv(.C) RocList {
@ -248,7 +248,7 @@ pub fn listMap2(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
a_width: usize,
b_width: usize,
c_width: usize,
@ -296,7 +296,7 @@ pub fn listMap3(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
a_width: usize,
b_width: usize,
c_width: usize,
@ -350,7 +350,7 @@ pub fn listKeepIf(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
element_width: usize,
inc: Inc,
dec: Dec,
@ -401,7 +401,7 @@ pub fn listKeepOks(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
before_width: usize,
result_width: usize,
after_width: usize,
@ -428,7 +428,7 @@ pub fn listKeepErrs(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
before_width: usize,
result_width: usize,
after_width: usize,
@ -456,7 +456,7 @@ pub fn listKeepResult(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
before_width: usize,
result_width: usize,
after_width: usize,
@ -468,7 +468,7 @@ pub fn listKeepResult(
var output = RocList.allocate(alignment, list.len(), list.len() * after_width);
const target_ptr = output.bytes orelse unreachable;
var temporary = @ptrCast([*]u8, utils.alloc(alignment, result_width));
var temporary = @ptrCast([*]u8, utils.alloc(result_width, alignment));
if (data_is_owned) {
inc_n_data(data, size);
@ -490,7 +490,7 @@ pub fn listKeepResult(
}
}
utils.dealloc(alignment, temporary);
utils.dealloc(temporary, alignment);
if (kept == 0) {
utils.decref(alignment, output.bytes, size * after_width);
@ -511,7 +511,7 @@ pub fn listWalk(
inc_n_data: IncN,
data_is_owned: bool,
accum: Opaque,
alignment: usize,
alignment: u32,
element_width: usize,
accum_width: usize,
output: Opaque,
@ -529,7 +529,7 @@ pub fn listWalk(
inc_n_data(data, list.len());
}
const bytes_ptr: [*]u8 = utils.alloc(alignment, accum_width);
const bytes_ptr: [*]u8 = utils.alloc(accum_width, alignment);
var b1 = output orelse unreachable;
var b2 = bytes_ptr;
@ -549,7 +549,7 @@ pub fn listWalk(
}
@memcpy(output orelse unreachable, b2, accum_width);
utils.dealloc(alignment, bytes_ptr);
utils.dealloc(bytes_ptr, alignment);
}
pub fn listWalkBackwards(
@ -559,7 +559,7 @@ pub fn listWalkBackwards(
inc_n_data: IncN,
data_is_owned: bool,
accum: Opaque,
alignment: usize,
alignment: u32,
element_width: usize,
accum_width: usize,
output: Opaque,
@ -577,7 +577,7 @@ pub fn listWalkBackwards(
inc_n_data(data, list.len());
}
const bytes_ptr: [*]u8 = utils.alloc(alignment, accum_width);
const bytes_ptr: [*]u8 = utils.alloc(accum_width, alignment);
var b1 = output orelse unreachable;
var b2 = bytes_ptr;
@ -598,7 +598,7 @@ pub fn listWalkBackwards(
}
@memcpy(output orelse unreachable, b2, accum_width);
utils.dealloc(alignment, bytes_ptr);
utils.dealloc(bytes_ptr, alignment);
}
pub fn listWalkUntil(
@ -608,7 +608,7 @@ pub fn listWalkUntil(
inc_n_data: IncN,
data_is_owned: bool,
accum: Opaque,
alignment: usize,
alignment: u32,
element_width: usize,
accum_width: usize,
dec: Dec,
@ -626,7 +626,7 @@ pub fn listWalkUntil(
return;
}
const bytes_ptr: [*]u8 = utils.alloc(alignment, TAG_WIDTH + accum_width);
const bytes_ptr: [*]u8 = utils.alloc(TAG_WIDTH + accum_width, alignment);
@memcpy(bytes_ptr + TAG_WIDTH, accum orelse unreachable, accum_width);
@ -655,7 +655,7 @@ pub fn listWalkUntil(
}
@memcpy(output orelse unreachable, bytes_ptr + TAG_WIDTH, accum_width);
utils.dealloc(alignment, bytes_ptr);
utils.dealloc(bytes_ptr, alignment);
}
// List.contains : List k, k -> Bool
@ -674,7 +674,7 @@ pub fn listContains(list: RocList, key: Opaque, key_width: usize, is_eq: EqFn) c
return false;
}
pub fn listRepeat(count: usize, alignment: usize, element: Opaque, element_width: usize, inc_n_element: IncN) callconv(.C) RocList {
pub fn listRepeat(count: usize, alignment: u32, element: Opaque, element_width: usize, inc_n_element: IncN) callconv(.C) RocList {
if (count == 0) {
return RocList.empty();
}
@ -697,7 +697,7 @@ pub fn listRepeat(count: usize, alignment: usize, element: Opaque, element_width
}
}
pub fn listSingle(alignment: usize, element: Opaque, element_width: usize) callconv(.C) RocList {
pub fn listSingle(alignment: u32, element: Opaque, element_width: usize) callconv(.C) RocList {
var output = RocList.allocate(alignment, 1, element_width);
if (output.bytes) |target| {
@ -709,7 +709,7 @@ pub fn listSingle(alignment: usize, element: Opaque, element_width: usize) callc
return output;
}
pub fn listAppend(list: RocList, alignment: usize, element: Opaque, element_width: usize) callconv(.C) RocList {
pub fn listAppend(list: RocList, alignment: u32, element: Opaque, element_width: usize) callconv(.C) RocList {
const old_length = list.len();
var output = list.reallocate(alignment, old_length + 1, element_width);
@ -724,7 +724,7 @@ pub fn listAppend(list: RocList, alignment: usize, element: Opaque, element_widt
pub fn listDrop(
list: RocList,
alignment: usize,
alignment: u32,
element_width: usize,
drop_count: usize,
dec: Dec,
@ -883,7 +883,7 @@ pub fn listSortWith(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
element_width: usize,
) callconv(.C) RocList {
var list = input.makeUnique(alignment, element_width);
@ -947,7 +947,7 @@ fn swapElements(source_ptr: [*]u8, element_width: usize, index_1: usize, index_2
return swap(element_width, element_at_i, element_at_j);
}
pub fn listJoin(list_of_lists: RocList, alignment: usize, element_width: usize) callconv(.C) RocList {
pub fn listJoin(list_of_lists: RocList, alignment: u32, element_width: usize) callconv(.C) RocList {
var total_length: usize = 0;
const slice_of_lists = @ptrCast([*]RocList, @alignCast(@alignOf(RocList), list_of_lists.bytes));
@ -975,7 +975,7 @@ pub fn listJoin(list_of_lists: RocList, alignment: usize, element_width: usize)
return output;
}
pub fn listConcat(list_a: RocList, list_b: RocList, alignment: usize, element_width: usize) callconv(.C) RocList {
pub fn listConcat(list_a: RocList, list_b: RocList, alignment: u32, element_width: usize) callconv(.C) RocList {
if (list_a.isEmpty()) {
return list_b;
} else if (list_b.isEmpty()) {

View file

@ -53,7 +53,7 @@ pub const RocStr = extern struct {
}
pub fn initBig(in_place: InPlace, number_of_chars: u64) RocStr {
const first_element = utils.allocateWithRefcount(@sizeOf(usize), number_of_chars);
const first_element = utils.allocateWithRefcount(number_of_chars, @sizeOf(usize));
return RocStr{
.str_bytes = first_element,
@ -99,7 +99,7 @@ pub const RocStr = extern struct {
if (length < roc_str_size) {
return RocStr.empty();
} else {
var new_bytes: []T = utils.alloc(RocStr.alignment, length) catch unreachable;
var new_bytes: []T = utils.alloc(length, RocStr.alignment) catch unreachable;
var new_bytes_ptr: [*]u8 = @ptrCast([*]u8, &new_bytes);
@ -1070,7 +1070,7 @@ fn strToBytes(arg: RocStr) RocList {
return RocList.empty();
} else if (arg.isSmallStr()) {
const length = arg.len();
const ptr = utils.allocateWithRefcount(RocStr.alignment, length);
const ptr = utils.allocateWithRefcount(length, RocStr.alignment);
@memcpy(ptr, arg.asU8ptr(), length);

View file

@ -2,14 +2,14 @@ const std = @import("std");
const always_inline = std.builtin.CallOptions.Modifier.always_inline;
// If allocation fails, this must cxa_throw - it must not return a null pointer!
extern fn roc_alloc(alignment: usize, size: usize) callconv(.C) *c_void;
extern fn roc_alloc(size: usize, alignment: u32) callconv(.C) *c_void;
// This should never be passed a null pointer.
// If allocation fails, this must cxa_throw - it must not return a null pointer!
extern fn roc_realloc(alignment: usize, c_ptr: *c_void, old_size: usize, new_size: usize) callconv(.C) *c_void;
extern fn roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) *c_void;
// This should never be passed a null pointer.
extern fn roc_dealloc(alignment: usize, c_ptr: *c_void) callconv(.C) void;
extern fn roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void;
comptime {
// During tetsts, use the testing allocators to satisfy these functions.
@ -20,33 +20,33 @@ comptime {
}
}
fn testing_roc_alloc(alignment: usize, size: usize) callconv(.C) *c_void {
fn testing_roc_alloc(size: usize, alignment: u32) callconv(.C) *c_void {
return @ptrCast(*c_void, std.testing.allocator.alloc(u8, size) catch unreachable);
}
fn testing_roc_realloc(alignment: usize, c_ptr: *c_void, old_size: usize, new_size: usize) callconv(.C) *c_void {
fn testing_roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) *c_void {
const ptr = @ptrCast([*]u8, @alignCast(16, c_ptr));
const slice = ptr[0..old_size];
return @ptrCast(*c_void, std.testing.allocator.realloc(slice, new_size) catch unreachable);
}
fn testing_roc_dealloc(alignment: usize, c_ptr: *c_void) callconv(.C) void {
fn testing_roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void {
const ptr = @ptrCast([*]u8, @alignCast(16, c_ptr));
std.testing.allocator.destroy(ptr);
}
pub fn alloc(alignment: usize, size: usize) [*]u8 {
return @ptrCast([*]u8, @call(.{ .modifier = always_inline }, roc_alloc, .{ alignment, size }));
pub fn alloc(size: usize, alignment: u32) [*]u8 {
return @ptrCast([*]u8, @call(.{ .modifier = always_inline }, roc_alloc, .{ size, alignment }));
}
pub fn realloc(alignment: usize, c_ptr: [*]u8, old_size: usize, new_size: usize) [*]u8 {
return @ptrCast([*]u8, @call(.{ .modifier = always_inline }, roc_realloc, .{ alignment, c_ptr, old_size, new_size }));
pub fn realloc(c_ptr: [*]u8, new_size: usize, old_size: usize, alignment: u32) [*]u8 {
return @ptrCast([*]u8, @call(.{ .modifier = always_inline }, roc_realloc, .{ c_ptr, new_size, old_size, alignment }));
}
pub fn dealloc(alignment: usize, c_ptr: [*]u8) void {
return @call(.{ .modifier = always_inline }, roc_dealloc, .{ alignment, c_ptr });
pub fn dealloc(c_ptr: [*]u8, alignment: u32) void {
return @call(.{ .modifier = always_inline }, roc_dealloc, .{ c_ptr, alignment });
}
pub const Inc = fn (?[*]u8) callconv(.C) void;
@ -110,7 +110,7 @@ pub fn intWidth(width: IntWidth) anytype {
}
pub fn decref(
alignment: usize,
alignment: u32,
bytes_or_null: ?[*]u8,
data_bytes: usize,
) void {
@ -128,7 +128,7 @@ pub fn decref(
switch (alignment) {
16 => {
if (refcount == REFCOUNT_ONE_ISIZE) {
dealloc(alignment, bytes - 16);
dealloc(bytes - 16, alignment);
} else if (refcount_isize < 0) {
(isizes - 1)[0] = refcount - 1;
}
@ -136,7 +136,7 @@ pub fn decref(
else => {
// NOTE enums can currently have an alignment of < 8
if (refcount == REFCOUNT_ONE_ISIZE) {
dealloc(alignment, bytes - 8);
dealloc(bytes - 8, alignment);
} else if (refcount_isize < 0) {
(isizes - 1)[0] = refcount - 1;
}
@ -145,8 +145,8 @@ pub fn decref(
}
pub fn allocateWithRefcount(
alignment: usize,
data_bytes: usize,
alignment: u32,
) [*]u8 {
comptime const result_in_place = false;
@ -154,7 +154,7 @@ pub fn allocateWithRefcount(
16 => {
const length = 2 * @sizeOf(usize) + data_bytes;
var new_bytes: [*]align(16) u8 = @alignCast(16, alloc(alignment, length));
var new_bytes: [*]align(16) u8 = @alignCast(16, alloc(length, alignment));
var as_usize_array = @ptrCast([*]usize, new_bytes);
if (result_in_place) {
@ -173,7 +173,7 @@ pub fn allocateWithRefcount(
else => {
const length = @sizeOf(usize) + data_bytes;
var new_bytes: [*]align(8) u8 = @alignCast(8, alloc(alignment, length));
var new_bytes: [*]align(8) u8 = @alignCast(8, alloc(length, alignment));
var as_isize_array = @ptrCast([*]isize, new_bytes);
if (result_in_place) {
@ -192,7 +192,7 @@ pub fn allocateWithRefcount(
pub fn unsafeReallocate(
source_ptr: [*]u8,
alignment: usize,
alignment: u32,
old_length: usize,
new_length: usize,
element_width: usize,
@ -211,7 +211,7 @@ pub fn unsafeReallocate(
// TODO handle out of memory
// NOTE realloc will dealloc the original allocation
const old_allocation = source_ptr - align_width;
const new_allocation = realloc(alignment, old_allocation, old_width, new_width);
const new_allocation = realloc(old_allocation, new_width, old_width, alignment);
const new_source = @ptrCast([*]u8, new_allocation) + align_width;
return new_source;