use FixedBufferAllocator in builtins

This commit is contained in:
Folkert 2021-04-04 22:01:22 +02:00
parent 4c9accb2ba
commit 0c2da8686a
2 changed files with 29 additions and 10 deletions

View file

@ -763,7 +763,11 @@ const StepperCaller = fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) v
pub fn dictWalk(dict: RocDict, stepper: Opaque, stepper_caller: StepperCaller, accum: Opaque, alignment: Alignment, key_width: usize, value_width: usize, accum_width: usize, inc_key: Inc, inc_value: Inc, output: Opaque) callconv(.C) void {
// allocate space to write the result of the stepper into
// experimentally aliasing the accum and output pointers is not a good idea
const alloc: [*]u8 = @ptrCast([*]u8, std.heap.c_allocator.alloc(u8, accum_width) catch unreachable);
const threshold: comptime usize = 64;
var buffer: [threshold]u8 = undefined;
const buffer_allocator = &std.heap.FixedBufferAllocator.init(&buffer).allocator;
const alloc: [*]u8 = @ptrCast([*]u8, buffer_allocator.alloc(u8, accum_width) catch unreachable);
var b1 = output orelse unreachable;
var b2 = alloc;
@ -788,7 +792,7 @@ pub fn dictWalk(dict: RocDict, stepper: Opaque, stepper_caller: StepperCaller, a
}
@memcpy(output orelse unreachable, b2, accum_width);
std.heap.c_allocator.free(alloc[0..accum_width]);
buffer_allocator.free(alloc[0..accum_width]);
const data_bytes = dict.capacity() * slotSize(key_width, value_width);
decref(std.heap.c_allocator, alignment, dict.dict_bytes, data_bytes);

View file

@ -433,7 +433,10 @@ pub fn listKeepResult(list: RocList, is_good_constructor: fn (RocResult) bool, t
var output = RocList.allocate(std.heap.c_allocator, alignment, list.len(), list.len() * after_width);
const target_ptr = output.bytes orelse unreachable;
var temporary = @ptrCast([*]u8, std.heap.c_allocator.alloc(u8, result_width) catch unreachable);
const threshold: comptime usize = 64;
var buffer: [threshold]u8 = undefined;
const buffer_allocator = &std.heap.FixedBufferAllocator.init(&buffer).allocator;
var temporary = @ptrCast([*]u8, buffer_allocator.alloc(u8, result_width) catch unreachable);
var kept: usize = 0;
while (i < size) : (i += 1) {
@ -453,7 +456,7 @@ pub fn listKeepResult(list: RocList, is_good_constructor: fn (RocResult) bool, t
}
utils.decref(std.heap.c_allocator, alignment, list.bytes, size * before_width);
std.heap.c_allocator.free(temporary[0..result_width]);
buffer_allocator.free(temporary[0..result_width]);
if (kept == 0) {
utils.decref(std.heap.c_allocator, alignment, output.bytes, size * after_width);
@ -477,7 +480,11 @@ pub fn listWalk(list: RocList, stepper: Opaque, stepper_caller: Caller2, accum:
return;
}
const alloc: [*]u8 = @ptrCast([*]u8, std.heap.c_allocator.alloc(u8, accum_width) catch unreachable);
const threshold: comptime usize = 64;
var buffer: [threshold]u8 = undefined;
const buffer_allocator = &std.heap.FixedBufferAllocator.init(&buffer).allocator;
const alloc: [*]u8 = @ptrCast([*]u8, buffer_allocator.alloc(u8, accum_width) catch unreachable);
var b1 = output orelse unreachable;
var b2 = alloc;
@ -497,7 +504,7 @@ pub fn listWalk(list: RocList, stepper: Opaque, stepper_caller: Caller2, accum:
}
@memcpy(output orelse unreachable, b2, accum_width);
std.heap.c_allocator.free(alloc[0..accum_width]);
buffer_allocator.free(alloc[0..accum_width]);
const data_bytes = list.len() * element_width;
utils.decref(std.heap.c_allocator, alignment, list.bytes, data_bytes);
@ -513,7 +520,11 @@ pub fn listWalkBackwards(list: RocList, stepper: Opaque, stepper_caller: Caller2
return;
}
const alloc: [*]u8 = @ptrCast([*]u8, std.heap.c_allocator.alloc(u8, accum_width) catch unreachable);
const threshold: comptime usize = 64;
var buffer: [threshold]u8 = undefined;
const buffer_allocator = &std.heap.FixedBufferAllocator.init(&buffer).allocator;
const alloc: [*]u8 = @ptrCast([*]u8, buffer_allocator.alloc(u8, accum_width) catch unreachable);
var b1 = output orelse unreachable;
var b2 = alloc;
@ -537,7 +548,7 @@ pub fn listWalkBackwards(list: RocList, stepper: Opaque, stepper_caller: Caller2
}
@memcpy(output orelse unreachable, b2, accum_width);
std.heap.c_allocator.free(alloc[0..accum_width]);
buffer_allocator.free(alloc[0..accum_width]);
const data_bytes = list.len() * element_width;
utils.decref(std.heap.c_allocator, alignment, list.bytes, data_bytes);
@ -556,7 +567,11 @@ pub fn listWalkUntil(list: RocList, stepper: Opaque, stepper_caller: Caller2, ac
return;
}
const alloc: [*]u8 = @ptrCast([*]u8, std.heap.c_allocator.alloc(u8, TAG_WIDTH + accum_width) catch unreachable);
const threshold: comptime usize = 64;
var buffer: [threshold]u8 = undefined;
const buffer_allocator = &std.heap.FixedBufferAllocator.init(&buffer).allocator;
const alloc: [*]u8 = @ptrCast([*]u8, buffer_allocator.alloc(u8, TAG_WIDTH + accum_width) catch unreachable);
@memcpy(alloc + TAG_WIDTH, accum orelse unreachable, accum_width);
@ -580,7 +595,7 @@ pub fn listWalkUntil(list: RocList, stepper: Opaque, stepper_caller: Caller2, ac
}
@memcpy(output orelse unreachable, alloc + TAG_WIDTH, accum_width);
std.heap.c_allocator.free(alloc[0 .. TAG_WIDTH + accum_width]);
buffer_allocator.free(alloc[0 .. TAG_WIDTH + accum_width]);
const data_bytes = list.len() * element_width;
utils.decref(std.heap.c_allocator, alignment, list.bytes, data_bytes);