Do not require allocating Layouts in arena before interning

This should reduce memory spend, the interner has its own effective
arena anyway
This commit is contained in:
Ayaz Hafiz 2023-01-03 15:43:18 -06:00
parent 4652661a5c
commit ce717dca8b
No known key found for this signature in database
GPG key ID: 0E2A37416A25EF58
19 changed files with 161 additions and 171 deletions

View file

@ -895,7 +895,7 @@ fn call_spec<'a>(
&WhenRecursive::Unreachable,
)?;
let return_layout = interner.insert(return_layout);
let return_layout = interner.insert(*return_layout);
let state_layout = Layout::Builtin(Builtin::List(return_layout));
let state_type = layout_spec(
@ -928,7 +928,7 @@ fn call_spec<'a>(
with_new_heap_cell(builder, block, bag)
};
let arg0_layout = interner.insert(&argument_layouts[0]);
let arg0_layout = interner.insert(argument_layouts[0]);
let state_layout = Layout::Builtin(Builtin::List(arg0_layout));
let state_type = layout_spec(
@ -969,7 +969,7 @@ fn call_spec<'a>(
&WhenRecursive::Unreachable,
)?;
let return_layout = interner.insert(return_layout);
let return_layout = interner.insert(*return_layout);
let state_layout = Layout::Builtin(Builtin::List(return_layout));
let state_type = layout_spec(
@ -1016,7 +1016,7 @@ fn call_spec<'a>(
&WhenRecursive::Unreachable,
)?;
let return_layout = interner.insert(return_layout);
let return_layout = interner.insert(*return_layout);
let state_layout = Layout::Builtin(Builtin::List(return_layout));
let state_type = layout_spec(
@ -1069,7 +1069,7 @@ fn call_spec<'a>(
&WhenRecursive::Unreachable,
)?;
let return_layout = interner.insert(return_layout);
let return_layout = interner.insert(*return_layout);
let state_layout = Layout::Builtin(Builtin::List(return_layout));
let state_type = layout_spec(
@ -1246,7 +1246,7 @@ fn lowlevel_spec<'a>(
env,
builder,
interner,
element_layout,
&element_layout,
&WhenRecursive::Unreachable,
)?;
new_list(builder, block, type_id)
@ -1585,7 +1585,7 @@ fn expr_spec<'a>(
env,
builder,
interner,
element_layout,
&element_layout,
&WhenRecursive::Unreachable,
)?;
new_list(builder, block, type_id)
@ -1731,7 +1731,7 @@ fn layout_spec_help<'a>(
Boxed(inner_layout) => {
let inner_layout = interner.get(*inner_layout);
let inner_type =
layout_spec_help(env, builder, interner, inner_layout, when_recursive)?;
layout_spec_help(env, builder, interner, &inner_layout, when_recursive)?;
let cell_type = builder.add_heap_cell_type();
builder.add_tuple_type(&[cell_type, inner_type])
@ -1772,7 +1772,7 @@ fn builtin_spec<'a>(
List(element_layout) => {
let element_layout = interner.get(*element_layout);
let element_type =
layout_spec_help(env, builder, interner, element_layout, when_recursive)?;
layout_spec_help(env, builder, interner, &element_layout, when_recursive)?;
let cell = builder.add_heap_cell_type();
let bag = builder.add_bag_type(element_type)?;

View file

@ -118,8 +118,8 @@ pub(crate) fn list_with_capacity<'a, 'ctx, 'env>(
&[],
&[
capacity.into(),
env.alignment_intvalue(layout_interner, element_layout),
layout_width(env, layout_interner, element_layout),
env.alignment_intvalue(layout_interner, &element_layout),
layout_width(env, layout_interner, &element_layout),
],
BitcodeReturns::List,
bitcode::LIST_WITH_CAPACITY,
@ -137,7 +137,7 @@ pub(crate) fn list_get_unsafe<'a, 'ctx, 'env>(
let builder = env.builder;
let element_layout = layout_interner.get(element_layout);
let elem_type = basic_type_from_layout(env, layout_interner, element_layout);
let elem_type = basic_type_from_layout(env, layout_interner, &element_layout);
let ptr_type = elem_type.ptr_type(AddressSpace::Generic);
// Load the pointer to the array data
let array_data_ptr = load_list_ptr(builder, wrapper_struct, ptr_type);
@ -156,12 +156,12 @@ pub(crate) fn list_get_unsafe<'a, 'ctx, 'env>(
let result = load_roc_value(
env,
layout_interner,
*element_layout,
element_layout,
elem_ptr,
"list_get_load_element",
);
increment_refcount_layout(env, layout_interner, layout_ids, 1, result, element_layout);
increment_refcount_layout(env, layout_interner, layout_ids, 1, result, &element_layout);
result
}
@ -180,9 +180,9 @@ pub(crate) fn list_reserve<'a, 'ctx, 'env>(
env,
list.into_struct_value(),
&[
env.alignment_intvalue(layout_interner, element_layout),
env.alignment_intvalue(layout_interner, &element_layout),
spare,
layout_width(env, layout_interner, element_layout),
layout_width(env, layout_interner, &element_layout),
pass_update_mode(env, update_mode),
],
bitcode::LIST_RESERVE,
@ -243,8 +243,8 @@ pub(crate) fn list_swap<'a, 'ctx, 'env>(
env,
original_wrapper,
&[
env.alignment_intvalue(layout_interner, element_layout),
layout_width(env, layout_interner, element_layout),
env.alignment_intvalue(layout_interner, &element_layout),
layout_width(env, layout_interner, &element_layout),
index_1.into(),
index_2.into(),
pass_update_mode(env, update_mode),
@ -264,13 +264,13 @@ pub(crate) fn list_sublist<'a, 'ctx, 'env>(
element_layout: InLayout<'a>,
) -> BasicValueEnum<'ctx> {
let element_layout = layout_interner.get(element_layout);
let dec_element_fn = build_dec_wrapper(env, layout_interner, layout_ids, element_layout);
let dec_element_fn = build_dec_wrapper(env, layout_interner, layout_ids, &element_layout);
call_list_bitcode_fn_1(
env,
original_wrapper,
&[
env.alignment_intvalue(layout_interner, element_layout),
layout_width(env, layout_interner, element_layout),
env.alignment_intvalue(layout_interner, &element_layout),
layout_width(env, layout_interner, &element_layout),
start.into(),
len.into(),
dec_element_fn.as_global_value().as_pointer_value().into(),
@ -289,13 +289,13 @@ pub(crate) fn list_drop_at<'a, 'ctx, 'env>(
element_layout: InLayout<'a>,
) -> BasicValueEnum<'ctx> {
let element_layout = layout_interner.get(element_layout);
let dec_element_fn = build_dec_wrapper(env, layout_interner, layout_ids, element_layout);
let dec_element_fn = build_dec_wrapper(env, layout_interner, layout_ids, &element_layout);
call_list_bitcode_fn_1(
env,
original_wrapper,
&[
env.alignment_intvalue(layout_interner, element_layout),
layout_width(env, layout_interner, element_layout),
env.alignment_intvalue(layout_interner, &element_layout),
layout_width(env, layout_interner, &element_layout),
count.into(),
dec_element_fn.as_global_value().as_pointer_value().into(),
],
@ -624,8 +624,8 @@ pub(crate) fn list_concat<'a, 'ctx, 'env>(
env,
&[list1.into_struct_value(), list2.into_struct_value()],
&[
env.alignment_intvalue(layout_interner, element_layout),
layout_width(env, layout_interner, element_layout),
env.alignment_intvalue(layout_interner, &element_layout),
layout_width(env, layout_interner, &element_layout),
],
BitcodeReturns::List,
bitcode::LIST_CONCAT,

View file

@ -435,7 +435,7 @@ fn build_list_eq<'a, 'ctx, 'env>(
let symbol = Symbol::LIST_EQ;
let element_layout = layout_interner.get(element_layout);
let element_layout = when_recursive.unwrap_recursive_pointer(*element_layout);
let element_layout = when_recursive.unwrap_recursive_pointer(element_layout);
let fn_name = layout_ids
.get(symbol, &element_layout)
.to_symbol_string(symbol, &env.interns);
@ -1458,8 +1458,8 @@ fn build_box_eq_help<'a, 'ctx, 'env>(
let inner_layout = layout_interner.get(inner_layout);
let value1 = load_roc_value(env, layout_interner, *inner_layout, box1, "load_box1");
let value2 = load_roc_value(env, layout_interner, *inner_layout, box2, "load_box2");
let value1 = load_roc_value(env, layout_interner, inner_layout, box1, "load_box1");
let value2 = load_roc_value(env, layout_interner, inner_layout, box2, "load_box2");
let is_equal = build_eq(
env,
@ -1467,8 +1467,8 @@ fn build_box_eq_help<'a, 'ctx, 'env>(
layout_ids,
value1,
value2,
inner_layout,
inner_layout,
&inner_layout,
&inner_layout,
when_recursive,
);

View file

@ -45,7 +45,7 @@ pub fn basic_type_from_layout<'a, 'ctx, 'env>(
),
Boxed(inner_layout) => {
let inner_layout = layout_interner.get(*inner_layout);
let inner_type = basic_type_from_layout(env, layout_interner, inner_layout);
let inner_type = basic_type_from_layout(env, layout_interner, &inner_layout);
inner_type.ptr_type(AddressSpace::Generic).into()
}

View file

@ -353,7 +353,7 @@ fn build_clone<'a, 'ctx, 'env>(
let source = value.into_pointer_value();
let inner_layout = layout_interner.get(inner_layout);
let value = load_roc_value(env, layout_interner, *inner_layout, source, "inner");
let value = load_roc_value(env, layout_interner, inner_layout, source, "inner");
let inner_width = env.ptr_int().const_int(
inner_layout.stack_size(layout_interner, env.target_info) as u64,
@ -376,7 +376,7 @@ fn build_clone<'a, 'ctx, 'env>(
ptr,
cursors,
value,
*inner_layout,
inner_layout,
when_recursive,
)
}
@ -1063,7 +1063,7 @@ fn build_clone_builtin<'a, 'ctx, 'env>(
// We cloned the elements into the extra_offset address.
let elements_start_offset = cursors.extra_offset;
let element_type = basic_type_from_layout(env, layout_interner, elem);
let element_type = basic_type_from_layout(env, layout_interner, &elem);
let elements = bd.build_pointer_cast(
elements,
element_type.ptr_type(AddressSpace::Generic),
@ -1107,7 +1107,7 @@ fn build_clone_builtin<'a, 'ctx, 'env>(
ptr,
cursors,
element,
*elem,
elem,
when_recursive,
);
@ -1124,7 +1124,7 @@ fn build_clone_builtin<'a, 'ctx, 'env>(
env,
layout_interner,
parent,
*elem,
elem,
elements,
len,
"index",

View file

@ -2333,7 +2333,7 @@ pub(crate) fn run_higher_order_low_level<'a, 'ctx, 'env>(
let element_layout = layout_interner.get(*element_layout);
let result_layout = layout_interner.get(*result_layout);
let argument_layouts = &[*element_layout];
let argument_layouts = &[element_layout];
let roc_function_call = roc_function_call(
env,
@ -2344,7 +2344,7 @@ pub(crate) fn run_higher_order_low_level<'a, 'ctx, 'env>(
closure_layout,
function_owns_closure_data,
argument_layouts,
*result_layout,
result_layout,
);
list_map(
@ -2352,8 +2352,8 @@ pub(crate) fn run_higher_order_low_level<'a, 'ctx, 'env>(
layout_interner,
roc_function_call,
list,
element_layout,
result_layout,
&element_layout,
&result_layout,
)
}
_ => unreachable!("invalid list layout"),
@ -2375,7 +2375,7 @@ pub(crate) fn run_higher_order_low_level<'a, 'ctx, 'env>(
let element2_layout = layout_interner.get(*element2_layout);
let result_layout = layout_interner.get(*result_layout);
let argument_layouts = &[*element1_layout, *element2_layout];
let argument_layouts = &[element1_layout, element2_layout];
let roc_function_call = roc_function_call(
env,
@ -2386,7 +2386,7 @@ pub(crate) fn run_higher_order_low_level<'a, 'ctx, 'env>(
closure_layout,
function_owns_closure_data,
argument_layouts,
*result_layout,
result_layout,
);
list_map2(
@ -2396,9 +2396,9 @@ pub(crate) fn run_higher_order_low_level<'a, 'ctx, 'env>(
roc_function_call,
list1,
list2,
element1_layout,
element2_layout,
result_layout,
&element1_layout,
&element2_layout,
&result_layout,
)
}
_ => unreachable!("invalid list layout"),
@ -2423,7 +2423,7 @@ pub(crate) fn run_higher_order_low_level<'a, 'ctx, 'env>(
let element3_layout = layout_interner.get(*element3_layout);
let result_layout = layout_interner.get(*result_layout);
let argument_layouts = &[*element1_layout, *element2_layout, *element3_layout];
let argument_layouts = &[element1_layout, element2_layout, element3_layout];
let roc_function_call = roc_function_call(
env,
@ -2434,7 +2434,7 @@ pub(crate) fn run_higher_order_low_level<'a, 'ctx, 'env>(
closure_layout,
function_owns_closure_data,
argument_layouts,
*result_layout,
result_layout,
);
list_map3(
@ -2445,10 +2445,10 @@ pub(crate) fn run_higher_order_low_level<'a, 'ctx, 'env>(
list1,
list2,
list3,
element1_layout,
element2_layout,
element3_layout,
result_layout,
&element1_layout,
&element2_layout,
&element3_layout,
&result_layout,
)
}
_ => unreachable!("invalid list layout"),
@ -2483,10 +2483,10 @@ pub(crate) fn run_higher_order_low_level<'a, 'ctx, 'env>(
let result_layout = layout_interner.get(*result_layout);
let argument_layouts = &[
*element1_layout,
*element2_layout,
*element3_layout,
*element4_layout,
element1_layout,
element2_layout,
element3_layout,
element4_layout,
];
let roc_function_call = roc_function_call(
@ -2498,7 +2498,7 @@ pub(crate) fn run_higher_order_low_level<'a, 'ctx, 'env>(
closure_layout,
function_owns_closure_data,
argument_layouts,
*result_layout,
result_layout,
);
list_map4(
@ -2510,11 +2510,11 @@ pub(crate) fn run_higher_order_low_level<'a, 'ctx, 'env>(
list2,
list3,
list4,
element1_layout,
element2_layout,
element3_layout,
element4_layout,
result_layout,
&element1_layout,
&element2_layout,
&element3_layout,
&element4_layout,
&result_layout,
)
}
_ => unreachable!("invalid list layout"),
@ -2532,14 +2532,14 @@ pub(crate) fn run_higher_order_low_level<'a, 'ctx, 'env>(
let element_layout = layout_interner.get(*element_layout);
let argument_layouts = &[*element_layout, *element_layout];
let argument_layouts = &[element_layout, element_layout];
let compare_wrapper = build_compare_wrapper(
env,
layout_interner,
function,
closure_layout,
element_layout,
&element_layout,
)
.as_global_value()
.as_pointer_value();
@ -2562,7 +2562,7 @@ pub(crate) fn run_higher_order_low_level<'a, 'ctx, 'env>(
roc_function_call,
compare_wrapper,
list,
element_layout,
&element_layout,
)
}
_ => unreachable!("invalid list layout"),

View file

@ -682,8 +682,8 @@ fn modify_refcount_list<'a, 'ctx, 'env>(
let di_location = env.builder.get_current_debug_location().unwrap();
let element_layout = layout_interner.get(element_layout);
let element_layout = when_recursive.unwrap_recursive_pointer(*element_layout);
let element_layout = layout_interner.insert(env.arena.alloc(element_layout));
let element_layout = when_recursive.unwrap_recursive_pointer(element_layout);
let element_layout = layout_interner.insert(element_layout);
let list_layout = &Layout::Builtin(Builtin::List(element_layout));
let (_, fn_name) = function_name_from_mode(
layout_ids,
@ -777,7 +777,7 @@ fn modify_refcount_list_help<'a, 'ctx, 'env>(
let element_layout = layout_interner.get(element_layout);
if element_layout.contains_refcounted(layout_interner) {
let ptr_type = basic_type_from_layout(env, layout_interner, element_layout)
let ptr_type = basic_type_from_layout(env, layout_interner, &element_layout)
.ptr_type(AddressSpace::Generic);
let (len, ptr) = load_list(env.builder, original_wrapper, ptr_type);
@ -790,7 +790,7 @@ fn modify_refcount_list_help<'a, 'ctx, 'env>(
mode.to_call_mode(fn_val),
when_recursive,
element,
element_layout,
&element_layout,
);
};
@ -798,7 +798,7 @@ fn modify_refcount_list_help<'a, 'ctx, 'env>(
env,
layout_interner,
parent,
*element_layout,
element_layout,
ptr,
len,
"modify_rc_index",

View file

@ -506,7 +506,7 @@ impl<'a, 'r> WasmBackend<'a, 'r> {
let heap_return_ptr_id = LocalId(wrapper_arg_layouts.len() as u32 - 1);
let inner_ret_layout = match wrapper_arg_layouts.last() {
Some(Layout::Boxed(inner)) => {
WasmLayout::new(self.layout_interner, self.layout_interner.get(*inner))
WasmLayout::new(self.layout_interner, &self.layout_interner.get(*inner))
}
x => internal_error!("Higher-order wrapper: invalid return layout {:?}", x),
};
@ -548,7 +548,7 @@ impl<'a, 'r> WasmBackend<'a, 'r> {
// Load the argument pointer. If it's a primitive value, dereference it too.
n_inner_wasm_args += 1;
self.code_builder.get_local(LocalId(i as u32));
self.dereference_boxed_value(inner_layout);
self.dereference_boxed_value(&inner_layout);
}
// If the inner function has closure data, it's the last arg of the inner fn
@ -638,9 +638,9 @@ impl<'a, 'r> WasmBackend<'a, 'r> {
x => internal_error!("Expected a Boxed layout, got {:?}", x),
};
self.code_builder.get_local(LocalId(1));
self.dereference_boxed_value(inner_layout);
self.dereference_boxed_value(&inner_layout);
self.code_builder.get_local(LocalId(2));
self.dereference_boxed_value(inner_layout);
self.dereference_boxed_value(&inner_layout);
// Call the wrapped inner function
let inner_wasm_fn_index = self.fn_index_offset + inner_lookup_idx as u32;

View file

@ -415,7 +415,7 @@ impl<'a> LowLevelCall<'a> {
Layout::Struct {
field_layouts: &[Layout::Builtin(Builtin::List(list_elem)), value_layout],
..
} if value_layout == *backend.layout_interner.get(list_elem) => {
} if value_layout == backend.layout_interner.get(list_elem) => {
let list_offset = 0;
let elem_offset = Layout::Builtin(Builtin::List(list_elem))
.stack_size(backend.layout_interner, TARGET_INFO);
@ -424,7 +424,7 @@ impl<'a> LowLevelCall<'a> {
Layout::Struct {
field_layouts: &[value_layout, Layout::Builtin(Builtin::List(list_elem))],
..
} if value_layout == *backend.layout_interner.get(list_elem) => {
} if value_layout == backend.layout_interner.get(list_elem) => {
let list_offset =
value_layout.stack_size(backend.layout_interner, TARGET_INFO);
let elem_offset = 0;
@ -591,7 +591,7 @@ impl<'a> LowLevelCall<'a> {
let elem_layout = backend.layout_interner.get(elem_layout);
let elem_width = elem_layout.stack_size(backend.layout_interner, TARGET_INFO);
let (elem_local, elem_offset, _) =
ensure_symbol_is_in_memory(backend, elem, *elem_layout, backend.env.arena);
ensure_symbol_is_in_memory(backend, elem, elem_layout, backend.env.arena);
// Zig arguments Wasm types
// (return pointer) i32
@ -630,7 +630,7 @@ impl<'a> LowLevelCall<'a> {
let (elem_width, elem_align) =
elem_layout.stack_size_and_alignment(backend.layout_interner, TARGET_INFO);
let (elem_local, elem_offset, _) =
ensure_symbol_is_in_memory(backend, elem, *elem_layout, backend.env.arena);
ensure_symbol_is_in_memory(backend, elem, elem_layout, backend.env.arena);
// Zig arguments Wasm types
// (return pointer) i32
@ -677,7 +677,7 @@ impl<'a> LowLevelCall<'a> {
// This is the same as a Struct containing the element
let in_memory_layout = Layout::Struct {
field_order_hash: FieldOrderHash::from_ordered_fields(&[]),
field_layouts: backend.env.arena.alloc([*elem_layout]),
field_layouts: backend.env.arena.alloc([elem_layout]),
};
let dec_fn = backend.get_refcount_fn_index(in_memory_layout, HelperOp::Dec);
let dec_fn_ptr = backend.get_fn_ptr(dec_fn);
@ -723,7 +723,7 @@ impl<'a> LowLevelCall<'a> {
// This is the same as a Struct containing the element
let in_memory_layout = Layout::Struct {
field_order_hash: FieldOrderHash::from_ordered_fields(&[]),
field_layouts: backend.env.arena.alloc([*elem_layout]),
field_layouts: backend.env.arena.alloc([elem_layout]),
};
let dec_fn = backend.get_refcount_fn_index(in_memory_layout, HelperOp::Dec);
let dec_fn_ptr = backend.get_fn_ptr(dec_fn);
@ -2233,7 +2233,7 @@ pub fn call_higher_order_lowlevel<'a>(
let boxed_closure_arg_layouts =
argument_layouts.iter().take(n_non_closure_args).map(|lay| {
let lay_in = backend.layout_interner.insert(lay);
let lay_in = backend.layout_interner.insert(*lay);
Layout::Boxed(lay_in)
});
@ -2243,7 +2243,7 @@ pub fn call_higher_order_lowlevel<'a>(
match helper_proc_source {
ProcSource::HigherOrderMapper(_) => {
// Our convention for mappers is that they write to the heap via the last argument
let result_layout = backend.layout_interner.insert(result_layout);
let result_layout = backend.layout_interner.insert(*result_layout);
wrapper_arg_layouts.push(Layout::Boxed(result_layout));
ProcLayout {
arguments: wrapper_arg_layouts.into_bump_slice(),
@ -2392,7 +2392,7 @@ fn list_map_n<'a>(
let arg_elem_layouts = Vec::from_iter_in(
arg_symbols.iter().map(|sym| {
let lay = unwrap_list_elem_layout(backend.storage.symbol_layouts[sym]);
*backend.layout_interner.get(lay)
backend.layout_interner.get(lay)
}),
backend.env.arena,
);

View file

@ -592,7 +592,7 @@ fn eq_boxed<'a>(
ident_ids,
ctx,
layout_interner,
*inner_layout,
inner_layout,
root.arena.alloc([a, b]),
)
.unwrap();
@ -600,13 +600,13 @@ fn eq_boxed<'a>(
Stmt::Let(
a,
a_expr,
*inner_layout,
inner_layout,
root.arena.alloc(
//
Stmt::Let(
b,
b_expr,
*inner_layout,
inner_layout,
root.arena.alloc(
//
Stmt::Let(
@ -641,7 +641,7 @@ fn eq_list<'a>(
let elem_layout = layout_interner.get(elem_layout);
// A "Box" layout (heap pointer to a single list element)
let box_union_layout = UnionLayout::NonNullableUnwrapped(root.arena.alloc([*elem_layout]));
let box_union_layout = UnionLayout::NonNullableUnwrapped(root.arena.alloc([elem_layout]));
let box_layout = Layout::Union(box_union_layout);
// Compare lengths
@ -754,14 +754,14 @@ fn eq_list<'a>(
tag_id: 0,
index: 0,
};
let elem1_stmt = |next| Stmt::Let(elem1, elem1_expr, *elem_layout, next);
let elem2_stmt = |next| Stmt::Let(elem2, elem2_expr, *elem_layout, next);
let elem1_stmt = |next| Stmt::Let(elem1, elem1_expr, elem_layout, next);
let elem2_stmt = |next| Stmt::Let(elem2, elem2_expr, elem_layout, next);
// Compare the two current elements
let eq_elems = root.create_symbol(ident_ids, "eq_elems");
let eq_elems_args = root.arena.alloc([elem1, elem2]);
let eq_elems_expr = root
.call_specialized_op(ident_ids, ctx, layout_interner, *elem_layout, eq_elems_args)
.call_specialized_op(ident_ids, ctx, layout_interner, elem_layout, eq_elems_args)
.unwrap();
let eq_elems_stmt = |next| Stmt::Let(eq_elems, eq_elems_expr, LAYOUT_BOOL, next);

View file

@ -446,8 +446,8 @@ impl<'a> CodeGenHelp<'a> {
) -> Layout<'a> {
match layout {
Layout::Builtin(Builtin::List(v)) => {
let v = self.replace_rec_ptr(ctx, layout_interner, *layout_interner.get(v));
let v = layout_interner.insert(self.arena.alloc(v));
let v = self.replace_rec_ptr(ctx, layout_interner, layout_interner.get(v));
let v = layout_interner.insert(v);
Layout::Builtin(Builtin::List(v))
}
@ -487,9 +487,7 @@ impl<'a> CodeGenHelp<'a> {
Layout::Boxed(inner) => {
let inner = layout_interner.get(inner);
let inner = self
.arena
.alloc(self.replace_rec_ptr(ctx, layout_interner, *inner));
let inner = self.replace_rec_ptr(ctx, layout_interner, inner);
let inner = layout_interner.insert(inner);
Layout::Boxed(inner)
}

View file

@ -187,7 +187,7 @@ pub fn refcount_generic<'a>(
ctx,
layout_interner,
&layout,
inner_layout,
&inner_layout,
structure,
)
}
@ -429,7 +429,7 @@ where
match layout {
Layout::Builtin(Builtin::List(elem_layout)) => {
let elem_layout = interner.get(*elem_layout);
is_rc_implemented_yet(interner, elem_layout)
is_rc_implemented_yet(interner, &elem_layout)
}
Layout::Builtin(_) => true,
Layout::Struct { field_layouts, .. } => field_layouts
@ -775,7 +775,7 @@ fn refcount_list<'a>(
let elem_layout = layout_interner.get(elem_layout);
// A "Box" layout (heap pointer to a single list element)
let box_union_layout = UnionLayout::NonNullableUnwrapped(arena.alloc([*elem_layout]));
let box_union_layout = UnionLayout::NonNullableUnwrapped(arena.alloc([elem_layout]));
let box_layout = Layout::Union(box_union_layout);
//
@ -843,7 +843,7 @@ fn refcount_list<'a>(
ident_ids,
ctx,
layout_interner,
elem_layout,
&elem_layout,
LAYOUT_UNIT,
box_union_layout,
len,
@ -1684,8 +1684,8 @@ fn refcount_boxed<'a>(
ident_ids: &mut IdentIds,
ctx: &mut Context<'a>,
layout_interner: &mut STLayoutInterner<'a>,
layout: &Layout,
inner_layout: &'a Layout,
layout: &Layout<'a>,
inner_layout: &Layout<'a>,
outer: Symbol,
) -> Stmt<'a> {
let arena = root.arena;

View file

@ -176,10 +176,6 @@ struct Ctx<'a, 'r> {
}
impl<'a, 'r> Ctx<'a, 'r> {
fn alloc<T>(&self, v: T) -> &'a T {
self.arena.alloc(v)
}
fn problem(&mut self, problem_kind: ProblemKind<'a>) {
self.problems.push(Problem {
proc: self.arena.alloc(self.proc.clone()),
@ -428,7 +424,7 @@ impl<'a, 'r> Ctx<'a, 'r> {
}
}
}
let elem_layout = self.interner.insert(self.alloc(*elem_layout));
let elem_layout = self.interner.insert(*elem_layout);
Some(Layout::Builtin(Builtin::List(elem_layout)))
}
Expr::EmptyArray => {
@ -436,12 +432,12 @@ impl<'a, 'r> Ctx<'a, 'r> {
None
}
&Expr::ExprBox { symbol } => self.with_sym_layout(symbol, |ctx, _def_line, layout| {
let inner = ctx.interner.insert(ctx.alloc(layout));
let inner = ctx.interner.insert(layout);
Some(Layout::Boxed(inner))
}),
&Expr::ExprUnbox { symbol } => {
self.with_sym_layout(symbol, |ctx, def_line, layout| match ctx.resolve(layout) {
Layout::Boxed(inner) => Some(*ctx.interner.get(inner)),
Layout::Boxed(inner) => Some(ctx.interner.get(inner)),
_ => {
ctx.problem(ProblemKind::UnboxNotABox { symbol, def_line });
None
@ -675,8 +671,8 @@ fn resolve_recursive_layout<'a>(
}
},
Layout::Boxed(inner) => {
let inner = go!(*interner.get(inner));
Layout::Boxed(interner.insert(arena.alloc(inner)))
let inner = go!(interner.get(inner));
Layout::Boxed(interner.insert(inner))
}
Layout::Struct {
field_order_hash,
@ -693,13 +689,9 @@ fn resolve_recursive_layout<'a>(
}
Layout::Builtin(builtin) => match builtin {
Builtin::List(inner) => {
let inner = arena.alloc(resolve_recursive_layout(
arena,
interner,
*interner.get(inner),
when_recursive,
));
let inner = interner.insert(arena.alloc(inner));
let inner =
resolve_recursive_layout(arena, interner, interner.get(inner), when_recursive);
let inner = interner.insert(inner);
Layout::Builtin(Builtin::List(inner))
}
Builtin::Int(_)
@ -714,8 +706,8 @@ fn resolve_recursive_layout<'a>(
}) => {
let set = set.iter().map(|(symbol, captures)| {
let captures = captures.iter().map(|lay_in| {
let new_lay = go!(*interner.get(*lay_in));
interner.insert(arena.alloc(new_lay))
let new_lay = go!(interner.get(*lay_in));
interner.insert(new_lay)
});
let captures = &*arena.alloc_slice_fill_iter(captures);
(*symbol, captures)

View file

@ -1448,9 +1448,9 @@ fn path_to_expr_help<'a>(
let elem_layout = layout_interner.get(elem_layout);
stores.push((load_sym, *elem_layout, load_expr));
stores.push((load_sym, elem_layout, load_expr));
layout = *elem_layout;
layout = elem_layout;
symbol = load_sym;
}
_ => internal_error!("not a list"),

View file

@ -3592,7 +3592,7 @@ fn specialize_proc_help<'a>(
let ordered_field_layouts = Vec::from_iter_in(
combined
.iter()
.map(|(_, layout)| *layout_cache.get_in(**layout)),
.map(|(_, layout)| layout_cache.get_in(**layout)),
env.arena,
);
let ordered_field_layouts = ordered_field_layouts.into_bump_slice();
@ -3618,7 +3618,7 @@ fn specialize_proc_help<'a>(
specialized_body = Stmt::Let(
symbol,
expr,
*layout_cache.get_in(**layout),
layout_cache.get_in(**layout),
env.arena.alloc(specialized_body),
);
}
@ -4634,7 +4634,7 @@ pub fn with_hole<'a>(
Ok(elem_layout) => {
let expr = Expr::EmptyArray;
// TODO don't alloc once elem_layout is interned
let elem_layout = layout_cache.put_in(env.arena.alloc(elem_layout));
let elem_layout = layout_cache.put_in(elem_layout);
Stmt::Let(
assigned,
expr,
@ -4645,7 +4645,7 @@ pub fn with_hole<'a>(
Err(LayoutProblem::UnresolvedTypeVar(_)) => {
let expr = Expr::EmptyArray;
// TODO don't alloc once elem_layout is interned
let elem_layout = layout_cache.put_in(env.arena.alloc(Layout::VOID));
let elem_layout = layout_cache.put_in(Layout::VOID);
Stmt::Let(
assigned,
expr,
@ -4694,7 +4694,7 @@ pub fn with_hole<'a>(
elems: elements.into_bump_slice(),
};
let elem_layout = layout_cache.put_in(env.arena.alloc(elem_layout));
let elem_layout = layout_cache.put_in(elem_layout);
let stmt = Stmt::Let(
assigned,
@ -5791,7 +5791,7 @@ where
let symbols =
Vec::from_iter_in(combined.iter().map(|(a, _)| *a), env.arena).into_bump_slice();
let field_layouts = Vec::from_iter_in(
combined.iter().map(|(_, b)| *layout_cache.get_in(**b)),
combined.iter().map(|(_, b)| layout_cache.get_in(**b)),
env.arena,
)
.into_bump_slice();

View file

@ -312,11 +312,11 @@ impl<'a> LayoutCache<'a> {
}
}
pub fn get_in(&self, interned: InLayout<'a>) -> &Layout<'a> {
pub fn get_in(&self, interned: InLayout<'a>) -> Layout<'a> {
self.interner.get(interned)
}
pub fn put_in(&mut self, layout: &'a Layout<'a>) -> InLayout<'a> {
pub fn put_in(&mut self, layout: Layout<'a>) -> InLayout<'a> {
self.interner.insert(layout)
}
@ -1393,7 +1393,7 @@ impl<'a> LambdaSet<'a> {
where
I: LayoutInterner<'a>,
{
*interner.get(self.representation)
interner.get(self.representation)
}
/// Does the lambda set contain the given symbol?
@ -1407,7 +1407,7 @@ impl<'a> LambdaSet<'a> {
{
if self.has_unwrapped_capture_repr() {
let repr = interner.get(self.representation);
Some(*repr)
Some(repr)
} else if self.has_enum_dispatch_repr() {
None
} else {
@ -1416,7 +1416,7 @@ impl<'a> LambdaSet<'a> {
Layout::Struct {
field_layouts: &[], ..
} => None,
repr => Some(*repr),
repr => Some(repr),
}
}
}
@ -1516,11 +1516,11 @@ impl<'a> LambdaSet<'a> {
I: LayoutInterner<'a>,
{
let left = interner.get(*left);
if left == right {
if &left == right {
return true;
}
let left = if left == &Layout::RecursivePointer {
let left = if left == Layout::RecursivePointer {
let runtime_repr = self.runtime_representation(interner);
debug_assert!(matches!(
runtime_repr,
@ -1528,7 +1528,7 @@ impl<'a> LambdaSet<'a> {
));
Layout::LambdaSet(*self)
} else {
*left
left
};
let right = if right == &Layout::RecursivePointer {
@ -1578,7 +1578,7 @@ impl<'a> LambdaSet<'a> {
tag_id: index as TagIdIntType,
alphabetic_order_fields: fields,
closure_name,
union_layout: *union,
union_layout: union,
}
}
UnionLayout::Recursive(_) => {
@ -1595,7 +1595,7 @@ impl<'a> LambdaSet<'a> {
tag_id: index as TagIdIntType,
alphabetic_order_fields: fields,
closure_name,
union_layout: *union,
union_layout: union,
}
}
UnionLayout::NullableUnwrapped {
@ -1615,7 +1615,7 @@ impl<'a> LambdaSet<'a> {
tag_id: index as TagIdIntType,
alphabetic_order_fields: fields,
closure_name,
union_layout: *union,
union_layout: union,
}
}
UnionLayout::NonNullableUnwrapped(_) => todo!("recursive closures"),
@ -1665,16 +1665,16 @@ impl<'a> LambdaSet<'a> {
let repr = interner.get(self.representation);
if self.has_unwrapped_capture_repr() {
return ClosureCallOptions::UnwrappedCapture(*repr);
return ClosureCallOptions::UnwrappedCapture(repr);
}
match repr {
Layout::Union(union_layout) => {
if repr == &Layout::VOID {
if repr == Layout::VOID {
debug_assert!(self.set.is_empty());
return ClosureCallOptions::Void;
}
ClosureCallOptions::Union(*union_layout)
ClosureCallOptions::Union(union_layout)
}
Layout::Struct {
field_layouts,
@ -1683,7 +1683,7 @@ impl<'a> LambdaSet<'a> {
debug_assert_eq!(self.set.len(), 1);
ClosureCallOptions::Struct {
field_layouts,
field_order_hash: *field_order_hash,
field_order_hash,
}
}
layout => {
@ -1806,7 +1806,7 @@ impl<'a> LambdaSet<'a> {
// representation, so here the criteria doesn't matter.
let mut criteria = CACHEABLE;
let arg = cached!(Layout::from_var(env, *var), criteria);
let arg_in = env.cache.interner.insert(env.arena.alloc(arg));
let arg_in = env.cache.interner.insert(arg);
arguments.push(arg_in);
}
@ -1862,7 +1862,7 @@ impl<'a> LambdaSet<'a> {
set_with_variables,
opt_recursion_var.into_variable(),
);
let representation = env.cache.interner.insert(env.arena.alloc(representation));
let representation = env.cache.interner.insert(representation);
Cacheable(
Ok(LambdaSet {
@ -1877,7 +1877,7 @@ impl<'a> LambdaSet<'a> {
// See also https://github.com/roc-lang/roc/issues/3163.
cacheable(Ok(LambdaSet {
set: &[],
representation: env.cache.interner.insert(env.arena.alloc(Layout::UNIT)),
representation: env.cache.interner.insert(Layout::UNIT),
}))
}
}
@ -3119,7 +3119,7 @@ fn layout_from_flat_type<'a>(
let inner_var = args[0];
let inner_layout = cached!(Layout::from_var(env, inner_var), criteria);
let inner_layout = env.cache.put_in(env.arena.alloc(inner_layout));
let inner_layout = env.cache.put_in(inner_layout);
Cacheable(Ok(Layout::Boxed(inner_layout)), criteria)
}
@ -4229,7 +4229,7 @@ pub(crate) fn list_layout_from_elem<'a>(
cached!(Layout::from_var(env, element_var), criteria)
};
let element_layout = env.cache.put_in(env.arena.alloc(element_layout));
let element_layout = env.cache.put_in(element_layout);
Cacheable(Ok(Layout::Builtin(Builtin::List(element_layout))), criteria)
}
@ -4392,7 +4392,7 @@ mod test {
let lambda_set = LambdaSet {
set: &[(Symbol::LIST_MAP, &[])],
representation: interner.insert(&Layout::UNIT),
representation: interner.insert(Layout::UNIT),
};
let a = &[Layout::UNIT] as &[_];

View file

@ -27,7 +27,7 @@ macro_rules! cache_interned_layouts {
fn fill_reserved_layouts<'a>(interner: &mut STLayoutInterner<'a>) {
assert!(interner.is_empty());
$(
interner.insert(&$layout);
interner.insert($layout);
)*
}
@ -100,10 +100,10 @@ pub trait LayoutInterner<'a>: Sized {
/// must live at least as long as the interner lives.
// TODO: we should consider maintaining our own arena in the interner, to avoid redundant
// allocations when values already have interned representations.
fn insert(&mut self, value: &'a Layout<'a>) -> InLayout<'a>;
fn insert(&mut self, value: Layout<'a>) -> InLayout<'a>;
/// Retrieves a value from the interner.
fn get(&self, key: InLayout<'a>) -> &'a Layout<'a>;
fn get(&self, key: InLayout<'a>) -> Layout<'a>;
fn alignment_bytes(&self, target_info: TargetInfo, layout: InLayout<'a>) -> u32 {
self.get(layout).alignment_bytes(self, target_info)
@ -158,8 +158,8 @@ pub struct GlobalLayoutInterner<'a>(Arc<GlobalLayoutInternerInner<'a>>);
#[derive(Debug)]
struct GlobalLayoutInternerInner<'a> {
map: Mutex<BumpMap<&'a Layout<'a>, InLayout<'a>>>,
vec: RwLock<Vec<&'a Layout<'a>>>,
map: Mutex<BumpMap<Layout<'a>, InLayout<'a>>>,
vec: RwLock<Vec<Layout<'a>>>,
}
/// A derivative of a [GlobalLayoutInterner] interner that provides caching desirable for
@ -174,9 +174,9 @@ struct GlobalLayoutInternerInner<'a> {
#[derive(Debug)]
pub struct TLLayoutInterner<'a> {
parent: GlobalLayoutInterner<'a>,
map: BumpMap<&'a Layout<'a>, InLayout<'a>>,
map: BumpMap<Layout<'a>, InLayout<'a>>,
/// Cache of interned values from the parent for local access.
vec: RefCell<Vec<Option<&'a Layout<'a>>>>,
vec: RefCell<Vec<Option<Layout<'a>>>>,
}
/// A single-threaded interner, with no concurrency properties.
@ -185,8 +185,8 @@ pub struct TLLayoutInterner<'a> {
/// a [STLayoutInterner], via [GlobalLayoutInterner::unwrap].
#[derive(Debug)]
pub struct STLayoutInterner<'a> {
map: BumpMap<&'a Layout<'a>, InLayout<'a>>,
vec: Vec<&'a Layout<'a>>,
map: BumpMap<Layout<'a>, InLayout<'a>>,
vec: Vec<Layout<'a>>,
}
/// Generic hasher for a value, to be used by all interners.
@ -229,7 +229,7 @@ impl<'a> GlobalLayoutInterner<'a> {
/// Interns a value with a pre-computed hash.
/// Prefer calling this when possible, especially from [TLLayoutInterner], to avoid
/// re-computing hashes.
fn insert_hashed(&self, value: &'a Layout<'a>, hash: u64) -> InLayout<'a> {
fn insert_hashed(&self, value: Layout<'a>, hash: u64) -> InLayout<'a> {
let mut map = self.0.map.lock();
let (_, interned) = map
.raw_entry_mut()
@ -243,7 +243,7 @@ impl<'a> GlobalLayoutInterner<'a> {
*interned
}
fn get(&self, interned: InLayout<'a>) -> &'a Layout<'a> {
fn get(&self, interned: InLayout<'a>) -> Layout<'a> {
let InLayout(index, _) = interned;
self.0.vec.read()[index]
}
@ -255,7 +255,7 @@ impl<'a> GlobalLayoutInterner<'a> {
impl<'a> TLLayoutInterner<'a> {
/// Records an interned value in thread-specific storage, for faster access on lookups.
fn record(&self, key: &'a Layout<'a>, interned: InLayout<'a>) {
fn record(&self, key: Layout<'a>, interned: InLayout<'a>) {
let mut vec = self.vec.borrow_mut();
let len = vec.len().max(interned.0 + 1);
vec.resize(len, None);
@ -264,7 +264,7 @@ impl<'a> TLLayoutInterner<'a> {
}
impl<'a> LayoutInterner<'a> for TLLayoutInterner<'a> {
fn insert(&mut self, value: &'a Layout<'a>) -> InLayout<'a> {
fn insert(&mut self, value: Layout<'a>) -> InLayout<'a> {
let global = &self.parent;
let hash = hash(value);
let (&mut value, &mut interned) = self
@ -279,9 +279,9 @@ impl<'a> LayoutInterner<'a> for TLLayoutInterner<'a> {
interned
}
fn get(&self, key: InLayout<'a>) -> &'a Layout<'a> {
fn get(&self, key: InLayout<'a>) -> Layout<'a> {
if let Some(Some(value)) = self.vec.borrow().get(key.0) {
return value;
return *value;
}
let value = self.parent.get(key);
self.record(value, key);
@ -318,12 +318,12 @@ impl<'a> STLayoutInterner<'a> {
}
impl<'a> LayoutInterner<'a> for STLayoutInterner<'a> {
fn insert(&mut self, value: &'a Layout<'a>) -> InLayout<'a> {
fn insert(&mut self, value: Layout<'a>) -> InLayout<'a> {
let hash = hash(value);
let (_, interned) = self
.map
.raw_entry_mut()
.from_key_hashed_nocheck(hash, value)
.from_key_hashed_nocheck(hash, &value)
.or_insert_with(|| {
let interned = InLayout(self.vec.len(), Default::default());
self.vec.push(value);
@ -332,7 +332,7 @@ impl<'a> LayoutInterner<'a> for STLayoutInterner<'a> {
*interned
}
fn get(&self, key: InLayout<'a>) -> &'a Layout<'a> {
fn get(&self, key: InLayout<'a>) -> Layout<'a> {
let InLayout(index, _) = key;
self.vec[index]
}

View file

@ -1098,7 +1098,7 @@ fn add_builtin_type<'a>(
debug_assert_eq!(args.len(), 1);
let elem_layout = env.layout_cache.get_in(elem_layout);
let elem_id = add_type_help(env, *elem_layout, args[0], opt_name, types);
let elem_id = add_type_help(env, elem_layout, args[0], opt_name, types);
let list_id = types.add_anonymous(
&env.layout_cache.interner,
RocType::RocList(elem_id),
@ -1114,7 +1114,7 @@ fn add_builtin_type<'a>(
Alias(Symbol::DICT_DICT, _alias_variables, alias_var, AliasKind::Opaque),
) => {
match (
*env.layout_cache.get_in(elem_layout),
env.layout_cache.get_in(elem_layout),
env.subs.get_content_without_compacting(*alias_var),
) {
(
@ -1458,7 +1458,7 @@ fn add_tag_union<'a>(
Layout::Boxed(elem_layout) => {
let elem_layout = env.layout_cache.get_in(elem_layout);
let (tag_name, payload_fields) =
single_tag_payload_fields(union_tags, subs, &[*elem_layout], env, types);
single_tag_payload_fields(union_tags, subs, &[elem_layout], env, types);
RocTagUnion::SingleTagStruct {
name: name.clone(),

View file

@ -859,7 +859,7 @@ fn addr_to_ast<'a, M: ReplAppMemory>(
env,
mem,
addr_of_inner,
inner_layout,
&inner_layout,
WhenRecursive::Unreachable,
inner_var,
);
@ -911,7 +911,7 @@ fn list_to_ast<'a, M: ReplAppMemory>(
let arena = env.arena;
let mut output = Vec::with_capacity_in(len, arena);
let elem_layout = *env.layout_cache.get_in(elem_layout);
let elem_layout = env.layout_cache.get_in(elem_layout);
let elem_size = elem_layout.stack_size(&env.layout_cache.interner, env.target_info) as usize;
for index in 0..len {