update rust code to llvm-16 and fix build

also update:
 - llvm frame address
 - erased function type
 - dec passing to zig
 - gen dev storage size
This commit is contained in:
Brendan Hansknecht 2023-09-21 13:12:48 -07:00
parent 2e2e609547
commit 398bf2f96c
No known key found for this signature in database
GPG key ID: 0EA784685083E75B
20 changed files with 1830 additions and 1432 deletions

View file

@ -40,7 +40,9 @@ fn main() {
"builtins-windows-x86_64.obj",
);
generate_object_file(&bitcode_path, "wasm32-object", "builtins-wasm32.o");
if !DEBUG {
generate_object_file(&bitcode_path, "wasm32-object", "builtins-wasm32.o");
}
copy_zig_builtins_to_target_dir(&bitcode_path);
@ -68,27 +70,25 @@ fn generate_object_file(bitcode_path: &Path, zig_object: &str, object_file_name:
println!("Compiling zig object `{zig_object}` to: {src_obj}");
if !DEBUG {
let mut zig_cmd = zig();
let mut zig_cmd = zig();
zig_cmd
.current_dir(bitcode_path)
.args(["build", zig_object, "-Drelease=true"]);
zig_cmd
.current_dir(bitcode_path)
.args(["build", zig_object, "-Drelease=true"]);
run_command(zig_cmd, 0);
run_command(zig_cmd, 0);
println!("Moving zig object `{zig_object}` to: {dest_obj}");
println!("Moving zig object `{zig_object}` to: {dest_obj}");
// we store this .o file in rust's `target` folder (for wasm we need to leave a copy here too)
fs::copy(src_obj, dest_obj).unwrap_or_else(|err| {
internal_error!(
"Failed to copy object file {} to {}: {:?}",
src_obj,
dest_obj,
err
);
});
}
// we store this .o file in rust's `target` folder (for wasm we need to leave a copy here too)
fs::copy(src_obj, dest_obj).unwrap_or_else(|err| {
internal_error!(
"Failed to copy object file {} to {}: {:?}",
src_obj,
dest_obj,
err
);
});
}
pub fn get_lib_dir() -> PathBuf {

View file

@ -1303,7 +1303,7 @@ impl<
layout: InLayout<'_>,
) -> i32 {
let (size, alignment) = layout_interner.stack_size_and_alignment(layout);
self.claim_stack_area_with_alignment(sym, size, Ord::min(alignment, 8))
self.claim_stack_area_with_alignment(sym, size, Ord::max(alignment, 8))
}
/// Claim space on the stack of a certain size and alignment.

View file

@ -62,7 +62,9 @@ fn call_bitcode_fn_help<'ctx>(
.get_function(fn_name)
.unwrap_or_else(|| panic!("Unrecognized builtin function: {fn_name:?} - if you're working on the Roc compiler, do you need to rebuild the bitcode? See compiler/builtins/bitcode/README.md"));
let call = env.builder.build_call(fn_val, &arguments, "call_builtin");
let call = env
.builder
.new_build_call(fn_val, &arguments, "call_builtin");
// Attributes that we propagate from the zig builtin parameters, to the arguments we give to the
// call. It is undefined behavior in LLVM to have an attribute on a parameter, and then call
@ -120,7 +122,7 @@ pub fn call_bitcode_fn_fixing_for_convention<'a, 'ctx, 'env>(
// when we write an i128 into this (happens in NumToInt), zig expects this pointer to
// be 16-byte aligned. Not doing so is UB and will immediately fail on CI
let cc_return_value_ptr = env.builder.build_alloca(cc_return_type, "return_value");
let cc_return_value_ptr = env.builder.new_build_alloca(cc_return_type, "return_value");
cc_return_value_ptr
.as_instruction()
.unwrap()
@ -249,7 +251,7 @@ fn build_transform_caller_help<'a, 'ctx>(
basic_type_from_layout(env, layout_interner, layout_interner.get_repr(*layout))
.ptr_type(AddressSpace::default());
let cast_ptr = env.builder.build_pointer_cast(
let cast_ptr = env.builder.new_build_pointer_cast(
argument_ptr.into_pointer_value(),
basic_type,
"cast_ptr_to_tag_build_transform_caller_help",
@ -280,9 +282,11 @@ fn build_transform_caller_help<'a, 'ctx>(
basic_type_from_layout(env, layout_interner, layout_interner.get_repr(layout))
.ptr_type(AddressSpace::default());
let closure_cast =
env.builder
.build_pointer_cast(closure_ptr, closure_type, "cast_opaque_closure");
let closure_cast = env.builder.new_build_pointer_cast(
closure_ptr,
closure_type,
"cast_opaque_closure",
);
let closure_data = load_roc_value(
env,
@ -316,7 +320,7 @@ fn build_transform_caller_help<'a, 'ctx>(
result_u8_ptr,
result,
);
env.builder.build_return(None);
env.builder.new_build_return(None);
env.builder.position_at_end(block);
env.builder.set_current_debug_location(di_location);
@ -421,9 +425,11 @@ fn build_rc_wrapper<'a, 'ctx>(
let value_type =
basic_type_from_layout(env, layout_interner, layout_interner.get_repr(layout));
let value_ptr_type = value_type.ptr_type(AddressSpace::default());
let value_ptr =
env.builder
.build_pointer_cast(generic_value_ptr, value_ptr_type, "load_opaque");
let value_ptr = env.builder.new_build_pointer_cast(
generic_value_ptr,
value_ptr_type,
"load_opaque",
);
// even though this looks like a `load_roc_value`, that gives segfaults in practice.
// I suspect it has something to do with the lifetime of the alloca that is created by
@ -451,7 +457,7 @@ fn build_rc_wrapper<'a, 'ctx>(
}
}
env.builder.build_return(None);
env.builder.new_build_return(None);
function_value
}
@ -513,13 +519,13 @@ pub fn build_eq_wrapper<'a, 'ctx>(
basic_type_from_layout(env, layout_interner, layout_interner.get_repr(layout))
.ptr_type(AddressSpace::default());
let value_cast1 = env
.builder
.build_pointer_cast(value_ptr1, value_type, "load_opaque");
let value_cast1 =
env.builder
.new_build_pointer_cast(value_ptr1, value_type, "load_opaque");
let value_cast2 = env
.builder
.build_pointer_cast(value_ptr2, value_type, "load_opaque");
let value_cast2 =
env.builder
.new_build_pointer_cast(value_ptr2, value_type, "load_opaque");
// load_roc_value(env, *element_layout, elem_ptr, "get_elem")
let value1 = load_roc_value(
@ -547,7 +553,7 @@ pub fn build_eq_wrapper<'a, 'ctx>(
layout,
);
env.builder.build_return(Some(&result));
env.builder.new_build_return(Some(&result));
function_value
}
@ -618,11 +624,11 @@ pub fn build_compare_wrapper<'a, 'ctx>(
let value_cast1 =
env.builder
.build_pointer_cast(value_ptr1, value_ptr_type, "load_opaque");
.new_build_pointer_cast(value_ptr1, value_ptr_type, "load_opaque");
let value_cast2 =
env.builder
.build_pointer_cast(value_ptr2, value_ptr_type, "load_opaque");
.new_build_pointer_cast(value_ptr2, value_ptr_type, "load_opaque");
let value1 = load_roc_value(
env,
@ -659,7 +665,7 @@ pub fn build_compare_wrapper<'a, 'ctx>(
);
let closure_ptr_type = closure_type.ptr_type(AddressSpace::default());
let closure_cast = env.builder.build_pointer_cast(
let closure_cast = env.builder.new_build_pointer_cast(
closure_ptr,
closure_ptr_type,
"load_opaque",
@ -675,7 +681,7 @@ pub fn build_compare_wrapper<'a, 'ctx>(
}
};
let call = env.builder.build_call(
let call = env.builder.new_build_call(
roc_function,
arguments_cast,
"call_user_defined_compare_function",
@ -686,7 +692,7 @@ pub fn build_compare_wrapper<'a, 'ctx>(
// IMPORTANT! we call a user function, so it has the fast calling convention
call.set_call_convention(FAST_CALL_CONV);
env.builder.build_return(Some(&result));
env.builder.new_build_return(Some(&result));
function_value
}
@ -812,7 +818,7 @@ fn ptr_len_cap<'ctx>(
.into_int_value();
let upper_word = {
let shift = env.builder.build_right_shift(
let shift = env.builder.new_build_right_shift(
ptr_and_len,
env.context.i64_type().const_int(32, false),
false,
@ -820,16 +826,16 @@ fn ptr_len_cap<'ctx>(
);
env.builder
.build_int_cast(shift, env.context.i32_type(), "list_ptr_int")
.new_build_int_cast(shift, env.context.i32_type(), "list_ptr_int")
};
let lower_word = env
.builder
.build_int_cast(ptr_and_len, env.context.i32_type(), "list_len");
let lower_word =
env.builder
.new_build_int_cast(ptr_and_len, env.context.i32_type(), "list_len");
let len = upper_word;
let ptr = env.builder.build_int_to_ptr(
let ptr = env.builder.new_build_int_to_ptr(
lower_word,
env.context.i8_type().ptr_type(AddressSpace::default()),
"list_ptr",
@ -889,7 +895,7 @@ pub(crate) fn pass_list_to_zig_64bit<'ctx>(
let list_type = super::convert::zig_list_type(env);
let list_alloca = create_entry_block_alloca(env, parent, list_type.into(), "list_alloca");
env.builder.build_store(list_alloca, list);
env.builder.new_build_store(list_alloca, list);
list_alloca
}
@ -914,7 +920,7 @@ pub(crate) fn pass_list_or_string_to_zig_32bit<'ctx>(
let ptr = env
.builder
.build_ptr_to_int(ptr, env.context.i32_type(), "ptr_to_i32");
.new_build_ptr_to_int(ptr, env.context.i32_type(), "ptr_to_i32");
let len = env
.builder
@ -931,15 +937,15 @@ pub(crate) fn pass_list_or_string_to_zig_32bit<'ctx>(
let int_64_type = env.context.i64_type();
let len = env
.builder
.build_int_z_extend(len, int_64_type, "list_len_64");
.new_build_int_z_extend(len, int_64_type, "list_len_64");
let ptr = env
.builder
.build_int_z_extend(ptr, int_64_type, "list_ptr_64");
.new_build_int_z_extend(ptr, int_64_type, "list_ptr_64");
let len_shift =
env.builder
.build_left_shift(len, int_64_type.const_int(32, false), "list_len_shift");
let ptr_len = env.builder.build_or(len_shift, ptr, "list_ptr_len");
.new_build_left_shift(len, int_64_type.const_int(32, false), "list_len_shift");
let ptr_len = env.builder.new_build_or(len_shift, ptr, "list_ptr_len");
(ptr_len, cap)
}

File diff suppressed because it is too large Load diff

View file

@ -44,7 +44,7 @@ pub(crate) fn list_symbol_to_c_abi<'a, 'ctx>(
let list_alloca = create_entry_block_alloca(env, parent, list_type.into(), "list_alloca");
let list = scope.load_symbol(&symbol);
env.builder.build_store(list_alloca, list);
env.builder.new_build_store(list_alloca, list);
list_alloca
}
@ -69,7 +69,7 @@ fn pass_element_as_opaque<'a, 'ctx>(
basic_type_from_layout(env, layout_interner, layout_interner.get_repr(layout));
let element_ptr = env
.builder
.build_alloca(element_type, "element_to_pass_as_opaque");
.new_build_alloca(element_type, "element_to_pass_as_opaque");
store_roc_value(
env,
layout_interner,
@ -79,7 +79,7 @@ fn pass_element_as_opaque<'a, 'ctx>(
);
env.builder
.build_pointer_cast(
.new_build_pointer_cast(
element_ptr,
env.context.i8_type().ptr_type(AddressSpace::default()),
"pass_element_as_opaque",
@ -102,7 +102,7 @@ pub(crate) fn pass_as_opaque<'ctx>(
ptr: PointerValue<'ctx>,
) -> BasicValueEnum<'ctx> {
env.builder
.build_pointer_cast(
.new_build_pointer_cast(
ptr,
env.context.i8_type().ptr_type(AddressSpace::default()),
"pass_as_opaque",
@ -338,7 +338,7 @@ pub(crate) fn list_replace_unsafe<'a, 'ctx>(
);
let element_ptr = env
.builder
.build_alloca(element_type, "output_element_as_opaque");
.new_build_alloca(element_type, "output_element_as_opaque");
// Assume the bounds have already been checked earlier
// (e.g. by List.replace or List.set, which wrap List.#replaceUnsafe)
@ -420,7 +420,7 @@ fn bounds_check_comparison<'ctx>(
// to avoid misprediction. (In practice this should usually pass,
// and CPUs generally default to predicting that a forward jump
// shouldn't be taken; that is, they predict "else" won't be taken.)
builder.build_int_compare(IntPredicate::ULT, elem_index, len, "bounds_check")
builder.new_build_int_compare(IntPredicate::ULT, elem_index, len, "bounds_check")
}
/// List.len : List * -> Nat
@ -749,14 +749,14 @@ where
let zero = env.ptr_int().const_zero();
// allocate a stack slot for the current index
let index_alloca = builder.build_alloca(env.ptr_int(), index_name);
builder.build_store(index_alloca, zero);
let index_alloca = builder.new_build_alloca(env.ptr_int(), index_name);
builder.new_build_store(index_alloca, zero);
let loop_bb = ctx.append_basic_block(parent, "loop");
let after_loop_bb = ctx.append_basic_block(parent, "after_loop");
let loop_end_cond = bounds_check_comparison(builder, zero, end);
builder.build_conditional_branch(loop_end_cond, loop_bb, after_loop_bb);
builder.new_build_conditional_branch(loop_end_cond, loop_bb, after_loop_bb);
{
builder.position_at_end(loop_bb);
@ -764,8 +764,8 @@ where
let current_index = builder
.new_build_load(env.ptr_int(), index_alloca, "index")
.into_int_value();
let next_index = builder.build_int_add(current_index, one, "next_index");
builder.build_store(index_alloca, next_index);
let next_index = builder.new_build_int_add(current_index, one, "next_index");
builder.new_build_store(index_alloca, next_index);
// The body of the loop
loop_fn(layout_interner, current_index);
@ -773,7 +773,7 @@ where
// #index < end
let loop_end_cond = bounds_check_comparison(builder, next_index, end);
builder.build_conditional_branch(loop_end_cond, loop_bb, after_loop_bb);
builder.new_build_conditional_branch(loop_end_cond, loop_bb, after_loop_bb);
}
builder.position_at_end(after_loop_bb);
@ -831,7 +831,7 @@ pub(crate) fn allocate_list<'a, 'ctx>(
let elem_bytes = layout_interner.stack_size(elem_layout) as u64;
let bytes_per_element = len_type.const_int(elem_bytes, false);
let number_of_data_bytes =
builder.build_int_mul(bytes_per_element, number_of_elements, "data_length");
builder.new_build_int_mul(bytes_per_element, number_of_elements, "data_length");
let basic_type =
basic_type_from_layout(env, layout_interner, layout_interner.get_repr(elem_layout));

View file

@ -69,7 +69,7 @@ fn build_eq_builtin<'a, 'ctx>(
builtin: &Builtin<'a>,
) -> BasicValueEnum<'ctx> {
let int_cmp = |pred, label| {
let int_val = env.builder.build_int_compare(
let int_val = env.builder.new_build_int_compare(
pred,
lhs_val.into_int_value(),
rhs_val.into_int_value(),
@ -80,7 +80,7 @@ fn build_eq_builtin<'a, 'ctx>(
};
let float_cmp = |pred, label| {
let int_val = env.builder.build_float_compare(
let int_val = env.builder.new_build_float_compare(
pred,
lhs_val.into_float_value(),
rhs_val.into_float_value(),
@ -197,13 +197,13 @@ fn build_eq<'a, 'ctx>(
let bt = basic_type_from_layout(env, layout_interner, layout_interner.get_repr(layout));
// cast the i64 pointer to a pointer to block of memory
let field1_cast = env.builder.build_pointer_cast(
let field1_cast = env.builder.new_build_pointer_cast(
lhs_val.into_pointer_value(),
bt.into_pointer_type(),
"i64_to_opaque",
);
let field2_cast = env.builder.build_pointer_cast(
let field2_cast = env.builder.new_build_pointer_cast(
rhs_val.into_pointer_value(),
bt.into_pointer_type(),
"i64_to_opaque",
@ -240,7 +240,7 @@ fn build_neq_builtin<'a, 'ctx>(
builtin: &Builtin<'a>,
) -> BasicValueEnum<'ctx> {
let int_cmp = |pred, label| {
let int_val = env.builder.build_int_compare(
let int_val = env.builder.new_build_int_compare(
pred,
lhs_val.into_int_value(),
rhs_val.into_int_value(),
@ -251,7 +251,7 @@ fn build_neq_builtin<'a, 'ctx>(
};
let float_cmp = |pred, label| {
let int_val = env.builder.build_float_compare(
let int_val = env.builder.new_build_float_compare(
pred,
lhs_val.into_float_value(),
rhs_val.into_float_value(),
@ -297,7 +297,7 @@ fn build_neq_builtin<'a, 'ctx>(
Builtin::Str => {
let is_equal = str_equal(env, lhs_val, rhs_val).into_int_value();
let result: IntValue = env.builder.build_not(is_equal, "negate");
let result: IntValue = env.builder.new_build_not(is_equal, "negate");
result.into()
}
@ -313,7 +313,7 @@ fn build_neq_builtin<'a, 'ctx>(
)
.into_int_value();
let result: IntValue = env.builder.build_not(is_equal, "negate");
let result: IntValue = env.builder.new_build_not(is_equal, "negate");
result.into()
}
@ -358,7 +358,7 @@ fn build_neq<'a, 'ctx>(
)
.into_int_value();
let result: IntValue = env.builder.build_not(is_equal, "negate");
let result: IntValue = env.builder.new_build_not(is_equal, "negate");
result.into()
}
@ -375,7 +375,7 @@ fn build_neq<'a, 'ctx>(
)
.into_int_value();
let result: IntValue = env.builder.build_not(is_equal, "negate");
let result: IntValue = env.builder.new_build_not(is_equal, "negate");
result.into()
}
@ -392,7 +392,7 @@ fn build_neq<'a, 'ctx>(
)
.into_int_value();
let result: IntValue = env.builder.build_not(is_equal, "negate");
let result: IntValue = env.builder.new_build_not(is_equal, "negate");
result.into()
}
@ -456,7 +456,7 @@ fn build_list_eq<'a, 'ctx>(
env.builder.set_current_debug_location(di_location);
let call = env
.builder
.build_call(function, &[list1.into(), list2.into()], "list_eq");
.new_build_call(function, &[list1.into(), list2.into()], "list_eq");
call.set_call_convention(FAST_CALL_CONV);
@ -515,12 +515,12 @@ fn build_list_eq_help<'a, 'ctx>(
let length_equal: IntValue =
env.builder
.build_int_compare(IntPredicate::EQ, len1, len2, "bounds_check");
.new_build_int_compare(IntPredicate::EQ, len1, len2, "bounds_check");
let then_block = ctx.append_basic_block(parent, "then");
env.builder
.build_conditional_branch(length_equal, then_block, return_false);
.new_build_conditional_branch(length_equal, then_block, return_false);
{
// the length is equal; check elements pointwise
@ -536,15 +536,15 @@ fn build_list_eq_help<'a, 'ctx>(
let end = len1;
// allocate a stack slot for the current index
let index_alloca = builder.build_alloca(env.ptr_int(), "index");
builder.build_store(index_alloca, env.ptr_int().const_zero());
let index_alloca = builder.new_build_alloca(env.ptr_int(), "index");
builder.new_build_store(index_alloca, env.ptr_int().const_zero());
let loop_bb = ctx.append_basic_block(parent, "loop");
let body_bb = ctx.append_basic_block(parent, "body");
let increment_bb = ctx.append_basic_block(parent, "increment");
// the "top" of the loop
builder.build_unconditional_branch(loop_bb);
builder.new_build_unconditional_branch(loop_bb);
builder.position_at_end(loop_bb);
let curr_index = builder
@ -553,11 +553,11 @@ fn build_list_eq_help<'a, 'ctx>(
// #index < end
let loop_end_cond =
builder.build_int_compare(IntPredicate::ULT, curr_index, end, "bounds_check");
builder.new_build_int_compare(IntPredicate::ULT, curr_index, end, "bounds_check");
// if we're at the end, and all elements were equal so far, return true
// otherwise check the current elements for equality
builder.build_conditional_branch(loop_end_cond, body_bb, return_true);
builder.new_build_conditional_branch(loop_end_cond, body_bb, return_true);
{
// loop body
@ -590,7 +590,7 @@ fn build_list_eq_help<'a, 'ctx>(
// if the elements are equal, increment the index and check the next element
// otherwise, return false
builder.build_conditional_branch(are_equal, increment_bb, return_false);
builder.new_build_conditional_branch(are_equal, increment_bb, return_false);
}
{
@ -599,25 +599,25 @@ fn build_list_eq_help<'a, 'ctx>(
// constant 1isize
let one = env.ptr_int().const_int(1, false);
let next_index = builder.build_int_add(curr_index, one, "nextindex");
let next_index = builder.new_build_int_add(curr_index, one, "nextindex");
builder.build_store(index_alloca, next_index);
builder.new_build_store(index_alloca, next_index);
// jump back to the top of the loop
builder.build_unconditional_branch(loop_bb);
builder.new_build_unconditional_branch(loop_bb);
}
}
{
env.builder.position_at_end(return_true);
env.builder
.build_return(Some(&env.context.bool_type().const_int(1, false)));
.new_build_return(Some(&env.context.bool_type().const_int(1, false)));
}
{
env.builder.position_at_end(return_false);
env.builder
.build_return(Some(&env.context.bool_type().const_int(0, false)));
.new_build_return(Some(&env.context.bool_type().const_int(0, false)));
}
}
@ -667,7 +667,7 @@ fn build_struct_eq<'a, 'ctx>(
env.builder.set_current_debug_location(di_location);
let call = env
.builder
.build_call(function, &[struct1.into(), struct2.into()], "struct_eq");
.new_build_call(function, &[struct1.into(), struct2.into()], "struct_eq");
call.set_call_convention(FAST_CALL_CONV);
@ -717,7 +717,7 @@ fn build_struct_eq_help<'a, 'ctx>(
let entry = ctx.append_basic_block(parent, "entry");
let start = ctx.append_basic_block(parent, "start");
env.builder.position_at_end(entry);
env.builder.build_unconditional_branch(start);
env.builder.new_build_unconditional_branch(start);
let return_true = ctx.append_basic_block(parent, "return_true");
let return_false = ctx.append_basic_block(parent, "return_false");
@ -757,13 +757,13 @@ fn build_struct_eq_help<'a, 'ctx>(
);
// cast the i64 pointer to a pointer to block of memory
let field1_cast = env.builder.build_pointer_cast(
let field1_cast = env.builder.new_build_pointer_cast(
field1.into_pointer_value(),
bt.into_pointer_type(),
"i64_to_opaque",
);
let field2_cast = env.builder.build_pointer_cast(
let field2_cast = env.builder.new_build_pointer_cast(
field2.into_pointer_value(),
bt.into_pointer_type(),
"i64_to_opaque",
@ -795,22 +795,22 @@ fn build_struct_eq_help<'a, 'ctx>(
current = ctx.append_basic_block(parent, &format!("eq_step_{index}"));
env.builder
.build_conditional_branch(are_equal, current, return_false);
.new_build_conditional_branch(are_equal, current, return_false);
}
env.builder.position_at_end(current);
env.builder.build_unconditional_branch(return_true);
env.builder.new_build_unconditional_branch(return_true);
{
env.builder.position_at_end(return_true);
env.builder
.build_return(Some(&env.context.bool_type().const_int(1, false)));
.new_build_return(Some(&env.context.bool_type().const_int(1, false)));
}
{
env.builder.position_at_end(return_false);
env.builder
.build_return(Some(&env.context.bool_type().const_int(0, false)));
.new_build_return(Some(&env.context.bool_type().const_int(0, false)));
}
}
@ -859,7 +859,7 @@ fn build_tag_eq<'a, 'ctx>(
env.builder.set_current_debug_location(di_location);
let call = env
.builder
.build_call(function, &[tag1.into(), tag2.into()], "tag_eq");
.new_build_call(function, &[tag1.into(), tag2.into()], "tag_eq");
call.set_call_convention(FAST_CALL_CONV);
@ -913,13 +913,13 @@ fn build_tag_eq_help<'a, 'ctx>(
{
env.builder.position_at_end(return_false);
env.builder
.build_return(Some(&env.context.bool_type().const_int(0, false)));
.new_build_return(Some(&env.context.bool_type().const_int(0, false)));
}
{
env.builder.position_at_end(return_true);
env.builder
.build_return(Some(&env.context.bool_type().const_int(1, false)));
.new_build_return(Some(&env.context.bool_type().const_int(1, false)));
}
env.builder.position_at_end(entry);
@ -929,22 +929,22 @@ fn build_tag_eq_help<'a, 'ctx>(
match union_layout {
NonRecursive(&[]) => {
// we're comparing empty tag unions; this code is effectively unreachable
env.builder.build_unreachable();
env.builder.new_build_unreachable();
}
NonRecursive(tags) => {
let ptr_equal = env.builder.build_int_compare(
let ptr_equal = env.builder.new_build_int_compare(
IntPredicate::EQ,
env.builder
.build_ptr_to_int(tag1.into_pointer_value(), env.ptr_int(), "pti"),
.new_build_ptr_to_int(tag1.into_pointer_value(), env.ptr_int(), "pti"),
env.builder
.build_ptr_to_int(tag2.into_pointer_value(), env.ptr_int(), "pti"),
.new_build_ptr_to_int(tag2.into_pointer_value(), env.ptr_int(), "pti"),
"compare_pointers",
);
let compare_tag_ids = ctx.append_basic_block(parent, "compare_tag_ids");
env.builder
.build_conditional_branch(ptr_equal, return_true, compare_tag_ids);
.new_build_conditional_branch(ptr_equal, return_true, compare_tag_ids);
env.builder.position_at_end(compare_tag_ids);
@ -959,10 +959,10 @@ fn build_tag_eq_help<'a, 'ctx>(
let same_tag =
env.builder
.build_int_compare(IntPredicate::EQ, id1, id2, "compare_tag_id");
.new_build_int_compare(IntPredicate::EQ, id1, id2, "compare_tag_id");
env.builder
.build_conditional_branch(same_tag, compare_tag_fields, return_false);
.new_build_conditional_branch(same_tag, compare_tag_fields, return_false);
env.builder.position_at_end(compare_tag_fields);
@ -986,7 +986,7 @@ fn build_tag_eq_help<'a, 'ctx>(
tag2,
);
env.builder.build_return(Some(&answer));
env.builder.new_build_return(Some(&answer));
cases.push((id1.get_type().const_int(tag_id as u64, false), block));
}
@ -995,28 +995,28 @@ fn build_tag_eq_help<'a, 'ctx>(
match cases.pop() {
Some((_, default)) => {
env.builder.build_switch(id1, default, &cases);
env.builder.new_build_switch(id1, default, &cases);
}
None => {
// we're comparing empty tag unions; this code is effectively unreachable
env.builder.build_unreachable();
env.builder.new_build_unreachable();
}
}
}
Recursive(tags) => {
let ptr_equal = env.builder.build_int_compare(
let ptr_equal = env.builder.new_build_int_compare(
IntPredicate::EQ,
env.builder
.build_ptr_to_int(tag1.into_pointer_value(), env.ptr_int(), "pti"),
.new_build_ptr_to_int(tag1.into_pointer_value(), env.ptr_int(), "pti"),
env.builder
.build_ptr_to_int(tag2.into_pointer_value(), env.ptr_int(), "pti"),
.new_build_ptr_to_int(tag2.into_pointer_value(), env.ptr_int(), "pti"),
"compare_pointers",
);
let compare_tag_ids = ctx.append_basic_block(parent, "compare_tag_ids");
env.builder
.build_conditional_branch(ptr_equal, return_true, compare_tag_ids);
.new_build_conditional_branch(ptr_equal, return_true, compare_tag_ids);
env.builder.position_at_end(compare_tag_ids);
@ -1031,10 +1031,10 @@ fn build_tag_eq_help<'a, 'ctx>(
let same_tag =
env.builder
.build_int_compare(IntPredicate::EQ, id1, id2, "compare_tag_id");
.new_build_int_compare(IntPredicate::EQ, id1, id2, "compare_tag_id");
env.builder
.build_conditional_branch(same_tag, compare_tag_fields, return_false);
.new_build_conditional_branch(same_tag, compare_tag_fields, return_false);
env.builder.position_at_end(compare_tag_fields);
@ -1058,7 +1058,7 @@ fn build_tag_eq_help<'a, 'ctx>(
tag2,
);
env.builder.build_return(Some(&answer));
env.builder.new_build_return(Some(&answer));
cases.push((id1.get_type().const_int(tag_id as u64, false), block));
}
@ -1067,15 +1067,15 @@ fn build_tag_eq_help<'a, 'ctx>(
let default = cases.pop().unwrap().1;
env.builder.build_switch(id1, default, &cases);
env.builder.new_build_switch(id1, default, &cases);
}
NullableUnwrapped { other_fields, .. } => {
let ptr_equal = env.builder.build_int_compare(
let ptr_equal = env.builder.new_build_int_compare(
IntPredicate::EQ,
env.builder
.build_ptr_to_int(tag1.into_pointer_value(), env.ptr_int(), "pti"),
.new_build_ptr_to_int(tag1.into_pointer_value(), env.ptr_int(), "pti"),
env.builder
.build_ptr_to_int(tag2.into_pointer_value(), env.ptr_int(), "pti"),
.new_build_ptr_to_int(tag2.into_pointer_value(), env.ptr_int(), "pti"),
"compare_pointers",
);
@ -1083,7 +1083,7 @@ fn build_tag_eq_help<'a, 'ctx>(
let compare_other = ctx.append_basic_block(parent, "compare_other");
env.builder
.build_conditional_branch(ptr_equal, return_true, check_for_null);
.new_build_conditional_branch(ptr_equal, return_true, check_for_null);
// check for NULL
@ -1091,18 +1091,20 @@ fn build_tag_eq_help<'a, 'ctx>(
let is_null_1 = env
.builder
.build_is_null(tag1.into_pointer_value(), "is_null");
.new_build_is_null(tag1.into_pointer_value(), "is_null");
let is_null_2 = env
.builder
.build_is_null(tag2.into_pointer_value(), "is_null");
.new_build_is_null(tag2.into_pointer_value(), "is_null");
let either_null = env.builder.build_or(is_null_1, is_null_2, "either_null");
let either_null = env
.builder
.new_build_or(is_null_1, is_null_2, "either_null");
// logic: the pointers are not the same, if one is NULL, the other one is not
// therefore the two tags are not equal
env.builder
.build_conditional_branch(either_null, return_false, compare_other);
.new_build_conditional_branch(either_null, return_false, compare_other);
// compare the non-null case
@ -1120,18 +1122,18 @@ fn build_tag_eq_help<'a, 'ctx>(
tag2.into_pointer_value(),
);
env.builder.build_return(Some(&answer));
env.builder.new_build_return(Some(&answer));
}
NullableWrapped {
other_tags,
nullable_id,
} => {
let ptr_equal = env.builder.build_int_compare(
let ptr_equal = env.builder.new_build_int_compare(
IntPredicate::EQ,
env.builder
.build_ptr_to_int(tag1.into_pointer_value(), env.ptr_int(), "pti"),
.new_build_ptr_to_int(tag1.into_pointer_value(), env.ptr_int(), "pti"),
env.builder
.build_ptr_to_int(tag2.into_pointer_value(), env.ptr_int(), "pti"),
.new_build_ptr_to_int(tag2.into_pointer_value(), env.ptr_int(), "pti"),
"compare_pointers",
);
@ -1139,7 +1141,7 @@ fn build_tag_eq_help<'a, 'ctx>(
let compare_other = ctx.append_basic_block(parent, "compare_other");
env.builder
.build_conditional_branch(ptr_equal, return_true, check_for_null);
.new_build_conditional_branch(ptr_equal, return_true, check_for_null);
// check for NULL
@ -1147,11 +1149,11 @@ fn build_tag_eq_help<'a, 'ctx>(
let is_null_1 = env
.builder
.build_is_null(tag1.into_pointer_value(), "is_null");
.new_build_is_null(tag1.into_pointer_value(), "is_null");
let is_null_2 = env
.builder
.build_is_null(tag2.into_pointer_value(), "is_null");
.new_build_is_null(tag2.into_pointer_value(), "is_null");
// Logic:
//
@ -1162,15 +1164,15 @@ fn build_tag_eq_help<'a, 'ctx>(
let i8_type = env.context.i8_type();
let sum = env.builder.build_int_add(
let sum = env.builder.new_build_int_add(
env.builder
.build_int_cast_sign_flag(is_null_1, i8_type, false, "to_u8"),
.new_build_int_cast_sign_flag(is_null_1, i8_type, false, "to_u8"),
env.builder
.build_int_cast_sign_flag(is_null_2, i8_type, false, "to_u8"),
.new_build_int_cast_sign_flag(is_null_2, i8_type, false, "to_u8"),
"sum_is_null",
);
env.builder.build_switch(
env.builder.new_build_switch(
sum,
compare_other,
&[
@ -1194,10 +1196,10 @@ fn build_tag_eq_help<'a, 'ctx>(
let same_tag =
env.builder
.build_int_compare(IntPredicate::EQ, id1, id2, "compare_tag_id");
.new_build_int_compare(IntPredicate::EQ, id1, id2, "compare_tag_id");
env.builder
.build_conditional_branch(same_tag, compare_tag_fields, return_false);
.new_build_conditional_branch(same_tag, compare_tag_fields, return_false);
env.builder.position_at_end(compare_tag_fields);
@ -1224,7 +1226,7 @@ fn build_tag_eq_help<'a, 'ctx>(
tag2,
);
env.builder.build_return(Some(&answer));
env.builder.new_build_return(Some(&answer));
cases.push((id1.get_type().const_int(tag_id as u64, false), block));
}
@ -1233,22 +1235,22 @@ fn build_tag_eq_help<'a, 'ctx>(
let default = cases.pop().unwrap().1;
env.builder.build_switch(id1, default, &cases);
env.builder.new_build_switch(id1, default, &cases);
}
NonNullableUnwrapped(field_layouts) => {
let ptr_equal = env.builder.build_int_compare(
let ptr_equal = env.builder.new_build_int_compare(
IntPredicate::EQ,
env.builder
.build_ptr_to_int(tag1.into_pointer_value(), env.ptr_int(), "pti"),
.new_build_ptr_to_int(tag1.into_pointer_value(), env.ptr_int(), "pti"),
env.builder
.build_ptr_to_int(tag2.into_pointer_value(), env.ptr_int(), "pti"),
.new_build_ptr_to_int(tag2.into_pointer_value(), env.ptr_int(), "pti"),
"compare_pointers",
);
let compare_fields = ctx.append_basic_block(parent, "compare_fields");
env.builder
.build_conditional_branch(ptr_equal, return_true, compare_fields);
.new_build_conditional_branch(ptr_equal, return_true, compare_fields);
env.builder.position_at_end(compare_fields);
@ -1264,7 +1266,7 @@ fn build_tag_eq_help<'a, 'ctx>(
tag2.into_pointer_value(),
);
env.builder.build_return(Some(&answer));
env.builder.new_build_return(Some(&answer));
}
}
}
@ -1282,13 +1284,13 @@ fn eq_ptr_to_struct<'a, 'ctx>(
debug_assert!(wrapper_type.is_struct_type());
// cast the opaque pointer to a pointer of the correct shape
let struct1_ptr = env.builder.build_pointer_cast(
let struct1_ptr = env.builder.new_build_pointer_cast(
tag1,
wrapper_type.ptr_type(AddressSpace::default()),
"opaque_to_correct",
);
let struct2_ptr = env.builder.build_pointer_cast(
let struct2_ptr = env.builder.new_build_pointer_cast(
tag2,
wrapper_type.ptr_type(AddressSpace::default()),
"opaque_to_correct",
@ -1368,7 +1370,7 @@ fn build_box_eq<'a, 'ctx>(
env.builder.set_current_debug_location(di_location);
let call = env
.builder
.build_call(function, &[tag1.into(), tag2.into()], "box_eq");
.new_build_call(function, &[tag1.into(), tag2.into()], "box_eq");
call.set_call_convention(FAST_CALL_CONV);
@ -1420,23 +1422,23 @@ fn build_box_eq_help<'a, 'ctx>(
let return_true = ctx.append_basic_block(parent, "return_true");
env.builder.position_at_end(return_true);
env.builder
.build_return(Some(&env.context.bool_type().const_all_ones()));
.new_build_return(Some(&env.context.bool_type().const_all_ones()));
env.builder.position_at_end(entry);
let ptr_equal = env.builder.build_int_compare(
let ptr_equal = env.builder.new_build_int_compare(
IntPredicate::EQ,
env.builder
.build_ptr_to_int(box1.into_pointer_value(), env.ptr_int(), "pti"),
.new_build_ptr_to_int(box1.into_pointer_value(), env.ptr_int(), "pti"),
env.builder
.build_ptr_to_int(box2.into_pointer_value(), env.ptr_int(), "pti"),
.new_build_ptr_to_int(box2.into_pointer_value(), env.ptr_int(), "pti"),
"compare_pointers",
);
let check_null_then_compare_inner_values =
ctx.append_basic_block(parent, "check_null_then_compare_inner_values");
env.builder.build_conditional_branch(
env.builder.new_build_conditional_branch(
ptr_equal,
return_true,
check_null_then_compare_inner_values,
@ -1449,27 +1451,27 @@ fn build_box_eq_help<'a, 'ctx>(
let box1_is_null = env
.builder
.build_is_null(box1.into_pointer_value(), "box1_is_null");
.new_build_is_null(box1.into_pointer_value(), "box1_is_null");
let check_box2_is_null = ctx.append_basic_block(parent, "check_if_box2_is_null");
let return_false = ctx.append_basic_block(parent, "return_false");
let compare_inner_values = ctx.append_basic_block(parent, "compare_inner_values");
env.builder
.build_conditional_branch(box1_is_null, return_false, check_box2_is_null);
.new_build_conditional_branch(box1_is_null, return_false, check_box2_is_null);
{
env.builder.position_at_end(check_box2_is_null);
let box2_is_null = env
.builder
.build_is_null(box2.into_pointer_value(), "box2_is_null");
.new_build_is_null(box2.into_pointer_value(), "box2_is_null");
env.builder
.build_conditional_branch(box2_is_null, return_false, compare_inner_values);
.new_build_conditional_branch(box2_is_null, return_false, compare_inner_values);
}
{
env.builder.position_at_end(return_false);
env.builder
.build_return(Some(&env.context.bool_type().const_zero()));
.new_build_return(Some(&env.context.bool_type().const_zero()));
}
// Compare the inner values.
@ -1504,5 +1506,5 @@ fn build_box_eq_help<'a, 'ctx>(
layout_interner.get_repr(inner_layout),
);
env.builder.build_return(Some(&is_equal));
env.builder.new_build_return(Some(&is_equal));
}

View file

@ -428,30 +428,27 @@ impl<'ctx> RocUnion<'ctx> {
) {
debug_assert_eq!(tag_id.is_some(), self.tag_type.is_some());
let data_buffer = env
.builder
.new_build_struct_gep(
self.struct_type(),
tag_alloca,
Self::TAG_DATA_INDEX,
"data_buffer",
)
.unwrap();
let data_buffer = env.builder.new_build_struct_gep(
self.struct_type(),
tag_alloca,
Self::TAG_DATA_INDEX,
"data_buffer",
);
match data {
// NOTE: the data may be smaller than the buffer, so there might be uninitialized
// bytes in the buffer. We should never touch those, but e.g. valgrind might not
// realize that. If that comes up, the solution is to just fill it with zeros
RocStruct::ByValue(value) => {
let cast_pointer = env.builder.build_pointer_cast(
let cast_pointer = env.builder.new_build_pointer_cast(
data_buffer,
value.get_type().ptr_type(AddressSpace::default()),
"to_data_ptr",
);
env.builder.build_store(cast_pointer, value);
env.builder.new_build_store(cast_pointer, value);
}
RocStruct::ByReference(payload_data_ptr) => {
let cast_tag_pointer = env.builder.build_pointer_cast(
let cast_tag_pointer = env.builder.new_build_pointer_cast(
data_buffer,
payload_data_ptr.get_type(),
"to_data_ptr",
@ -477,19 +474,16 @@ impl<'ctx> RocUnion<'ctx> {
TagType::I16 => env.context.i16_type(),
};
let tag_id_ptr = env
.builder
.new_build_struct_gep(
self.struct_type(),
tag_alloca,
Self::TAG_ID_INDEX,
"tag_id_ptr",
)
.unwrap();
let tag_id_ptr = env.builder.new_build_struct_gep(
self.struct_type(),
tag_alloca,
Self::TAG_ID_INDEX,
"tag_id_ptr",
);
let tag_id = tag_id_type.const_int(tag_id as u64, false);
env.builder.build_store(tag_id_ptr, tag_id);
env.builder.new_build_store(tag_id_ptr, tag_id);
}
}
}

View file

@ -5,7 +5,7 @@ use inkwell::{
};
use roc_mono::ir::ErasedField;
use super::build::Env;
use super::build::{BuilderExt, Env};
pub fn opaque_ptr_type<'ctx>(env: &Env<'_, 'ctx, '_>) -> PointerType<'ctx> {
env.context.i8_type().ptr_type(AddressSpace::default())
@ -50,7 +50,7 @@ fn bitcast_to_opaque_ptr<'ctx>(
value: PointerValue<'ctx>,
) -> PointerValue<'ctx> {
env.builder
.build_bitcast(
.new_build_bitcast(
value,
env.context.i8_type().ptr_type(AddressSpace::default()),
"to_opaque_ptr",
@ -108,7 +108,7 @@ pub fn load<'ctx>(
let value = env
.builder
.build_bitcast(value, as_type, "bitcast_to_type")
.new_build_bitcast(value, as_type, "bitcast_to_type")
.into_pointer_value();
value

View file

@ -37,7 +37,7 @@ impl<'ctx> SharedMemoryPointer<'ctx> {
let call_result = env
.builder
.build_call(func, &[], "call_expect_start_failed");
.new_build_call(func, &[], "call_expect_start_failed");
let ptr = call_result
.try_as_basic_value()
@ -99,7 +99,7 @@ fn read_state<'ctx>(
ptr: PointerValue<'ctx>,
) -> (IntValue<'ctx>, IntValue<'ctx>) {
let ptr_type = env.ptr_int().ptr_type(AddressSpace::default());
let ptr = env.builder.build_pointer_cast(ptr, ptr_type, "");
let ptr = env.builder.new_build_pointer_cast(ptr, ptr_type, "");
let one = env.ptr_int().const_int(1, false);
let offset_ptr = pointer_at_offset(env.builder, env.ptr_int(), ptr, one);
@ -119,13 +119,13 @@ fn write_state<'ctx>(
offset: IntValue<'ctx>,
) {
let ptr_type = env.ptr_int().ptr_type(AddressSpace::default());
let ptr = env.builder.build_pointer_cast(ptr, ptr_type, "");
let ptr = env.builder.new_build_pointer_cast(ptr, ptr_type, "");
let one = env.ptr_int().const_int(1, false);
let offset_ptr = pointer_at_offset(env.builder, env.ptr_int(), ptr, one);
env.builder.build_store(ptr, count);
env.builder.build_store(offset_ptr, offset);
env.builder.new_build_store(ptr, count);
env.builder.new_build_store(offset_ptr, offset);
}
fn offset_add<'ctx>(
@ -134,7 +134,7 @@ fn offset_add<'ctx>(
extra: u32,
) -> IntValue<'ctx> {
let intval = current.get_type().const_int(extra as _, false);
builder.build_int_add(current, intval, "offset_add")
builder.new_build_int_add(current, intval, "offset_add")
}
pub(crate) fn notify_parent_expect(env: &Env, shared_memory: &SharedMemoryPointer) {
@ -143,7 +143,7 @@ pub(crate) fn notify_parent_expect(env: &Env, shared_memory: &SharedMemoryPointe
.get_function(bitcode::NOTIFY_PARENT_EXPECT)
.unwrap();
env.builder.build_call(
env.builder.new_build_call(
func,
&[shared_memory.0.into()],
"call_expect_failed_finalize",
@ -153,7 +153,7 @@ pub(crate) fn notify_parent_expect(env: &Env, shared_memory: &SharedMemoryPointe
pub(crate) fn notify_parent_dbg(env: &Env, shared_memory: &SharedMemoryPointer) {
let func = env.module.get_function(bitcode::NOTIFY_PARENT_DBG).unwrap();
env.builder.build_call(
env.builder.new_build_call(
func,
&[shared_memory.0.into()],
"call_expect_failed_finalize",
@ -203,7 +203,7 @@ pub(crate) fn clone_to_shared_memory<'a, 'ctx>(
offset = env
.builder
.build_int_add(offset, space_for_offsets, "offset");
.new_build_int_add(offset, space_for_offsets, "offset");
for lookup in lookups.iter() {
lookup_starts.push(offset);
@ -214,7 +214,7 @@ pub(crate) fn clone_to_shared_memory<'a, 'ctx>(
.ptr_int()
.const_int(layout_interner.stack_size(layout) as u64, false);
let mut extra_offset = env.builder.build_int_add(offset, stack_size, "offset");
let mut extra_offset = env.builder.new_build_int_add(offset, stack_size, "offset");
let cursors = Cursors {
offset,
@ -246,7 +246,7 @@ pub(crate) fn clone_to_shared_memory<'a, 'ctx>(
.ptr_int()
.const_int(env.target_info.ptr_size() as _, false);
offset = env.builder.build_int_add(offset, ptr_width, "offset");
offset = env.builder.new_build_int_add(offset, ptr_width, "offset");
}
// Store the specialized variable of the value
@ -263,26 +263,26 @@ pub(crate) fn clone_to_shared_memory<'a, 'ctx>(
let u32_ptr = env.context.i32_type().ptr_type(AddressSpace::default());
let ptr = env
.builder
.build_pointer_cast(ptr, u32_ptr, "cast_ptr_type");
.new_build_pointer_cast(ptr, u32_ptr, "cast_ptr_type");
let var_value = env
.context
.i32_type()
.const_int(lookup_var.index() as _, false);
env.builder.build_store(ptr, var_value);
env.builder.new_build_store(ptr, var_value);
let var_size = env
.ptr_int()
.const_int(std::mem::size_of::<u32>() as _, false);
offset = env.builder.build_int_add(offset, var_size, "offset");
offset = env.builder.new_build_int_add(offset, var_size, "offset");
}
}
}
let one = env.ptr_int().const_int(1, false);
let new_count = env.builder.build_int_add(count, one, "inc");
let new_count = env.builder.new_build_int_add(count, one, "inc");
write_state(env, original_ptr, new_count, offset)
}
@ -335,7 +335,7 @@ fn build_clone<'a, 'ctx>(
let ptr_type = value.get_type().ptr_type(AddressSpace::default());
let ptr = env
.builder
.build_pointer_cast(ptr, ptr_type, "cast_ptr_type");
.new_build_pointer_cast(ptr, ptr_type, "cast_ptr_type");
store_roc_value(env, layout_interner, layout, ptr, value);
@ -363,7 +363,7 @@ fn build_clone<'a, 'ctx>(
let bt = basic_type_from_layout(env, layout_interner, layout_interner.get_repr(layout));
// cast the i64 pointer to a pointer to block of memory
let field1_cast = env.builder.build_pointer_cast(
let field1_cast = env.builder.new_build_pointer_cast(
value.into_pointer_value(),
bt.into_pointer_type(),
"i64_to_opaque",
@ -429,7 +429,7 @@ fn build_clone_struct<'a, 'ctx>(
cursors.extra_offset = new_extra;
cursors.offset = env
.builder
.build_int_add(cursors.offset, field_width, "offset");
.new_build_int_add(cursors.offset, field_width, "offset");
}
cursors.extra_offset
@ -496,7 +496,7 @@ fn build_clone_tag<'a, 'ctx>(
}
};
let call = env.builder.build_call(
let call = env.builder.new_build_call(
function,
&[
ptr.into(),
@ -523,17 +523,14 @@ fn load_tag_data<'a, 'ctx>(
) -> BasicValueEnum<'ctx> {
let union_struct_type = struct_type_from_union_layout(env, layout_interner, &union_layout);
let raw_data_ptr = env
.builder
.new_build_struct_gep(
union_struct_type,
tag_value,
RocUnion::TAG_DATA_INDEX,
"tag_data",
)
.unwrap();
let raw_data_ptr = env.builder.new_build_struct_gep(
union_struct_type,
tag_value,
RocUnion::TAG_DATA_INDEX,
"tag_data",
);
let data_ptr = env.builder.build_pointer_cast(
let data_ptr = env.builder.new_build_pointer_cast(
raw_data_ptr,
tag_type.ptr_type(AddressSpace::default()),
"data_ptr",
@ -555,7 +552,7 @@ fn clone_tag_payload_and_id<'a, 'ctx>(
) -> IntValue<'ctx> {
let payload_type = basic_type_from_layout(env, layout_interner, payload_layout);
let payload_ptr = env.builder.build_pointer_cast(
let payload_ptr = env.builder.new_build_pointer_cast(
opaque_payload_ptr,
payload_type.ptr_type(AddressSpace::default()),
"cast_payload_ptr",
@ -636,13 +633,13 @@ fn build_clone_tag_help<'a, 'ctx>(
match union_layout {
NonRecursive(&[]) => {
// we're comparing empty tag unions; this code is effectively unreachable
env.builder.build_unreachable();
env.builder.new_build_unreachable();
}
NonRecursive(tags) => {
let id = get_tag_id(env, layout_interner, parent, &union_layout, tag_value);
let switch_block = env.context.append_basic_block(parent, "switch_block");
env.builder.build_unconditional_branch(switch_block);
env.builder.new_build_unconditional_branch(switch_block);
let mut cases = Vec::with_capacity_in(tags.len(), env.arena);
@ -655,15 +652,12 @@ fn build_clone_tag_help<'a, 'ctx>(
// load the tag payload (if any)
let payload_layout = LayoutRepr::struct_(field_layouts);
let opaque_payload_ptr = env
.builder
.new_build_struct_gep(
roc_union.struct_type(),
tag_value.into_pointer_value(),
RocUnion::TAG_DATA_INDEX,
"data_buffer",
)
.unwrap();
let opaque_payload_ptr = env.builder.new_build_struct_gep(
roc_union.struct_type(),
tag_value.into_pointer_value(),
RocUnion::TAG_DATA_INDEX,
"data_buffer",
);
let answer = clone_tag_payload_and_id(
env,
@ -677,7 +671,7 @@ fn build_clone_tag_help<'a, 'ctx>(
opaque_payload_ptr,
);
env.builder.build_return(Some(&answer));
env.builder.new_build_return(Some(&answer));
cases.push((id.get_type().const_int(tag_id as u64, false), block));
}
@ -686,11 +680,11 @@ fn build_clone_tag_help<'a, 'ctx>(
match cases.pop() {
Some((_, default)) => {
env.builder.build_switch(id, default, &cases);
env.builder.new_build_switch(id, default, &cases);
}
None => {
// we're serializing an empty tag union; this code is effectively unreachable
env.builder.build_unreachable();
env.builder.new_build_unreachable();
}
}
}
@ -698,7 +692,7 @@ fn build_clone_tag_help<'a, 'ctx>(
let id = get_tag_id(env, layout_interner, parent, &union_layout, tag_value);
let switch_block = env.context.append_basic_block(parent, "switch_block");
env.builder.build_unconditional_branch(switch_block);
env.builder.new_build_unconditional_branch(switch_block);
let mut cases = Vec::with_capacity_in(tags.len(), env.arena);
@ -728,7 +722,7 @@ fn build_clone_tag_help<'a, 'ctx>(
let cursors = Cursors {
offset: extra_offset,
extra_offset: env.builder.build_int_add(
extra_offset: env.builder.new_build_int_add(
extra_offset,
env.ptr_int().const_int(width as _, false),
"new_offset",
@ -738,7 +732,7 @@ fn build_clone_tag_help<'a, 'ctx>(
let answer =
build_clone(env, layout_interner, layout_ids, ptr, cursors, data, layout);
env.builder.build_return(Some(&answer));
env.builder.new_build_return(Some(&answer));
cases.push((id.get_type().const_int(tag_id as u64, false), block));
}
@ -747,11 +741,11 @@ fn build_clone_tag_help<'a, 'ctx>(
match cases.pop() {
Some((_, default)) => {
env.builder.build_switch(id, default, &cases);
env.builder.new_build_switch(id, default, &cases);
}
None => {
// we're serializing an empty tag union; this code is effectively unreachable
env.builder.build_unreachable();
env.builder.new_build_unreachable();
}
}
}
@ -767,7 +761,7 @@ fn build_clone_tag_help<'a, 'ctx>(
let cursors = Cursors {
offset: extra_offset,
extra_offset: env.builder.build_int_add(
extra_offset: env.builder.new_build_int_add(
extra_offset,
env.ptr_int().const_int(width as _, false),
"new_offset",
@ -778,7 +772,7 @@ fn build_clone_tag_help<'a, 'ctx>(
let answer = build_clone(env, layout_interner, layout_ids, ptr, cursors, data, layout);
env.builder.build_return(Some(&answer));
env.builder.new_build_return(Some(&answer));
}
NullableWrapped {
nullable_id,
@ -791,10 +785,10 @@ fn build_clone_tag_help<'a, 'ctx>(
let comparison = env
.builder
.build_is_null(tag_value.into_pointer_value(), "is_null");
.new_build_is_null(tag_value.into_pointer_value(), "is_null");
env.builder
.build_conditional_branch(comparison, null_block, switch_block);
.new_build_conditional_branch(comparison, null_block, switch_block);
{
let mut cases = Vec::with_capacity_in(other_tags.len(), env.arena);
@ -823,7 +817,7 @@ fn build_clone_tag_help<'a, 'ctx>(
let cursors = Cursors {
offset: extra_offset,
extra_offset: env.builder.build_int_add(
extra_offset: env.builder.new_build_int_add(
extra_offset,
env.ptr_int().const_int(width as _, false),
"new_offset",
@ -837,7 +831,7 @@ fn build_clone_tag_help<'a, 'ctx>(
let answer =
build_clone(env, layout_interner, layout_ids, ptr, cursors, data, layout);
env.builder.build_return(Some(&answer));
env.builder.new_build_return(Some(&answer));
cases.push((id.get_type().const_int(i as u64, false), block));
}
@ -846,11 +840,11 @@ fn build_clone_tag_help<'a, 'ctx>(
match cases.pop() {
Some((_, default)) => {
env.builder.build_switch(id, default, &cases);
env.builder.new_build_switch(id, default, &cases);
}
None => {
// we're serializing an empty tag union; this code is effectively unreachable
env.builder.build_unreachable();
env.builder.new_build_unreachable();
}
}
}
@ -861,7 +855,7 @@ fn build_clone_tag_help<'a, 'ctx>(
let value = env.ptr_int().const_zero();
build_copy(env, ptr, offset, value.into());
env.builder.build_return(Some(&extra_offset));
env.builder.new_build_return(Some(&extra_offset));
}
}
NullableUnwrapped { other_fields, .. } => {
@ -870,10 +864,10 @@ fn build_clone_tag_help<'a, 'ctx>(
let comparison = env
.builder
.build_is_null(tag_value.into_pointer_value(), "is_null");
.new_build_is_null(tag_value.into_pointer_value(), "is_null");
env.builder
.build_conditional_branch(comparison, null_block, other_block);
.new_build_conditional_branch(comparison, null_block, other_block);
{
env.builder.position_at_end(null_block);
@ -881,7 +875,7 @@ fn build_clone_tag_help<'a, 'ctx>(
let value = env.ptr_int().const_zero();
build_copy(env, ptr, offset, value.into());
env.builder.build_return(Some(&extra_offset));
env.builder.new_build_return(Some(&extra_offset));
}
{
@ -895,7 +889,7 @@ fn build_clone_tag_help<'a, 'ctx>(
let cursors = Cursors {
offset: extra_offset,
extra_offset: env.builder.build_int_add(
extra_offset: env.builder.new_build_int_add(
extra_offset,
env.ptr_int()
.const_int(layout.stack_size(layout_interner) as _, false),
@ -914,7 +908,7 @@ fn build_clone_tag_help<'a, 'ctx>(
let answer =
build_clone(env, layout_interner, layout_ids, ptr, cursors, data, layout);
env.builder.build_return(Some(&answer));
env.builder.new_build_return(Some(&answer));
}
}
}
@ -935,12 +929,12 @@ fn write_pointer_with_tag_id<'a, 'ctx>(
// increment offset by 4
let four = env.ptr_int().const_int(4, false);
let offset = env.builder.build_int_add(offset, four, "");
let offset = env.builder.new_build_int_add(offset, four, "");
// cast to u32
let extra_offset = env
.builder
.build_int_cast(extra_offset, env.context.i32_type(), "");
.new_build_int_cast(extra_offset, env.context.i32_type(), "");
build_copy(env, ptr, offset, extra_offset.into());
} else {
@ -966,12 +960,12 @@ fn build_copy<'ctx>(
let ptr_type = value.get_type().ptr_type(AddressSpace::default());
let ptr = env
.builder
.build_pointer_cast(ptr, ptr_type, "cast_ptr_type");
.new_build_pointer_cast(ptr, ptr_type, "cast_ptr_type");
env.builder.build_store(ptr, value);
env.builder.new_build_store(ptr, value);
let width = value.get_type().size_of().unwrap();
env.builder.build_int_add(offset, width, "new_offset")
env.builder.new_build_int_add(offset, width, "new_offset")
}
fn build_clone_builtin<'a, 'ctx>(
@ -1024,7 +1018,7 @@ fn build_clone_builtin<'a, 'ctx>(
let (element_width, _element_align) = layout_interner.stack_size_and_alignment(elem);
let element_width = env.ptr_int().const_int(element_width as _, false);
let elements_width = bd.build_int_mul(element_width, len, "elements_width");
let elements_width = bd.new_build_int_mul(element_width, len, "elements_width");
// We clone the elements into the extra_offset address.
let _ = offset;
@ -1033,41 +1027,44 @@ fn build_clone_builtin<'a, 'ctx>(
if layout_interner.safe_to_memcpy(elem) {
// NOTE we are not actually sure the dest is properly aligned
let dest = pointer_at_offset(bd, env.context.i8_type(), ptr, elements_start_offset);
let src = bd.build_pointer_cast(
let src = bd.new_build_pointer_cast(
elements,
env.context.i8_type().ptr_type(AddressSpace::default()),
"to_bytes_pointer",
);
bd.build_memcpy(dest, 1, src, 1, elements_width).unwrap();
bd.build_int_add(elements_start_offset, elements_width, "new_offset")
bd.new_build_int_add(elements_start_offset, elements_width, "new_offset")
} else {
let element_type =
basic_type_from_layout(env, layout_interner, layout_interner.get_repr(elem));
let elements = bd.build_pointer_cast(
let elements = bd.new_build_pointer_cast(
elements,
element_type.ptr_type(AddressSpace::default()),
"elements",
);
// if the element has any pointers, we clone them to this offset
let rest_offset = bd.build_alloca(env.ptr_int(), "rest_offset");
let rest_offset = bd.new_build_alloca(env.ptr_int(), "rest_offset");
let element_stack_size = env
.ptr_int()
.const_int(layout_interner.stack_size(elem) as u64, false);
let rest_start_offset = bd.build_int_add(
let rest_start_offset = bd.new_build_int_add(
cursors.extra_offset,
bd.build_int_mul(len, element_stack_size, "elements_width"),
bd.new_build_int_mul(len, element_stack_size, "elements_width"),
"rest_start_offset",
);
bd.build_store(rest_offset, rest_start_offset);
bd.new_build_store(rest_offset, rest_start_offset);
let body = |layout_interner: &STLayoutInterner<'a>, index, element| {
let current_offset =
bd.build_int_mul(element_stack_size, index, "current_offset");
let current_offset =
bd.build_int_add(elements_start_offset, current_offset, "current_offset");
bd.new_build_int_mul(element_stack_size, index, "current_offset");
let current_offset = bd.new_build_int_add(
elements_start_offset,
current_offset,
"current_offset",
);
let current_extra_offset =
bd.new_build_load(env.ptr_int(), rest_offset, "element_offset");
@ -1090,7 +1087,7 @@ fn build_clone_builtin<'a, 'ctx>(
elem_layout,
);
bd.build_store(rest_offset, new_offset);
bd.new_build_store(rest_offset, new_offset);
};
let parent = env

View file

@ -59,7 +59,7 @@ pub fn add_default_roc_externs(env: &Env<'_, '_, '_>) {
.build_array_malloc(ctx.i8_type(), size_arg.into_int_value(), "call_malloc")
.unwrap();
builder.build_return(Some(&retval));
builder.new_build_return(Some(&retval));
if cfg!(debug_assertions) {
crate::llvm::build::verify_fn(fn_val);
@ -115,7 +115,7 @@ pub fn add_default_roc_externs(env: &Env<'_, '_, '_>) {
builder.position_at_end(entry);
// Call libc realloc()
let call = builder.build_call(
let call = builder.new_build_call(
libc_realloc_val,
&[ptr_arg.into(), new_size_arg.into()],
"call_libc_realloc",
@ -125,7 +125,7 @@ pub fn add_default_roc_externs(env: &Env<'_, '_, '_>) {
let retval = call.try_as_basic_value().left().unwrap();
builder.build_return(Some(&retval));
builder.new_build_return(Some(&retval));
if cfg!(debug_assertions) {
crate::llvm::build::verify_fn(fn_val);
@ -151,9 +151,9 @@ pub fn add_default_roc_externs(env: &Env<'_, '_, '_>) {
builder.position_at_end(entry);
// Call libc free()
builder.build_free(ptr_arg.into_pointer_value());
builder.new_build_free(ptr_arg.into_pointer_value());
builder.build_return(None);
builder.new_build_return(None);
if cfg!(debug_assertions) {
crate::llvm::build::verify_fn(fn_val);
@ -188,7 +188,7 @@ fn unreachable_function(env: &Env, name: &str) {
env.builder.position_at_end(entry);
env.builder.build_unreachable();
env.builder.new_build_unreachable();
if cfg!(debug_assertions) {
crate::llvm::build::verify_fn(fn_val);
@ -243,31 +243,32 @@ pub fn add_sjlj_roc_panic(env: &Env<'_, '_, '_>) {
};
env.builder
.build_store(get_panic_msg_ptr(env), loaded_roc_str);
.new_build_store(get_panic_msg_ptr(env), loaded_roc_str);
}
// write the panic tag.
// increment by 1, since the tag we'll get from the Roc program is 0-based,
// but we use 0 for marking a successful call.
{
let cast_tag_id = builder.build_int_z_extend(
let cast_tag_id = builder.new_build_int_z_extend(
tag_id_arg.into_int_value(),
env.context.i64_type(),
"zext_panic_tag",
);
let inc_tag_id = builder.build_int_add(
let inc_tag_id = builder.new_build_int_add(
cast_tag_id,
env.context.i64_type().const_int(1, false),
"inc_panic_tag",
);
env.builder.build_store(get_panic_tag_ptr(env), inc_tag_id);
env.builder
.new_build_store(get_panic_tag_ptr(env), inc_tag_id);
}
build_longjmp_call(env);
builder.build_unreachable();
builder.new_build_unreachable();
if cfg!(debug_assertions) {
crate::llvm::build::verify_fn(fn_val);
@ -284,7 +285,7 @@ pub fn build_longjmp_call(env: &Env) {
call_void_bitcode_fn(env, &[jmp_buf.into(), tag.into()], bitcode::UTILS_LONGJMP);
} else {
// Call the LLVM-intrinsic longjmp: `void @llvm.eh.sjlj.longjmp(i8* %setjmp_buf)`
let jmp_buf_i8p = env.builder.build_pointer_cast(
let jmp_buf_i8p = env.builder.new_build_pointer_cast(
jmp_buf,
env.context.i8_type().ptr_type(AddressSpace::default()),
"jmp_buf i8*",

View file

@ -7,7 +7,9 @@ use inkwell::{
use roc_mono::layout::{InLayout, LambdaName, LayoutInterner, STLayoutInterner};
use super::{
build::{function_value_by_func_spec, Env, FuncBorrowSpec, FunctionSpec, RocReturn},
build::{
function_value_by_func_spec, BuilderExt, Env, FuncBorrowSpec, FunctionSpec, RocReturn,
},
convert::{argument_type_from_layout, basic_type_from_layout},
};
@ -43,6 +45,6 @@ pub fn cast_to_function_ptr_type<'ctx>(
function_pointer_type: PointerType<'ctx>,
) -> PointerValue<'ctx> {
env.builder
.build_bitcast(pointer, function_pointer_type, "cast_to_function_ptr")
.new_build_bitcast(pointer, function_pointer_type, "cast_to_function_ptr")
.into_pointer_value()
}

View file

@ -142,7 +142,7 @@ pub(crate) fn add_intrinsics<'ctx>(ctx: &'ctx Context, module: &Module<'ctx>) {
pub static LLVM_MEMSET_I64: &str = "llvm.memset.p0i8.i64";
pub static LLVM_MEMSET_I32: &str = "llvm.memset.p0i8.i32";
pub static LLVM_FRAME_ADDRESS: &str = "llvm.frameaddress.p0i8";
pub static LLVM_FRAME_ADDRESS: &str = "llvm.frameaddress.p0";
pub static LLVM_STACK_SAVE: &str = "llvm.stacksave";
pub static LLVM_SETJMP: &str = "llvm.eh.sjlj.setjmp";

View file

@ -4,7 +4,7 @@ use inkwell::{
types::{BasicType, BasicTypeEnum, IntType},
values::{
BasicValue, BasicValueEnum, FloatValue, FunctionValue, InstructionOpcode, IntValue,
PointerValue, StructValue,
StructValue,
},
AddressSpace, IntPredicate,
};
@ -20,7 +20,7 @@ use roc_mono::{
},
list_element_layout,
};
use roc_target::PtrWidth;
use roc_target::{PtrWidth, TargetInfo};
use crate::llvm::{
bitcode::{
@ -270,7 +270,7 @@ pub(crate) fn run_low_level<'a, 'ctx>(
)
.ptr_type(AddressSpace::default());
let roc_return_alloca = env.builder.build_pointer_cast(
let roc_return_alloca = env.builder.new_build_pointer_cast(
zig_return_alloca,
roc_return_type,
"cast_to_roc",
@ -389,7 +389,7 @@ pub(crate) fn run_low_level<'a, 'ctx>(
let result_type = env.module.get_struct_type("str.FromUtf8Result").unwrap();
let result_ptr = env
.builder
.build_alloca(result_type, "alloca_utf8_validate_bytes_result");
.new_build_alloca(result_type, "alloca_utf8_validate_bytes_result");
match env.target_info.ptr_width() {
PtrWidth::Bytes4 => {
@ -482,11 +482,14 @@ pub(crate) fn run_low_level<'a, 'ctx>(
.into_int_value();
// cast to the appropriate usize of the current build
let byte_count =
env.builder
.build_int_cast_sign_flag(length, env.ptr_int(), false, "len_as_usize");
let byte_count = env.builder.new_build_int_cast_sign_flag(
length,
env.ptr_int(),
false,
"len_as_usize",
);
let is_zero = env.builder.build_int_compare(
let is_zero = env.builder.new_build_int_compare(
IntPredicate::EQ,
byte_count,
env.ptr_int().const_zero(),
@ -510,21 +513,16 @@ pub(crate) fn run_low_level<'a, 'ctx>(
// Str.getScalarUnsafe : Str, Nat -> { bytesParsed : Nat, scalar : U32 }
arguments!(string, index);
let roc_return_type =
basic_type_from_layout(env, layout_interner, layout_interner.get_repr(layout));
use roc_target::OperatingSystem::*;
match env.target_info.operating_system {
Windows => {
let zig_return_type = env
.module
.get_function(bitcode::STR_GET_SCALAR_UNSAFE)
.unwrap()
.get_type()
.get_param_types()[0]
.into_pointer_type()
.get_element_type();
let result = env
.builder
.build_alloca(BasicTypeEnum::try_from(zig_return_type).unwrap(), "result");
let result = env.builder.new_build_alloca(
BasicTypeEnum::try_from(roc_return_type).unwrap(),
"result",
);
call_void_bitcode_fn(
env,
@ -532,12 +530,7 @@ pub(crate) fn run_low_level<'a, 'ctx>(
bitcode::STR_GET_SCALAR_UNSAFE,
);
let roc_return_type = basic_type_from_layout(
env,
layout_interner,
layout_interner.get_repr(layout),
);
let cast_result = env.builder.build_pointer_cast(
let cast_result = env.builder.new_build_pointer_cast(
result,
roc_return_type.ptr_type(AddressSpace::default()),
"cast",
@ -555,18 +548,15 @@ pub(crate) fn run_low_level<'a, 'ctx>(
bitcode::STR_GET_SCALAR_UNSAFE,
);
// on 32-bit targets, zig bitpacks the struct
match env.target_info.ptr_width() {
PtrWidth::Bytes8 => result,
PtrWidth::Bytes4 => {
let to = basic_type_from_layout(
env,
layout_interner,
layout_interner.get_repr(layout),
);
complex_bitcast_check_size(env, result, to, "to_roc_record")
}
}
// zig will pad the struct to the alignment boundary, or bitpack it on 32-bit
// targets. So we have to cast it to the format that the roc code expects
let alloca = env
.builder
.new_build_alloca(result.get_type(), "to_roc_record");
env.builder.new_build_store(alloca, result);
env.builder
.new_build_load(roc_return_type, alloca, "to_roc_record")
}
Wasi => unimplemented!(),
}
@ -1057,7 +1047,7 @@ pub(crate) fn run_low_level<'a, 'ctx>(
match lhs_builtin {
Int(int_width) => {
let are_equal = env.builder.build_int_compare(
let are_equal = env.builder.new_build_int_compare(
IntPredicate::EQ,
lhs_arg.into_int_value(),
rhs_arg.into_int_value(),
@ -1070,18 +1060,21 @@ pub(crate) fn run_low_level<'a, 'ctx>(
IntPredicate::ULT
};
let is_less_than = env.builder.build_int_compare(
let is_less_than = env.builder.new_build_int_compare(
predicate,
lhs_arg.into_int_value(),
rhs_arg.into_int_value(),
"int_compare",
);
let step1 =
env.builder
.build_select(is_less_than, tag_lt, tag_gt, "lt_or_gt");
let step1 = env.builder.new_build_select(
is_less_than,
tag_lt,
tag_gt,
"lt_or_gt",
);
env.builder.build_select(
env.builder.new_build_select(
are_equal,
tag_eq,
step1.into_int_value(),
@ -1089,24 +1082,27 @@ pub(crate) fn run_low_level<'a, 'ctx>(
)
}
Float(_) => {
let are_equal = env.builder.build_float_compare(
let are_equal = env.builder.new_build_float_compare(
FloatPredicate::OEQ,
lhs_arg.into_float_value(),
rhs_arg.into_float_value(),
"float_eq",
);
let is_less_than = env.builder.build_float_compare(
let is_less_than = env.builder.new_build_float_compare(
FloatPredicate::OLT,
lhs_arg.into_float_value(),
rhs_arg.into_float_value(),
"float_compare",
);
let step1 =
env.builder
.build_select(is_less_than, tag_lt, tag_gt, "lt_or_gt");
let step1 = env.builder.new_build_select(
is_less_than,
tag_lt,
tag_gt,
"lt_or_gt",
);
env.builder.build_select(
env.builder.new_build_select(
are_equal,
tag_eq,
step1.into_int_value(),
@ -1177,7 +1173,7 @@ pub(crate) fn run_low_level<'a, 'ctx>(
// LLVM shift intrinsics expect the left and right sides to have the same type, so
// here we cast up `rhs` to the lhs type. Since the rhs was checked to be a U8,
// this cast isn't lossy.
let rhs_arg = env.builder.build_int_cast(
let rhs_arg = env.builder.new_build_int_cast(
rhs_arg.into_int_value(),
lhs_arg.get_type().into_int_type(),
"cast_for_shift",
@ -1205,7 +1201,7 @@ pub(crate) fn run_low_level<'a, 'ctx>(
let to_signed = intwidth_from_layout(layout).is_signed();
env.builder
.build_int_cast_sign_flag(arg.into_int_value(), to, to_signed, "inc_cast")
.new_build_int_cast_sign_flag(arg.into_int_value(), to, to_signed, "inc_cast")
.into()
}
NumToFloatCast => {
@ -1224,11 +1220,11 @@ pub(crate) fn run_low_level<'a, 'ctx>(
if width.is_signed() {
env.builder
.build_signed_int_to_float(int_val, dest, "signed_int_to_float")
.new_build_signed_int_to_float(int_val, dest, "signed_int_to_float")
.into()
} else {
env.builder
.build_unsigned_int_to_float(int_val, dest, "unsigned_int_to_float")
.new_build_unsigned_int_to_float(int_val, dest, "unsigned_int_to_float")
.into()
}
}
@ -1242,7 +1238,7 @@ pub(crate) fn run_low_level<'a, 'ctx>(
.into_float_type();
env.builder
.build_float_cast(arg.into_float_value(), dest, "cast_float_to_float")
.new_build_float_cast(arg.into_float_value(), dest, "cast_float_to_float")
.into()
}
LayoutRepr::Builtin(Builtin::Decimal) => {
@ -1292,7 +1288,7 @@ pub(crate) fn run_low_level<'a, 'ctx>(
// The (&&) operator
arguments!(lhs_arg, rhs_arg);
let bool_val = env.builder.build_and(
let bool_val = env.builder.new_build_and(
lhs_arg.into_int_value(),
rhs_arg.into_int_value(),
"bool_and",
@ -1304,7 +1300,7 @@ pub(crate) fn run_low_level<'a, 'ctx>(
// The (||) operator
arguments!(lhs_arg, rhs_arg);
let bool_val = env.builder.build_or(
let bool_val = env.builder.new_build_or(
lhs_arg.into_int_value(),
rhs_arg.into_int_value(),
"bool_or",
@ -1316,7 +1312,7 @@ pub(crate) fn run_low_level<'a, 'ctx>(
// The (!) operator
arguments!(arg);
let bool_val = env.builder.build_not(arg.into_int_value(), "bool_not");
let bool_val = env.builder.new_build_not(arg.into_int_value(), "bool_not");
BasicValueEnum::IntValue(bool_val)
}
Hash => {
@ -1341,14 +1337,14 @@ pub(crate) fn run_low_level<'a, 'ctx>(
debug_assert!(data_ptr.is_pointer_value());
env.builder
.build_pointer_cast(data_ptr.into_pointer_value(), target_type, "ptr_cast")
.new_build_pointer_cast(data_ptr.into_pointer_value(), target_type, "ptr_cast")
.into()
}
PtrStore => {
arguments!(ptr, value);
env.builder.build_store(ptr.into_pointer_value(), value);
env.builder.new_build_store(ptr.into_pointer_value(), value);
// ptr
env.context.struct_type(&[], false).const_zero().into()
@ -1377,7 +1373,7 @@ pub(crate) fn run_low_level<'a, 'ctx>(
RefCountIsUnique => {
arguments_with_layouts!((data_ptr, data_layout));
let ptr = env.builder.build_pointer_cast(
let ptr = env.builder.new_build_pointer_cast(
data_ptr.into_pointer_value(),
env.context.i8_type().ptr_type(AddressSpace::default()),
"cast_to_i8_ptr",
@ -1413,8 +1409,10 @@ pub(crate) fn run_low_level<'a, 'ctx>(
layout_interner,
layout_interner.get_repr(layout),
);
let ptr = env.builder.build_alloca(basic_type, "unreachable_alloca");
env.builder.build_store(ptr, basic_type.const_zero());
let ptr = env
.builder
.new_build_alloca(basic_type, "unreachable_alloca");
env.builder.new_build_store(ptr, basic_type.const_zero());
ptr.into()
}
@ -1459,7 +1457,7 @@ fn build_int_binop<'ctx>(
throw_on_overflow(env, parent, result, "integer addition overflowed!")
}
NumAddWrap => bd.build_int_add(lhs, rhs, "add_int_wrap").into(),
NumAddWrap => bd.new_build_int_add(lhs, rhs, "add_int_wrap").into(),
NumAddChecked => {
let with_overflow = env.call_intrinsic(
&LLVM_ADD_WITH_OVERFLOW[int_width],
@ -1490,7 +1488,7 @@ fn build_int_binop<'ctx>(
throw_on_overflow(env, parent, result, "integer subtraction overflowed!")
}
NumSubWrap => bd.build_int_sub(lhs, rhs, "sub_int").into(),
NumSubWrap => bd.new_build_int_sub(lhs, rhs, "sub_int").into(),
NumSubChecked => {
let with_overflow = env.call_intrinsic(
&LLVM_SUB_WITH_OVERFLOW[int_width],
@ -1521,7 +1519,7 @@ fn build_int_binop<'ctx>(
throw_on_overflow(env, parent, result, "integer multiplication overflowed!")
}
NumMulWrap => bd.build_int_mul(lhs, rhs, "mul_int").into(),
NumMulWrap => bd.new_build_int_mul(lhs, rhs, "mul_int").into(),
NumMulSaturated => call_bitcode_fn(
env,
&[lhs.into(), rhs.into()],
@ -1546,37 +1544,37 @@ fn build_int_binop<'ctx>(
}
NumGt => {
if int_width.is_signed() {
bd.build_int_compare(SGT, lhs, rhs, "gt_int").into()
bd.new_build_int_compare(SGT, lhs, rhs, "gt_int").into()
} else {
bd.build_int_compare(UGT, lhs, rhs, "gt_uint").into()
bd.new_build_int_compare(UGT, lhs, rhs, "gt_uint").into()
}
}
NumGte => {
if int_width.is_signed() {
bd.build_int_compare(SGE, lhs, rhs, "gte_int").into()
bd.new_build_int_compare(SGE, lhs, rhs, "gte_int").into()
} else {
bd.build_int_compare(UGE, lhs, rhs, "gte_uint").into()
bd.new_build_int_compare(UGE, lhs, rhs, "gte_uint").into()
}
}
NumLt => {
if int_width.is_signed() {
bd.build_int_compare(SLT, lhs, rhs, "lt_int").into()
bd.new_build_int_compare(SLT, lhs, rhs, "lt_int").into()
} else {
bd.build_int_compare(ULT, lhs, rhs, "lt_uint").into()
bd.new_build_int_compare(ULT, lhs, rhs, "lt_uint").into()
}
}
NumLte => {
if int_width.is_signed() {
bd.build_int_compare(SLE, lhs, rhs, "lte_int").into()
bd.new_build_int_compare(SLE, lhs, rhs, "lte_int").into()
} else {
bd.build_int_compare(ULE, lhs, rhs, "lte_uint").into()
bd.new_build_int_compare(ULE, lhs, rhs, "lte_uint").into()
}
}
NumRemUnchecked => {
if int_width.is_signed() {
bd.build_int_signed_rem(lhs, rhs, "rem_int").into()
bd.new_build_int_signed_rem(lhs, rhs, "rem_int").into()
} else {
bd.build_int_unsigned_rem(lhs, rhs, "rem_uint").into()
bd.new_build_int_unsigned_rem(lhs, rhs, "rem_uint").into()
}
}
NumIsMultipleOf => {
@ -1606,44 +1604,44 @@ fn build_int_binop<'ctx>(
let cont_block = env.context.append_basic_block(parent, "branchcont");
if is_signed {
bd.build_switch(
bd.new_build_switch(
rhs,
default_block,
&[(zero, special_block), (neg_1, special_block)],
)
} else {
bd.build_switch(rhs, default_block, &[(zero, special_block)])
bd.new_build_switch(rhs, default_block, &[(zero, special_block)])
};
let condition_rem = {
bd.position_at_end(default_block);
let rem = if is_signed {
bd.build_int_signed_rem(lhs, rhs, "int_rem")
bd.new_build_int_signed_rem(lhs, rhs, "int_rem")
} else {
bd.build_int_unsigned_rem(lhs, rhs, "uint_rem")
bd.new_build_int_unsigned_rem(lhs, rhs, "uint_rem")
};
let result = bd.build_int_compare(IntPredicate::EQ, rem, zero, "is_zero_rem");
let result = bd.new_build_int_compare(IntPredicate::EQ, rem, zero, "is_zero_rem");
bd.build_unconditional_branch(cont_block);
bd.new_build_unconditional_branch(cont_block);
result
};
let condition_special = {
bd.position_at_end(special_block);
let is_zero = bd.build_int_compare(IntPredicate::EQ, lhs, zero, "is_zero_lhs");
let is_zero = bd.new_build_int_compare(IntPredicate::EQ, lhs, zero, "is_zero_lhs");
let result = if is_signed {
let is_neg_one =
bd.build_int_compare(IntPredicate::EQ, rhs, neg_1, "is_neg_one_rhs");
bd.new_build_int_compare(IntPredicate::EQ, rhs, neg_1, "is_neg_one_rhs");
bd.build_or(is_neg_one, is_zero, "cond")
bd.new_build_or(is_neg_one, is_zero, "cond")
} else {
is_zero
};
bd.build_unconditional_branch(cont_block);
bd.new_build_unconditional_branch(cont_block);
result
};
@ -1651,7 +1649,7 @@ fn build_int_binop<'ctx>(
{
bd.position_at_end(cont_block);
let phi = bd.build_phi(env.context.bool_type(), "branch");
let phi = bd.new_build_phi(env.context.bool_type(), "branch");
phi.add_incoming(&[
(&condition_rem, default_block),
@ -1668,9 +1666,9 @@ fn build_int_binop<'ctx>(
),
NumDivTruncUnchecked => {
if int_width.is_signed() {
bd.build_int_signed_div(lhs, rhs, "div_int").into()
bd.new_build_int_signed_div(lhs, rhs, "div_int").into()
} else {
bd.build_int_unsigned_div(lhs, rhs, "div_uint").into()
bd.new_build_int_unsigned_div(lhs, rhs, "div_uint").into()
}
}
NumDivCeilUnchecked => call_bitcode_fn(
@ -1678,15 +1676,15 @@ fn build_int_binop<'ctx>(
&[lhs.into(), rhs.into()],
&bitcode::NUM_DIV_CEIL[int_width],
),
NumBitwiseAnd => bd.build_and(lhs, rhs, "int_bitwise_and").into(),
NumBitwiseXor => bd.build_xor(lhs, rhs, "int_bitwise_xor").into(),
NumBitwiseOr => bd.build_or(lhs, rhs, "int_bitwise_or").into(),
NumShiftLeftBy => bd.build_left_shift(lhs, rhs, "int_shift_left").into(),
NumBitwiseAnd => bd.new_build_and(lhs, rhs, "int_bitwise_and").into(),
NumBitwiseXor => bd.new_build_xor(lhs, rhs, "int_bitwise_xor").into(),
NumBitwiseOr => bd.new_build_or(lhs, rhs, "int_bitwise_or").into(),
NumShiftLeftBy => bd.new_build_left_shift(lhs, rhs, "int_shift_left").into(),
NumShiftRightBy => bd
.build_right_shift(lhs, rhs, true, "int_shift_right")
.new_build_right_shift(lhs, rhs, true, "int_shift_right")
.into(),
NumShiftRightZfBy => bd
.build_right_shift(lhs, rhs, false, "int_shift_right_zf")
.new_build_right_shift(lhs, rhs, false, "int_shift_right_zf")
.into(),
_ => {
@ -1767,16 +1765,16 @@ fn build_float_binop<'ctx>(
let bd = env.builder;
match op {
NumAdd => bd.build_float_add(lhs, rhs, "add_float").into(),
NumAdd => bd.new_build_float_add(lhs, rhs, "add_float").into(),
NumAddChecked => {
let context = env.context;
let result = bd.build_float_add(lhs, rhs, "add_float");
let result = bd.new_build_float_add(lhs, rhs, "add_float");
let is_finite =
call_bitcode_fn(env, &[result.into()], &bitcode::NUM_IS_FINITE[float_width])
.into_int_value();
let is_infinite = bd.build_not(is_finite, "negate");
let is_infinite = bd.new_build_not(is_finite, "negate");
let struct_type = context.struct_type(
&[context.f64_type().into(), context.bool_type().into()],
@ -1796,16 +1794,16 @@ fn build_float_binop<'ctx>(
struct_value.into()
}
NumAddWrap => unreachable!("wrapping addition is not defined on floats"),
NumSub => bd.build_float_sub(lhs, rhs, "sub_float").into(),
NumSub => bd.new_build_float_sub(lhs, rhs, "sub_float").into(),
NumSubChecked => {
let context = env.context;
let result = bd.build_float_sub(lhs, rhs, "sub_float");
let result = bd.new_build_float_sub(lhs, rhs, "sub_float");
let is_finite =
call_bitcode_fn(env, &[result.into()], &bitcode::NUM_IS_FINITE[float_width])
.into_int_value();
let is_infinite = bd.build_not(is_finite, "negate");
let is_infinite = bd.new_build_not(is_finite, "negate");
let struct_type = context.struct_type(
&[context.f64_type().into(), context.bool_type().into()],
@ -1825,17 +1823,17 @@ fn build_float_binop<'ctx>(
struct_value.into()
}
NumSubWrap => unreachable!("wrapping subtraction is not defined on floats"),
NumMul => bd.build_float_mul(lhs, rhs, "mul_float").into(),
NumMulSaturated => bd.build_float_mul(lhs, rhs, "mul_float").into(),
NumMul => bd.new_build_float_mul(lhs, rhs, "mul_float").into(),
NumMulSaturated => bd.new_build_float_mul(lhs, rhs, "mul_float").into(),
NumMulChecked => {
let context = env.context;
let result = bd.build_float_mul(lhs, rhs, "mul_float");
let result = bd.new_build_float_mul(lhs, rhs, "mul_float");
let is_finite =
call_bitcode_fn(env, &[result.into()], &bitcode::NUM_IS_FINITE[float_width])
.into_int_value();
let is_infinite = bd.build_not(is_finite, "negate");
let is_infinite = bd.new_build_not(is_finite, "negate");
let struct_type = context.struct_type(
&[context.f64_type().into(), context.bool_type().into()],
@ -1855,11 +1853,15 @@ fn build_float_binop<'ctx>(
struct_value.into()
}
NumMulWrap => unreachable!("wrapping multiplication is not defined on floats"),
NumGt => bd.build_float_compare(OGT, lhs, rhs, "float_gt").into(),
NumGte => bd.build_float_compare(OGE, lhs, rhs, "float_gte").into(),
NumLt => bd.build_float_compare(OLT, lhs, rhs, "float_lt").into(),
NumLte => bd.build_float_compare(OLE, lhs, rhs, "float_lte").into(),
NumDivFrac => bd.build_float_div(lhs, rhs, "div_float").into(),
NumGt => bd.new_build_float_compare(OGT, lhs, rhs, "float_gt").into(),
NumGte => bd
.new_build_float_compare(OGE, lhs, rhs, "float_gte")
.into(),
NumLt => bd.new_build_float_compare(OLT, lhs, rhs, "float_lt").into(),
NumLte => bd
.new_build_float_compare(OLE, lhs, rhs, "float_lte")
.into(),
NumDivFrac => bd.new_build_float_div(lhs, rhs, "div_float").into(),
NumPow => call_bitcode_fn(
env,
&[lhs.into(), rhs.into()],
@ -1882,7 +1884,7 @@ fn throw_on_overflow<'ctx>(
let has_overflowed = bd.build_extract_value(result, 1, "has_overflowed").unwrap();
let condition = bd.build_int_compare(
let condition = bd.new_build_int_compare(
IntPredicate::EQ,
has_overflowed.into_int_value(),
context.bool_type().const_zero(),
@ -1892,7 +1894,7 @@ fn throw_on_overflow<'ctx>(
let then_block = context.append_basic_block(parent, "then_block");
let throw_block = context.append_basic_block(parent, "throw_block");
bd.build_conditional_branch(condition, then_block, throw_block);
bd.new_build_conditional_branch(condition, then_block, throw_block);
bd.position_at_end(throw_block);
@ -1952,10 +1954,10 @@ fn throw_because_overflow(env: &Env<'_, '_, '_>, message: &str) {
env.builder.position_at_end(block);
env.builder.set_current_debug_location(di_location);
let call = env.builder.build_call(function, &[], "overflow");
let call = env.builder.new_build_call(function, &[], "overflow");
call.set_call_convention(FAST_CALL_CONV);
env.builder.build_unreachable();
env.builder.new_build_unreachable();
}
fn dec_split_into_words<'ctx>(
@ -1967,50 +1969,73 @@ fn dec_split_into_words<'ctx>(
let left_bits_i128 = env
.builder
.build_right_shift(value, int_64, false, "left_bits_i128");
.new_build_right_shift(value, int_64, false, "left_bits_i128");
(
env.builder.build_int_cast(value, int_64_type, ""),
env.builder.build_int_cast(left_bits_i128, int_64_type, ""),
env.builder.new_build_int_cast(value, int_64_type, ""),
env.builder
.new_build_int_cast(left_bits_i128, int_64_type, ""),
)
}
fn dec_alloca<'ctx>(env: &Env<'_, 'ctx, '_>, value: IntValue<'ctx>) -> PointerValue<'ctx> {
let dec_type = zig_dec_type(env);
fn dec_alloca<'ctx>(env: &Env<'_, 'ctx, '_>, value: IntValue<'ctx>) -> BasicValueEnum<'ctx> {
use roc_target::Architecture::*;
use roc_target::OperatingSystem::*;
match env.target_info.operating_system {
Windows => {
let dec_type = zig_dec_type(env);
let alloca = env.builder.build_alloca(dec_type, "dec_alloca");
let alloca = env.builder.new_build_alloca(dec_type, "dec_alloca");
let instruction = alloca.as_instruction_value().unwrap();
instruction.set_alignment(16).unwrap();
let instruction = alloca.as_instruction_value().unwrap();
instruction.set_alignment(16).unwrap();
let ptr = env.builder.build_pointer_cast(
alloca,
value.get_type().ptr_type(AddressSpace::default()),
"cast_to_i128_ptr",
);
let ptr = env.builder.new_build_pointer_cast(
alloca,
value.get_type().ptr_type(AddressSpace::default()),
"cast_to_i128_ptr",
);
env.builder.build_store(ptr, value);
env.builder.new_build_store(ptr, value);
alloca
alloca.into()
}
Unix => {
if matches!(env.target_info.architecture, X86_32 | X86_64) {
internal_error!("X86 unix does not pass with a dec alloc instead it splits into high and low halves");
}
let i64_type = env.context.i64_type();
let alloca = env
.builder
.build_array_alloca(i64_type, i64_type.const_int(2, false), "dec_alloca")
.unwrap();
let instruction = alloca.as_instruction_value().unwrap();
instruction.set_alignment(16).unwrap();
let ptr = env.builder.new_build_pointer_cast(
alloca,
value.get_type().ptr_type(AddressSpace::default()),
"cast_to_i128_ptr",
);
env.builder.new_build_store(ptr, value);
env.builder
.new_build_load(i64_type.array_type(2), alloca, "load as array")
.into()
}
Wasi => unimplemented!(),
}
}
fn dec_to_str<'ctx>(env: &Env<'_, 'ctx, '_>, dec: BasicValueEnum<'ctx>) -> BasicValueEnum<'ctx> {
use roc_target::Architecture::*;
use roc_target::OperatingSystem::*;
let dec = dec.into_int_value();
match env.target_info.operating_system {
Windows => {
//
call_str_bitcode_fn(
env,
&[],
&[dec_alloca(env, dec).into()],
BitcodeReturns::Str,
bitcode::DEC_TO_STR,
)
}
Unix => {
match env.target_info {
TargetInfo {
architecture: X86_64 | X86_32,
operating_system: Unix,
} => {
let (low, high) = dec_split_into_words(env, dec);
call_str_bitcode_fn(
@ -2021,7 +2046,13 @@ fn dec_to_str<'ctx>(env: &Env<'_, 'ctx, '_>, dec: BasicValueEnum<'ctx>) -> Basic
bitcode::DEC_TO_STR,
)
}
Wasi => unimplemented!(),
_ => call_str_bitcode_fn(
env,
&[],
&[dec_alloca(env, dec).into()],
BitcodeReturns::Str,
bitcode::DEC_TO_STR,
),
}
}
@ -2030,16 +2061,19 @@ fn dec_unary_op<'ctx>(
fn_name: &str,
dec: BasicValueEnum<'ctx>,
) -> BasicValueEnum<'ctx> {
use roc_target::Architecture::*;
use roc_target::OperatingSystem::*;
let dec = dec.into_int_value();
match env.target_info.operating_system {
Windows => call_bitcode_fn(env, &[dec_alloca(env, dec).into()], fn_name),
Unix => {
match env.target_info {
TargetInfo {
architecture: X86_64 | X86_32,
operating_system: Unix,
} => {
let (low, high) = dec_split_into_words(env, dec);
call_bitcode_fn(env, &[low.into(), high.into()], fn_name)
}
Wasi => unimplemented!(),
_ => call_bitcode_fn(env, &[dec_alloca(env, dec).into()], fn_name),
}
}
@ -2049,30 +2083,22 @@ fn dec_binop_with_overflow<'ctx>(
lhs: BasicValueEnum<'ctx>,
rhs: BasicValueEnum<'ctx>,
) -> StructValue<'ctx> {
use roc_target::Architecture::*;
use roc_target::OperatingSystem::*;
let lhs = lhs.into_int_value();
let rhs = rhs.into_int_value();
let return_type = zig_with_overflow_roc_dec(env);
let return_alloca = env.builder.build_alloca(return_type, "return_alloca");
let return_alloca = env.builder.new_build_alloca(return_type, "return_alloca");
match env.target_info.operating_system {
Windows => {
call_void_bitcode_fn(
env,
&[
return_alloca.into(),
dec_alloca(env, lhs).into(),
dec_alloca(env, rhs).into(),
],
fn_name,
);
}
Unix => {
match env.target_info {
TargetInfo {
architecture: X86_64 | X86_32,
operating_system: Unix,
} => {
let (lhs_low, lhs_high) = dec_split_into_words(env, lhs);
let (rhs_low, rhs_high) = dec_split_into_words(env, rhs);
call_void_bitcode_fn(
env,
&[
@ -2085,8 +2111,18 @@ fn dec_binop_with_overflow<'ctx>(
fn_name,
);
}
Wasi => unimplemented!(),
}
_ => {
call_void_bitcode_fn(
env,
&[
return_alloca.into(),
dec_alloca(env, lhs).into(),
dec_alloca(env, rhs).into(),
],
fn_name,
);
}
};
env.builder
.new_build_load(return_type, return_alloca, "load_dec")
@ -2099,24 +2135,19 @@ pub(crate) fn dec_binop_with_unchecked<'ctx>(
lhs: BasicValueEnum<'ctx>,
rhs: BasicValueEnum<'ctx>,
) -> BasicValueEnum<'ctx> {
use roc_target::Architecture::*;
use roc_target::OperatingSystem::*;
let lhs = lhs.into_int_value();
let rhs = rhs.into_int_value();
match env.target_info.operating_system {
Windows => {
// windows is much nicer for us here
call_bitcode_fn(
env,
&[dec_alloca(env, lhs).into(), dec_alloca(env, rhs).into()],
fn_name,
)
}
Unix => {
match env.target_info {
TargetInfo {
architecture: X86_64 | X86_32,
operating_system: Unix,
} => {
let (lhs_low, lhs_high) = dec_split_into_words(env, lhs);
let (rhs_low, rhs_high) = dec_split_into_words(env, rhs);
call_bitcode_fn(
env,
&[
@ -2128,7 +2159,11 @@ pub(crate) fn dec_binop_with_unchecked<'ctx>(
fn_name,
)
}
Wasi => unimplemented!(),
_ => call_bitcode_fn(
env,
&[dec_alloca(env, lhs).into(), dec_alloca(env, rhs).into()],
fn_name,
),
}
}
@ -2312,7 +2347,7 @@ fn build_int_unary_op<'a, 'ctx, 'env>(
LayoutRepr::Builtin(Builtin::Float(float_width)) => {
let target_float_type = convert::float_type_from_float_width(env, float_width);
bd.build_cast(
bd.new_build_cast(
InstructionOpcode::SIToFP,
arg,
target_float_type,
@ -2372,7 +2407,7 @@ fn build_int_unary_op<'a, 'ctx, 'env>(
let target_int_type = convert::int_type_from_int_width(env, target_int_width);
let target_int_val: BasicValueEnum<'ctx> = env
.builder
.build_int_cast_sign_flag(
.new_build_int_cast_sign_flag(
arg,
target_int_type,
target_int_width.is_signed(),
@ -2441,7 +2476,7 @@ fn build_int_unary_op<'a, 'ctx, 'env>(
)
.ptr_type(AddressSpace::default());
let roc_return_alloca = env.builder.build_pointer_cast(
let roc_return_alloca = env.builder.new_build_pointer_cast(
zig_return_alloca,
roc_return_type,
"cast_to_roc",
@ -2506,7 +2541,7 @@ fn int_neg_raise_on_overflow<'ctx>(
let builder = env.builder;
let min_val = int_type_signed_min(int_type);
let condition = builder.build_int_compare(IntPredicate::EQ, arg, min_val, "is_min_val");
let condition = builder.new_build_int_compare(IntPredicate::EQ, arg, min_val, "is_min_val");
let block = env.builder.get_insert_block().expect("to be in a function");
let parent = block.get_parent().expect("to be in a function");
@ -2514,7 +2549,7 @@ fn int_neg_raise_on_overflow<'ctx>(
let else_block = env.context.append_basic_block(parent, "else");
env.builder
.build_conditional_branch(condition, then_block, else_block);
.new_build_conditional_branch(condition, then_block, else_block);
builder.position_at_end(then_block);
@ -2526,7 +2561,7 @@ fn int_neg_raise_on_overflow<'ctx>(
builder.position_at_end(else_block);
builder.build_int_neg(arg, "negate_int").into()
builder.new_build_int_neg(arg, "negate_int").into()
}
fn int_abs_raise_on_overflow<'ctx>(
@ -2537,7 +2572,7 @@ fn int_abs_raise_on_overflow<'ctx>(
let builder = env.builder;
let min_val = int_type_signed_min(int_type);
let condition = builder.build_int_compare(IntPredicate::EQ, arg, min_val, "is_min_val");
let condition = builder.new_build_int_compare(IntPredicate::EQ, arg, min_val, "is_min_val");
let block = env.builder.get_insert_block().expect("to be in a function");
let parent = block.get_parent().expect("to be in a function");
@ -2545,7 +2580,7 @@ fn int_abs_raise_on_overflow<'ctx>(
let else_block = env.context.append_basic_block(parent, "else");
env.builder
.build_conditional_branch(condition, then_block, else_block);
.new_build_conditional_branch(condition, then_block, else_block);
builder.position_at_end(then_block);
@ -2577,16 +2612,16 @@ fn int_abs_with_overflow<'ctx>(
let shifted_alloca = {
let bits_to_shift = int_type.get_bit_width() as u64 - 1;
let shift_val = int_type.const_int(bits_to_shift, false);
let shifted = bd.build_right_shift(arg, shift_val, true, shifted_name);
let alloca = bd.build_alloca(int_type, "#int_abs_help");
let shifted = bd.new_build_right_shift(arg, shift_val, true, shifted_name);
let alloca = bd.new_build_alloca(int_type, "#int_abs_help");
// shifted = arg >>> 63
bd.build_store(alloca, shifted);
bd.new_build_store(alloca, shifted);
alloca
};
let xored_arg = bd.build_xor(
let xored_arg = bd.new_build_xor(
arg,
bd.new_build_load(int_type, shifted_alloca, shifted_name)
.into_int_value(),
@ -2594,7 +2629,7 @@ fn int_abs_with_overflow<'ctx>(
);
BasicValueEnum::IntValue(
bd.build_int_sub(
bd.new_build_int_sub(
xored_arg,
bd.new_build_load(int_type, shifted_alloca, shifted_name)
.into_int_value(),
@ -2617,7 +2652,7 @@ fn build_float_unary_op<'a, 'ctx>(
// TODO: Handle different sized floats
match op {
NumNeg => bd.build_float_neg(arg, "negate_float").into(),
NumNeg => bd.new_build_float_neg(arg, "negate_float").into(),
NumAbs => call_bitcode_fn(env, &[arg.into()], &bitcode::NUM_FABS[float_width]),
NumSqrtUnchecked => call_bitcode_fn(env, &[arg.into()], &bitcode::NUM_SQRT[float_width]),
NumLogUnchecked => call_bitcode_fn(env, &[arg.into()], &bitcode::NUM_LOG[float_width]),
@ -2628,13 +2663,13 @@ fn build_float_unary_op<'a, 'ctx>(
};
match (float_width, return_width) {
(FloatWidth::F32, FloatWidth::F32) => arg.into(),
(FloatWidth::F32, FloatWidth::F64) => bd.build_cast(
(FloatWidth::F32, FloatWidth::F64) => bd.new_build_cast(
InstructionOpcode::FPExt,
arg,
env.context.f64_type(),
"f32_to_f64",
),
(FloatWidth::F64, FloatWidth::F32) => bd.build_cast(
(FloatWidth::F64, FloatWidth::F32) => bd.new_build_cast(
InstructionOpcode::FPTrunc,
arg,
env.context.f32_type(),

View file

@ -14,9 +14,7 @@ use bumpalo::collections::Vec;
use inkwell::basic_block::BasicBlock;
use inkwell::module::Linkage;
use inkwell::types::{AnyTypeEnum, BasicMetadataTypeEnum, BasicType, BasicTypeEnum};
use inkwell::values::{
BasicValueEnum, CallableValue, FunctionValue, InstructionValue, IntValue, PointerValue,
};
use inkwell::values::{BasicValueEnum, FunctionValue, InstructionValue, IntValue, PointerValue};
use inkwell::{AddressSpace, IntPredicate};
use roc_module::symbol::Interns;
use roc_module::symbol::Symbol;
@ -43,7 +41,7 @@ impl<'ctx> PointerToRefcount<'ctx> {
// must make sure it's a pointer to usize
let refcount_type = env.ptr_int();
let value = env.builder.build_pointer_cast(
let value = env.builder.new_build_pointer_cast(
ptr,
refcount_type.ptr_type(AddressSpace::default()),
"to_refcount_ptr",
@ -62,7 +60,7 @@ impl<'ctx> PointerToRefcount<'ctx> {
let refcount_ptr_type = refcount_type.ptr_type(AddressSpace::default());
let ptr_as_usize_ptr =
builder.build_pointer_cast(data_ptr, refcount_ptr_type, "as_usize_ptr");
builder.new_build_pointer_cast(data_ptr, refcount_ptr_type, "as_usize_ptr");
// get a pointer to index -1
let index_intvalue = refcount_type.const_int(-1_i64 as u64, false);
@ -92,7 +90,7 @@ impl<'ctx> PointerToRefcount<'ctx> {
};
env.builder
.build_int_compare(IntPredicate::EQ, current, one, "is_one")
.new_build_int_compare(IntPredicate::EQ, current, one, "is_one")
}
fn get_refcount<'a, 'env>(&self, env: &Env<'a, 'ctx, 'env>) -> IntValue<'ctx> {
@ -102,7 +100,7 @@ impl<'ctx> PointerToRefcount<'ctx> {
}
pub fn set_refcount<'a, 'env>(&self, env: &Env<'a, 'ctx, 'env>, refcount: IntValue<'ctx>) {
env.builder.build_store(self.value, refcount);
env.builder.new_build_store(self.value, refcount);
}
fn modify<'a, 'env>(
@ -171,7 +169,7 @@ impl<'ctx> PointerToRefcount<'ctx> {
let call = env
.builder
.build_call(function, &[refcount_ptr.into()], fn_name);
.new_build_call(function, &[refcount_ptr.into()], fn_name);
call.set_call_convention(FAST_CALL_CONV);
}
@ -195,7 +193,7 @@ impl<'ctx> PointerToRefcount<'ctx> {
alignment,
);
builder.build_return(None);
builder.new_build_return(None);
}
pub fn deallocate<'a, 'env>(
@ -216,7 +214,7 @@ fn incref_pointer<'ctx>(
env,
&[
env.builder
.build_pointer_cast(
.new_build_pointer_cast(
pointer,
env.ptr_int().ptr_type(AddressSpace::default()),
"to_isize_ptr",
@ -238,7 +236,7 @@ fn free_pointer<'ctx>(
env,
&[
env.builder
.build_pointer_cast(
.new_build_pointer_cast(
pointer,
env.ptr_int().ptr_type(AddressSpace::default()),
"to_isize_ptr",
@ -256,7 +254,7 @@ fn decref_pointer<'ctx>(env: &Env<'_, 'ctx, '_>, pointer: PointerValue<'ctx>, al
env,
&[
env.builder
.build_pointer_cast(
.new_build_pointer_cast(
pointer,
env.ptr_int().ptr_type(AddressSpace::default()),
"to_isize_ptr",
@ -279,7 +277,7 @@ pub fn decref_pointer_check_null<'ctx>(
env,
&[
env.builder
.build_pointer_cast(
.new_build_pointer_cast(
pointer,
env.context.i8_type().ptr_type(AddressSpace::default()),
"to_i8_ptr",
@ -389,7 +387,7 @@ fn modify_refcount_struct_help<'a, 'ctx>(
}
}
// this function returns void
builder.build_return(None);
builder.new_build_return(None);
}
fn modify_refcount_erased<'a, 'ctx>(
@ -450,33 +448,34 @@ fn modify_refcount_erased_help<'ctx>(
arg_val.set_name(arg_symbol.as_str(&env.interns));
let refcounter = erased::load_refcounter(env, arg_val, mode);
let refcounter_is_null = env.builder.build_is_null(refcounter, "refcounter_unset");
let refcounter_is_null = env
.builder
.new_build_is_null(refcounter, "refcounter_unset");
let call_refcounter_block = ctx.append_basic_block(fn_val, "call_refcounter");
let noop_block = ctx.append_basic_block(fn_val, "noop");
builder.build_conditional_branch(refcounter_is_null, noop_block, call_refcounter_block);
builder.new_build_conditional_branch(refcounter_is_null, noop_block, call_refcounter_block);
{
builder.position_at_end(call_refcounter_block);
let value = erased::load(
env,
arg_val,
ErasedField::Value,
erased::opaque_ptr_type(env),
);
let opaque_ptr_type = erased::opaque_ptr_type(env);
let value = erased::load(env, arg_val, ErasedField::Value, opaque_ptr_type);
builder.build_call(
CallableValue::try_from(refcounter).unwrap(),
builder.new_build_indirect_call(
env.context
.void_type()
.fn_type(&[opaque_ptr_type.into()], false),
refcounter,
&[value.into()],
"call_refcounter",
);
builder.build_return(None);
builder.new_build_return(None);
}
{
builder.position_at_end(noop_block);
builder.build_return(None);
builder.new_build_return(None);
}
}
@ -598,7 +597,7 @@ fn modify_refcount_layout_help<'a, 'ctx>(
let bt = basic_type_from_layout(env, layout_interner, layout_interner.get_repr(layout));
// cast the i64 pointer to a pointer to block of memory
let field_cast = env.builder.build_pointer_cast(
let field_cast = env.builder.new_build_pointer_cast(
value.into_pointer_value(),
bt.into_pointer_type(),
"i64_to_opaque",
@ -627,11 +626,11 @@ fn call_help<'ctx>(
let call = match call_mode {
CallMode::Inc(inc_amount) => {
env.builder
.build_call(function, &[value.into(), inc_amount.into()], "increment")
.new_build_call(function, &[value.into(), inc_amount.into()], "increment")
}
CallMode::Dec => env
.builder
.build_call(function, &[value.into()], "decrement"),
.new_build_call(function, &[value.into()], "decrement"),
};
call.set_call_convention(FAST_CALL_CONV);
@ -816,7 +815,7 @@ fn modify_refcount_list_help<'a, 'ctx>(
// We use the raw capacity to ensure we always decrement the refcount of seamless slices.
let capacity = list_capacity_or_ref_ptr(builder, original_wrapper);
let is_non_empty = builder.build_int_compare(
let is_non_empty = builder.new_build_int_compare(
IntPredicate::UGT,
capacity,
env.ptr_int().const_zero(),
@ -827,7 +826,7 @@ fn modify_refcount_list_help<'a, 'ctx>(
let modification_list_block = ctx.append_basic_block(parent, "modification_list_block");
let cont_block = ctx.append_basic_block(parent, "modify_rc_list_cont");
builder.build_conditional_branch(is_non_empty, modification_list_block, cont_block);
builder.new_build_conditional_branch(is_non_empty, modification_list_block, cont_block);
builder.position_at_end(modification_list_block);
@ -869,12 +868,12 @@ fn modify_refcount_list_help<'a, 'ctx>(
let call_mode = mode_to_call_mode(fn_val, mode);
refcount_ptr.modify(call_mode, layout, env, layout_interner);
builder.build_unconditional_branch(cont_block);
builder.new_build_unconditional_branch(cont_block);
builder.position_at_end(cont_block);
// this function returns void
builder.build_return(None);
builder.new_build_return(None);
}
fn modify_refcount_str<'a, 'ctx>(
@ -958,7 +957,7 @@ fn modify_refcount_str_help<'a, 'ctx>(
// Small strings have 1 as the first bit of capacity, making them negative.
// Thus, to check for big and non empty, just needs a signed len > 0.
let is_big_and_non_empty = builder.build_int_compare(
let is_big_and_non_empty = builder.new_build_int_compare(
IntPredicate::SGT,
capacity,
env.ptr_int().const_zero(),
@ -969,7 +968,7 @@ fn modify_refcount_str_help<'a, 'ctx>(
let cont_block = ctx.append_basic_block(parent, "modify_rc_str_cont");
let modification_block = ctx.append_basic_block(parent, "modify_rc");
builder.build_conditional_branch(is_big_and_non_empty, modification_block, cont_block);
builder.new_build_conditional_branch(is_big_and_non_empty, modification_block, cont_block);
builder.position_at_end(modification_block);
let refcount_ptr = PointerToRefcount::from_ptr_to_data(env, str_refcount_ptr(env, arg_val));
@ -981,12 +980,12 @@ fn modify_refcount_str_help<'a, 'ctx>(
layout_interner,
);
builder.build_unconditional_branch(cont_block);
builder.new_build_unconditional_branch(cont_block);
builder.position_at_end(cont_block);
// this function returns void
builder.build_return(None);
builder.new_build_return(None);
}
/// Build an increment or decrement function for a specific layout
@ -1169,19 +1168,20 @@ fn build_rec_union_help<'a, 'ctx>(
let ctx = env.context;
if union_layout.is_nullable() {
let is_null = env.builder.build_is_null(value_ptr, "is_null");
let is_null = env.builder.new_build_is_null(value_ptr, "is_null");
let then_block = ctx.append_basic_block(parent, "then");
env.builder
.build_conditional_branch(is_null, then_block, should_recurse_block);
.new_build_conditional_branch(is_null, then_block, should_recurse_block);
{
env.builder.position_at_end(then_block);
env.builder.build_return(None);
env.builder.new_build_return(None);
}
} else {
env.builder.build_unconditional_branch(should_recurse_block);
env.builder
.new_build_unconditional_branch(should_recurse_block);
}
env.builder.position_at_end(should_recurse_block);
@ -1196,14 +1196,14 @@ fn build_rec_union_help<'a, 'ctx>(
Mode::Inc => {
// inc is cheap; we never recurse
refcount_ptr.modify(call_mode, layout, env, layout_interner);
env.builder.build_return(None);
env.builder.new_build_return(None);
}
Mode::Dec => {
let do_recurse_block = env.context.append_basic_block(parent, "do_recurse");
let no_recurse_block = env.context.append_basic_block(parent, "no_recurse");
builder.build_conditional_branch(
builder.new_build_conditional_branch(
refcount_ptr.is_1(env),
do_recurse_block,
no_recurse_block,
@ -1213,7 +1213,7 @@ fn build_rec_union_help<'a, 'ctx>(
env.builder.position_at_end(no_recurse_block);
refcount_ptr.modify(call_mode, layout, env, layout_interner);
env.builder.build_return(None);
env.builder.new_build_return(None);
}
{
@ -1304,7 +1304,7 @@ fn build_rec_union_recursive_decrement<'a, 'ctx>(
let wrapper_type = basic_type_from_layout(env, layout_interner, fields_struct);
// cast the opaque pointer to a pointer of the correct shape
let struct_ptr = env.builder.build_pointer_cast(
let struct_ptr = env.builder.new_build_pointer_cast(
value_ptr,
wrapper_type.ptr_type(AddressSpace::default()),
"opaque_to_correct_recursive_decrement",
@ -1318,15 +1318,12 @@ fn build_rec_union_recursive_decrement<'a, 'ctx>(
for (i, field_layout) in field_layouts.iter().enumerate() {
if let LayoutRepr::RecursivePointer(_) = layout_interner.get_repr(*field_layout) {
// this field has type `*i64`, but is really a pointer to the data we want
let elem_pointer = env
.builder
.new_build_struct_gep(
wrapper_type.into_struct_type(),
struct_ptr,
i as u32,
"gep_recursive_pointer",
)
.unwrap();
let elem_pointer = env.builder.new_build_struct_gep(
wrapper_type.into_struct_type(),
struct_ptr,
i as u32,
"gep_recursive_pointer",
);
let ptr_as_i64_ptr = env.builder.new_build_load(
env.context.i64_type().ptr_type(AddressSpace::default()),
@ -1343,15 +1340,12 @@ fn build_rec_union_recursive_decrement<'a, 'ctx>(
deferred_rec.push(recursive_field_ptr);
} else if layout_interner.contains_refcounted(*field_layout) {
let elem_pointer = env
.builder
.new_build_struct_gep(
wrapper_type.into_struct_type(),
struct_ptr,
i as u32,
"gep_recursive_pointer",
)
.unwrap();
let elem_pointer = env.builder.new_build_struct_gep(
wrapper_type.into_struct_type(),
struct_ptr,
i as u32,
"gep_recursive_pointer",
);
let field = load_roc_value(
env,
@ -1398,7 +1392,7 @@ fn build_rec_union_recursive_decrement<'a, 'ctx>(
}
// this function returns void
builder.build_return(None);
builder.new_build_return(None);
cases.push((tag_id_int_type.const_int(tag_id as u64, false), block));
}
@ -1415,18 +1409,18 @@ fn build_rec_union_recursive_decrement<'a, 'ctx>(
if cases.is_empty() {
// The only other layout doesn't need refcounting. Pass through.
builder.build_return(None);
builder.new_build_return(None);
} else {
// in this case, don't switch, because the `else` branch below would try to read the (nonexistent) tag id
let (_, only_branch) = cases.pop().unwrap();
env.builder.build_unconditional_branch(only_branch);
env.builder.new_build_unconditional_branch(only_branch);
}
} else {
let default_block = env.context.append_basic_block(parent, "switch_default");
// switch on it
env.builder
.build_switch(current_tag_id, default_block, &cases);
.new_build_switch(current_tag_id, default_block, &cases);
{
env.builder.position_at_end(default_block);
@ -1439,7 +1433,7 @@ fn build_rec_union_recursive_decrement<'a, 'ctx>(
}
// this function returns void
builder.build_return(None);
builder.new_build_return(None);
}
}
@ -1572,19 +1566,20 @@ fn build_reuse_rec_union_help<'a, 'ctx>(
let ctx = env.context;
if union_layout.is_nullable() {
let is_null = env.builder.build_is_null(value_ptr, "is_null");
let is_null = env.builder.new_build_is_null(value_ptr, "is_null");
let then_block = ctx.append_basic_block(parent, "then");
env.builder
.build_conditional_branch(is_null, then_block, should_recurse_block);
.new_build_conditional_branch(is_null, then_block, should_recurse_block);
{
env.builder.position_at_end(then_block);
env.builder.build_return(None);
env.builder.new_build_return(None);
}
} else {
env.builder.build_unconditional_branch(should_recurse_block);
env.builder
.new_build_unconditional_branch(should_recurse_block);
}
env.builder.position_at_end(should_recurse_block);
@ -1594,13 +1589,17 @@ fn build_reuse_rec_union_help<'a, 'ctx>(
let do_recurse_block = env.context.append_basic_block(parent, "do_recurse");
let no_recurse_block = env.context.append_basic_block(parent, "no_recurse");
builder.build_conditional_branch(refcount_ptr.is_1(env), do_recurse_block, no_recurse_block);
builder.new_build_conditional_branch(
refcount_ptr.is_1(env),
do_recurse_block,
no_recurse_block,
);
{
env.builder.position_at_end(no_recurse_block);
refcount_ptr.modify(call_mode, layout, env, layout_interner);
env.builder.build_return(None);
env.builder.new_build_return(None);
}
{
@ -1725,15 +1724,12 @@ fn modify_refcount_nonrecursive_help<'a, 'ctx>(
let union_struct_type = basic_type_from_layout(env, layout_interner, layout).into_struct_type();
// read the tag_id
let tag_id_ptr = env
.builder
.new_build_struct_gep(
union_struct_type,
arg_ptr,
RocUnion::TAG_ID_INDEX,
"tag_id_ptr",
)
.unwrap();
let tag_id_ptr = env.builder.new_build_struct_gep(
union_struct_type,
arg_ptr,
RocUnion::TAG_ID_INDEX,
"tag_id_ptr",
);
let tag_id = env
.builder
@ -1750,7 +1746,7 @@ fn modify_refcount_nonrecursive_help<'a, 'ctx>(
let tag_id_u8 =
env.builder
.build_int_cast_sign_flag(tag_id, env.context.i8_type(), false, "tag_id_u8");
.new_build_int_cast_sign_flag(tag_id, env.context.i8_type(), false, "tag_id_u8");
// next, make a jump table for all possible values of the tag_id
let mut cases = Vec::with_capacity_in(tags.len(), env.arena);
@ -1776,17 +1772,14 @@ fn modify_refcount_nonrecursive_help<'a, 'ctx>(
debug_assert!(data_struct_type.is_struct_type());
let data_struct_type = data_struct_type.into_struct_type();
let opaque_tag_data_ptr = env
.builder
.new_build_struct_gep(
union_struct_type,
arg_ptr,
RocUnion::TAG_DATA_INDEX,
"field_ptr",
)
.unwrap();
let opaque_tag_data_ptr = env.builder.new_build_struct_gep(
union_struct_type,
arg_ptr,
RocUnion::TAG_DATA_INDEX,
"field_ptr",
);
let cast_tag_data_pointer = env.builder.build_pointer_cast(
let cast_tag_data_pointer = env.builder.new_build_pointer_cast(
opaque_tag_data_ptr,
data_struct_type.ptr_type(AddressSpace::default()),
"cast_to_concrete_tag",
@ -1797,15 +1790,12 @@ fn modify_refcount_nonrecursive_help<'a, 'ctx>(
layout_interner.get_repr(*field_layout)
{
// This field is a pointer to the recursive pointer.
let field_ptr = env
.builder
.new_build_struct_gep(
data_struct_type,
cast_tag_data_pointer,
i as u32,
"modify_tag_field",
)
.unwrap();
let field_ptr = env.builder.new_build_struct_gep(
data_struct_type,
cast_tag_data_pointer,
i as u32,
"modify_tag_field",
);
// This is the actual pointer to the recursive data.
let field_value = env.builder.new_build_load(
@ -1834,15 +1824,12 @@ fn modify_refcount_nonrecursive_help<'a, 'ctx>(
*field_layout,
)
} else if layout_interner.contains_refcounted(*field_layout) {
let field_ptr = env
.builder
.new_build_struct_gep(
data_struct_type,
cast_tag_data_pointer,
i as u32,
"modify_tag_field",
)
.unwrap();
let field_ptr = env.builder.new_build_struct_gep(
data_struct_type,
cast_tag_data_pointer,
i as u32,
"modify_tag_field",
);
let field_value = if layout_interner.is_passed_by_reference(*field_layout) {
field_ptr.into()
@ -1869,17 +1856,17 @@ fn modify_refcount_nonrecursive_help<'a, 'ctx>(
}
}
env.builder.build_unconditional_branch(merge_block);
env.builder.new_build_unconditional_branch(merge_block);
cases.push((env.context.i8_type().const_int(tag_id as u64, false), block));
}
env.builder.position_at_end(before_block);
env.builder.build_switch(tag_id_u8, merge_block, &cases);
env.builder.new_build_switch(tag_id_u8, merge_block, &cases);
env.builder.position_at_end(merge_block);
// this function returns void
builder.build_return(None);
builder.new_build_return(None);
}

View file

@ -58,8 +58,8 @@ impl<'ctx> RocStruct<'ctx> {
let passed_by_ref = layout_repr.is_passed_by_reference(layout_interner);
if passed_by_ref {
let alloca = env.builder.build_alloca(struct_type, "struct_alloca");
env.builder.build_store(alloca, struct_val);
let alloca = env.builder.new_build_alloca(struct_type, "struct_alloca");
env.builder.new_build_store(alloca, struct_val);
RocStruct::ByReference(alloca)
} else {
RocStruct::ByValue(struct_val)
@ -156,8 +156,7 @@ fn index_struct_ptr<'a, 'ctx>(
let name = format!("struct_field_access_record_{index}");
let field_value = env
.builder
.new_build_struct_gep(struct_type, ptr, index as u32, &name)
.unwrap();
.new_build_struct_gep(struct_type, ptr, index as u32, &name);
load_roc_value(
env,