Merge pull request #3612 from rtfeldman/tag-alignment

Tag alignment
This commit is contained in:
Richard Feldman 2022-07-23 22:45:54 -04:00 committed by GitHub
commit eb04d4b9f6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 539 additions and 451 deletions

1
Cargo.lock generated
View file

@ -4795,6 +4795,7 @@ dependencies = [
"roc_collections", "roc_collections",
"roc_constrain", "roc_constrain",
"roc_debug_flags", "roc_debug_flags",
"roc_error_macros",
"roc_gen_dev", "roc_gen_dev",
"roc_gen_llvm", "roc_gen_llvm",
"roc_gen_wasm", "roc_gen_wasm",

View file

@ -9,8 +9,8 @@ use roc_builtins::bitcode::{
use roc_collections::VecMap; use roc_collections::VecMap;
use roc_module::symbol::{Interns, Symbol}; use roc_module::symbol::{Interns, Symbol};
use roc_mono::layout::{ use roc_mono::layout::{
cmp_fields, ext_var_is_empty_tag_union, round_up_to_alignment, Builtin, Layout, LayoutCache, cmp_fields, ext_var_is_empty_tag_union, round_up_to_alignment, Builtin, Discriminant, Layout,
UnionLayout, LayoutCache, UnionLayout,
}; };
use roc_target::TargetInfo; use roc_target::TargetInfo;
use roc_types::{ use roc_types::{
@ -1024,7 +1024,10 @@ fn add_tag_union<'a>(
// A non-recursive tag union // A non-recursive tag union
// e.g. `Result ok err : [Ok ok, Err err]` // e.g. `Result ok err : [Ok ok, Err err]`
NonRecursive(_) => { NonRecursive(_) => {
let discriminant_size = UnionLayout::discriminant_size(tags.len()).stack_size(); // TODO deal with empty tag union
let discriminant_size = Discriminant::from_number_of_tags(tags.len())
.stack_size()
.max(1);
let discriminant_offset = union_layout.tag_id_offset(env.target).unwrap(); let discriminant_offset = union_layout.tag_id_offset(env.target).unwrap();
RocType::TagUnion(RocTagUnion::NonRecursive { RocType::TagUnion(RocTagUnion::NonRecursive {
@ -1037,7 +1040,8 @@ fn add_tag_union<'a>(
// A recursive tag union (general case) // A recursive tag union (general case)
// e.g. `Expr : [Sym Str, Add Expr Expr]` // e.g. `Expr : [Sym Str, Add Expr Expr]`
Recursive(_) => { Recursive(_) => {
let discriminant_size = UnionLayout::discriminant_size(tags.len()).stack_size(); let discriminant_size =
Discriminant::from_number_of_tags(tags.len()).stack_size();
let discriminant_offset = union_layout.tag_id_offset(env.target).unwrap(); let discriminant_offset = union_layout.tag_id_offset(env.target).unwrap();
RocType::TagUnion(RocTagUnion::Recursive { RocType::TagUnion(RocTagUnion::Recursive {

View file

@ -594,16 +594,16 @@ impl<
let (data_size, data_alignment) = let (data_size, data_alignment) =
union_layout.data_size_and_alignment(self.target_info); union_layout.data_size_and_alignment(self.target_info);
let id_offset = data_size - data_alignment; let id_offset = data_size - data_alignment;
let id_builtin = union_layout.tag_id_builtin(); let discriminant = union_layout.discriminant();
let size = id_builtin.stack_size(self.target_info); let size = discriminant.stack_size();
self.allocation_map.insert(*sym, owned_data); self.allocation_map.insert(*sym, owned_data);
self.symbol_storage_map.insert( self.symbol_storage_map.insert(
*sym, *sym,
Stack(ReferencedPrimitive { Stack(ReferencedPrimitive {
base_offset: union_offset + id_offset as i32, base_offset: union_offset + id_offset as i32,
size, size,
sign_extend: matches!(id_builtin, sign_extended_int_builtins!()), sign_extend: false, // tag ids are always unsigned
}), }),
); );
} }

View file

@ -2,9 +2,9 @@
use crate::debug_info_init; use crate::debug_info_init;
use crate::llvm::build::{ use crate::llvm::build::{
complex_bitcast_check_size, load_roc_value, struct_from_fields, to_cc_return, CCReturn, Env, complex_bitcast_check_size, load_roc_value, struct_from_fields, to_cc_return, CCReturn, Env,
C_CALL_CONV, FAST_CALL_CONV, TAG_DATA_INDEX, C_CALL_CONV, FAST_CALL_CONV,
}; };
use crate::llvm::convert::basic_type_from_layout; use crate::llvm::convert::{basic_type_from_layout, RocUnion};
use crate::llvm::refcounting::{ use crate::llvm::refcounting::{
decrement_refcount_layout, increment_n_refcount_layout, increment_refcount_layout, decrement_refcount_layout, increment_n_refcount_layout, increment_refcount_layout,
}; };
@ -181,7 +181,15 @@ pub fn call_bitcode_fn_fixing_for_convention<'a, 'ctx, 'env>(
.try_into() .try_into()
.expect("Zig bitcode return type is not a basic type!"); .expect("Zig bitcode return type is not a basic type!");
// when we write an i128 into this (happens in NumToInt), zig expects this pointer to
// be 16-byte aligned. Not doing so is UB and will immediately fail on CI
let cc_return_value_ptr = env.builder.build_alloca(cc_return_type, "return_value"); let cc_return_value_ptr = env.builder.build_alloca(cc_return_type, "return_value");
cc_return_value_ptr
.as_instruction()
.unwrap()
.set_alignment(16)
.unwrap();
let fixed_args: Vec<BasicValueEnum<'ctx>> = [cc_return_value_ptr.into()] let fixed_args: Vec<BasicValueEnum<'ctx>> = [cc_return_value_ptr.into()]
.iter() .iter()
.chain(args) .chain(args)
@ -314,7 +322,7 @@ fn build_has_tag_id_help<'a, 'ctx, 'env>(
let tag_data_ptr = { let tag_data_ptr = {
let ptr = env let ptr = env
.builder .builder
.build_struct_gep(tag_value, TAG_DATA_INDEX, "get_data_ptr") .build_struct_gep(tag_value, RocUnion::TAG_DATA_INDEX, "get_data_ptr")
.unwrap(); .unwrap();
env.builder.build_bitcast(ptr, i8_ptr_type, "to_opaque") env.builder.build_bitcast(ptr, i8_ptr_type, "to_opaque")

View file

@ -11,8 +11,7 @@ use crate::llvm::build_list::{
use crate::llvm::build_str::{dec_to_str, str_from_float, str_from_int}; use crate::llvm::build_str::{dec_to_str, str_from_float, str_from_int};
use crate::llvm::compare::{generic_eq, generic_neq}; use crate::llvm::compare::{generic_eq, generic_neq};
use crate::llvm::convert::{ use crate::llvm::convert::{
self, argument_type_from_layout, basic_type_from_builtin, basic_type_from_layout, self, argument_type_from_layout, basic_type_from_builtin, basic_type_from_layout, zig_str_type,
block_of_memory_slices, zig_str_type,
}; };
use crate::llvm::refcounting::{ use crate::llvm::refcounting::{
build_reset, decrement_refcount_layout, increment_refcount_layout, PointerToRefcount, build_reset, decrement_refcount_layout, increment_refcount_layout, PointerToRefcount,
@ -65,7 +64,7 @@ use std::convert::TryInto;
use std::path::Path; use std::path::Path;
use target_lexicon::{Architecture, OperatingSystem, Triple}; use target_lexicon::{Architecture, OperatingSystem, Triple};
use super::convert::zig_with_overflow_roc_dec; use super::convert::{zig_with_overflow_roc_dec, RocUnion};
#[inline(always)] #[inline(always)]
fn print_fn_verification_output() -> bool { fn print_fn_verification_output() -> bool {
@ -1162,9 +1161,6 @@ pub fn build_exp_call<'a, 'ctx, 'env>(
} }
} }
pub const TAG_ID_INDEX: u32 = 1;
pub const TAG_DATA_INDEX: u32 = 0;
pub fn struct_from_fields<'a, 'ctx, 'env, I>( pub fn struct_from_fields<'a, 'ctx, 'env, I>(
env: &Env<'a, 'ctx, 'env>, env: &Env<'a, 'ctx, 'env>,
struct_type: StructType<'ctx>, struct_type: StructType<'ctx>,
@ -1240,40 +1236,7 @@ pub fn build_exp_expr<'a, 'ctx, 'env>(
call, call,
), ),
Struct(sorted_fields) => { Struct(sorted_fields) => build_struct(env, scope, sorted_fields).into(),
let ctx = env.context;
// Determine types
let num_fields = sorted_fields.len();
let mut field_types = Vec::with_capacity_in(num_fields, env.arena);
let mut field_vals = Vec::with_capacity_in(num_fields, env.arena);
for symbol in sorted_fields.iter() {
// Zero-sized fields have no runtime representation.
// The layout of the struct expects them to be dropped!
let (field_expr, field_layout) = load_symbol_and_layout(scope, symbol);
if !field_layout.is_dropped_because_empty() {
field_types.push(basic_type_from_layout(env, field_layout));
if field_layout.is_passed_by_reference(env.target_info) {
let field_value = env.builder.build_load(
field_expr.into_pointer_value(),
"load_tag_to_put_in_struct",
);
field_vals.push(field_value);
} else {
field_vals.push(field_expr);
}
}
}
// Create the struct_type
let struct_type = ctx.struct_type(field_types.into_bump_slice(), false);
// Insert field exprs into struct_val
struct_from_fields(env, struct_type, field_vals.into_iter().enumerate()).into()
}
Reuse { Reuse {
arguments, arguments,
@ -1465,16 +1428,34 @@ pub fn build_exp_expr<'a, 'ctx, 'env>(
let field_layouts = tag_layouts[*tag_id as usize]; let field_layouts = tag_layouts[*tag_id as usize];
let tag_id_type = let struct_layout = Layout::struct_no_name_order(field_layouts);
basic_type_from_layout(env, &union_layout.tag_id_layout()).into_int_type(); let struct_type = basic_type_from_layout(env, &struct_layout);
lookup_at_index_ptr2( let opaque_data_ptr = env
.builder
.build_struct_gep(
argument.into_pointer_value(),
RocUnion::TAG_DATA_INDEX,
"get_opaque_data_ptr",
)
.unwrap();
let data_ptr = env.builder.build_pointer_cast(
opaque_data_ptr,
struct_type.ptr_type(AddressSpace::Generic),
"to_data_pointer",
);
let element_ptr = env
.builder
.build_struct_gep(data_ptr, *index as _, "get_opaque_data_ptr")
.unwrap();
load_roc_value(
env, env,
union_layout, field_layouts[*index as usize],
tag_id_type, element_ptr,
field_layouts, "load_element",
*index as usize,
argument.into_pointer_value(),
) )
} }
UnionLayout::Recursive(tag_layouts) => { UnionLayout::Recursive(tag_layouts) => {
@ -1482,19 +1463,9 @@ pub fn build_exp_expr<'a, 'ctx, 'env>(
let field_layouts = tag_layouts[*tag_id as usize]; let field_layouts = tag_layouts[*tag_id as usize];
let tag_id_type =
basic_type_from_layout(env, &union_layout.tag_id_layout()).into_int_type();
let ptr = tag_pointer_clear_tag_id(env, argument.into_pointer_value()); let ptr = tag_pointer_clear_tag_id(env, argument.into_pointer_value());
lookup_at_index_ptr2( lookup_at_index_ptr2(env, union_layout, field_layouts, *index as usize, ptr)
env,
union_layout,
tag_id_type,
field_layouts,
*index as usize,
ptr,
)
} }
UnionLayout::NonNullableUnwrapped(field_layouts) => { UnionLayout::NonNullableUnwrapped(field_layouts) => {
let struct_layout = Layout::struct_no_name_order(field_layouts); let struct_layout = Layout::struct_no_name_order(field_layouts);
@ -1525,18 +1496,8 @@ pub fn build_exp_expr<'a, 'ctx, 'env>(
let field_layouts = other_tags[tag_index as usize]; let field_layouts = other_tags[tag_index as usize];
let tag_id_type =
basic_type_from_layout(env, &union_layout.tag_id_layout()).into_int_type();
let ptr = tag_pointer_clear_tag_id(env, argument.into_pointer_value()); let ptr = tag_pointer_clear_tag_id(env, argument.into_pointer_value());
lookup_at_index_ptr2( lookup_at_index_ptr2(env, union_layout, field_layouts, *index as usize, ptr)
env,
union_layout,
tag_id_type,
field_layouts,
*index as usize,
ptr,
)
} }
UnionLayout::NullableUnwrapped { UnionLayout::NullableUnwrapped {
nullable_id, nullable_id,
@ -1599,7 +1560,7 @@ fn build_wrapped_tag<'a, 'ctx, 'env>(
if union_layout.stores_tag_id_as_data(env.target_info) { if union_layout.stores_tag_id_as_data(env.target_info) {
let tag_id_ptr = builder let tag_id_ptr = builder
.build_struct_gep(raw_data_ptr, TAG_ID_INDEX, "tag_id_index") .build_struct_gep(raw_data_ptr, RocUnion::TAG_ID_INDEX, "tag_id_index")
.unwrap(); .unwrap();
let tag_id_type = basic_type_from_layout(env, &tag_id_layout).into_int_type(); let tag_id_type = basic_type_from_layout(env, &tag_id_layout).into_int_type();
@ -1608,7 +1569,7 @@ fn build_wrapped_tag<'a, 'ctx, 'env>(
.build_store(tag_id_ptr, tag_id_type.const_int(tag_id as u64, false)); .build_store(tag_id_ptr, tag_id_type.const_int(tag_id as u64, false));
let opaque_struct_ptr = builder let opaque_struct_ptr = builder
.build_struct_gep(raw_data_ptr, TAG_DATA_INDEX, "tag_data_index") .build_struct_gep(raw_data_ptr, RocUnion::TAG_DATA_INDEX, "tag_data_index")
.unwrap(); .unwrap();
struct_pointer_from_fields( struct_pointer_from_fields(
@ -1702,6 +1663,44 @@ fn build_tag_fields<'a, 'ctx, 'env>(
(field_types, field_values) (field_types, field_values)
} }
fn build_struct<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
scope: &Scope<'a, 'ctx>,
sorted_fields: &[Symbol],
) -> StructValue<'ctx> {
let ctx = env.context;
// Determine types
let num_fields = sorted_fields.len();
let mut field_types = Vec::with_capacity_in(num_fields, env.arena);
let mut field_vals = Vec::with_capacity_in(num_fields, env.arena);
for symbol in sorted_fields.iter() {
// Zero-sized fields have no runtime representation.
// The layout of the struct expects them to be dropped!
let (field_expr, field_layout) = load_symbol_and_layout(scope, symbol);
if !field_layout.is_dropped_because_empty() {
field_types.push(basic_type_from_layout(env, field_layout));
if field_layout.is_passed_by_reference(env.target_info) {
let field_value = env
.builder
.build_load(field_expr.into_pointer_value(), "load_tag_to_put_in_struct");
field_vals.push(field_value);
} else {
field_vals.push(field_expr);
}
}
}
// Create the struct_type
let struct_type = ctx.struct_type(field_types.into_bump_slice(), false);
// Insert field exprs into struct_val
struct_from_fields(env, struct_type, field_vals.into_iter().enumerate())
}
fn build_tag<'a, 'ctx, 'env>( fn build_tag<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>, env: &Env<'a, 'ctx, 'env>,
scope: &Scope<'a, 'ctx>, scope: &Scope<'a, 'ctx>,
@ -1711,93 +1710,27 @@ fn build_tag<'a, 'ctx, 'env>(
reuse_allocation: Option<PointerValue<'ctx>>, reuse_allocation: Option<PointerValue<'ctx>>,
parent: FunctionValue<'ctx>, parent: FunctionValue<'ctx>,
) -> BasicValueEnum<'ctx> { ) -> BasicValueEnum<'ctx> {
let tag_id_layout = union_layout.tag_id_layout();
let union_size = union_layout.number_of_tags(); let union_size = union_layout.number_of_tags();
match union_layout { match union_layout {
UnionLayout::NonRecursive(tags) => { UnionLayout::NonRecursive(tags) => {
debug_assert!(union_size > 1); debug_assert!(union_size > 1);
let internal_type = block_of_memory_slices(env.context, tags, env.target_info); let data = build_struct(env, scope, arguments);
let tag_id_type = basic_type_from_layout(env, &tag_id_layout).into_int_type(); let roc_union = RocUnion::tagged_from_slices(env.context, tags, env.target_info);
let wrapper_type = env let value = roc_union.as_struct_value(env, data, Some(tag_id as _));
.context
.struct_type(&[internal_type, tag_id_type.into()], false);
let result_alloca = entry_block_alloca_zerofill(env, wrapper_type.into(), "opaque_tag");
// Determine types let alloca = create_entry_block_alloca(
let num_fields = arguments.len() + 1; env,
let mut field_types = Vec::with_capacity_in(num_fields, env.arena); parent,
let mut field_vals = Vec::with_capacity_in(num_fields, env.arena); value.get_type().into(),
"non_recursive_tag_alloca",
let tag_field_layouts = &tags[tag_id as usize];
for (field_symbol, tag_field_layout) in arguments.iter().zip(tag_field_layouts.iter()) {
let (val, _val_layout) = load_symbol_and_layout(scope, field_symbol);
// Zero-sized fields have no runtime representation.
// The layout of the struct expects them to be dropped!
if !tag_field_layout.is_dropped_because_empty() {
let field_type = basic_type_from_layout(env, tag_field_layout);
field_types.push(field_type);
if let Layout::RecursivePointer = tag_field_layout {
panic!(
r"non-recursive tag unions cannot directly contain a recursive pointer"
);
} else {
// this check fails for recursive tag unions, but can be helpful while debugging
// debug_assert_eq!(tag_field_layout, val_layout);
field_vals.push(val);
}
}
}
// store the tag id
let tag_id_ptr = env
.builder
.build_struct_gep(result_alloca, TAG_ID_INDEX, "tag_id_ptr")
.unwrap();
let tag_id_intval = tag_id_type.const_int(tag_id as u64, false);
env.builder.build_store(tag_id_ptr, tag_id_intval);
// Create the struct_type
let struct_type = env
.context
.struct_type(field_types.into_bump_slice(), false);
let struct_opaque_ptr = env
.builder
.build_struct_gep(result_alloca, TAG_DATA_INDEX, "opaque_data_ptr")
.unwrap();
let struct_ptr = env.builder.build_pointer_cast(
struct_opaque_ptr,
struct_type.ptr_type(AddressSpace::Generic),
"to_specific",
); );
// Insert field exprs into struct_val env.builder.build_store(alloca, value);
//let struct_val =
//struct_from_fields(env, struct_type, field_vals.into_iter().enumerate());
// Insert field exprs into struct_val alloca.into()
for (index, field_val) in field_vals.iter().copied().enumerate() {
let index: u32 = index as u32;
let ptr = env
.builder
.build_struct_gep(struct_ptr, index, "get_tag_field_ptr")
.unwrap();
let field_layout = tag_field_layouts[index as usize];
store_roc_value(env, field_layout, ptr, field_val);
}
// env.builder.build_load(result_alloca, "load_result")
result_alloca.into()
} }
UnionLayout::Recursive(tags) => { UnionLayout::Recursive(tags) => {
debug_assert!(union_size > 1); debug_assert!(union_size > 1);
@ -1876,11 +1809,11 @@ fn build_tag<'a, 'ctx, 'env>(
nullable_id, nullable_id,
other_fields, other_fields,
} => { } => {
let tag_struct_type = let roc_union =
block_of_memory_slices(env.context, &[other_fields], env.target_info); RocUnion::untagged_from_slices(env.context, &[other_fields], env.target_info);
if tag_id == *nullable_id as _ { if tag_id == *nullable_id as _ {
let output_type = tag_struct_type.ptr_type(AddressSpace::Generic); let output_type = roc_union.struct_type().ptr_type(AddressSpace::Generic);
return output_type.const_null().into(); return output_type.const_null().into();
} }
@ -1891,23 +1824,15 @@ fn build_tag<'a, 'ctx, 'env>(
debug_assert!(union_size == 2); debug_assert!(union_size == 2);
// Determine types
let (field_types, field_values) = build_tag_fields(env, scope, other_fields, arguments);
// Create the struct_type // Create the struct_type
let data_ptr = let data_ptr =
allocate_tag(env, parent, reuse_allocation, union_layout, &[other_fields]); allocate_tag(env, parent, reuse_allocation, union_layout, &[other_fields]);
let struct_type = env let data = build_struct(env, scope, arguments);
.context
.struct_type(field_types.into_bump_slice(), false);
struct_pointer_from_fields( let value = roc_union.as_struct_value(env, data, None);
env,
struct_type, env.builder.build_store(data_ptr, value);
data_ptr,
field_values.into_iter().enumerate(),
);
data_ptr.into() data_ptr.into()
} }
@ -2154,7 +2079,6 @@ fn lookup_at_index_ptr<'a, 'ctx, 'env>(
fn lookup_at_index_ptr2<'a, 'ctx, 'env>( fn lookup_at_index_ptr2<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>, env: &Env<'a, 'ctx, 'env>,
union_layout: &UnionLayout<'a>, union_layout: &UnionLayout<'a>,
tag_id_type: IntType<'ctx>,
field_layouts: &[Layout<'_>], field_layouts: &[Layout<'_>],
index: usize, index: usize,
value: PointerValue<'ctx>, value: PointerValue<'ctx>,
@ -2164,23 +2088,15 @@ fn lookup_at_index_ptr2<'a, 'ctx, 'env>(
let struct_layout = Layout::struct_no_name_order(field_layouts); let struct_layout = Layout::struct_no_name_order(field_layouts);
let struct_type = basic_type_from_layout(env, &struct_layout); let struct_type = basic_type_from_layout(env, &struct_layout);
let wrapper_type = env let data_ptr = env
.context
.struct_type(&[struct_type, tag_id_type.into()], false);
let ptr = env
.builder .builder
.build_bitcast( .build_bitcast(
value, value,
wrapper_type.ptr_type(AddressSpace::Generic), struct_type.ptr_type(AddressSpace::Generic),
"cast_lookup_at_index_ptr", "cast_lookup_at_index_ptr",
) )
.into_pointer_value(); .into_pointer_value();
let data_ptr = builder
.build_struct_gep(ptr, TAG_DATA_INDEX, "at_index_struct_gep_tag")
.unwrap();
let elem_ptr = builder let elem_ptr = builder
.build_struct_gep(data_ptr, index as u32, "at_index_struct_gep_data") .build_struct_gep(data_ptr, index as u32, "at_index_struct_gep_data")
.unwrap(); .unwrap();
@ -2224,35 +2140,18 @@ fn reserve_with_refcount_union_as_block_of_memory<'a, 'ctx, 'env>(
) -> PointerValue<'ctx> { ) -> PointerValue<'ctx> {
let ptr_bytes = env.target_info; let ptr_bytes = env.target_info;
let block_type = block_of_memory_slices(env.context, fields, env.target_info); let roc_union = if union_layout.stores_tag_id_as_data(ptr_bytes) {
RocUnion::tagged_from_slices(env.context, fields, env.target_info)
let basic_type = if union_layout.stores_tag_id_as_data(ptr_bytes) {
let tag_id_type = basic_type_from_layout(env, &union_layout.tag_id_layout());
env.context
.struct_type(&[block_type, tag_id_type], false)
.into()
} else { } else {
block_type RocUnion::untagged_from_slices(env.context, fields, env.target_info)
}; };
let mut stack_size = fields reserve_with_refcount_help(
.iter() env,
.map(|tag| tag.iter().map(|l| l.stack_size(env.target_info)).sum()) roc_union.struct_type(),
.max() roc_union.tag_width(),
.unwrap_or_default(); roc_union.tag_alignment(),
)
if union_layout.stores_tag_id_as_data(ptr_bytes) {
stack_size += union_layout.tag_id_layout().stack_size(env.target_info);
}
let alignment_bytes = fields
.iter()
.flat_map(|tag| tag.iter().map(|l| l.alignment_bytes(env.target_info)))
.max()
.unwrap_or(0);
reserve_with_refcount_help(env, basic_type, stack_size, alignment_bytes)
} }
fn reserve_with_refcount_help<'a, 'ctx, 'env>( fn reserve_with_refcount_help<'a, 'ctx, 'env>(
@ -2655,10 +2554,9 @@ pub fn build_exp_stmt<'a, 'ctx, 'env>(
// //
// Hence, we explicitly memcpy source to destination, and rely on // Hence, we explicitly memcpy source to destination, and rely on
// LLVM optimizing away any inefficiencies. // LLVM optimizing away any inefficiencies.
let size = env.ptr_int().const_int( let target_info = env.target_info;
layout.stack_size_without_alignment(env.target_info) as u64, let width = layout.stack_size(target_info);
false, let size = env.ptr_int().const_int(width as _, false);
);
env.builder env.builder
.build_memcpy( .build_memcpy(
@ -3263,7 +3161,7 @@ fn get_tag_id_wrapped<'a, 'ctx, 'env>(
) -> IntValue<'ctx> { ) -> IntValue<'ctx> {
let tag_id_ptr = env let tag_id_ptr = env
.builder .builder
.build_struct_gep(from_value, TAG_ID_INDEX, "tag_id_ptr") .build_struct_gep(from_value, RocUnion::TAG_ID_INDEX, "tag_id_ptr")
.unwrap(); .unwrap();
env.builder env.builder
@ -3276,7 +3174,7 @@ pub fn get_tag_id_non_recursive<'a, 'ctx, 'env>(
tag: StructValue<'ctx>, tag: StructValue<'ctx>,
) -> IntValue<'ctx> { ) -> IntValue<'ctx> {
env.builder env.builder
.build_extract_value(tag, TAG_ID_INDEX, "get_tag_id") .build_extract_value(tag, RocUnion::TAG_ID_INDEX, "get_tag_id")
.unwrap() .unwrap()
.into_int_value() .into_int_value()
} }

View file

@ -2,9 +2,10 @@ use crate::llvm::build::Env;
use bumpalo::collections::Vec; use bumpalo::collections::Vec;
use inkwell::context::Context; use inkwell::context::Context;
use inkwell::types::{BasicType, BasicTypeEnum, FloatType, IntType, StructType}; use inkwell::types::{BasicType, BasicTypeEnum, FloatType, IntType, StructType};
use inkwell::values::StructValue;
use inkwell::AddressSpace; use inkwell::AddressSpace;
use roc_builtins::bitcode::{FloatWidth, IntWidth}; use roc_builtins::bitcode::{FloatWidth, IntWidth};
use roc_mono::layout::{Builtin, Layout, UnionLayout}; use roc_mono::layout::{round_up_to_alignment, Builtin, Layout, UnionLayout};
use roc_target::TargetInfo; use roc_target::TargetInfo;
fn basic_type_from_record<'a, 'ctx, 'env>( fn basic_type_from_record<'a, 'ctx, 'env>(
@ -56,36 +57,40 @@ pub fn basic_type_from_union_layout<'a, 'ctx, 'env>(
) -> BasicTypeEnum<'ctx> { ) -> BasicTypeEnum<'ctx> {
use UnionLayout::*; use UnionLayout::*;
let tag_id_type = basic_type_from_layout(env, &union_layout.tag_id_layout());
match union_layout { match union_layout {
NonRecursive(tags) => { NonRecursive(tags) => {
let data = block_of_memory_slices(env.context, tags, env.target_info); //
RocUnion::tagged_from_slices(env.context, tags, env.target_info)
env.context.struct_type(&[data, tag_id_type], false).into() .struct_type()
.into()
} }
Recursive(tags) Recursive(tags)
| NullableWrapped { | NullableWrapped {
other_tags: tags, .. other_tags: tags, ..
} => { } => {
let data = block_of_memory_slices(env.context, tags, env.target_info);
if union_layout.stores_tag_id_as_data(env.target_info) { if union_layout.stores_tag_id_as_data(env.target_info) {
env.context RocUnion::tagged_from_slices(env.context, tags, env.target_info)
.struct_type(&[data, tag_id_type], false) .struct_type()
.ptr_type(AddressSpace::Generic) .ptr_type(AddressSpace::Generic)
.into() .into()
} else { } else {
data.ptr_type(AddressSpace::Generic).into() RocUnion::untagged_from_slices(env.context, tags, env.target_info)
.struct_type()
.ptr_type(AddressSpace::Generic)
.into()
} }
} }
NullableUnwrapped { other_fields, .. } => { NullableUnwrapped { other_fields, .. } => {
let block = block_of_memory_slices(env.context, &[other_fields], env.target_info); RocUnion::untagged_from_slices(env.context, &[other_fields], env.target_info)
block.ptr_type(AddressSpace::Generic).into() .struct_type()
.ptr_type(AddressSpace::Generic)
.into()
} }
NonNullableUnwrapped(fields) => { NonNullableUnwrapped(fields) => {
let block = block_of_memory_slices(env.context, &[fields], env.target_info); RocUnion::untagged_from_slices(env.context, &[fields], env.target_info)
block.ptr_type(AddressSpace::Generic).into() .struct_type()
.ptr_type(AddressSpace::Generic)
.into()
} }
} }
} }
@ -185,68 +190,196 @@ pub fn float_type_from_float_width<'a, 'ctx, 'env>(
} }
} }
pub fn block_of_memory_slices<'ctx>( fn alignment_type(context: &Context, alignment: u32) -> BasicTypeEnum {
context: &'ctx Context, match alignment {
layouts: &[&[Layout<'_>]], 0 => context.struct_type(&[], false).into(),
target_info: TargetInfo, 1 => context.i8_type().into(),
) -> BasicTypeEnum<'ctx> { 2 => context.i16_type().into(),
let mut union_size = 0; 4 => context.i32_type().into(),
for tag in layouts { 8 => context.i64_type().into(),
let mut total = 0; 16 => context.i128_type().into(),
for layout in tag.iter() { _ => unimplemented!("weird alignment: {alignment}"),
total += layout.stack_size(target_info); }
}
#[derive(Debug, Clone, Copy)]
enum TagType {
I8,
I16,
}
#[derive(Debug, Clone, Copy)]
pub(crate) struct RocUnion<'ctx> {
struct_type: StructType<'ctx>,
data_align: u32,
data_width: u32,
tag_type: Option<TagType>,
}
impl<'ctx> RocUnion<'ctx> {
pub const TAG_ID_INDEX: u32 = 2;
pub const TAG_DATA_INDEX: u32 = 1;
fn new(
context: &'ctx Context,
_target_info: TargetInfo,
data_align: u32,
data_width: u32,
tag_type: Option<TagType>,
) -> Self {
let bytes = round_up_to_alignment(data_width, data_align);
let byte_array_type = context.i8_type().array_type(bytes).as_basic_type_enum();
let alignment_array_type = alignment_type(context, data_align)
.array_type(0)
.as_basic_type_enum();
let struct_type = if let Some(tag_type) = tag_type {
let tag_width = match tag_type {
TagType::I8 => 1,
TagType::I16 => 2,
};
let tag_padding = round_up_to_alignment(tag_width, data_align) - tag_width;
let tag_padding_type = context
.i8_type()
.array_type(tag_padding)
.as_basic_type_enum();
context.struct_type(
&[
alignment_array_type,
byte_array_type,
match tag_type {
TagType::I8 => context.i8_type().into(),
TagType::I16 => context.i16_type().into(),
},
tag_padding_type,
],
false,
)
} else {
context.struct_type(&[alignment_array_type, byte_array_type], false)
};
Self {
struct_type,
data_align,
data_width,
tag_type,
}
}
pub fn struct_type(&self) -> StructType<'ctx> {
self.struct_type
}
pub fn tagged_from_slices(
context: &'ctx Context,
layouts: &[&[Layout<'_>]],
target_info: TargetInfo,
) -> Self {
let tag_type = match layouts.len() {
0..=255 => TagType::I8,
_ => TagType::I16,
};
let (data_width, data_align) =
Layout::stack_size_and_alignment_slices(layouts, target_info);
Self::new(context, target_info, data_align, data_width, Some(tag_type))
}
pub fn untagged_from_slices(
context: &'ctx Context,
layouts: &[&[Layout<'_>]],
target_info: TargetInfo,
) -> Self {
let (data_width, data_align) =
Layout::stack_size_and_alignment_slices(layouts, target_info);
Self::new(context, target_info, data_align, data_width, None)
}
pub fn tag_alignment(&self) -> u32 {
let tag_id_alignment = match self.tag_type {
None => 0,
Some(TagType::I8) => 1,
Some(TagType::I16) => 2,
};
self.data_align.max(tag_id_alignment)
}
pub fn tag_width(&self) -> u32 {
let tag_id_width = match self.tag_type {
None => 0,
Some(TagType::I8) => 1,
Some(TagType::I16) => 2,
};
let mut width = self.data_width;
// add padding between data and the tag id
width = round_up_to_alignment(width, tag_id_width);
// add tag id
width += tag_id_width;
// add padding after the tag id
width = round_up_to_alignment(width, self.tag_alignment());
width
}
pub fn as_struct_value<'a, 'env>(
&self,
env: &Env<'a, 'ctx, 'env>,
data: StructValue<'ctx>,
tag_id: Option<usize>,
) -> StructValue<'ctx> {
debug_assert_eq!(tag_id.is_some(), self.tag_type.is_some());
let mut struct_value = self.struct_type().const_zero();
let tag_alloca = env
.builder
.build_alloca(struct_value.get_type(), "tag_alloca");
env.builder.build_store(tag_alloca, struct_value);
let cast_pointer = env.builder.build_pointer_cast(
tag_alloca,
data.get_type().ptr_type(AddressSpace::Generic),
"to_data_ptr",
);
env.builder.build_store(cast_pointer, data);
struct_value = env
.builder
.build_load(tag_alloca, "load_tag")
.into_struct_value();
// set the tag id
//
// NOTE: setting the tag id initially happened before writing the data into it.
// That turned out to expose UB. More info at https://github.com/rtfeldman/roc/issues/3554
if let Some(tag_id) = tag_id {
let tag_id_type = match self.tag_type.unwrap() {
TagType::I8 => env.context.i8_type(),
TagType::I16 => env.context.i16_type(),
};
let tag_id = tag_id_type.const_int(tag_id as u64, false);
struct_value = env
.builder
.build_insert_value(struct_value, tag_id, Self::TAG_ID_INDEX, "insert_tag_id")
.unwrap()
.into_struct_value();
} }
union_size = union_size.max(total); struct_value
}
block_of_memory_help(context, union_size)
}
pub fn block_of_memory<'ctx>(
context: &'ctx Context,
layout: &Layout<'_>,
target_info: TargetInfo,
) -> BasicTypeEnum<'ctx> {
// TODO make this dynamic
let mut union_size = layout.stack_size(target_info);
if let Layout::Union(UnionLayout::NonRecursive { .. }) = layout {
union_size -= target_info.ptr_width() as u32;
}
block_of_memory_help(context, union_size)
}
fn block_of_memory_help(context: &Context, union_size: u32) -> BasicTypeEnum<'_> {
// The memory layout of Union is a bit tricky.
// We have tags with different memory layouts, that are part of the same type.
// For llvm, all tags must have the same memory layout.
//
// So, we convert all tags to a layout of bytes of some size.
// It turns out that encoding to i64 for as many elements as possible is
// a nice optimization, the remainder is encoded as bytes.
let num_i64 = union_size / 8;
let num_i8 = union_size % 8;
let i8_array_type = context.i8_type().array_type(num_i8).as_basic_type_enum();
let i64_array_type = context.i64_type().array_type(num_i64).as_basic_type_enum();
if num_i64 == 0 {
// The object fits perfectly in some number of i8s
context.struct_type(&[i8_array_type], false).into()
} else if num_i8 == 0 {
// The object fits perfectly in some number of i64s
// (i.e. the size is a multiple of 8 bytes)
context.struct_type(&[i64_array_type], false).into()
} else {
// There are some trailing bytes at the end
let i8_array_type = context.i8_type().array_type(num_i8).as_basic_type_enum();
context
.struct_type(&[i64_array_type, i8_array_type], false)
.into()
} }
} }

View file

@ -2,10 +2,10 @@ use crate::debug_info_init;
use crate::llvm::bitcode::call_void_bitcode_fn; use crate::llvm::bitcode::call_void_bitcode_fn;
use crate::llvm::build::{ use crate::llvm::build::{
add_func, cast_basic_basic, get_tag_id, tag_pointer_clear_tag_id, use_roc_value, Env, add_func, cast_basic_basic, get_tag_id, tag_pointer_clear_tag_id, use_roc_value, Env,
FAST_CALL_CONV, TAG_DATA_INDEX, TAG_ID_INDEX, FAST_CALL_CONV,
}; };
use crate::llvm::build_list::{incrementing_elem_loop, list_len, load_list}; use crate::llvm::build_list::{incrementing_elem_loop, list_len, load_list};
use crate::llvm::convert::basic_type_from_layout; use crate::llvm::convert::{basic_type_from_layout, RocUnion};
use bumpalo::collections::Vec; use bumpalo::collections::Vec;
use inkwell::basic_block::BasicBlock; use inkwell::basic_block::BasicBlock;
use inkwell::module::Linkage; use inkwell::module::Linkage;
@ -1597,7 +1597,7 @@ fn modify_refcount_union_help<'a, 'ctx, 'env>(
// read the tag_id // read the tag_id
let tag_id_ptr = env let tag_id_ptr = env
.builder .builder
.build_struct_gep(arg_ptr, TAG_ID_INDEX, "tag_id_ptr") .build_struct_gep(arg_ptr, RocUnion::TAG_ID_INDEX, "tag_id_ptr")
.unwrap(); .unwrap();
let tag_id = env let tag_id = env
@ -1634,7 +1634,7 @@ fn modify_refcount_union_help<'a, 'ctx, 'env>(
debug_assert!(wrapper_type.is_struct_type()); debug_assert!(wrapper_type.is_struct_type());
let opaque_tag_data_ptr = env let opaque_tag_data_ptr = env
.builder .builder
.build_struct_gep(arg_ptr, TAG_DATA_INDEX, "field_ptr") .build_struct_gep(arg_ptr, RocUnion::TAG_DATA_INDEX, "field_ptr")
.unwrap(); .unwrap();
let cast_tag_data_pointer = env.builder.build_pointer_cast( let cast_tag_data_pointer = env.builder.build_pointer_cast(

View file

@ -253,8 +253,6 @@ impl<'a> WasmBackend<'a> {
.to_symbol_string(symbol, self.interns); .to_symbol_string(symbol, self.interns);
let name = String::from_str_in(&name, self.env.arena).into_bump_str(); let name = String::from_str_in(&name, self.env.arena).into_bump_str();
// dbg!(name);
self.proc_lookup.push(ProcLookupData { self.proc_lookup.push(ProcLookupData {
name: symbol, name: symbol,
layout, layout,
@ -1595,10 +1593,9 @@ impl<'a> WasmBackend<'a> {
// Store the tag ID (if any) // Store the tag ID (if any)
if stores_tag_id_as_data { if stores_tag_id_as_data {
let id_offset = let id_offset = data_offset + union_layout.tag_id_offset(TARGET_INFO).unwrap();
data_offset + union_layout.data_size_without_tag_id(TARGET_INFO).unwrap();
let id_align = union_layout.tag_id_builtin().alignment_bytes(TARGET_INFO); let id_align = union_layout.discriminant().alignment_bytes();
let id_align = Align::from(id_align); let id_align = Align::from(id_align);
self.code_builder.get_local(local_id); self.code_builder.get_local(local_id);
@ -1679,22 +1676,18 @@ impl<'a> WasmBackend<'a> {
}; };
if union_layout.stores_tag_id_as_data(TARGET_INFO) { if union_layout.stores_tag_id_as_data(TARGET_INFO) {
let id_offset = union_layout.data_size_without_tag_id(TARGET_INFO).unwrap(); let id_offset = union_layout.tag_id_offset(TARGET_INFO).unwrap();
let id_align = union_layout.tag_id_builtin().alignment_bytes(TARGET_INFO); let id_align = union_layout.discriminant().alignment_bytes();
let id_align = Align::from(id_align); let id_align = Align::from(id_align);
self.storage self.storage
.load_symbols(&mut self.code_builder, &[structure]); .load_symbols(&mut self.code_builder, &[structure]);
match union_layout.tag_id_builtin() { use roc_mono::layout::Discriminant::*;
Builtin::Bool | Builtin::Int(IntWidth::U8) => { match union_layout.discriminant() {
self.code_builder.i32_load8_u(id_align, id_offset) U0 | U1 | U8 => self.code_builder.i32_load8_u(id_align, id_offset),
} U16 => self.code_builder.i32_load16_u(id_align, id_offset),
Builtin::Int(IntWidth::U16) => self.code_builder.i32_load16_u(id_align, id_offset),
Builtin::Int(IntWidth::U32) => self.code_builder.i32_load(id_align, id_offset),
Builtin::Int(IntWidth::U64) => self.code_builder.i64_load(id_align, id_offset),
x => internal_error!("Unexpected layout for tag union id {:?}", x),
} }
} else if union_layout.stores_tag_id_in_pointer(TARGET_INFO) { } else if union_layout.stores_tag_id_in_pointer(TARGET_INFO) {
self.storage self.storage

View file

@ -444,38 +444,28 @@ impl<'a> UnionLayout<'a> {
} }
} }
pub fn discriminant_size(num_tags: usize) -> IntWidth { pub fn discriminant(&self) -> Discriminant {
if num_tags <= u8::MAX as usize {
IntWidth::U8
} else if num_tags <= u16::MAX as usize {
IntWidth::U16
} else {
panic!("tag union is too big")
}
}
pub fn tag_id_builtin(&self) -> Builtin<'a> {
match self { match self {
UnionLayout::NonRecursive(tags) => { UnionLayout::NonRecursive(tags) => Discriminant::from_number_of_tags(tags.len()),
let union_size = tags.len(); UnionLayout::Recursive(tags) => Discriminant::from_number_of_tags(tags.len()),
Builtin::Int(Self::discriminant_size(union_size))
}
UnionLayout::Recursive(tags) => {
let union_size = tags.len();
Builtin::Int(Self::discriminant_size(union_size))
}
UnionLayout::NullableWrapped { other_tags, .. } => { UnionLayout::NullableWrapped { other_tags, .. } => {
Builtin::Int(Self::discriminant_size(other_tags.len() + 1)) Discriminant::from_number_of_tags(other_tags.len() + 1)
} }
UnionLayout::NonNullableUnwrapped(_) => Builtin::Bool, UnionLayout::NonNullableUnwrapped(_) => Discriminant::from_number_of_tags(2),
UnionLayout::NullableUnwrapped { .. } => Builtin::Bool, UnionLayout::NullableUnwrapped { .. } => Discriminant::from_number_of_tags(1),
} }
} }
pub fn tag_id_layout(&self) -> Layout<'a> { pub fn tag_id_layout(&self) -> Layout<'a> {
Layout::Builtin(self.tag_id_builtin()) // TODO is it beneficial to return a more specific layout?
// e.g. Layout::bool() and Layout::VOID
match self.discriminant() {
Discriminant::U0 => Layout::u8(),
Discriminant::U1 => Layout::u8(),
Discriminant::U8 => Layout::u8(),
Discriminant::U16 => Layout::u16(),
}
} }
fn stores_tag_id_in_pointer_bits(tags: &[&[Layout<'a>]], target_info: TargetInfo) -> bool { fn stores_tag_id_in_pointer_bits(tags: &[&[Layout<'a>]], target_info: TargetInfo) -> bool {
@ -561,13 +551,30 @@ impl<'a> UnionLayout<'a> {
/// Size of the data in memory, whether it's stack or heap (for non-null tag ids) /// Size of the data in memory, whether it's stack or heap (for non-null tag ids)
pub fn data_size_and_alignment(&self, target_info: TargetInfo) -> (u32, u32) { pub fn data_size_and_alignment(&self, target_info: TargetInfo) -> (u32, u32) {
let id_data_layout = if self.stores_tag_id_as_data(target_info) { let (data_width, data_align) = self.data_size_and_alignment_help_match(target_info);
Some(self.tag_id_layout())
} else {
None
};
self.data_size_and_alignment_help_match(id_data_layout, target_info) if self.stores_tag_id_as_data(target_info) {
use Discriminant::*;
match self.discriminant() {
U0 => (round_up_to_alignment(data_width, data_align), data_align),
U1 | U8 => (
round_up_to_alignment(data_width + 1, data_align),
data_align,
),
U16 => {
// first, round up the data so the tag id is well-aligned;
// then add the tag id width, and make sure the whole extends
// to the next alignment multiple
let tag_align = data_align.max(2);
let tag_width =
round_up_to_alignment(round_up_to_alignment(data_width, 2) + 2, tag_align);
(tag_width, tag_align)
}
}
} else {
(data_width, data_align)
}
} }
/// Size of the data before the tag_id, if it exists. /// Size of the data before the tag_id, if it exists.
@ -577,92 +584,49 @@ impl<'a> UnionLayout<'a> {
return None; return None;
}; };
Some(self.data_size_and_alignment_help_match(None, target_info).0) Some(self.data_size_and_alignment_help_match(target_info).0)
} }
fn data_size_and_alignment_help_match( fn data_size_and_alignment_help_match(&self, target_info: TargetInfo) -> (u32, u32) {
&self,
id_data_layout: Option<Layout>,
target_info: TargetInfo,
) -> (u32, u32) {
match self { match self {
Self::NonRecursive(tags) => { Self::NonRecursive(tags) => Layout::stack_size_and_alignment_slices(tags, target_info),
Self::data_size_and_alignment_help(tags, id_data_layout, target_info) Self::Recursive(tags) => Layout::stack_size_and_alignment_slices(tags, target_info),
}
Self::Recursive(tags) => {
Self::data_size_and_alignment_help(tags, id_data_layout, target_info)
}
Self::NonNullableUnwrapped(fields) => { Self::NonNullableUnwrapped(fields) => {
Self::data_size_and_alignment_help(&[fields], id_data_layout, target_info) Layout::stack_size_and_alignment_slices(&[fields], target_info)
} }
Self::NullableWrapped { other_tags, .. } => { Self::NullableWrapped { other_tags, .. } => {
Self::data_size_and_alignment_help(other_tags, id_data_layout, target_info) Layout::stack_size_and_alignment_slices(other_tags, target_info)
} }
Self::NullableUnwrapped { other_fields, .. } => { Self::NullableUnwrapped { other_fields, .. } => {
Self::data_size_and_alignment_help(&[other_fields], id_data_layout, target_info) Layout::stack_size_and_alignment_slices(&[other_fields], target_info)
} }
} }
} }
fn data_size_and_alignment_help(
variant_field_layouts: &[&[Layout]],
id_data_layout: Option<Layout>,
target_info: TargetInfo,
) -> (u32, u32) {
let mut size = 0;
let mut alignment_bytes = 0;
for field_layouts in variant_field_layouts {
let mut data = Layout::struct_no_name_order(field_layouts);
let fields_and_id;
if let Some(id_layout) = id_data_layout {
fields_and_id = [data, id_layout];
data = Layout::struct_no_name_order(&fields_and_id);
}
let (variant_size, variant_alignment) = data.stack_size_and_alignment(target_info);
alignment_bytes = alignment_bytes.max(variant_alignment);
size = size.max(variant_size);
}
(size, alignment_bytes)
}
pub fn tag_id_offset(&self, target_info: TargetInfo) -> Option<u32> { pub fn tag_id_offset(&self, target_info: TargetInfo) -> Option<u32> {
use UnionLayout::*; match self {
UnionLayout::NonRecursive(tags)
if let NonNullableUnwrapped(_) | NullableUnwrapped { .. } = self { | UnionLayout::Recursive(tags)
return None; | UnionLayout::NullableWrapped {
other_tags: tags, ..
} => Some(Self::tag_id_offset_help(tags, target_info)),
UnionLayout::NonNullableUnwrapped(_) | UnionLayout::NullableUnwrapped { .. } => None,
} }
}
let data_width = self.data_size_and_alignment_help_match(None, target_info).0; fn tag_id_offset_help(layouts: &[&[Layout]], target_info: TargetInfo) -> u32 {
let (data_width, data_align) =
Layout::stack_size_and_alignment_slices(layouts, target_info);
// current, broken logic round_up_to_alignment(data_width, data_align)
if data_width > 8 {
Some(round_up_to_alignment(data_width, 8))
} else {
Some(data_width)
}
} }
/// Very important to use this when doing a memcpy! /// Very important to use this when doing a memcpy!
fn stack_size_without_alignment(&self, target_info: TargetInfo) -> u32 { fn stack_size_without_alignment(&self, target_info: TargetInfo) -> u32 {
match self { match self {
UnionLayout::NonRecursive(tags) => { UnionLayout::NonRecursive(_) => {
let id_layout = self.tag_id_layout(); let (width, align) = self.data_size_and_alignment(target_info);
round_up_to_alignment(width, align)
let mut size = 0;
for field_layouts in tags.iter() {
let fields = Layout::struct_no_name_order(field_layouts);
let fields_and_id = [fields, id_layout];
let data = Layout::struct_no_name_order(&fields_and_id);
size = size.max(data.stack_size_without_alignment(target_info));
}
size
} }
UnionLayout::Recursive(_) UnionLayout::Recursive(_)
| UnionLayout::NonNullableUnwrapped(_) | UnionLayout::NonNullableUnwrapped(_)
@ -672,6 +636,39 @@ impl<'a> UnionLayout<'a> {
} }
} }
pub enum Discriminant {
U0,
U1,
U8,
U16,
}
impl Discriminant {
pub const fn from_number_of_tags(tags: usize) -> Self {
match tags {
0 => Discriminant::U0,
1 => Discriminant::U0,
2 => Discriminant::U1,
3..=255 => Discriminant::U8,
256..=65_535 => Discriminant::U16,
_ => panic!("discriminant too large"),
}
}
pub const fn stack_size(&self) -> u32 {
match self {
Discriminant::U0 => 0,
Discriminant::U1 => 1,
Discriminant::U8 => 1,
Discriminant::U16 => 2,
}
}
pub const fn alignment_bytes(&self) -> u32 {
self.stack_size()
}
}
/// Custom type so we can get the numeric representation of a symbol in tests (so `#UserApp.3` /// Custom type so we can get the numeric representation of a symbol in tests (so `#UserApp.3`
/// instead of `UserApp.foo`). The pretty name is not reliable when running many tests /// instead of `UserApp.foo`). The pretty name is not reliable when running many tests
/// concurrently. The number does not change and will give a reliable output. /// concurrently. The number does not change and will give a reliable output.
@ -1208,10 +1205,16 @@ impl<'a, 'b> Env<'a, 'b> {
} }
pub const fn round_up_to_alignment(width: u32, alignment: u32) -> u32 { pub const fn round_up_to_alignment(width: u32, alignment: u32) -> u32 {
if alignment != 0 && width % alignment > 0 { match alignment {
width + alignment - (width % alignment) 0 => width,
} else { 1 => width,
width _ => {
if width % alignment > 0 {
width + alignment - (width % alignment)
} else {
width
}
}
} }
} }
@ -1514,15 +1517,15 @@ impl<'a> Layout<'a> {
}) })
.max(); .max();
let tag_id_builtin = variant.tag_id_builtin(); let discriminant = variant.discriminant();
match max_alignment { match max_alignment {
Some(align) => round_up_to_alignment( Some(align) => round_up_to_alignment(
align.max(tag_id_builtin.alignment_bytes(target_info)), align.max(discriminant.alignment_bytes()),
tag_id_builtin.alignment_bytes(target_info), discriminant.alignment_bytes(),
), ),
None => { None => {
// none of the tags had any payload, but the tag id still contains information // none of the tags had any payload, but the tag id still contains information
tag_id_builtin.alignment_bytes(target_info) discriminant.alignment_bytes()
} }
} }
} }
@ -1556,6 +1559,29 @@ impl<'a> Layout<'a> {
} }
} }
pub fn stack_size_and_alignment_slices(
slices: &[&[Self]],
target_info: TargetInfo,
) -> (u32, u32) {
let mut data_align = 1;
let mut data_width = 0;
for tag in slices {
let mut total = 0;
for layout in tag.iter() {
let (stack_size, alignment) = layout.stack_size_and_alignment(target_info);
total += stack_size;
data_align = data_align.max(alignment);
}
data_width = data_width.max(total);
}
data_width = round_up_to_alignment(data_width, data_align);
(data_width, data_align)
}
pub fn is_refcounted(&self) -> bool { pub fn is_refcounted(&self) -> bool {
use self::Builtin::*; use self::Builtin::*;
use Layout::*; use Layout::*;
@ -3277,6 +3303,23 @@ impl<'a> LayoutIds<'a> {
} }
} }
/// Compare two fields when sorting them for code gen.
/// This is called by both code gen and bindgen, so that
/// their field orderings agree.
#[inline(always)]
pub fn cmp_fields<L: Ord>(
label1: &L,
layout1: &Layout<'_>,
label2: &L,
layout2: &Layout<'_>,
target_info: TargetInfo,
) -> Ordering {
let size1 = layout1.alignment_bytes(target_info);
let size2 = layout2.alignment_bytes(target_info);
size2.cmp(&size1).then(label1.cmp(label2))
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
@ -3308,23 +3351,12 @@ mod test {
let layout = Layout::Union(union_layout); let layout = Layout::Union(union_layout);
let target_info = TargetInfo::default_x86_64(); let target_info = TargetInfo::default_x86_64();
assert_eq!(layout.stack_size_without_alignment(target_info), 5); assert_eq!(layout.stack_size_without_alignment(target_info), 8);
}
#[test]
fn void_stack_size() {
let target_info = TargetInfo::default_x86_64();
assert_eq!(Layout::VOID.stack_size(target_info), 0);
} }
} }
/// Compare two fields when sorting them for code gen.
/// This is called by both code gen and bindgen, so that
/// their field orderings agree.
#[inline(always)]
pub fn cmp_fields<L: Ord>(
label1: &L,
layout1: &Layout<'_>,
label2: &L,
layout2: &Layout<'_>,
target_info: TargetInfo,
) -> Ordering {
let size1 = layout1.alignment_bytes(target_info);
let size2 = layout2.alignment_bytes(target_info);
size2.cmp(&size1).then(label1.cmp(label2))
}

View file

@ -33,6 +33,7 @@ roc_can = { path = "../can" }
roc_parse = { path = "../parse" } roc_parse = { path = "../parse" }
roc_build = { path = "../build", features = ["target-aarch64", "target-x86_64", "target-wasm32"] } roc_build = { path = "../build", features = ["target-aarch64", "target-x86_64", "target-wasm32"] }
roc_target = { path = "../roc_target" } roc_target = { path = "../roc_target" }
roc_error_macros = { path = "../../error_macros" }
roc_std = { path = "../../roc_std" } roc_std = { path = "../../roc_std" }
roc_debug_flags = {path="../debug_flags"} roc_debug_flags = {path="../debug_flags"}
bumpalo = { version = "3.8.0", features = ["collections"] } bumpalo = { version = "3.8.0", features = ["collections"] }

View file

@ -308,15 +308,17 @@ fn roc_result_after_on_err() {
#[test] #[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] #[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn roc_result_after_err() { fn roc_result_after_err() {
assert_evals_to!(indoc!( assert_evals_to!(
r#" indoc!(
r#"
result : Result Str I64 result : Result Str I64
result = result =
Result.onErr (Ok "already a string") \num -> Result.onErr (Ok "already a string") \num ->
if num < 0 then Ok "negative!" else Err -num if num < 0 then Ok "negative!" else Err -num
result result
"#), "#
),
RocResult::ok(RocStr::from("already a string")), RocResult::ok(RocStr::from("already a string")),
RocResult<RocStr, i64> RocResult<RocStr, i64>
); );

View file

@ -11,7 +11,7 @@ use crate::helpers::wasm::assert_evals_to;
use indoc::indoc; use indoc::indoc;
#[cfg(all(test, any(feature = "gen-llvm", feature = "gen-wasm")))] #[cfg(all(test, any(feature = "gen-llvm", feature = "gen-wasm")))]
use roc_std::{RocList, RocStr}; use roc_std::{RocList, RocStr, U128};
#[test] #[test]
fn width_and_alignment_u8_u8() { fn width_and_alignment_u8_u8() {
@ -982,8 +982,8 @@ fn alignment_in_multi_tag_construction_two() {
#" #"
), ),
((32i64, true), 1), ((32i64, true), 1, [0; 7]),
((i64, bool), u8) ((i64, bool), u8, [u8; 7])
); );
} }
@ -999,8 +999,8 @@ fn alignment_in_multi_tag_construction_three() {
x x
#" #"
), ),
((32i64, true, 2u8), 1), ((32i64, true, 2u8), 1, [0; 7]),
((i64, bool, u8), u8) ((i64, bool, u8), u8, [u8; 7])
); );
} }
@ -1862,6 +1862,23 @@ fn issue_3560_newtype_tag_constructor_has_nested_constructor_with_no_payload() {
) )
} }
#[test]
#[cfg(any(feature = "gen-llvm"))]
fn alignment_i128() {
assert_evals_to!(
indoc!(
r"#
x : [One I128 Bool, Empty]
x = One 42 (1 == 1)
x
#"
),
// NOTE: roc_std::U128 is always aligned to 16, unlike rust's u128
((U128::from(42), true), 1, [0; 15]),
((U128, bool), u8, [u8; 15])
);
}
#[test] #[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] #[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
#[should_panic(expected = r#"Roc failed with message: "Erroneous: Expr::Closure""#)] #[should_panic(expected = r#"Roc failed with message: "Erroneous: Expr::Closure""#)]

View file

@ -1,4 +1,5 @@
use roc_gen_wasm::wasm32_sized::Wasm32Sized; use roc_error_macros::internal_error;
use roc_gen_wasm::{round_up_to_alignment, wasm32_sized::Wasm32Sized};
use roc_std::{RocDec, RocList, RocOrder, RocResult, RocStr}; use roc_std::{RocDec, RocList, RocOrder, RocResult, RocStr};
use std::convert::TryInto; use std::convert::TryInto;
@ -99,7 +100,9 @@ where
E: FromWasm32Memory + Wasm32Sized, E: FromWasm32Memory + Wasm32Sized,
{ {
fn decode(memory: &[u8], offset: u32) -> Self { fn decode(memory: &[u8], offset: u32) -> Self {
let tag_offset = Ord::max(T::ACTUAL_WIDTH, E::ACTUAL_WIDTH); let data_align = Ord::max(T::ALIGN_OF_WASM, E::ALIGN_OF_WASM);
let data_width = Ord::max(T::ACTUAL_WIDTH, E::ACTUAL_WIDTH);
let tag_offset = round_up_to_alignment!(data_width, data_align);
let tag = <u8 as FromWasm32Memory>::decode(memory, offset + tag_offset as u32); let tag = <u8 as FromWasm32Memory>::decode(memory, offset + tag_offset as u32);
if tag == 1 { if tag == 1 {
let value = <T as FromWasm32Memory>::decode(memory, offset); let value = <T as FromWasm32Memory>::decode(memory, offset);

View file

@ -238,16 +238,12 @@ fn tag_id_from_data<'a, M: ReplAppMemory>(
.unwrap(); .unwrap();
let tag_id_addr = data_addr + offset as usize; let tag_id_addr = data_addr + offset as usize;
match union_layout.tag_id_builtin() { use roc_mono::layout::Discriminant::*;
Builtin::Bool => mem.deref_bool(tag_id_addr) as i64, match union_layout.discriminant() {
Builtin::Int(IntWidth::U8) => mem.deref_u8(tag_id_addr) as i64, U0 => 0,
Builtin::Int(IntWidth::U16) => mem.deref_u16(tag_id_addr) as i64, U1 => mem.deref_bool(tag_id_addr) as i64,
Builtin::Int(IntWidth::U64) => { U8 => mem.deref_u8(tag_id_addr) as i64,
// used by non-recursive unions at the U16 => mem.deref_u16(tag_id_addr) as i64,
// moment, remove if that is no longer the case
mem.deref_i64(tag_id_addr)
}
_ => unreachable!("invalid tag id layout"),
} }
} }