Merge branch 'Frame-Limited' into joinpoint-reuse

This commit is contained in:
J.Teeuwissen 2023-03-30 20:38:30 +02:00
commit 36f4969a5c
No known key found for this signature in database
GPG key ID: DB5F7A1ED8D478AD
591 changed files with 42667 additions and 28227 deletions

View file

@ -100,8 +100,14 @@ pub fn infer_borrow<'a>(
// host-exposed functions must always own their arguments.
let is_host_exposed = host_exposed_procs.contains(&key.0);
let param_offset = param_map.get_param_offset(key.0, key.1);
env.collect_proc(&mut param_map, proc, param_offset, is_host_exposed);
let param_offset = param_map.get_param_offset(interner, key.0, key.1);
env.collect_proc(
interner,
&mut param_map,
proc,
param_offset,
is_host_exposed,
);
}
if !env.modified {
@ -167,6 +173,7 @@ impl<'a> DeclarationToIndex<'a> {
fn get_param_offset(
&self,
interner: &STLayoutInterner<'a>,
needle_symbol: Symbol,
needle_layout: ProcLayout<'a>,
) -> ParamOffset {
@ -181,12 +188,14 @@ impl<'a> DeclarationToIndex<'a> {
.elements
.iter()
.filter_map(|(Declaration { symbol, layout }, _)| {
(*symbol == needle_symbol).then_some(layout)
(*symbol == needle_symbol)
.then_some(layout)
.map(|l| l.dbg_deep(interner))
})
.collect::<std::vec::Vec<_>>();
unreachable!(
"symbol/layout {:?} {:#?} combo must be in DeclarationToIndex\nHowever {} similar layouts were found:\n{:#?}",
needle_symbol, needle_layout, similar.len(), similar
needle_symbol, needle_layout.dbg_deep(interner), similar.len(), similar,
)
}
}
@ -206,13 +215,24 @@ pub struct ParamMap<'a> {
}
impl<'a> ParamMap<'a> {
pub fn get_param_offset(&self, symbol: Symbol, layout: ProcLayout<'a>) -> ParamOffset {
self.declaration_to_index.get_param_offset(symbol, layout)
pub fn get_param_offset(
&self,
interner: &STLayoutInterner<'a>,
symbol: Symbol,
layout: ProcLayout<'a>,
) -> ParamOffset {
self.declaration_to_index
.get_param_offset(interner, symbol, layout)
}
pub fn get_symbol(&self, symbol: Symbol, layout: ProcLayout<'a>) -> Option<&[Param<'a>]> {
pub fn get_symbol(
&self,
interner: &STLayoutInterner<'a>,
symbol: Symbol,
layout: ProcLayout<'a>,
) -> Option<&[Param<'a>]> {
// let index: usize = self.declaration_to_index[&(symbol, layout)].into();
let index: usize = self.get_param_offset(symbol, layout).into();
let index: usize = self.get_param_offset(interner, symbol, layout).into();
self.declarations.get(index..index + layout.arguments.len())
}
@ -292,7 +312,7 @@ impl<'a> ParamMap<'a> {
// return;
// }
let index: usize = self.get_param_offset(key.0, key.1).into();
let index: usize = self.get_param_offset(interner, key.0, key.1).into();
for (i, param) in Self::init_borrow_args(arena, interner, proc.args)
.iter()
@ -312,7 +332,7 @@ impl<'a> ParamMap<'a> {
proc: &Proc<'a>,
key: (Symbol, ProcLayout<'a>),
) {
let index: usize = self.get_param_offset(key.0, key.1).into();
let index: usize = self.get_param_offset(interner, key.0, key.1).into();
for (i, param) in Self::init_borrow_args_always_owned(arena, proc.args)
.iter()
@ -534,7 +554,13 @@ impl<'a> BorrowInfState<'a> {
///
/// and determines whether z and which of the symbols used in e
/// must be taken as owned parameters
fn collect_call(&mut self, param_map: &mut ParamMap<'a>, z: Symbol, e: &crate::ir::Call<'a>) {
fn collect_call(
&mut self,
interner: &STLayoutInterner<'a>,
param_map: &mut ParamMap<'a>,
z: Symbol,
e: &crate::ir::Call<'a>,
) {
use crate::ir::CallType::*;
let crate::ir::Call {
@ -553,7 +579,7 @@ impl<'a> BorrowInfState<'a> {
// get the borrow signature of the applied function
let ps = param_map
.get_symbol(name.name(), top_level)
.get_symbol(interner, name.name(), top_level)
.expect("function is defined");
// the return value will be owned
@ -595,11 +621,14 @@ impl<'a> BorrowInfState<'a> {
niche: passed_function.name.niche(),
};
let function_ps =
match param_map.get_symbol(passed_function.name.name(), closure_layout) {
Some(function_ps) => function_ps,
None => unreachable!(),
};
let function_ps = match param_map.get_symbol(
interner,
passed_function.name.name(),
closure_layout,
) {
Some(function_ps) => function_ps,
None => unreachable!(),
};
match op {
ListMap { xs } => {
@ -671,7 +700,13 @@ impl<'a> BorrowInfState<'a> {
}
}
fn collect_expr(&mut self, param_map: &mut ParamMap<'a>, z: Symbol, e: &Expr<'a>) {
fn collect_expr(
&mut self,
interner: &STLayoutInterner<'a>,
param_map: &mut ParamMap<'a>,
z: Symbol,
e: &Expr<'a>,
) {
use Expr::*;
match e {
@ -724,7 +759,7 @@ impl<'a> BorrowInfState<'a> {
self.own_var(z);
}
Call(call) => self.collect_call(param_map, z, call),
Call(call) => self.collect_call(interner, param_map, z, call),
Literal(_) | RuntimeErrorFunction(_) => {}
@ -757,6 +792,7 @@ impl<'a> BorrowInfState<'a> {
#[allow(clippy::many_single_char_names)]
fn preserve_tail_call(
&mut self,
interner: &STLayoutInterner<'a>,
param_map: &mut ParamMap<'a>,
x: Symbol,
v: &Expr<'a>,
@ -782,7 +818,7 @@ impl<'a> BorrowInfState<'a> {
if self.current_proc == g.name() && x == *z {
// anonymous functions (for which the ps may not be known)
// can never be tail-recursive, so this is fine
if let Some(ps) = param_map.get_symbol(g.name(), top_level) {
if let Some(ps) = param_map.get_symbol(interner, g.name(), top_level) {
self.own_params_using_args(ys, ps)
}
}
@ -801,7 +837,12 @@ impl<'a> BorrowInfState<'a> {
}
}
fn collect_stmt(&mut self, param_map: &mut ParamMap<'a>, stmt: &Stmt<'a>) {
fn collect_stmt(
&mut self,
interner: &STLayoutInterner<'a>,
param_map: &mut ParamMap<'a>,
stmt: &Stmt<'a>,
) {
use Stmt::*;
match stmt {
@ -813,11 +854,11 @@ impl<'a> BorrowInfState<'a> {
} => {
let old = self.param_set.clone();
self.update_param_set(ys);
self.collect_stmt(param_map, v);
self.collect_stmt(interner, param_map, v);
self.param_set = old;
self.update_param_map_join_point(param_map, *j);
self.collect_stmt(param_map, b);
self.collect_stmt(interner, param_map, b);
}
Let(x, v, _, mut b) => {
@ -830,17 +871,17 @@ impl<'a> BorrowInfState<'a> {
stack.push((*symbol, expr));
}
self.collect_stmt(param_map, b);
self.collect_stmt(interner, param_map, b);
let mut it = stack.into_iter().rev();
// collect the final expr, and see if we need to preserve a tail call
let (x, v) = it.next().unwrap();
self.collect_expr(param_map, x, v);
self.preserve_tail_call(param_map, x, v, b);
self.collect_expr(interner, param_map, x, v);
self.preserve_tail_call(interner, param_map, x, v, b);
for (x, v) in it {
self.collect_expr(param_map, x, v);
self.collect_expr(interner, param_map, x, v);
}
}
@ -859,21 +900,21 @@ impl<'a> BorrowInfState<'a> {
..
} => {
for (_, _, b) in branches.iter() {
self.collect_stmt(param_map, b);
self.collect_stmt(interner, param_map, b);
}
self.collect_stmt(param_map, default_branch.1);
self.collect_stmt(interner, param_map, default_branch.1);
}
Dbg { remainder, .. } => {
self.collect_stmt(param_map, remainder);
self.collect_stmt(interner, param_map, remainder);
}
Expect { remainder, .. } => {
self.collect_stmt(param_map, remainder);
self.collect_stmt(interner, param_map, remainder);
}
ExpectFx { remainder, .. } => {
self.collect_stmt(param_map, remainder);
self.collect_stmt(interner, param_map, remainder);
}
Refcounting(_, _) => unreachable!("these have not been introduced yet"),
@ -891,6 +932,7 @@ impl<'a> BorrowInfState<'a> {
fn collect_proc(
&mut self,
interner: &STLayoutInterner<'a>,
param_map: &mut ParamMap<'a>,
proc: &Proc<'a>,
param_offset: ParamOffset,
@ -912,7 +954,7 @@ impl<'a> BorrowInfState<'a> {
owned_entry.extend(params.iter().map(|p| p.symbol));
}
self.collect_stmt(param_map, &proc.body);
self.collect_stmt(interner, param_map, &proc.body);
self.update_param_map_declaration(param_map, param_offset, proc.args.len());
self.param_set = old;
@ -974,6 +1016,8 @@ pub fn lowlevel_borrow_signature(arena: &Bump, op: LowLevel) -> &[bool] {
ListSublist => arena.alloc_slice_copy(&[owned, irrelevant, irrelevant]),
ListDropAt => arena.alloc_slice_copy(&[owned, irrelevant]),
ListSwap => arena.alloc_slice_copy(&[owned, irrelevant, irrelevant]),
ListReleaseExcessCapacity => arena.alloc_slice_copy(&[owned]),
StrReleaseExcessCapacity => arena.alloc_slice_copy(&[owned]),
Eq | NotEq => arena.alloc_slice_copy(&[borrowed, borrowed]),
@ -984,13 +1028,33 @@ pub fn lowlevel_borrow_signature(arena: &Bump, op: LowLevel) -> &[bool] {
| NumPow | NumPowInt | NumBitwiseAnd | NumBitwiseXor | NumBitwiseOr | NumShiftLeftBy
| NumShiftRightBy | NumShiftRightZfBy => arena.alloc_slice_copy(&[irrelevant, irrelevant]),
NumToStr | NumAbs | NumNeg | NumSin | NumCos | NumSqrtUnchecked | NumLogUnchecked
| NumRound | NumCeiling | NumFloor | NumToFrac | Not | NumIsFinite | NumAtan | NumAcos
| NumAsin | NumIntCast | NumToIntChecked | NumToFloatCast | NumToFloatChecked => {
arena.alloc_slice_copy(&[irrelevant])
}
NumToStr
| NumAbs
| NumNeg
| NumSin
| NumCos
| NumSqrtUnchecked
| NumLogUnchecked
| NumRound
| NumCeiling
| NumFloor
| NumToFrac
| Not
| NumIsFinite
| NumAtan
| NumAcos
| NumAsin
| NumIntCast
| NumToIntChecked
| NumToFloatCast
| NumToFloatChecked
| NumCountLeadingZeroBits
| NumCountTrailingZeroBits
| NumCountOneBits => arena.alloc_slice_copy(&[irrelevant]),
NumBytesToU16 => arena.alloc_slice_copy(&[borrowed, irrelevant]),
NumBytesToU32 => arena.alloc_slice_copy(&[borrowed, irrelevant]),
NumBytesToU64 => arena.alloc_slice_copy(&[borrowed, irrelevant]),
NumBytesToU128 => arena.alloc_slice_copy(&[borrowed, irrelevant]),
StrStartsWith | StrEndsWith => arena.alloc_slice_copy(&[borrowed, borrowed]),
StrStartsWithScalar => arena.alloc_slice_copy(&[borrowed, irrelevant]),
StrFromUtf8Range => arena.alloc_slice_copy(&[owned, irrelevant, irrelevant]),

View file

@ -125,16 +125,6 @@ impl<'a> CodeGenHelp<'a> {
modify: &ModifyRc,
following: &'a Stmt<'a>,
) -> (&'a Stmt<'a>, Vec<'a, (Symbol, ProcLayout<'a>)>) {
if !refcount::is_rc_implemented_yet(layout_interner, layout) {
// Just a warning, so we can decouple backend development from refcounting development.
// When we are closer to completion, we can change it to a panic.
println!(
"WARNING! MEMORY LEAK! Refcounting not yet implemented for Layout {:?}",
layout
);
return (following, Vec::new_in(self.arena));
}
let op = match modify {
ModifyRc::Inc(..) => HelperOp::Inc,
ModifyRc::Dec(_) => HelperOp::Dec,

View file

@ -74,7 +74,7 @@ pub fn refcount_stmt<'a>(
ModifyRc::DecRef(structure) => {
match layout_interner.get(layout) {
// Str has no children, so we might as well do what we normally do and call the helper.
// Str has no children, so Dec is the same as DecRef.
Layout::Builtin(Builtin::Str) => {
ctx.op = HelperOp::Dec;
refcount_stmt(
@ -127,8 +127,6 @@ pub fn refcount_generic<'a>(
layout: InLayout<'a>,
structure: Symbol,
) -> Stmt<'a> {
debug_assert!(is_rc_implemented_yet(layout_interner, layout));
match layout_interner.get(layout) {
Layout::Builtin(Builtin::Int(_) | Builtin::Float(_) | Builtin::Bool | Builtin::Decimal) => {
// Generate a dummy function that immediately returns Unit
@ -141,7 +139,6 @@ pub fn refcount_generic<'a>(
ident_ids,
ctx,
layout_interner,
layout,
elem_layout,
structure,
),
@ -416,44 +413,6 @@ pub fn refcount_reset_proc_body<'a>(
rc_ptr_stmt
}
// Check if refcounting is implemented yet. In the long term, this will be deleted.
// In the short term, it helps us to skip refcounting and let it leak, so we can make
// progress incrementally. Kept in sync with generate_procs using assertions.
pub fn is_rc_implemented_yet<'a, I>(interner: &I, layout: InLayout<'a>) -> bool
where
I: LayoutInterner<'a>,
{
use UnionLayout::*;
match interner.get(layout) {
Layout::Builtin(Builtin::List(elem_layout)) => is_rc_implemented_yet(interner, elem_layout),
Layout::Builtin(_) => true,
Layout::Struct { field_layouts, .. } => field_layouts
.iter()
.all(|l| is_rc_implemented_yet(interner, *l)),
Layout::Union(union_layout) => match union_layout {
NonRecursive(tags) => tags
.iter()
.all(|fields| fields.iter().all(|l| is_rc_implemented_yet(interner, *l))),
Recursive(tags) => tags
.iter()
.all(|fields| fields.iter().all(|l| is_rc_implemented_yet(interner, *l))),
NonNullableUnwrapped(fields) => {
fields.iter().all(|l| is_rc_implemented_yet(interner, *l))
}
NullableWrapped { other_tags, .. } => other_tags
.iter()
.all(|fields| fields.iter().all(|l| is_rc_implemented_yet(interner, *l))),
NullableUnwrapped { other_fields, .. } => other_fields
.iter()
.all(|l| is_rc_implemented_yet(interner, *l)),
},
Layout::LambdaSet(lambda_set) => is_rc_implemented_yet(interner, lambda_set.representation),
Layout::RecursivePointer(_) => true,
Layout::Boxed(_) => true,
}
}
fn rc_return_stmt<'a>(
root: &CodeGenHelp<'a>,
ident_ids: &mut IdentIds,
@ -765,7 +724,6 @@ fn refcount_list<'a>(
ident_ids: &mut IdentIds,
ctx: &mut Context<'a>,
layout_interner: &mut STLayoutInterner<'a>,
layout: InLayout,
elem_layout: InLayout<'a>,
structure: Symbol,
) -> Stmt<'a> {
@ -773,8 +731,7 @@ fn refcount_list<'a>(
let arena = root.arena;
// A "Box" layout (heap pointer to a single list element)
let box_union_layout = UnionLayout::NonNullableUnwrapped(arena.alloc([elem_layout]));
let box_layout = layout_interner.insert(Layout::Union(box_union_layout));
let box_layout = layout_interner.insert(Layout::Boxed(elem_layout));
//
// Check if the list is empty
@ -814,7 +771,7 @@ fn refcount_list<'a>(
//
let rc_ptr = root.create_symbol(ident_ids, "rc_ptr");
let alignment = layout_interner.alignment_bytes(layout);
let elem_alignment = layout_interner.alignment_bytes(elem_layout);
let ret_stmt = rc_return_stmt(root, ident_ids, ctx);
let modify_list = modify_refcount(
@ -822,7 +779,7 @@ fn refcount_list<'a>(
ident_ids,
ctx,
rc_ptr,
alignment,
elem_alignment,
arena.alloc(ret_stmt),
);
@ -845,7 +802,7 @@ fn refcount_list<'a>(
layout_interner,
elem_layout,
LAYOUT_UNIT,
box_union_layout,
box_layout,
len,
elements,
get_rc_and_modify_list,
@ -895,7 +852,7 @@ fn refcount_list_elems<'a>(
layout_interner: &mut STLayoutInterner<'a>,
elem_layout: InLayout<'a>,
ret_layout: InLayout<'a>,
box_union_layout: UnionLayout<'a>,
box_layout: InLayout<'a>,
length: Symbol,
elements: Symbol,
following: Stmt<'a>,
@ -955,17 +912,11 @@ fn refcount_list_elems<'a>(
// Cast integer to box pointer
let box_ptr = root.create_symbol(ident_ids, "box");
let box_layout = layout_interner.insert(Layout::Union(box_union_layout));
let box_stmt = |next| let_lowlevel(arena, box_layout, box_ptr, PtrCast, &[addr], next);
// Dereference the box pointer to get the current element
let elem = root.create_symbol(ident_ids, "elem");
let elem_expr = Expr::UnionAtIndex {
structure: box_ptr,
union_layout: box_union_layout,
tag_id: 0,
index: 0,
};
let elem_expr = Expr::ExprUnbox { symbol: box_ptr };
let elem_stmt = |next| Stmt::Let(elem, elem_expr, elem_layout, next);
//

View file

@ -10,8 +10,7 @@ use crate::{
ModifyRc, Param, Proc, ProcLayout, Stmt,
},
layout::{
Builtin, InLayout, LambdaSet, Layout, LayoutInterner, STLayoutInterner, TagIdIntType,
UnionLayout,
Builtin, InLayout, Layout, LayoutInterner, STLayoutInterner, TagIdIntType, UnionLayout,
},
};
@ -87,7 +86,7 @@ pub enum ProblemKind<'a> {
structure: Symbol,
def_line: usize,
tag_id: u16,
union_layout: UnionLayout<'a>,
union_layout: InLayout<'a>,
},
TagUnionStructIndexOOB {
structure: Symbol,
@ -100,7 +99,7 @@ pub enum ProblemKind<'a> {
structure: Symbol,
def_line: usize,
tag_id: u16,
union_layout: UnionLayout<'a>,
union_layout: InLayout<'a>,
},
UnboxNotABox {
symbol: Symbol,
@ -108,7 +107,7 @@ pub enum ProblemKind<'a> {
},
CreatingTagIdNotInUnion {
tag_id: u16,
union_layout: UnionLayout<'a>,
union_layout: InLayout<'a>,
},
CreateTagPayloadMismatch {
num_needed: usize,
@ -193,11 +192,12 @@ impl<'a, 'r> Ctx<'a, 'r> {
r
}
fn resolve(&mut self, mut layout: InLayout<'a>) -> InLayout<'a> {
fn resolve(&self, mut layout: InLayout<'a>) -> InLayout<'a> {
// Note that we are more aggressive than the usual `runtime_representation`
// here because we need strict equality, and so cannot unwrap lambda sets
// lazily.
loop {
layout = self.interner.chase_recursive_in(layout);
match self.interner.get(layout) {
Layout::LambdaSet(ls) => layout = ls.representation,
_ => return layout,
@ -205,6 +205,12 @@ impl<'a, 'r> Ctx<'a, 'r> {
}
}
fn not_equiv(&mut self, layout1: InLayout<'a>, layout2: InLayout<'a>) -> bool {
!self
.interner
.equiv(self.resolve(layout1), self.resolve(layout2))
}
fn insert(&mut self, symbol: Symbol, layout: InLayout<'a>) {
if let Some((old_line, _)) = self.venv.insert(symbol, (self.line, layout)) {
self.problem(ProblemKind::RedefinedSymbol { symbol, old_line })
@ -237,7 +243,7 @@ impl<'a, 'r> Ctx<'a, 'r> {
use_kind: UseKind,
) {
if let Some(&(def_line, layout)) = self.venv.get(&symbol) {
if self.resolve(layout) != self.resolve(expected_layout) {
if self.not_equiv(layout, expected_layout) {
self.problem(ProblemKind::SymbolUseMismatch {
symbol,
def_layout: layout,
@ -265,7 +271,7 @@ impl<'a, 'r> Ctx<'a, 'r> {
match body {
Stmt::Let(x, e, x_layout, rest) => {
if let Some(e_layout) = self.check_expr(e) {
if self.resolve(e_layout) != self.resolve(*x_layout) {
if self.not_equiv(e_layout, *x_layout) {
self.problem(ProblemKind::SymbolDefMismatch {
symbol: *x,
def_layout: *x_layout,
@ -390,8 +396,9 @@ impl<'a, 'r> Ctx<'a, 'r> {
tag_id,
arguments,
} => {
self.check_tag_expr(tag_layout, tag_id, arguments);
Some(self.interner.insert(Layout::Union(tag_layout)))
let interned_layout = self.interner.insert(Layout::Union(tag_layout));
self.check_tag_expr(interned_layout, tag_layout, tag_id, arguments);
Some(interned_layout)
}
Expr::Struct(syms) => {
for sym in syms.iter() {
@ -415,7 +422,9 @@ impl<'a, 'r> Ctx<'a, 'r> {
tag_id,
union_layout,
index,
} => self.check_union_at_index(structure, union_layout, tag_id, index),
} => self.with_sym_layout(structure, |ctx, _def_line, layout| {
ctx.check_union_at_index(structure, layout, union_layout, tag_id, index)
}),
Expr::Array { elem_layout, elems } => {
for elem in elems.iter() {
match elem {
@ -503,6 +512,7 @@ impl<'a, 'r> Ctx<'a, 'r> {
fn check_union_at_index(
&mut self,
structure: Symbol,
interned_union_layout: InLayout<'a>,
union_layout: UnionLayout<'a>,
tag_id: u16,
index: u64,
@ -517,7 +527,7 @@ impl<'a, 'r> Ctx<'a, 'r> {
structure,
def_line,
tag_id,
union_layout,
union_layout: interned_union_layout,
});
None
}
@ -532,12 +542,7 @@ impl<'a, 'r> Ctx<'a, 'r> {
});
return None;
}
let layout = resolve_recursive_layout(
ctx.arena,
ctx.interner,
payloads[index as usize],
union_layout,
);
let layout = payloads[index as usize];
Some(layout)
}
}
@ -605,12 +610,18 @@ impl<'a, 'r> Ctx<'a, 'r> {
}
}
fn check_tag_expr(&mut self, union_layout: UnionLayout<'a>, tag_id: u16, arguments: &[Symbol]) {
fn check_tag_expr(
&mut self,
interned_union_layout: InLayout<'a>,
union_layout: UnionLayout<'a>,
tag_id: u16,
arguments: &[Symbol],
) {
match get_tag_id_payloads(union_layout, tag_id) {
TagPayloads::IdNotInUnion => {
self.problem(ProblemKind::CreatingTagIdNotInUnion {
tag_id,
union_layout,
union_layout: interned_union_layout,
});
}
TagPayloads::Payloads(payloads) => {
@ -621,13 +632,7 @@ impl<'a, 'r> Ctx<'a, 'r> {
});
}
for (arg, wanted_layout) in arguments.iter().zip(payloads.iter()) {
let wanted_layout = resolve_recursive_layout(
self.arena,
self.interner,
*wanted_layout,
union_layout,
);
self.check_sym_layout(*arg, wanted_layout, UseKind::TagPayloadArg);
self.check_sym_layout(*arg, *wanted_layout, UseKind::TagPayloadArg);
}
}
}
@ -643,94 +648,6 @@ impl<'a, 'r> Ctx<'a, 'r> {
}
}
fn resolve_recursive_layout<'a>(
arena: &'a Bump,
interner: &mut STLayoutInterner<'a>,
layout: InLayout<'a>,
when_recursive: UnionLayout<'a>,
) -> InLayout<'a> {
macro_rules! go {
($lay:expr) => {
resolve_recursive_layout(arena, interner, $lay, when_recursive)
};
}
// TODO check if recursive pointer not in recursive union
let layout = match interner.get(layout) {
Layout::RecursivePointer(_) => Layout::Union(when_recursive),
Layout::Union(union_layout) => match union_layout {
UnionLayout::NonRecursive(payloads) => {
let payloads = payloads.iter().map(|args| {
let args = args.iter().map(|lay| go!(*lay));
&*arena.alloc_slice_fill_iter(args)
});
let payloads = arena.alloc_slice_fill_iter(payloads);
Layout::Union(UnionLayout::NonRecursive(payloads))
}
UnionLayout::Recursive(_)
| UnionLayout::NonNullableUnwrapped(_)
| UnionLayout::NullableWrapped { .. }
| UnionLayout::NullableUnwrapped { .. } => {
// This is the recursive layout.
// TODO will need fixing to be modified once we support multiple
// recursive pointers in one structure.
return layout;
}
},
Layout::Boxed(inner) => {
let inner = go!(inner);
Layout::Boxed(inner)
}
Layout::Struct {
field_order_hash,
field_layouts,
} => {
let field_layouts = field_layouts
.iter()
.map(|lay| resolve_recursive_layout(arena, interner, *lay, when_recursive));
let field_layouts = arena.alloc_slice_fill_iter(field_layouts);
Layout::Struct {
field_order_hash,
field_layouts,
}
}
Layout::Builtin(builtin) => match builtin {
Builtin::List(inner) => {
let inner = resolve_recursive_layout(arena, interner, inner, when_recursive);
Layout::Builtin(Builtin::List(inner))
}
Builtin::Int(_)
| Builtin::Float(_)
| Builtin::Bool
| Builtin::Decimal
| Builtin::Str => return layout,
},
Layout::LambdaSet(LambdaSet {
args,
ret,
set,
representation,
full_layout,
}) => {
let set = set.iter().map(|(symbol, captures)| {
let captures = captures.iter().map(|lay_in| go!(*lay_in));
let captures = &*arena.alloc_slice_fill_iter(captures);
(*symbol, captures)
});
let set = arena.alloc_slice_fill_iter(set);
Layout::LambdaSet(LambdaSet {
args,
ret,
set: arena.alloc(&*set),
representation,
full_layout,
})
}
};
interner.insert(layout)
}
enum TagPayloads<'a> {
IdNotInUnion,
Payloads(&'a [InLayout<'a>]),

View file

@ -5,7 +5,7 @@ use ven_pretty::{Arena, DocAllocator, DocBuilder};
use crate::{
ir::{Parens, ProcLayout},
layout::{Layout, LayoutInterner},
layout::LayoutInterner,
};
use super::{
@ -157,7 +157,7 @@ where
f.concat([
format_symbol(f, interns, symbol),
f.reflow(" defined here with layout "),
interner.to_doc(def_layout, f, Parens::NotNeeded),
interner.to_doc_top(def_layout, f),
]),
)];
f.concat([
@ -165,7 +165,7 @@ where
f.reflow(" used as a "),
f.reflow(format_use_kind(use_kind)),
f.reflow(" here with layout "),
interner.to_doc(use_layout, f, Parens::NotNeeded),
interner.to_doc_top(use_layout, f),
])
}
ProblemKind::SymbolDefMismatch {
@ -178,9 +178,9 @@ where
f.concat([
format_symbol(f, interns, symbol),
f.reflow(" is defined as "),
interner.to_doc(def_layout, f, Parens::NotNeeded),
interner.to_doc_top(def_layout, f),
f.reflow(" but its initializer is "),
interner.to_doc(expr_layout, f, Parens::NotNeeded),
interner.to_doc_top(expr_layout, f),
])
}
ProblemKind::BadSwitchConditionLayout { found_layout } => {
@ -188,7 +188,7 @@ where
docs_before = vec![];
f.concat([
f.reflow("This switch condition is a "),
interner.to_doc(found_layout, f, Parens::NotNeeded),
interner.to_doc_top(found_layout, f),
])
}
ProblemKind::DuplicateSwitchBranch {} => {
@ -324,7 +324,7 @@ where
f.reflow("The union "),
format_symbol(f, interns, structure),
f.reflow(" defined here has layout "),
Layout::Union(union_layout).to_doc(f, interner, Parens::NotNeeded),
interner.to_doc_top(union_layout, f),
]),
)];
f.concat([f.reflow("which has no tag of id "), f.as_string(tag_id)])
@ -367,7 +367,7 @@ where
f.reflow("The union "),
format_symbol(f, interns, structure),
f.reflow(" defined here has layout "),
Layout::Union(union_layout).to_doc(f, interner, Parens::NotNeeded),
interner.to_doc_top(union_layout, f),
]),
)];
f.concat([
@ -394,7 +394,7 @@ where
f.reflow("The variant "),
f.as_string(tag_id),
f.reflow(" is outside the target union layout "),
Layout::Union(union_layout).to_doc(f, interner, Parens::NotNeeded),
interner.to_doc_top(union_layout, f),
])
}
ProblemKind::CreateTagPayloadMismatch {
@ -469,16 +469,16 @@ where
let args = f.intersperse(
arguments
.iter()
.map(|a| interner.to_doc(*a, f, Parens::InFunction)),
.map(|a| interner.to_doc(*a, f, &mut Default::default(), Parens::InFunction)),
f.reflow(", "),
);
let fun = f.concat([
f.concat([f.reflow("("), args, f.reflow(")")]),
f.reflow(" -> "),
interner.to_doc(result, f, Parens::NotNeeded),
interner.to_doc_top(result, f),
]);
let niche = (f.text("("))
.append(captures_niche.to_doc(f, interner))
.append(captures_niche.to_doc(f, interner, &mut Default::default()))
.append(f.text(")"));
f.concat([fun, f.space(), niche])
}

View file

@ -605,7 +605,7 @@ impl<'a, 'i> Context<'a, 'i> {
// get the borrow signature
let ps = self
.param_map
.get_symbol(name.name(), top_level)
.get_symbol(self.layout_interner, name.name(), top_level)
.expect("function is defined");
let v = Expr::Call(crate::ir::Call {
@ -653,10 +653,11 @@ impl<'a, 'i> Context<'a, 'i> {
niche: passed_function.name.niche(),
};
let function_ps = match self
.param_map
.get_symbol(passed_function.name.name(), function_layout)
{
let function_ps = match self.param_map.get_symbol(
self.layout_interner,
passed_function.name.name(),
function_layout,
) {
Some(function_ps) => function_ps,
None => unreachable!(),
};
@ -671,14 +672,14 @@ impl<'a, 'i> Context<'a, 'i> {
match ownership {
DataOwnedFunctionOwns | DataBorrowedFunctionOwns => {
// elements have been consumed, must still consume the list itself
let rest = self.arena.alloc($stmt);
let rest = self.arena.alloc(stmt);
let rc = Stmt::Refcounting(ModifyRc::DecRef(argument), rest);
stmt = self.arena.alloc(rc);
}
DataOwnedFunctionBorrows => {
// must consume list and elements
let rest = self.arena.alloc($stmt);
let rest = self.arena.alloc(stmt);
let rc = Stmt::Refcounting(ModifyRc::Dec(argument), rest);
stmt = self.arena.alloc(rc);
@ -1510,19 +1511,28 @@ pub fn visit_procs<'a, 'i>(
};
for (key, proc) in procs.iter_mut() {
visit_proc(arena, &mut codegen, param_map, &ctx, proc, key.1);
visit_proc(
arena,
layout_interner,
&mut codegen,
param_map,
&ctx,
proc,
key.1,
);
}
}
fn visit_proc<'a, 'i>(
arena: &'a Bump,
interner: &STLayoutInterner<'a>,
codegen: &mut CodegenTools<'i>,
param_map: &'a ParamMap<'a>,
ctx: &Context<'a, 'i>,
proc: &mut Proc<'a>,
layout: ProcLayout<'a>,
) {
let params = match param_map.get_symbol(proc.name.name(), layout) {
let params = match param_map.get_symbol(interner, proc.name.name(), layout) {
Some(slice) => slice,
None => Vec::from_iter_in(
proc.args.iter().cloned().map(|(layout, symbol)| Param {

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,8 @@
use super::pattern::{build_list_index_probe, store_pattern, DestructType, ListIndex, Pattern};
use crate::borrow::Ownership;
use crate::ir::{
build_list_index_probe, BranchInfo, Call, CallType, DestructType, Env, Expr, JoinPointId,
ListIndex, Literal, Param, Pattern, Procs, Stmt,
substitute_in_exprs_many, BranchInfo, Call, CallType, CompiledGuardStmt, Env, Expr,
GuardStmtSpec, JoinPointId, Literal, Param, Procs, Stmt,
};
use crate::layout::{
Builtin, InLayout, Layout, LayoutCache, LayoutInterner, TLLayoutInterner, TagIdIntType,
@ -9,6 +10,7 @@ use crate::layout::{
};
use roc_builtins::bitcode::{FloatWidth, IntWidth};
use roc_collections::all::{MutMap, MutSet};
use roc_collections::BumpMap;
use roc_error_macros::internal_error;
use roc_exhaustive::{Ctor, CtorName, ListArity, RenderAs, TagId, Union};
use roc_module::ident::TagName;
@ -19,6 +21,7 @@ use roc_module::symbol::Symbol;
type Label = u64;
const RECORD_TAG_NAME: &str = "#Record";
const TUPLE_TAG_NAME: &str = "#Tuple";
/// Users of this module will mainly interact with this function. It takes
/// some normal branches and gives out a decision tree that has "labels" at all
@ -41,14 +44,13 @@ fn compile<'a>(
}
#[derive(Clone, Debug, PartialEq)]
pub enum Guard<'a> {
pub(crate) enum Guard<'a> {
NoGuard,
Guard {
/// pattern
pattern: Pattern<'a>,
/// after assigning to symbol, the stmt jumps to this label
id: JoinPointId,
stmt: Stmt<'a>,
/// How to compile the guard statement.
stmt_spec: GuardStmtSpec,
},
}
@ -56,6 +58,10 @@ impl<'a> Guard<'a> {
fn is_none(&self) -> bool {
self == &Guard::NoGuard
}
fn is_some(&self) -> bool {
!self.is_none()
}
}
type Edge<'a> = (GuardedTest<'a>, DecisionTree<'a>);
@ -72,19 +78,19 @@ enum DecisionTree<'a> {
#[derive(Clone, Debug, PartialEq)]
enum GuardedTest<'a> {
// e.g. `_ if True -> ...`
// e.g. `x if True -> ...`
GuardedNoTest {
/// pattern
pattern: Pattern<'a>,
/// after assigning to symbol, the stmt jumps to this label
id: JoinPointId,
/// body
stmt: Stmt<'a>,
/// How to compile the guard body.
stmt_spec: GuardStmtSpec,
},
// e.g. `<pattern> -> ...`
TestNotGuarded {
test: Test<'a>,
},
Placeholder,
// e.g. `_ -> ...` or `x -> ...`
PlaceholderWithGuard,
}
#[derive(Clone, Copy, Debug, PartialEq, Hash)]
@ -187,15 +193,15 @@ impl<'a> Hash for Test<'a> {
impl<'a> Hash for GuardedTest<'a> {
fn hash<H: Hasher>(&self, state: &mut H) {
match self {
GuardedTest::GuardedNoTest { id, .. } => {
GuardedTest::GuardedNoTest { stmt_spec, .. } => {
state.write_u8(1);
id.hash(state);
stmt_spec.hash(state);
}
GuardedTest::TestNotGuarded { test } => {
state.write_u8(0);
test.hash(state);
}
GuardedTest::Placeholder => {
GuardedTest::PlaceholderWithGuard => {
state.write_u8(2);
}
}
@ -231,8 +237,8 @@ fn to_decision_tree<'a>(
match first.guard {
Guard::NoGuard => unreachable!(),
Guard::Guard { id, stmt, pattern } => {
let guarded_test = GuardedTest::GuardedNoTest { id, stmt, pattern };
Guard::Guard { pattern, stmt_spec } => {
let guarded_test = GuardedTest::GuardedNoTest { pattern, stmt_spec };
// the guard test does not have a path
let path = vec![];
@ -263,6 +269,7 @@ fn to_decision_tree<'a>(
let path = pick_path(&branches).clone();
let bs = branches.clone();
let (edges, fallback) = gather_edges(interner, branches, &path);
let mut decision_edges: Vec<_> = edges
@ -307,7 +314,7 @@ fn break_out_guard<'a>(
) -> DecisionTree<'a> {
match edges
.iter()
.position(|(t, _)| matches!(t, GuardedTest::Placeholder))
.position(|(t, _)| matches!(t, GuardedTest::PlaceholderWithGuard))
{
None => DecisionTree::Decision {
path,
@ -346,7 +353,7 @@ fn guarded_tests_are_complete(tests: &[GuardedTest]) -> bool {
.all(|t| matches!(t, GuardedTest::TestNotGuarded { .. }));
match tests.last().unwrap() {
GuardedTest::Placeholder => false,
GuardedTest::PlaceholderWithGuard => false,
GuardedTest::GuardedNoTest { .. } => false,
GuardedTest::TestNotGuarded { test } => no_guard && tests_are_complete_help(test, length),
}
@ -572,6 +579,31 @@ fn test_for_pattern<'a>(pattern: &Pattern<'a>) -> Option<Test<'a>> {
}
}
TupleDestructure(destructs, _) => {
// not rendered, so pick the easiest
let union = Union {
render_as: RenderAs::Tag,
alternatives: vec![Ctor {
tag_id: TagId(0),
name: CtorName::Tag(TagName(TUPLE_TAG_NAME.into())),
arity: destructs.len(),
}],
};
let mut arguments = std::vec::Vec::new();
for destruct in destructs {
arguments.push((destruct.pat.clone(), destruct.layout));
}
IsCtor {
tag_id: 0,
ctor_name: CtorName::Tag(TagName(TUPLE_TAG_NAME.into())),
union,
arguments,
}
}
NewtypeDestructure {
tag_name,
arguments,
@ -661,7 +693,7 @@ fn test_at_path<'a>(
if let Guard::Guard { .. } = &branch.guard {
// no tests for this pattern remain, but we cannot discard it yet
// because it has a guard!
Some(GuardedTest::Placeholder)
Some(GuardedTest::PlaceholderWithGuard)
} else {
None
}
@ -683,10 +715,33 @@ fn edges_for<'a>(
// if we test for a guard, skip all branches until one that has a guard
let it = match test {
GuardedTest::GuardedNoTest { .. } | GuardedTest::Placeholder => {
GuardedTest::GuardedNoTest { .. } => {
let index = branches
.iter()
.position(|b| !b.guard.is_none())
.position(|b| b.guard.is_some())
.expect("if testing for a guard, one branch must have a guard");
branches[index..].iter()
}
GuardedTest::PlaceholderWithGuard => {
// Skip all branches until we hit the one with a placeholder and a guard.
let index = branches
.iter()
.position(|b| {
if b.guard.is_none() {
return false;
}
let (_, pattern) = b
.patterns
.iter()
.find(|(branch_path, _)| branch_path == path)
.expect(
"if testing for a placeholder with guard, must find a branch matching the path",
);
test_for_pattern(pattern).is_none()
})
.expect("if testing for a guard, one branch must have a guard");
branches[index..].iter()
@ -715,7 +770,7 @@ fn to_relevant_branch<'a>(
found_pattern: pattern,
end,
} => match guarded_test {
GuardedTest::Placeholder | GuardedTest::GuardedNoTest { .. } => {
GuardedTest::PlaceholderWithGuard | GuardedTest::GuardedNoTest { .. } => {
// if there is no test, the pattern should not require any
debug_assert!(
matches!(pattern, Pattern::Identifier(_) | Pattern::Underscore,),
@ -790,6 +845,42 @@ fn to_relevant_branch_help<'a>(
_ => None,
},
TupleDestructure(destructs, _) => match test {
IsCtor {
ctor_name: test_name,
tag_id,
..
} => {
debug_assert!(test_name == &CtorName::Tag(TagName(TUPLE_TAG_NAME.into())));
let destructs_len = destructs.len();
let sub_positions = destructs.into_iter().enumerate().map(|(index, destruct)| {
let pattern = destruct.pat.clone();
let mut new_path = path.to_vec();
let next_instr = if destructs_len == 1 {
PathInstruction::NewType
} else {
PathInstruction::TagIndex {
index: index as u64,
tag_id: *tag_id,
}
};
new_path.push(next_instr);
(new_path, pattern)
});
start.extend(sub_positions);
start.extend(end);
Some(Branch {
goal: branch.goal,
guard: branch.guard.clone(),
patterns: start,
})
}
_ => None,
},
OpaqueUnwrap { opaque, argument } => match test {
IsCtor {
ctor_name: test_opaque_tag_name,
@ -1126,6 +1217,7 @@ fn needs_tests(pattern: &Pattern) -> bool {
NewtypeDestructure { .. }
| RecordDestructure(..)
| TupleDestructure(..)
| AppliedTag { .. }
| OpaqueUnwrap { .. }
| BitLiteral { .. }
@ -1269,15 +1361,15 @@ fn small_branching_factor(branches: &[Branch], path: &[PathInstruction]) -> usiz
relevant_tests.len() + (if !fallbacks { 0 } else { 1 })
}
#[derive(Clone, Debug, PartialEq)]
#[derive(Debug, PartialEq)]
enum Decider<'a, T> {
Leaf(T),
Guarded {
/// after assigning to symbol, the stmt jumps to this label
id: JoinPointId,
stmt: Stmt<'a>,
pattern: Pattern<'a>,
/// The guard expression and how to compile it.
stmt_spec: GuardStmtSpec,
success: Box<Decider<'a, T>>,
failure: Box<Decider<'a, T>>,
},
@ -1301,7 +1393,18 @@ enum Choice<'a> {
type StoresVec<'a> = bumpalo::collections::Vec<'a, (Symbol, InLayout<'a>, Expr<'a>)>;
pub fn optimize_when<'a>(
struct JumpSpec<'a> {
target_index: u64,
id: JoinPointId,
/// Symbols, from the unpacked pattern, to add on when jumping to the target.
jump_pattern_param_symbols: &'a [Symbol],
// Used to construct the joinpoint
join_params: &'a [Param<'a>],
join_body: Stmt<'a>,
}
pub(crate) fn optimize_when<'a>(
env: &mut Env<'a, '_>,
procs: &mut Procs<'a>,
layout_cache: &mut LayoutCache<'a>,
@ -1310,11 +1413,11 @@ pub fn optimize_when<'a>(
ret_layout: InLayout<'a>,
opt_branches: bumpalo::collections::Vec<'a, (Pattern<'a>, Guard<'a>, Stmt<'a>)>,
) -> Stmt<'a> {
let (patterns, _indexed_branches) = opt_branches
let (patterns, indexed_branches): (_, Vec<_>) = opt_branches
.into_iter()
.enumerate()
.map(|(index, (pattern, guard, branch))| {
let has_guard = !guard.is_none();
let has_guard = guard.is_some();
(
(guard, pattern.clone(), index as u64),
(index as u64, branch, pattern, has_guard),
@ -1322,8 +1425,6 @@ pub fn optimize_when<'a>(
})
.unzip();
let indexed_branches: Vec<_> = _indexed_branches;
let decision_tree = compile(&layout_cache.interner, patterns);
let decider = tree_to_decider(decision_tree);
@ -1334,19 +1435,84 @@ pub fn optimize_when<'a>(
let mut choices = MutMap::default();
let mut jumps = Vec::new();
for (index, mut branch, pattern, has_guard) in indexed_branches.into_iter() {
// bind the fields referenced in the pattern. For guards this happens separately, so
// the pattern variables are defined when evaluating the guard.
if !has_guard {
branch =
crate::ir::store_pattern(env, procs, layout_cache, &pattern, cond_symbol, branch);
for (target, mut branch, pattern, has_guard) in indexed_branches.into_iter() {
let should_inline = {
let target_counts = &target_counts;
match target_counts.get(target as usize) {
None => unreachable!(
"this should never happen: {:?} not in {:?}",
target, target_counts
),
Some(count) => *count == 1,
}
};
let join_params: &'a [Param<'a>];
let jump_pattern_param_symbols: &'a [Symbol];
match (has_guard, should_inline) {
(false, _) => {
// Bind the fields referenced in the pattern.
branch = store_pattern(env, procs, layout_cache, &pattern, cond_symbol, branch);
join_params = &[];
jump_pattern_param_symbols = &[];
}
(true, true) => {
// Nothing more to do - the patterns will be bound when the guard is evaluated in
// `decide_to_branching`.
join_params = &[];
jump_pattern_param_symbols = &[];
}
(true, false) => {
// The patterns will be bound when the guard is evaluated, and then we need to get
// them back into the joinpoint here.
//
// So, figure out what symbols the pattern binds, and update the joinpoint
// parameter to take each symbol. Then, when the joinpoint is called, the unpacked
// symbols will be filled in.
//
// Since the joinpoint's parameters will be fresh symbols, the join body also needs
// updating.
let pattern_bindings = pattern.collect_symbols(cond_layout);
let mut parameters_buf = bumpalo::collections::Vec::with_capacity_in(1, env.arena);
let mut pattern_symbols_buf =
bumpalo::collections::Vec::with_capacity_in(1, env.arena);
let mut substitutions = BumpMap::default();
for (pattern_symbol, layout) in pattern_bindings {
let param_symbol = env.unique_symbol();
parameters_buf.push(Param {
symbol: param_symbol,
layout,
ownership: Ownership::Owned,
});
pattern_symbols_buf.push(pattern_symbol);
substitutions.insert(pattern_symbol, param_symbol);
}
join_params = parameters_buf.into_bump_slice();
jump_pattern_param_symbols = pattern_symbols_buf.into_bump_slice();
substitute_in_exprs_many(env.arena, &mut branch, substitutions);
}
}
let ((branch_index, choice), opt_jump) = create_choices(&target_counts, index, branch);
let ((branch_index, choice), opt_jump) = if should_inline {
((target, Choice::Inline(branch)), None)
} else {
((target, Choice::Jump(target)), Some((target, branch)))
};
if let Some((index, body)) = opt_jump {
if let Some((target_index, body)) = opt_jump {
let id = JoinPointId(env.unique_symbol());
jumps.push((index, id, body));
jumps.push(JumpSpec {
target_index,
id,
jump_pattern_param_symbols,
join_params,
join_body: body,
});
}
choices.insert(branch_index, choice);
@ -1365,11 +1531,18 @@ pub fn optimize_when<'a>(
&jumps,
);
for (_, id, body) in jumps.into_iter() {
for JumpSpec {
target_index: _,
id,
jump_pattern_param_symbols: _,
join_params,
join_body,
} in jumps.into_iter()
{
stmt = Stmt::Join {
id,
parameters: &[],
body: env.arena.alloc(body),
parameters: join_params,
body: env.arena.alloc(join_body),
remainder: env.arena.alloc(stmt),
};
}
@ -1406,7 +1579,7 @@ fn path_to_expr_help<'a>(
PathInstruction::TagIndex { index, tag_id } => {
let index = *index;
match layout_interner.get(layout) {
match layout_interner.chase_recursive(layout) {
Layout::Union(union_layout) => {
let inner_expr = Expr::UnionAtIndex {
tag_id: *tag_id,
@ -1506,7 +1679,7 @@ fn test_to_comparison<'a>(
// (e.g. record pattern guard matches)
debug_assert!(union.alternatives.len() > 1);
match layout_interner.get(test_layout) {
match layout_interner.chase_recursive(test_layout) {
Layout::Union(union_layout) => {
let lhs = Expr::Literal(Literal::Int((tag_id as i128).to_ne_bytes()));
@ -1866,7 +2039,7 @@ fn decide_to_branching<'a>(
cond_layout: InLayout<'a>,
ret_layout: InLayout<'a>,
decider: Decider<'a, Choice<'a>>,
jumps: &[(u64, JoinPointId, Stmt<'a>)],
jumps: &[JumpSpec<'a>],
) -> Stmt<'a> {
use Choice::*;
use Decider::*;
@ -1876,16 +2049,15 @@ fn decide_to_branching<'a>(
match decider {
Leaf(Jump(label)) => {
let index = jumps
.binary_search_by_key(&label, |r| r.0)
.binary_search_by_key(&label, |r| r.target_index)
.expect("jump not in list of jumps");
Stmt::Jump(jumps[index].1, &[])
Stmt::Jump(jumps[index].id, jumps[index].jump_pattern_param_symbols)
}
Leaf(Inline(expr)) => expr,
Guarded {
id,
stmt,
pattern,
stmt_spec,
success,
failure,
} => {
@ -1931,14 +2103,19 @@ fn decide_to_branching<'a>(
ownership: Ownership::Owned,
};
let CompiledGuardStmt {
join_point_id,
stmt,
} = stmt_spec.generate_guard_and_join(env, procs, layout_cache);
let join = Stmt::Join {
id,
id: join_point_id,
parameters: arena.alloc([param]),
remainder: arena.alloc(stmt),
body: arena.alloc(decide),
remainder: arena.alloc(stmt),
};
crate::ir::store_pattern(env, procs, layout_cache, &pattern, cond_symbol, join)
store_pattern(env, procs, layout_cache, &pattern, cond_symbol, join)
}
Chain {
test_chain,
@ -2108,7 +2285,7 @@ fn decide_to_branching<'a>(
// We have learned more about the exact layout of the cond (based on the path)
// but tests are still relative to the original cond symbol
let inner_cond_layout_raw = layout_cache.get_in(inner_cond_layout);
let inner_cond_layout_raw = layout_cache.interner.chase_recursive(inner_cond_layout);
let mut switch = if let Layout::Union(union_layout) = inner_cond_layout_raw {
let tag_id_symbol = env.unique_symbol();
@ -2219,15 +2396,17 @@ fn sort_edge_tests_by_priority(edges: &mut [Edge<'_>]) {
edges.sort_by(|(t1, _), (t2, _)| match (t1, t2) {
// Guarded takes priority
(GuardedNoTest { .. }, GuardedNoTest { .. }) => Equal,
(GuardedNoTest { .. }, TestNotGuarded { .. }) | (GuardedNoTest { .. }, Placeholder) => Less,
(GuardedNoTest { .. }, TestNotGuarded { .. })
| (GuardedNoTest { .. }, PlaceholderWithGuard) => Less,
// Interesting case: what test do we pick?
(TestNotGuarded { test: t1 }, TestNotGuarded { test: t2 }) => order_tests(t1, t2),
// Otherwise we are between guarded and fall-backs
(TestNotGuarded { .. }, GuardedNoTest { .. }) => Greater,
(TestNotGuarded { .. }, Placeholder) => Less,
(TestNotGuarded { .. }, PlaceholderWithGuard) => Less,
// Placeholder is always last
(Placeholder, Placeholder) => Equal,
(Placeholder, GuardedNoTest { .. }) | (Placeholder, TestNotGuarded { .. }) => Greater,
(PlaceholderWithGuard, PlaceholderWithGuard) => Equal,
(PlaceholderWithGuard, GuardedNoTest { .. })
| (PlaceholderWithGuard, TestNotGuarded { .. }) => Greater,
});
fn order_tests(t1: &Test, t2: &Test) -> Ordering {
@ -2389,7 +2568,7 @@ fn fanout_decider_help<'a>(
guarded_test: GuardedTest<'a>,
) -> (Test<'a>, Decider<'a, u64>) {
match guarded_test {
GuardedTest::Placeholder | GuardedTest::GuardedNoTest { .. } => {
GuardedTest::PlaceholderWithGuard | GuardedTest::GuardedNoTest { .. } => {
unreachable!("this would not end up in a switch")
}
GuardedTest::TestNotGuarded { test } => {
@ -2406,16 +2585,15 @@ fn chain_decider<'a>(
success_tree: DecisionTree<'a>,
) -> Decider<'a, u64> {
match guarded_test {
GuardedTest::GuardedNoTest { id, stmt, pattern } => {
GuardedTest::GuardedNoTest { pattern, stmt_spec } => {
let failure = Box::new(tree_to_decider(failure_tree));
let success = Box::new(tree_to_decider(success_tree));
Decider::Guarded {
id,
stmt,
pattern,
stmt_spec,
success,
failure: failure.clone(),
failure,
}
}
GuardedTest::TestNotGuarded { test } => {
@ -2426,7 +2604,7 @@ fn chain_decider<'a>(
}
}
GuardedTest::Placeholder => {
GuardedTest::PlaceholderWithGuard => {
// ?
tree_to_decider(success_tree)
}
@ -2509,22 +2687,6 @@ fn count_targets(targets: &mut bumpalo::collections::Vec<u64>, initial: &Decider
}
}
#[allow(clippy::type_complexity)]
fn create_choices<'a>(
target_counts: &bumpalo::collections::Vec<'a, u64>,
target: u64,
branch: Stmt<'a>,
) -> ((u64, Choice<'a>), Option<(u64, Stmt<'a>)>) {
match target_counts.get(target as usize) {
None => unreachable!(
"this should never happen: {:?} not in {:?}",
target, target_counts
),
Some(1) => ((target, Choice::Inline(branch)), None),
Some(_) => ((target, Choice::Jump(target)), Some((target, branch))),
}
}
fn insert_choices<'a>(
choice_dict: &MutMap<u64, Choice<'a>>,
decider: Decider<'a, u64>,
@ -2538,15 +2700,13 @@ fn insert_choices<'a>(
}
Guarded {
id,
stmt,
pattern,
stmt_spec,
success,
failure,
} => Guarded {
id,
stmt,
pattern,
stmt_spec,
success: Box::new(insert_choices(choice_dict, *success)),
failure: Box::new(insert_choices(choice_dict, *failure)),
},

View file

@ -0,0 +1,116 @@
use roc_builtins::bitcode::{FloatWidth, IntWidth};
use roc_can::expr::IntValue;
use roc_error_macros::internal_error;
use roc_module::symbol::Symbol;
use roc_std::RocDec;
use crate::layout::{Builtin, InLayout, Layout, LayoutInterner, TLLayoutInterner};
use super::pattern::Pattern;
#[derive(Debug, Clone, Copy)]
pub enum IntOrFloatValue {
Int(IntValue),
Float(f64),
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum Literal<'a> {
// Literals
/// stored as raw bytes rather than a number to avoid an alignment bump
Int([u8; 16]),
/// stored as raw bytes rather than a number to avoid an alignment bump
U128([u8; 16]),
Float(f64),
/// stored as raw bytes rather than a number to avoid an alignment bump
Decimal([u8; 16]),
Str(&'a str),
/// Closed tag unions containing exactly two (0-arity) tags compile to Expr::Bool,
/// so they can (at least potentially) be emitted as 1-bit machine bools.
///
/// So [True, False] compiles to this, and so do [A, B] and [Foo, Bar].
/// However, a union like [True, False, Other Int] would not.
Bool(bool),
/// Closed tag unions containing between 3 and 256 tags (all of 0 arity)
/// compile to bytes, e.g. [Blue, Black, Red, Green, White]
Byte(u8),
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ListLiteralElement<'a> {
Literal(Literal<'a>),
Symbol(Symbol),
}
impl<'a> ListLiteralElement<'a> {
pub fn to_symbol(&self) -> Option<Symbol> {
match self {
Self::Symbol(s) => Some(*s),
_ => None,
}
}
}
pub enum NumLiteral {
Int([u8; 16], IntWidth),
U128([u8; 16]),
Float(f64, FloatWidth),
Decimal([u8; 16]),
}
impl NumLiteral {
pub fn to_expr_literal(&self) -> Literal<'static> {
match *self {
NumLiteral::Int(n, _) => Literal::Int(n),
NumLiteral::U128(n) => Literal::U128(n),
NumLiteral::Float(n, _) => Literal::Float(n),
NumLiteral::Decimal(n) => Literal::Decimal(n),
}
}
pub fn to_pattern(&self) -> Pattern<'static> {
match *self {
NumLiteral::Int(n, w) => Pattern::IntLiteral(n, w),
NumLiteral::U128(n) => Pattern::IntLiteral(n, IntWidth::U128),
NumLiteral::Float(n, w) => Pattern::FloatLiteral(f64::to_bits(n), w),
NumLiteral::Decimal(n) => Pattern::DecimalLiteral(n),
}
}
}
pub fn make_num_literal<'a>(
interner: &TLLayoutInterner<'a>,
layout: InLayout<'a>,
num_str: &str,
num_value: IntOrFloatValue,
) -> NumLiteral {
match interner.get(layout) {
Layout::Builtin(Builtin::Int(width)) => match num_value {
IntOrFloatValue::Int(IntValue::I128(n)) => NumLiteral::Int(n, width),
IntOrFloatValue::Int(IntValue::U128(n)) => NumLiteral::U128(n),
IntOrFloatValue::Float(..) => {
internal_error!("Float value where int was expected, should have been a type error")
}
},
Layout::Builtin(Builtin::Float(width)) => match num_value {
IntOrFloatValue::Float(n) => NumLiteral::Float(n, width),
IntOrFloatValue::Int(int_value) => match int_value {
IntValue::I128(n) => NumLiteral::Float(i128::from_ne_bytes(n) as f64, width),
IntValue::U128(n) => NumLiteral::Float(u128::from_ne_bytes(n) as f64, width),
},
},
Layout::Builtin(Builtin::Decimal) => {
let dec = match RocDec::from_str(num_str) {
Some(d) => d,
None => internal_error!(
"Invalid decimal for float literal = {}. This should be a type error!",
num_str
),
};
NumLiteral::Decimal(dec.to_ne_bytes())
}
layout => internal_error!(
"Found a non-num layout where a number was expected: {:?}",
layout
),
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,9 +1,11 @@
use crate::ir::Parens;
use crate::layout::intern::NeedsRecursionPointerFixup;
use bitvec::vec::BitVec;
use bumpalo::collections::Vec;
use bumpalo::Bump;
use roc_builtins::bitcode::{FloatWidth, IntWidth};
use roc_collections::all::{default_hasher, FnvMap, MutMap};
use roc_collections::VecSet;
use roc_error_macros::{internal_error, todo_abilities};
use roc_module::ident::{Lowercase, TagName};
use roc_module::symbol::{Interns, Symbol};
@ -12,9 +14,12 @@ use roc_target::{PtrWidth, TargetInfo};
use roc_types::num::NumericRange;
use roc_types::subs::{
self, Content, FlatType, GetSubsSlice, Label, OptVariable, RecordFields, Subs, TagExt,
UnsortedUnionLabels, Variable, VariableSubsSlice,
TupleElems, UnsortedUnionLabels, Variable, VariableSubsSlice,
};
use roc_types::types::{
gather_fields_unsorted_iter, gather_tuple_elems_unsorted_iter, RecordField, RecordFieldsError,
TupleElemsError,
};
use roc_types::types::{gather_fields_unsorted_iter, RecordField, RecordFieldsError};
use std::cmp::Ordering;
use std::collections::hash_map::{DefaultHasher, Entry};
use std::collections::HashMap;
@ -120,9 +125,9 @@ pub struct LayoutCache<'a> {
impl<'a> LayoutCache<'a> {
pub fn new(interner: TLLayoutInterner<'a>, target_info: TargetInfo) -> Self {
let mut cache = std::vec::Vec::with_capacity(4);
cache.push(CacheLayer::default());
cache.push(Default::default());
let mut raw_cache = std::vec::Vec::with_capacity(4);
raw_cache.push(CacheLayer::default());
raw_cache.push(Default::default());
Self {
target_info,
cache,
@ -299,14 +304,20 @@ impl<'a> LayoutCache<'a> {
/// Invalidates the list of given root variables.
/// Usually called after unification, when merged variables with changed contents need to be
/// invalidated.
pub fn invalidate(&mut self, vars: impl IntoIterator<Item = Variable>) {
pub fn invalidate(&mut self, subs: &Subs, vars: impl IntoIterator<Item = Variable>) {
// TODO(layout-cache): optimize me somehow
for var in vars.into_iter() {
let var = subs.get_root_key_without_compacting(var);
for layer in self.cache.iter_mut().rev() {
layer.0.remove(&var);
layer
.0
.retain(|k, _| !subs.equivalent_without_compacting(var, *k));
roc_tracing::debug!(?var, "invalidating cached layout");
}
for layer in self.raw_function_cache.iter_mut().rev() {
layer.0.remove(&var);
layer
.0
.retain(|k, _| !subs.equivalent_without_compacting(var, *k));
roc_tracing::debug!(?var, "invalidating cached layout");
}
}
@ -481,7 +492,9 @@ impl<'a> RawFunctionLayout<'a> {
let structure_content = env.subs.get_content_without_compacting(structure);
Self::new_help(env, structure, *structure_content)
}
LambdaSet(lset) => Self::layout_from_lambda_set(env, lset),
LambdaSet(_) => {
internal_error!("lambda set should only appear under a function, where it's handled independently.");
}
Structure(flat_type) => Self::layout_from_flat_type(env, flat_type),
RangedNumber(..) => Layout::new_help(env, var, content).then(Self::ZeroArgumentThunk),
@ -554,15 +567,6 @@ impl<'a> RawFunctionLayout<'a> {
}
}
fn layout_from_lambda_set(
_env: &mut Env<'a, '_>,
_lset: subs::LambdaSet,
) -> Cacheable<RawFunctionLayoutResult<'a>> {
unreachable!()
// Lambda set is just a tag union from the layout's perspective.
// Self::layout_from_flat_type(env, lset.as_tag_union())
}
fn layout_from_flat_type(
env: &mut Env<'a, '_>,
flat_type: FlatType,
@ -653,6 +657,17 @@ impl FieldOrderHash {
fields.iter().for_each(|field| field.hash(&mut hasher));
Self(hasher.finish())
}
pub fn from_ordered_tuple_elems(elems: &[usize]) -> Self {
if elems.is_empty() {
// HACK: we must make sure this is always equivalent to a `ZERO_FIELD_HASH`.
return Self::ZERO_FIELD_HASH;
}
let mut hasher = DefaultHasher::new();
elems.iter().for_each(|elem| elem.hash(&mut hasher));
Self(hasher.finish())
}
}
/// Types for code gen must be monomorphic. No type variables allowed!
@ -727,6 +742,7 @@ impl<'a> UnionLayout<'a> {
self,
alloc: &'b D,
interner: &I,
seen_rec: &mut SeenRecPtrs<'a>,
_parens: Parens,
) -> DocBuilder<'b, D, A>
where
@ -740,14 +756,14 @@ impl<'a> UnionLayout<'a> {
match self {
NonRecursive(tags) => {
let tags_doc = tags.iter().map(|fields| {
alloc.text("C ").append(alloc.intersperse(
fields.iter().map(|x| {
interner
.get(*x)
.to_doc(alloc, interner, Parens::InTypeParam)
}),
" ",
))
alloc.text("C ").append(
alloc.intersperse(
fields
.iter()
.map(|x| interner.to_doc(*x, alloc, seen_rec, Parens::InTypeParam)),
" ",
),
)
});
alloc
@ -757,14 +773,14 @@ impl<'a> UnionLayout<'a> {
}
Recursive(tags) => {
let tags_doc = tags.iter().map(|fields| {
alloc.text("C ").append(alloc.intersperse(
fields.iter().map(|x| {
interner
.get(*x)
.to_doc(alloc, interner, Parens::InTypeParam)
}),
" ",
))
alloc.text("C ").append(
alloc.intersperse(
fields
.iter()
.map(|x| interner.to_doc(*x, alloc, seen_rec, Parens::InTypeParam)),
" ",
),
)
});
alloc
.text("[<r>")
@ -772,14 +788,14 @@ impl<'a> UnionLayout<'a> {
.append(alloc.text("]"))
}
NonNullableUnwrapped(fields) => {
let fields_doc = alloc.text("C ").append(alloc.intersperse(
fields.iter().map(|x| {
interner
.get(*x)
.to_doc(alloc, interner, Parens::InTypeParam)
}),
" ",
));
let fields_doc = alloc.text("C ").append(
alloc.intersperse(
fields
.iter()
.map(|x| interner.to_doc(*x, alloc, seen_rec, Parens::InTypeParam)),
" ",
),
);
alloc
.text("[<rnnu>")
.append(fields_doc)
@ -789,14 +805,14 @@ impl<'a> UnionLayout<'a> {
nullable_id,
other_fields,
} => {
let fields_doc = alloc.text("C ").append(alloc.intersperse(
other_fields.iter().map(|x| {
interner
.get(*x)
.to_doc(alloc, interner, Parens::InTypeParam)
}),
" ",
));
let fields_doc = alloc.text("C ").append(
alloc.intersperse(
other_fields
.iter()
.map(|x| interner.to_doc(*x, alloc, seen_rec, Parens::InTypeParam)),
" ",
),
);
let tags_doc = if nullable_id {
alloc.concat(vec![alloc.text("<null>, "), fields_doc])
} else {
@ -812,21 +828,20 @@ impl<'a> UnionLayout<'a> {
other_tags,
} => {
let nullable_id = nullable_id as usize;
let tags_docs = (0..(other_tags.len() + 1)).map(|i| {
if i == nullable_id {
alloc.text("<null>")
} else {
let idx = if i > nullable_id { i - 1 } else { i };
alloc.text("C ").append(alloc.intersperse(
other_tags[idx].iter().map(|x| {
interner
.get(*x)
.to_doc(alloc, interner, Parens::InTypeParam)
}),
" ",
))
}
});
let tags_docs =
(0..(other_tags.len() + 1)).map(|i| {
if i == nullable_id {
alloc.text("<null>")
} else {
let idx = if i > nullable_id { i - 1 } else { i };
alloc.text("C ").append(alloc.intersperse(
other_tags[idx].iter().map(|x| {
interner.to_doc(*x, alloc, seen_rec, Parens::InTypeParam)
}),
" ",
))
}
});
let tags_docs = alloc.intersperse(tags_docs, alloc.text(", "));
alloc
.text("[<rnw>")
@ -1273,7 +1288,12 @@ pub struct Niche<'a>(NichePriv<'a>);
impl<'a> Niche<'a> {
pub const NONE: Niche<'a> = Niche(NichePriv::Captures(&[]));
pub fn to_doc<'b, D, A, I>(self, alloc: &'b D, interner: &I) -> DocBuilder<'b, D, A>
pub fn to_doc<'b, D, A, I>(
self,
alloc: &'b D,
interner: &I,
seen_rec: &mut SeenRecPtrs<'a>,
) -> DocBuilder<'b, D, A>
where
D: DocAllocator<'b, A>,
D::Doc: Clone,
@ -1286,13 +1306,21 @@ impl<'a> Niche<'a> {
alloc.intersperse(
captures
.iter()
.map(|c| interner.get(*c).to_doc(alloc, interner, Parens::NotNeeded)),
.map(|c| interner.to_doc(*c, alloc, seen_rec, Parens::NotNeeded)),
alloc.reflow(", "),
),
alloc.reflow("})"),
]),
}
}
pub fn dbg_deep<'r, I: LayoutInterner<'a>>(
&'r self,
interner: &'r I,
) -> crate::layout::intern::dbg::DbgFields<'a, 'r, I> {
let NichePriv::Captures(caps) = &self.0;
interner.dbg_deep_iter(caps)
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
@ -1517,35 +1545,7 @@ impl<'a> LambdaSet<'a> {
where
I: LayoutInterner<'a>,
{
if left == right {
return true;
}
let left = interner.get(*left);
let right = interner.get(*right);
let left = if matches!(left, Layout::RecursivePointer(_)) {
let runtime_repr = self.runtime_representation();
debug_assert!(matches!(
interner.get(runtime_repr),
Layout::Union(UnionLayout::Recursive(_) | UnionLayout::NullableUnwrapped { .. })
));
Layout::LambdaSet(*self)
} else {
left
};
let right = if matches!(right, Layout::RecursivePointer(_)) {
let runtime_repr = self.runtime_representation();
debug_assert!(matches!(
interner.get(runtime_repr),
Layout::Union(UnionLayout::Recursive(_) | UnionLayout::NullableUnwrapped { .. })
));
Layout::LambdaSet(*self)
} else {
right
};
left == right
interner.equiv(*left, *right)
}
fn layout_for_member<I, F>(&self, interner: &I, comparator: F) -> ClosureRepresentation<'a>
@ -1558,7 +1558,7 @@ impl<'a> LambdaSet<'a> {
return ClosureRepresentation::UnwrappedCapture(self.representation);
}
let repr = interner.get(self.representation);
let repr = interner.chase_recursive(self.representation);
match repr {
Layout::Union(union) => {
@ -1621,11 +1621,27 @@ impl<'a> LambdaSet<'a> {
union_layout: union,
}
}
UnionLayout::NonNullableUnwrapped(_) => todo!("recursive closures"),
UnionLayout::NullableWrapped {
nullable_id: _,
other_tags: _,
} => todo!("recursive closures"),
} => {
let (index, (name, fields)) = self
.set
.iter()
.enumerate()
.find(|(_, (s, layouts))| comparator(*s, layouts))
.unwrap();
let closure_name = *name;
ClosureRepresentation::Union {
tag_id: index as TagIdIntType,
alphabetic_order_fields: fields,
closure_name,
union_layout: union,
}
}
UnionLayout::NonNullableUnwrapped(_) => internal_error!("I thought a non-nullable-unwrapped variant for a lambda set was impossible: how could such a lambda set be created without a base case?"),
}
}
Layout::Struct { .. } => {
@ -1642,7 +1658,7 @@ impl<'a> LambdaSet<'a> {
ClosureRepresentation::AlphabeticOrderStruct(fields)
}
layout => {
debug_assert!(self.has_enum_dispatch_repr(),);
debug_assert!(self.has_enum_dispatch_repr());
let enum_repr = match layout {
Layout::Builtin(Builtin::Bool) => EnumDispatch::Bool,
Layout::Builtin(Builtin::Int(IntWidth::U8)) => EnumDispatch::U8,
@ -1669,7 +1685,7 @@ impl<'a> LambdaSet<'a> {
return ClosureCallOptions::UnwrappedCapture(self.representation);
}
let repr = interner.get(self.representation);
let repr = interner.chase_recursive(self.representation);
match repr {
Layout::Union(union_layout) => {
@ -1764,7 +1780,7 @@ impl<'a> LambdaSet<'a> {
Cacheable(result, criteria)
});
match result.map(|l| env.cache.get_in(l)) {
match result.map(|l| env.cache.interner.chase_recursive(l)) {
Ok(Layout::LambdaSet(lambda_set)) => Cacheable(Ok(lambda_set), criteria),
Err(err) => Cacheable(Err(err), criteria),
Ok(layout) => internal_error!("other layout found for lambda set: {:?}", layout),
@ -1804,6 +1820,7 @@ impl<'a> LambdaSet<'a> {
Vec::with_capacity_in(lambdas.len(), env.arena);
let mut set_with_variables: std::vec::Vec<(&Symbol, &[Variable])> =
std::vec::Vec::with_capacity(lambdas.len());
let mut set_captures_have_naked_rec_ptr = false;
let mut last_function_symbol = None;
let mut lambdas_it = lambdas.iter().peekable();
@ -1822,6 +1839,8 @@ impl<'a> LambdaSet<'a> {
let mut criteria = CACHEABLE;
let arg = cached!(Layout::from_var(env, *var), criteria);
arguments.push(arg);
set_captures_have_naked_rec_ptr =
set_captures_have_naked_rec_ptr || criteria.has_naked_recursion_pointer;
}
let arguments = arguments.into_bump_slice();
@ -1882,10 +1901,16 @@ impl<'a> LambdaSet<'a> {
);
cache_criteria.and(criteria);
let needs_recursive_fixup = NeedsRecursionPointerFixup(
opt_recursion_var.is_some() && set_captures_have_naked_rec_ptr,
);
let lambda_set = env.cache.interner.insert_lambda_set(
env.arena,
fn_args,
ret,
env.arena.alloc(set.into_bump_slice()),
needs_recursive_fixup,
representation,
);
@ -1895,9 +1920,11 @@ impl<'a> LambdaSet<'a> {
// The lambda set is unbound which means it must be unused. Just give it the empty lambda set.
// See also https://github.com/roc-lang/roc/issues/3163.
let lambda_set = env.cache.interner.insert_lambda_set(
env.arena,
fn_args,
ret,
&(&[] as &[(Symbol, &[InLayout])]),
NeedsRecursionPointerFixup(false),
Layout::UNIT,
);
Cacheable(Ok(lambda_set), cache_criteria)
@ -2128,6 +2155,16 @@ pub enum Builtin<'a> {
List(InLayout<'a>),
}
#[macro_export]
macro_rules! list_element_layout {
($interner:expr, $list_layout:expr) => {
match $interner.get($list_layout) {
Layout::Builtin(Builtin::List(list_layout)) => list_layout,
_ => internal_error!("invalid list layout"),
}
};
}
pub struct Env<'a, 'b> {
target_info: TargetInfo,
arena: &'a Bump,
@ -2353,7 +2390,9 @@ impl<'a> Layout<'a> {
let structure_content = env.subs.get_content_without_compacting(structure);
Self::new_help(env, structure, *structure_content)
}
LambdaSet(lset) => layout_from_lambda_set(env, lset),
LambdaSet(_) => {
internal_error!("lambda set should only appear under a function, where it's handled independently.");
}
Structure(flat_type) => layout_from_flat_type(env, flat_type),
Alias(symbol, _args, actual_var, _) => {
@ -2702,42 +2741,58 @@ impl<'a> Layout<'a> {
}
}
pub fn to_doc<'b, D, A, I>(
self,
alloc: &'b D,
interner: &I,
parens: Parens,
) -> DocBuilder<'b, D, A>
pub fn has_varying_stack_size<I>(self, interner: &I, arena: &bumpalo::Bump) -> bool
where
D: DocAllocator<'b, A>,
D::Doc: Clone,
A: Clone,
I: LayoutInterner<'a>,
{
use Layout::*;
let mut stack: Vec<Layout> = bumpalo::collections::Vec::new_in(arena);
match self {
Builtin(builtin) => builtin.to_doc(alloc, interner, parens),
Struct { field_layouts, .. } => {
let fields_doc = field_layouts
.iter()
.map(|x| interner.get(*x).to_doc(alloc, interner, parens));
stack.push(self);
alloc
.text("{")
.append(alloc.intersperse(fields_doc, ", "))
.append(alloc.text("}"))
while let Some(layout) = stack.pop() {
match layout {
Layout::Builtin(builtin) => match builtin {
Builtin::Int(_)
| Builtin::Float(_)
| Builtin::Bool
| Builtin::Decimal
| Builtin::Str
// If there's any layer of indirection (behind a pointer), then it doesn't vary!
| Builtin::List(_) => { /* do nothing */ }
},
// If there's any layer of indirection (behind a pointer), then it doesn't vary!
Layout::Struct { field_layouts, .. } => {
stack.extend(field_layouts.iter().map(|interned| interner.get(*interned)))
}
Layout::Union(tag_union) => match tag_union {
UnionLayout::NonRecursive(tags) | UnionLayout::Recursive(tags) => {
for tag in tags {
stack.extend(tag.iter().map(|interned| interner.get(*interned)));
}
}
UnionLayout::NonNullableUnwrapped(fields) => {
stack.extend(fields.iter().map(|interned| interner.get(*interned)));
}
UnionLayout::NullableWrapped { other_tags, .. } => {
for tag in other_tags {
stack.extend(tag.iter().map(|interned| interner.get(*interned)));
}
}
UnionLayout::NullableUnwrapped { other_fields, .. } => {
stack.extend(other_fields.iter().map(|interned| interner.get(*interned)));
}
},
Layout::LambdaSet(_) => return true,
Layout::Boxed(_) => {
// If there's any layer of indirection (behind a pointer), then it doesn't vary!
}
Layout::RecursivePointer(_) => {
/* do nothing, we've already generated for this type through the Union(_) */
}
}
Union(union_layout) => union_layout.to_doc(alloc, interner, parens),
LambdaSet(lambda_set) => interner
.get(lambda_set.runtime_representation())
.to_doc(alloc, interner, parens),
RecursivePointer(_) => alloc.text("*self"),
Boxed(inner) => alloc
.text("Boxed(")
.append(interner.get(inner).to_doc(alloc, interner, parens))
.append(")"),
}
false
}
/// Used to build a `Layout::Struct` where the field name order is irrelevant.
@ -2773,6 +2828,8 @@ impl<'a> Layout<'a> {
}
}
pub type SeenRecPtrs<'a> = VecSet<InLayout<'a>>;
impl<'a> Layout<'a> {
pub fn usize(target_info: TargetInfo) -> InLayout<'a> {
match target_info.ptr_width() {
@ -2821,6 +2878,18 @@ impl<'a> Layout<'a> {
Dec => Layout::DEC,
}
}
pub fn is_recursive_tag_union(self) -> bool {
matches!(
self,
Layout::Union(
UnionLayout::NullableUnwrapped { .. }
| UnionLayout::Recursive(_)
| UnionLayout::NullableWrapped { .. }
| UnionLayout::NonNullableUnwrapped { .. },
)
)
}
}
impl<'a> Builtin<'a> {
@ -2900,6 +2969,7 @@ impl<'a> Builtin<'a> {
self,
alloc: &'b D,
interner: &I,
seen_rec: &mut SeenRecPtrs<'a>,
_parens: Parens,
) -> DocBuilder<'b, D, A>
where
@ -2941,12 +3011,12 @@ impl<'a> Builtin<'a> {
Decimal => alloc.text("Decimal"),
Str => alloc.text("Str"),
List(layout) => {
let layout = interner.get(layout);
alloc
.text("List ")
.append(layout.to_doc(alloc, interner, Parens::InTypeParam))
}
List(layout) => alloc.text("List ").append(interner.to_doc(
layout,
alloc,
seen_rec,
Parens::InTypeParam,
)),
}
}
@ -2973,37 +3043,6 @@ impl<'a> Builtin<'a> {
}
}
fn layout_from_lambda_set<'a>(
env: &mut Env<'a, '_>,
lset: subs::LambdaSet,
) -> Cacheable<LayoutResult<'a>> {
// Lambda set is just a tag union from the layout's perspective.
let subs::LambdaSet {
solved,
recursion_var,
unspecialized,
ambient_function: _,
} = lset;
if !unspecialized.is_empty() {
internal_error!(
"unspecialized lambda sets remain during layout generation for {:?}",
roc_types::subs::SubsFmtContent(&Content::LambdaSet(lset), env.subs)
);
}
match recursion_var.into_variable() {
None => {
let labels = solved.unsorted_lambdas(env.subs);
layout_from_non_recursive_union(env, &labels).map(Ok)
}
Some(rec_var) => {
let labels = solved.unsorted_lambdas(env.subs);
layout_from_recursive_union(env, rec_var, &labels)
}
}
}
fn layout_from_flat_type<'a>(
env: &mut Env<'a, '_>,
flat_type: FlatType,
@ -3189,8 +3228,52 @@ fn layout_from_flat_type<'a>(
Cacheable(result, criteria)
}
Tuple(_elems, _ext_var) => {
todo!();
Tuple(elems, ext_var) => {
let mut criteria = CACHEABLE;
// extract any values from the ext_var
let mut sortables = Vec::with_capacity_in(elems.len(), arena);
let it = match elems.unsorted_iterator(subs, ext_var) {
Ok(it) => it,
Err(TupleElemsError) => return Cacheable(Err(LayoutProblem::Erroneous), criteria),
};
for (index, elem) in it {
let elem_layout = cached!(Layout::from_var(env, elem), criteria);
sortables.push((index, elem_layout));
}
sortables.sort_by(|(index1, layout1), (index2, layout2)| {
cmp_fields(
&env.cache.interner,
index1,
*layout1,
index2,
*layout2,
target_info,
)
});
let ordered_field_names =
Vec::from_iter_in(sortables.iter().map(|(index, _)| *index), arena);
let field_order_hash =
FieldOrderHash::from_ordered_tuple_elems(ordered_field_names.as_slice());
let result = if sortables.len() == 1 {
// If the tuple has only one field that isn't zero-sized,
// unwrap it.
Ok(sortables.pop().unwrap().1)
} else {
let layouts = Vec::from_iter_in(sortables.into_iter().map(|t| t.1), arena);
let struct_layout = Layout::Struct {
field_order_hash,
field_layouts: layouts.into_bump_slice(),
};
Ok(env.cache.put_in(struct_layout))
};
Cacheable(result, criteria)
}
TagUnion(tags, ext_var) => {
let (tags, ext_var) = tags.unsorted_tags_and_ext(subs, ext_var);
@ -3225,6 +3308,48 @@ fn layout_from_flat_type<'a>(
}
}
pub type SortedTupleElem<'a> = (usize, Variable, InLayout<'a>);
pub fn sort_tuple_elems<'a>(
env: &mut Env<'a, '_>,
var: Variable,
) -> Result<Vec<'a, SortedTupleElem<'a>>, LayoutProblem> {
let (it, _) = match gather_tuple_elems_unsorted_iter(env.subs, TupleElems::empty(), var) {
Ok(it) => it,
Err(_) => return Err(LayoutProblem::Erroneous),
};
sort_tuple_elems_help(env, it)
}
fn sort_tuple_elems_help<'a>(
env: &mut Env<'a, '_>,
elems_map: impl Iterator<Item = (usize, Variable)>,
) -> Result<Vec<'a, SortedTupleElem<'a>>, LayoutProblem> {
let target_info = env.target_info;
let mut sorted_elems = Vec::with_capacity_in(elems_map.size_hint().0, env.arena);
for (index, elem) in elems_map {
let Cacheable(layout, _) = Layout::from_var(env, elem);
let layout = layout?;
sorted_elems.push((index, elem, layout));
}
sorted_elems.sort_by(|(index1, _, res_layout1), (index2, _, res_layout2)| {
cmp_fields(
&env.cache.interner,
index1,
*res_layout1,
index2,
*res_layout2,
target_info,
)
});
Ok(sorted_elems)
}
pub type SortedField<'a> = (Lowercase, Variable, Result<InLayout<'a>, InLayout<'a>>);
pub fn sort_record_fields<'a>(
@ -3500,18 +3625,6 @@ fn get_recursion_var(subs: &Subs, var: Variable) -> Option<Variable> {
}
}
fn is_recursive_tag_union(layout: &Layout) -> bool {
matches!(
layout,
Layout::Union(
UnionLayout::NullableUnwrapped { .. }
| UnionLayout::Recursive(_)
| UnionLayout::NullableWrapped { .. }
| UnionLayout::NonNullableUnwrapped { .. },
)
)
}
fn union_sorted_non_recursive_tags_help<'a, L>(
env: &mut Env<'a, '_>,
tags_list: &[(&'_ L, &[Variable])],
@ -3804,7 +3917,7 @@ where
== env
.subs
.get_root_key_without_compacting(opt_rec_var.unwrap())
&& is_recursive_tag_union(&layout);
&& layout.is_recursive_tag_union();
let arg_layout = if self_recursion {
Layout::NAKED_RECURSIVE_PTR
@ -4075,6 +4188,7 @@ where
// The naked pointer will get fixed-up to loopback to the union below when we
// intern the union.
tag_layout.push(Layout::NAKED_RECURSIVE_PTR);
criteria.and(NAKED_RECURSION_PTR);
continue;
}
@ -4115,12 +4229,17 @@ where
} else {
UnionLayout::Recursive(tag_layouts.into_bump_slice())
};
criteria.pass_through_recursive_union(rec_var);
let union_layout = env
.cache
.interner
.insert_recursive(env.arena, Layout::Union(union_layout));
let union_layout = if criteria.has_naked_recursion_pointer {
env.cache
.interner
.insert_recursive(env.arena, Layout::Union(union_layout))
} else {
// There are no naked recursion pointers, so we can insert the layout as-is.
env.cache.interner.insert(Layout::Union(union_layout))
};
criteria.pass_through_recursive_union(rec_var);
Cacheable(Ok(union_layout), criteria)
}

View file

@ -12,7 +12,7 @@ use roc_collections::{default_hasher, BumpMap};
use roc_module::symbol::Symbol;
use roc_target::TargetInfo;
use super::{Builtin, FieldOrderHash, LambdaSet, Layout, UnionLayout};
use super::{Builtin, FieldOrderHash, LambdaSet, Layout, SeenRecPtrs, UnionLayout};
macro_rules! cache_interned_layouts {
($($i:literal, $name:ident, $vis:vis, $layout:expr)*; $total_constants:literal) => {
@ -121,6 +121,13 @@ impl<'a> Layout<'a> {
}
}
/// Whether a recursive lambda set being inserted into an interner needs fixing-up of naked
/// recursion pointers in the capture set.
/// Applicable only if
/// - the lambda set is indeed recursive, and
/// - its capture set contain naked pointer references
pub struct NeedsRecursionPointerFixup(pub bool);
pub trait LayoutInterner<'a>: Sized {
/// Interns a value, returning its interned representation.
/// If the value has been interned before, the old interned representation will be re-used.
@ -135,9 +142,11 @@ pub trait LayoutInterner<'a>: Sized {
/// lambda set onto itself.
fn insert_lambda_set(
&mut self,
arena: &'a Bump,
args: &'a &'a [InLayout<'a>],
ret: InLayout<'a>,
set: &'a &'a [(Symbol, &'a [InLayout<'a>])],
needs_recursive_fixup: NeedsRecursionPointerFixup,
representation: InLayout<'a>,
) -> LambdaSet<'a>;
@ -198,14 +207,63 @@ pub trait LayoutInterner<'a>: Sized {
Layout::runtime_representation_in(layout, self)
}
fn chase_recursive(&self, mut layout: InLayout<'a>) -> Layout<'a> {
loop {
match self.get(layout) {
Layout::RecursivePointer(l) => layout = l,
other => return other,
}
}
}
fn chase_recursive_in(&self, mut layout: InLayout<'a>) -> InLayout<'a> {
loop {
match self.get(layout) {
Layout::RecursivePointer(l) => layout = l,
_ => return layout,
}
}
}
fn safe_to_memcpy(&self, layout: InLayout<'a>) -> bool {
self.get(layout).safe_to_memcpy(self)
}
/// Checks if two layouts are equivalent up to isomorphism.
///
/// This is only to be used when layouts need to be compared across statements and depths,
/// for example
/// - when looking up a layout index in a lambda set
/// - in the [checker][crate::debug::check_procs], where `x = UnionAtIndex(f, 0)` may have
/// that the recorded layout of `x` is at a different depth than that determined when we
/// index the recorded layout of `f` at 0. Hence the two layouts may have different
/// interned representations, even if they are in fact isomorphic.
fn equiv(&self, l1: InLayout<'a>, l2: InLayout<'a>) -> bool {
std::thread_local! {
static SCRATCHPAD: RefCell<Option<Vec<(InLayout<'static>, InLayout<'static>)>>> = RefCell::new(Some(Vec::with_capacity(64)));
}
SCRATCHPAD.with(|f| {
// SAFETY: the promotion to lifetime 'a only lasts during equivalence-checking; the
// scratchpad stack is cleared after every use.
let mut stack: Vec<(InLayout<'a>, InLayout<'a>)> =
unsafe { std::mem::transmute(f.take().unwrap()) };
let answer = equiv::equivalent(&mut stack, self, l1, l2);
stack.clear();
let stack: Vec<(InLayout<'static>, InLayout<'static>)> =
unsafe { std::mem::transmute(stack) };
f.replace(Some(stack));
answer
})
}
fn to_doc<'b, D, A>(
&self,
layout: InLayout<'a>,
alloc: &'b D,
seen_rec: &mut SeenRecPtrs<'a>,
parens: crate::ir::Parens,
) -> ven_pretty::DocBuilder<'b, D, A>
where
@ -213,14 +271,104 @@ pub trait LayoutInterner<'a>: Sized {
D::Doc: Clone,
A: Clone,
{
self.get(layout).to_doc(alloc, self, parens)
use Layout::*;
match self.get(layout) {
Builtin(builtin) => builtin.to_doc(alloc, self, seen_rec, parens),
Struct { field_layouts, .. } => {
let fields_doc = field_layouts
.iter()
.map(|x| self.to_doc(*x, alloc, seen_rec, parens));
alloc
.text("{")
.append(alloc.intersperse(fields_doc, ", "))
.append(alloc.text("}"))
}
Union(union_layout) => {
let is_recursive = !matches!(union_layout, UnionLayout::NonRecursive(..));
if is_recursive {
seen_rec.insert(layout);
}
let doc = union_layout.to_doc(alloc, self, seen_rec, parens);
if is_recursive {
seen_rec.remove(&layout);
}
doc
}
LambdaSet(lambda_set) => {
self.to_doc(lambda_set.runtime_representation(), alloc, seen_rec, parens)
}
RecursivePointer(rec_layout) => {
if seen_rec.contains(&rec_layout) {
alloc.text("*self")
} else {
self.to_doc(rec_layout, alloc, seen_rec, parens)
}
}
Boxed(inner) => alloc
.text("Boxed(")
.append(self.to_doc(inner, alloc, seen_rec, parens))
.append(")"),
}
}
fn to_doc_top<'b, D, A>(
&self,
layout: InLayout<'a>,
alloc: &'b D,
) -> ven_pretty::DocBuilder<'b, D, A>
where
D: ven_pretty::DocAllocator<'b, A>,
D::Doc: Clone,
A: Clone,
{
self.to_doc(
layout,
alloc,
&mut Default::default(),
crate::ir::Parens::NotNeeded,
)
}
/// Pretty-print a representation of the layout.
fn dbg(&self, layout: InLayout<'a>) -> String {
let alloc: ven_pretty::Arena<()> = ven_pretty::Arena::new();
let doc = self.to_doc(layout, &alloc, crate::ir::Parens::NotNeeded);
let doc = self.to_doc_top(layout, &alloc);
doc.1.pretty(80).to_string()
}
/// Yields a debug representation of a layout, traversing its entire nested structure and
/// debug-printing all intermediate interned layouts.
///
/// By default, a [Layout] is composed inductively by [interned layout][InLayout]s.
/// This makes debugging a layout more than one level challenging, as you may run into further
/// opaque interned layouts that need unwrapping.
///
/// [`dbg_deep`][LayoutInterner::dbg_deep] works around this by returning a value whose debug
/// representation chases through all nested interned layouts as you would otherwise have to do
/// manually.
///
/// ## Example
///
/// ```ignore(illustrative)
/// fn is_rec_ptr<'a>(interner: &impl LayoutInterner<'a>, layout: InLayout<'a>) -> bool {
/// if matches!(interner.get(layout), Layout::RecursivePointer(..)) {
/// return true;
/// }
///
/// let deep_dbg = interner.dbg_deep(layout);
/// roc_tracing::info!("not a recursive pointer, actually a {deep_dbg:?}");
/// return false;
/// }
/// ```
fn dbg_deep<'r>(&'r self, layout: InLayout<'a>) -> dbg::Dbg<'a, 'r, Self> {
dbg::Dbg(self, layout)
}
fn dbg_deep_iter<'r>(&'r self, layouts: &'a [InLayout<'a>]) -> dbg::DbgFields<'a, 'r, Self> {
dbg::DbgFields(self, layouts)
}
}
/// An interned layout.
@ -260,6 +408,10 @@ impl<'a> InLayout<'a> {
pub(crate) const unsafe fn from_index(index: usize) -> Self {
Self(index, PhantomData)
}
pub fn index(&self) -> usize {
self.0
}
}
/// A concurrent interner, suitable for usage between threads.
@ -407,40 +559,64 @@ impl<'a> GlobalLayoutInterner<'a> {
fn get_or_insert_hashed_normalized_lambda_set(
&self,
arena: &'a Bump,
normalized: LambdaSet<'a>,
needs_recursive_fixup: NeedsRecursionPointerFixup,
normalized_hash: u64,
) -> WrittenGlobalLambdaSet<'a> {
let mut normalized_lambda_set_map = self.0.normalized_lambda_set_map.lock();
let (_, full_lambda_set) = normalized_lambda_set_map
.raw_entry_mut()
if let Some((_, &full_lambda_set)) = normalized_lambda_set_map
.raw_entry()
.from_key_hashed_nocheck(normalized_hash, &normalized)
.or_insert_with(|| {
// We don't already have an entry for the lambda set, which means it must be new to
// the world. Reserve a slot, insert the lambda set, and that should fill the slot
// in.
let mut map = self.0.map.lock();
let mut vec = self.0.vec.write();
{
let full_layout = self.0.vec.read()[full_lambda_set.full_layout.0];
return WrittenGlobalLambdaSet {
full_lambda_set,
full_layout,
};
}
let slot = unsafe { InLayout::from_index(vec.len()) };
// We don't already have an entry for the lambda set, which means it must be new to
// the world. Reserve a slot, insert the lambda set, and that should fill the slot
// in.
let mut map = self.0.map.lock();
let mut vec = self.0.vec.write();
let lambda_set = LambdaSet {
full_layout: slot,
..normalized
};
let lambda_set_layout = Layout::LambdaSet(lambda_set);
let slot = unsafe { InLayout::from_index(vec.len()) };
vec.push(Layout::VOID_NAKED);
vec.push(lambda_set_layout);
let set = if needs_recursive_fixup.0 {
let mut interner = LockedGlobalInterner {
map: &mut map,
normalized_lambda_set_map: &mut normalized_lambda_set_map,
vec: &mut vec,
target_info: self.0.target_info,
};
reify::reify_lambda_set_captures(arena, &mut interner, slot, normalized.set)
} else {
normalized.set
};
// TODO: Is it helpful to persist the hash and give it back to the thread-local
// interner?
let _old = map.insert(lambda_set_layout, slot);
debug_assert!(_old.is_none());
let full_lambda_set = LambdaSet {
full_layout: slot,
set,
..normalized
};
let lambda_set_layout = Layout::LambdaSet(full_lambda_set);
(normalized, lambda_set)
});
let full_layout = self.0.vec.read()[full_lambda_set.full_layout.0];
vec[slot.0] = lambda_set_layout;
// TODO: Is it helpful to persist the hash and give it back to the thread-local
// interner?
let _old = map.insert(lambda_set_layout, slot);
debug_assert!(_old.is_none());
let _old_normalized = normalized_lambda_set_map.insert(normalized, full_lambda_set);
debug_assert!(_old_normalized.is_none());
let full_layout = vec[full_lambda_set.full_layout.0];
WrittenGlobalLambdaSet {
full_lambda_set: *full_lambda_set,
full_lambda_set,
full_layout,
}
}
@ -539,9 +715,11 @@ impl<'a> LayoutInterner<'a> for TLLayoutInterner<'a> {
fn insert_lambda_set(
&mut self,
arena: &'a Bump,
args: &'a &'a [InLayout<'a>],
ret: InLayout<'a>,
set: &'a &'a [(Symbol, &'a [InLayout<'a>])],
needs_recursive_fixup: NeedsRecursionPointerFixup,
representation: InLayout<'a>,
) -> LambdaSet<'a> {
// The tricky bit of inserting a lambda set is we need to fill in the `full_layout` only
@ -567,7 +745,12 @@ impl<'a> LayoutInterner<'a> for TLLayoutInterner<'a> {
let WrittenGlobalLambdaSet {
full_lambda_set,
full_layout,
} = global.get_or_insert_hashed_normalized_lambda_set(normalized, normalized_hash);
} = global.get_or_insert_hashed_normalized_lambda_set(
arena,
normalized,
needs_recursive_fixup,
normalized_hash,
);
// The Layout(lambda_set) isn't present in our thread; make sure it is for future
// reference.
@ -689,9 +872,11 @@ macro_rules! st_impl {
fn insert_lambda_set(
&mut self,
arena: &'a Bump,
args: &'a &'a [InLayout<'a>],
ret: InLayout<'a>,
set: &'a &'a [(Symbol, &'a [InLayout<'a>])],
needs_recursive_fixup: NeedsRecursionPointerFixup,
representation: InLayout<'a>,
) -> LambdaSet<'a> {
// IDEA:
@ -708,6 +893,14 @@ macro_rules! st_impl {
// This lambda set must be new to the interner, reserve a slot and fill it in.
let slot = unsafe { InLayout::from_index(self.vec.len()) };
self.vec.push(Layout::VOID_NAKED);
let set = if needs_recursive_fixup.0 {
reify::reify_lambda_set_captures(arena, self, slot, set)
} else {
set
};
let lambda_set = LambdaSet {
args,
ret,
@ -715,11 +908,14 @@ macro_rules! st_impl {
representation,
full_layout: slot,
};
let filled_slot = self.insert(Layout::LambdaSet(lambda_set));
assert_eq!(slot, filled_slot);
self.vec[slot.0] = Layout::LambdaSet(lambda_set);
self.normalized_lambda_set_map
let _old = self.map.insert(Layout::LambdaSet(lambda_set), slot);
debug_assert!(_old.is_none());
let _old = self.normalized_lambda_set_map
.insert(normalized_lambda_set, lambda_set);
debug_assert!(_old.is_none());
lambda_set
}
@ -768,10 +964,11 @@ st_impl!('r LockedGlobalInterner);
mod reify {
use bumpalo::{collections::Vec, Bump};
use roc_module::symbol::Symbol;
use crate::layout::{Builtin, LambdaSet, Layout, UnionLayout};
use super::{InLayout, LayoutInterner};
use super::{InLayout, LayoutInterner, NeedsRecursionPointerFixup};
// TODO: if recursion becomes a problem we could make this iterative
pub fn reify_recursive_layout<'a>(
@ -912,18 +1109,330 @@ mod reify {
};
let representation = reify_layout(arena, interner, slot, representation);
interner.insert_lambda_set(arena.alloc(args), ret, arena.alloc(set), representation)
interner.insert_lambda_set(
arena,
arena.alloc(args),
ret,
arena.alloc(set),
// All nested recursive pointers should been fixed up, since we just did that above.
NeedsRecursionPointerFixup(false),
representation,
)
}
pub fn reify_lambda_set_captures<'a>(
arena: &'a Bump,
interner: &mut impl LayoutInterner<'a>,
slot: InLayout<'a>,
set: &[(Symbol, &'a [InLayout<'a>])],
) -> &'a &'a [(Symbol, &'a [InLayout<'a>])] {
let mut reified_set = Vec::with_capacity_in(set.len(), arena);
for (f, captures) in set.iter() {
let reified_captures = reify_layout_slice(arena, interner, slot, captures);
reified_set.push((*f, reified_captures));
}
arena.alloc(reified_set.into_bump_slice())
}
}
mod equiv {
use crate::layout::{self, Layout, UnionLayout};
use super::{InLayout, LayoutInterner};
pub fn equivalent<'a>(
stack: &mut Vec<(InLayout<'a>, InLayout<'a>)>,
interner: &impl LayoutInterner<'a>,
l1: InLayout<'a>,
l2: InLayout<'a>,
) -> bool {
stack.push((l1, l2));
macro_rules! equiv_fields {
($fields1:expr, $fields2:expr) => {{
if $fields1.len() != $fields2.len() {
return false;
}
stack.extend($fields1.iter().copied().zip($fields2.iter().copied()));
}};
}
macro_rules! equiv_unions {
($tags1:expr, $tags2:expr) => {{
if $tags1.len() != $tags2.len() {
return false;
}
for (payloads1, payloads2) in $tags1.iter().zip($tags2) {
equiv_fields!(payloads1, payloads2)
}
}};
}
while let Some((l1, l2)) = stack.pop() {
if l1 == l2 {
continue;
}
use Layout::*;
match (interner.get(l1), interner.get(l2)) {
(RecursivePointer(rec), _) => stack.push((rec, l2)),
(_, RecursivePointer(rec)) => stack.push((l1, rec)),
(Builtin(b1), Builtin(b2)) => {
use crate::layout::Builtin::*;
match (b1, b2) {
(List(e1), List(e2)) => stack.push((e1, e2)),
(b1, b2) => {
if b1 != b2 {
return false;
}
}
}
}
(
Struct {
field_order_hash: foh1,
field_layouts: fl1,
},
Struct {
field_order_hash: foh2,
field_layouts: fl2,
},
) => {
if foh1 != foh2 {
return false;
}
equiv_fields!(fl1, fl2)
}
(Boxed(b1), Boxed(b2)) => stack.push((b1, b2)),
(Union(u1), Union(u2)) => {
use UnionLayout::*;
match (u1, u2) {
(NonRecursive(tags1), NonRecursive(tags2)) => equiv_unions!(tags1, tags2),
(Recursive(tags1), Recursive(tags2)) => equiv_unions!(tags1, tags2),
(NonNullableUnwrapped(fields1), NonNullableUnwrapped(fields2)) => {
equiv_fields!(fields1, fields2)
}
(
NullableWrapped {
nullable_id: null_id1,
other_tags: tags1,
},
NullableWrapped {
nullable_id: null_id2,
other_tags: tags2,
},
) => {
if null_id1 != null_id2 {
return false;
}
equiv_unions!(tags1, tags2)
}
(
NullableUnwrapped {
nullable_id: null_id1,
other_fields: fields1,
},
NullableUnwrapped {
nullable_id: null_id2,
other_fields: fields2,
},
) => {
if null_id1 != null_id2 {
return false;
}
equiv_fields!(fields1, fields2)
}
_ => return false,
}
}
(
LambdaSet(layout::LambdaSet {
args: args1,
ret: ret1,
set: set1,
representation: repr1,
full_layout: _,
}),
LambdaSet(layout::LambdaSet {
args: args2,
ret: ret2,
set: set2,
representation: repr2,
full_layout: _,
}),
) => {
for ((fn1, captures1), (fn2, captures2)) in (**set1).iter().zip(*set2) {
if fn1 != fn2 {
return false;
}
equiv_fields!(captures1, captures2);
}
equiv_fields!(args1, args2);
stack.push((ret1, ret2));
stack.push((repr1, repr2));
}
_ => return false,
}
}
true
}
}
pub mod dbg {
use roc_module::symbol::Symbol;
use crate::layout::{Builtin, LambdaSet, Layout, UnionLayout};
use super::{InLayout, LayoutInterner};
pub struct Dbg<'a, 'r, I: LayoutInterner<'a>>(pub &'r I, pub InLayout<'a>);
impl<'a, 'r, I: LayoutInterner<'a>> std::fmt::Debug for Dbg<'a, 'r, I> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.0.get(self.1) {
Layout::Builtin(b) => f
.debug_tuple("Builtin")
.field(&DbgBuiltin(self.0, b))
.finish(),
Layout::Struct {
field_order_hash,
field_layouts,
} => f
.debug_struct("Struct")
.field("hash", &field_order_hash)
.field("fields", &DbgFields(self.0, field_layouts))
.finish(),
Layout::Boxed(b) => f.debug_tuple("Boxed").field(&Dbg(self.0, b)).finish(),
Layout::Union(un) => f.debug_tuple("Union").field(&DbgUnion(self.0, un)).finish(),
Layout::LambdaSet(ls) => f
.debug_tuple("LambdaSet")
.field(&DbgLambdaSet(self.0, ls))
.finish(),
Layout::RecursivePointer(rp) => {
f.debug_tuple("RecursivePointer").field(&rp.0).finish()
}
}
}
}
pub struct DbgFields<'a, 'r, I: LayoutInterner<'a>>(pub &'r I, pub &'a [InLayout<'a>]);
impl<'a, 'r, I: LayoutInterner<'a>> std::fmt::Debug for DbgFields<'a, 'r, I> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_list()
.entries(self.1.iter().map(|l| Dbg(self.0, *l)))
.finish()
}
}
struct DbgTags<'a, 'r, I: LayoutInterner<'a>>(&'r I, &'a [&'a [InLayout<'a>]]);
impl<'a, 'r, I: LayoutInterner<'a>> std::fmt::Debug for DbgTags<'a, 'r, I> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_list()
.entries(self.1.iter().map(|l| DbgFields(self.0, l)))
.finish()
}
}
struct DbgBuiltin<'a, 'r, I: LayoutInterner<'a>>(&'r I, Builtin<'a>);
impl<'a, 'r, I: LayoutInterner<'a>> std::fmt::Debug for DbgBuiltin<'a, 'r, I> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.1 {
Builtin::Int(w) => f.debug_tuple("Int").field(&w).finish(),
Builtin::Float(w) => f.debug_tuple("Float").field(&w).finish(),
Builtin::Bool => f.debug_tuple("Bool").finish(),
Builtin::Decimal => f.debug_tuple("Decimal").finish(),
Builtin::Str => f.debug_tuple("Str").finish(),
Builtin::List(e) => f.debug_tuple("List").field(&Dbg(self.0, e)).finish(),
}
}
}
struct DbgUnion<'a, 'r, I: LayoutInterner<'a>>(&'r I, UnionLayout<'a>);
impl<'a, 'r, I: LayoutInterner<'a>> std::fmt::Debug for DbgUnion<'a, 'r, I> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.1 {
UnionLayout::NonRecursive(payloads) => f
.debug_tuple("NonRecursive")
.field(&DbgTags(self.0, payloads))
.finish(),
UnionLayout::Recursive(payloads) => f
.debug_tuple("Recursive")
.field(&DbgTags(self.0, payloads))
.finish(),
UnionLayout::NonNullableUnwrapped(fields) => f
.debug_tuple("NonNullableUnwrapped")
.field(&DbgFields(self.0, fields))
.finish(),
UnionLayout::NullableWrapped {
nullable_id,
other_tags,
} => f
.debug_struct("NullableWrapped")
.field("nullable_id", &nullable_id)
.field("other_tags", &DbgTags(self.0, other_tags))
.finish(),
UnionLayout::NullableUnwrapped {
nullable_id,
other_fields,
} => f
.debug_struct("NullableUnwrapped")
.field("nullable_id", &nullable_id)
.field("other_tags", &DbgFields(self.0, other_fields))
.finish(),
}
}
}
struct DbgLambdaSet<'a, 'r, I: LayoutInterner<'a>>(&'r I, LambdaSet<'a>);
impl<'a, 'r, I: LayoutInterner<'a>> std::fmt::Debug for DbgLambdaSet<'a, 'r, I> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let LambdaSet {
args,
ret,
set,
representation,
full_layout,
} = self.1;
f.debug_struct("LambdaSet")
.field("args", &DbgFields(self.0, args))
.field("ret", &Dbg(self.0, ret))
.field("set", &DbgCapturesSet(self.0, set))
.field("representation", &Dbg(self.0, representation))
.field("full_layout", &full_layout)
.finish()
}
}
struct DbgCapturesSet<'a, 'r, I: LayoutInterner<'a>>(&'r I, &'a [(Symbol, &'a [InLayout<'a>])]);
impl<'a, 'r, I: LayoutInterner<'a>> std::fmt::Debug for DbgCapturesSet<'a, 'r, I> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_list()
.entries(
self.1
.iter()
.map(|(sym, captures)| (sym, DbgFields(self.0, captures))),
)
.finish()
}
}
}
#[cfg(test)]
mod insert_lambda_set {
use bumpalo::Bump;
use roc_module::symbol::Symbol;
use roc_target::TargetInfo;
use crate::layout::{LambdaSet, Layout};
use super::{GlobalLayoutInterner, InLayout, LayoutInterner};
use super::{GlobalLayoutInterner, InLayout, LayoutInterner, NeedsRecursionPointerFixup};
const TARGET_INFO: TargetInfo = TargetInfo::default_x86_64();
const TEST_SET: &&[(Symbol, &[InLayout])] =
@ -931,50 +1440,57 @@ mod insert_lambda_set {
const TEST_ARGS: &&[InLayout] = &(&[Layout::UNIT] as &[_]);
const TEST_RET: InLayout = Layout::UNIT;
const FIXUP: NeedsRecursionPointerFixup = NeedsRecursionPointerFixup(true);
#[test]
fn two_threads_write() {
let global = GlobalLayoutInterner::with_capacity(2, TARGET_INFO);
let set = TEST_SET;
let repr = Layout::UNIT;
for _ in 0..100 {
let mut handles = Vec::with_capacity(10);
for _ in 0..10 {
let mut interner = global.fork();
handles.push(std::thread::spawn(move || {
interner.insert_lambda_set(TEST_ARGS, TEST_RET, set, repr)
}))
}
let ins: Vec<LambdaSet> = handles.into_iter().map(|t| t.join().unwrap()).collect();
let interned = ins[0];
assert!(ins.iter().all(|in2| interned == *in2));
let mut arenas: Vec<_> = std::iter::repeat_with(Bump::new).take(10).collect();
let global = GlobalLayoutInterner::with_capacity(2, TARGET_INFO);
let set = TEST_SET;
let repr = Layout::UNIT;
std::thread::scope(|s| {
let mut handles = Vec::with_capacity(10);
for arena in arenas.iter_mut() {
let mut interner = global.fork();
handles.push(s.spawn(move || {
interner.insert_lambda_set(arena, TEST_ARGS, TEST_RET, set, FIXUP, repr)
}))
}
let ins: Vec<LambdaSet> = handles.into_iter().map(|t| t.join().unwrap()).collect();
let interned = ins[0];
assert!(ins.iter().all(|in2| interned == *in2));
});
}
}
#[test]
fn insert_then_reintern() {
let arena = &Bump::new();
let global = GlobalLayoutInterner::with_capacity(2, TARGET_INFO);
let mut interner = global.fork();
let lambda_set = interner.insert_lambda_set(TEST_ARGS, TEST_RET, TEST_SET, Layout::UNIT);
let lambda_set =
interner.insert_lambda_set(arena, TEST_ARGS, TEST_RET, TEST_SET, FIXUP, Layout::UNIT);
let lambda_set_layout_in = interner.insert(Layout::LambdaSet(lambda_set));
assert_eq!(lambda_set.full_layout, lambda_set_layout_in);
}
#[test]
fn write_global_then_single_threaded() {
let arena = &Bump::new();
let global = GlobalLayoutInterner::with_capacity(2, TARGET_INFO);
let set = TEST_SET;
let repr = Layout::UNIT;
let in1 = {
let mut interner = global.fork();
interner.insert_lambda_set(TEST_ARGS, TEST_RET, set, repr)
interner.insert_lambda_set(arena, TEST_ARGS, TEST_RET, set, FIXUP, repr)
};
let in2 = {
let mut st_interner = global.unwrap().unwrap();
st_interner.insert_lambda_set(TEST_ARGS, TEST_RET, set, repr)
st_interner.insert_lambda_set(arena, TEST_ARGS, TEST_RET, set, FIXUP, repr)
};
assert_eq!(in1, in2);
@ -982,18 +1498,19 @@ mod insert_lambda_set {
#[test]
fn write_single_threaded_then_global() {
let arena = &Bump::new();
let global = GlobalLayoutInterner::with_capacity(2, TARGET_INFO);
let mut st_interner = global.unwrap().unwrap();
let set = TEST_SET;
let repr = Layout::UNIT;
let in1 = st_interner.insert_lambda_set(TEST_ARGS, TEST_RET, set, repr);
let in1 = st_interner.insert_lambda_set(arena, TEST_ARGS, TEST_RET, set, FIXUP, repr);
let global = st_interner.into_global();
let mut interner = global.fork();
let in2 = interner.insert_lambda_set(TEST_ARGS, TEST_RET, set, repr);
let in2 = interner.insert_lambda_set(arena, TEST_ARGS, TEST_RET, set, FIXUP, repr);
assert_eq!(in1, in2);
}
@ -1022,7 +1539,7 @@ mod insert_recursive_layout {
}
fn get_rec_ptr_index<'a>(interner: &impl LayoutInterner<'a>, layout: InLayout<'a>) -> usize {
match interner.get(layout) {
match interner.chase_recursive(layout) {
Layout::Union(UnionLayout::Recursive(&[&[l1], &[l2]])) => {
match (interner.get(l1), interner.get(l2)) {
(
@ -1146,7 +1663,7 @@ mod insert_recursive_layout {
make_layout(arena, &mut interner)
};
let in1 = {
let in1: InLayout = {
let mut interner = global.fork();
interner.insert_recursive(arena, layout)
};

View file

@ -6,6 +6,8 @@
#![warn(clippy::dbg_macro)]
// See github.com/roc-lang/roc/issues/800 for discussion of the large_enum_variant check.
#![allow(clippy::large_enum_variant, clippy::upper_case_acronyms)]
// Not a useful lint for us
#![allow(clippy::too_many_arguments)]
pub mod borrow;
pub mod code_gen_help;
@ -19,9 +21,4 @@ pub mod perceus;
pub mod reset_reuse;
pub mod tail_recursion;
// Temporary, while we can build up test cases and optimize the exhaustiveness checking.
// For now, following this warning's advice will lead to nasty type inference errors.
//#[allow(clippy::ptr_arg)]
pub mod decision_tree;
pub mod debug;

View file

@ -120,6 +120,8 @@ enum FirstOrder {
NumShiftRightBy,
NumBytesToU16,
NumBytesToU32,
NumBytesToU64,
NumBytesToU128,
NumShiftRightZfBy,
NumIntCast,
NumFloatCast,

View file

@ -615,7 +615,7 @@ fn function_r_branch_body<'a, 'i>(
scrutinee,
layout,
tag_id,
} => match env.interner.get(*layout) {
} => match env.interner.chase_recursive(*layout) {
Layout::Union(UnionLayout::NonRecursive(_)) => temp,
Layout::Union(union_layout) if !union_layout.tag_is_null(*tag_id) => {
let ctor_info = CtorInfo {