Merge branch 'trunk' into str-concat

This commit is contained in:
Richard Feldman 2020-08-19 23:05:13 -04:00 committed by GitHub
commit 50251c678b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 1565 additions and 386 deletions

View file

@ -37,6 +37,9 @@ const PRINT_FN_VERIFICATION_OUTPUT: bool = true;
#[cfg(not(debug_assertions))]
const PRINT_FN_VERIFICATION_OUTPUT: bool = false;
pub const REFCOUNT_0: usize = std::usize::MAX;
pub const REFCOUNT_1: usize = REFCOUNT_0 - 1;
#[derive(Debug, Clone, Copy)]
pub enum OptLevel {
Normal,
@ -904,7 +907,7 @@ pub fn build_exp_stmt<'a, 'ctx, 'env>(
match layout {
Layout::Builtin(Builtin::List(MemoryMode::Refcounted, _)) => {
increment_refcount_list(env, value.into_struct_value());
increment_refcount_list(env, parent, value.into_struct_value());
build_exp_stmt(env, layout_ids, scope, parent, cont)
}
_ => build_exp_stmt(env, layout_ids, scope, parent, cont),
@ -929,11 +932,7 @@ fn refcount_is_one_comparison<'ctx>(
context: &'ctx Context,
refcount: IntValue<'ctx>,
) -> IntValue<'ctx> {
let refcount_one: IntValue<'ctx> = context.i64_type().const_int((std::usize::MAX) as _, false);
// Note: Check for refcount < refcount_1 as the "true" condition,
// to avoid misprediction. (In practice this should usually pass,
// and CPUs generally default to predicting that a forward jump
// shouldn't be taken; that is, they predict "else" won't be taken.)
let refcount_one: IntValue<'ctx> = context.i64_type().const_int(REFCOUNT_1 as _, false);
builder.build_int_compare(
IntPredicate::EQ,
refcount,
@ -998,6 +997,7 @@ fn decrement_refcount_layout<'a, 'ctx, 'env>(
}
}
}
RecursiveUnion(_) => todo!("TODO implement decrement layout of recursive tag union"),
Union(tags) => {
debug_assert!(!tags.is_empty());
let wrapper_struct = value.into_struct_value();
@ -1086,11 +1086,29 @@ fn decrement_refcount_builtin<'a, 'ctx, 'env>(
fn increment_refcount_list<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
parent: FunctionValue<'ctx>,
original_wrapper: StructValue<'ctx>,
) {
let builder = env.builder;
let ctx = env.context;
let len = list_len(builder, original_wrapper);
let is_non_empty = builder.build_int_compare(
IntPredicate::UGT,
len,
ctx.i64_type().const_zero(),
"len > 0",
);
// build blocks
let increment_block = ctx.append_basic_block(parent, "increment_block");
let cont_block = ctx.append_basic_block(parent, "after_increment_block");
builder.build_conditional_branch(is_non_empty, increment_block, cont_block);
builder.position_at_end(increment_block);
let refcount_ptr = list_get_refcount_ptr(env, original_wrapper);
let refcount = env
@ -1107,6 +1125,9 @@ fn increment_refcount_list<'a, 'ctx, 'env>(
// Mutate the new array in-place to change the element.
builder.build_store(refcount_ptr, decremented);
builder.build_unconditional_branch(cont_block);
builder.position_at_end(cont_block);
}
fn decrement_refcount_list<'a, 'ctx, 'env>(
@ -1117,6 +1138,30 @@ fn decrement_refcount_list<'a, 'ctx, 'env>(
let builder = env.builder;
let ctx = env.context;
// the block we'll always jump to when we're done
let cont_block = ctx.append_basic_block(parent, "after_decrement_block");
let decrement_block = ctx.append_basic_block(parent, "decrement_block");
// currently, an empty list has a null-pointer in its length is 0
// so we must first check the length
let len = list_len(builder, original_wrapper);
let is_non_empty = builder.build_int_compare(
IntPredicate::UGT,
len,
ctx.i64_type().const_zero(),
"len > 0",
);
// if the length is 0, we're done and jump to the continuation block
// otherwise, actually read and check the refcount
builder.build_conditional_branch(is_non_empty, decrement_block, cont_block);
builder.position_at_end(decrement_block);
// build blocks
let then_block = ctx.append_basic_block(parent, "then");
let else_block = ctx.append_basic_block(parent, "else");
let refcount_ptr = list_get_refcount_ptr(env, original_wrapper);
let refcount = env
@ -1126,16 +1171,24 @@ fn decrement_refcount_list<'a, 'ctx, 'env>(
let comparison = refcount_is_one_comparison(builder, env.context, refcount);
// build blocks
let then_block = ctx.append_basic_block(parent, "then");
let else_block = ctx.append_basic_block(parent, "else");
let cont_block = ctx.append_basic_block(parent, "dec_ref_branchcont");
// TODO what would be most optimial for the branch predictor
//
// are most refcounts 1 most of the time? or not?
builder.build_conditional_branch(comparison, then_block, else_block);
// build then block
{
builder.position_at_end(then_block);
if !env.leak {
let free = builder.build_free(refcount_ptr);
builder.insert_instruction(&free, None);
}
builder.build_unconditional_branch(cont_block);
}
// build else block
{
builder.position_at_end(else_block);
// our refcount 0 is actually usize::MAX, so decrementing the refcount means incrementing this value.
let decremented = env.builder.build_int_add(
ctx.i64_type().const_int(1 as u64, false),
@ -1149,16 +1202,6 @@ fn decrement_refcount_list<'a, 'ctx, 'env>(
builder.build_unconditional_branch(cont_block);
}
// build else block
{
builder.position_at_end(else_block);
if !env.leak {
let free = builder.build_free(refcount_ptr);
builder.insert_instruction(&free, None);
}
builder.build_unconditional_branch(cont_block);
}
// emit merge block
builder.position_at_end(cont_block);
}
@ -1804,14 +1847,9 @@ fn run_low_level<'a, 'ctx, 'env>(
list_get_unsafe(env, list_layout, elem_index, wrapper_struct)
}
ListSet => {
ListSetInPlace => {
let (list_symbol, list_layout) = load_symbol_and_layout(env, scope, &args[0]);
let in_place = match &list_layout {
Layout::Builtin(Builtin::List(MemoryMode::Unique, _)) => InPlace::InPlace,
_ => InPlace::Clone,
};
list_set(
parent,
&[
@ -1820,19 +1858,57 @@ fn run_low_level<'a, 'ctx, 'env>(
(load_symbol_and_layout(env, scope, &args[2])),
],
env,
in_place,
InPlace::InPlace,
)
}
ListSetInPlace => list_set(
parent,
&[
(load_symbol_and_layout(env, scope, &args[0])),
ListSet => {
let (list_symbol, list_layout) = load_symbol_and_layout(env, scope, &args[0]);
let arguments = &[
(list_symbol, list_layout),
(load_symbol_and_layout(env, scope, &args[1])),
(load_symbol_and_layout(env, scope, &args[2])),
],
env,
InPlace::InPlace,
),
];
match list_layout {
Layout::Builtin(Builtin::List(MemoryMode::Unique, _)) => {
// the layout tells us this List.set can be done in-place
list_set(parent, arguments, env, InPlace::InPlace)
}
Layout::Builtin(Builtin::List(MemoryMode::Refcounted, _)) => {
// no static guarantees, but all is not lost: we can check the refcount
// if it is one, we hold the final reference, and can mutate it in-place!
let builder = env.builder;
let ctx = env.context;
let ret_type =
basic_type_from_layout(env.arena, ctx, list_layout, env.ptr_bytes);
let refcount_ptr = list_get_refcount_ptr(env, list_symbol.into_struct_value());
let refcount = env
.builder
.build_load(refcount_ptr, "get_refcount")
.into_int_value();
let comparison = refcount_is_one_comparison(builder, env.context, refcount);
// build then block
// refcount is 1, so work in-place
let build_pass = || list_set(parent, arguments, env, InPlace::InPlace);
// build else block
// refcount != 1, so clone first
let build_fail = || list_set(parent, arguments, env, InPlace::Clone);
crate::llvm::build_list::build_basic_phi2(
env, parent, comparison, build_pass, build_fail, ret_type,
)
}
Layout::Builtin(Builtin::EmptyList) => list_symbol,
other => unreachable!("List.set: weird layout {:?}", other),
}
}
}
}