Merge pull request #3643 from rtfeldman/disjoint-able-variable-specialization

Disjoint able variable specialization algorithm
This commit is contained in:
Folkert de Vries 2022-08-02 20:31:47 +02:00 committed by GitHub
commit 86a1a0f401
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 538 additions and 118 deletions

View file

@ -2362,7 +2362,7 @@ pub mod test_constrain {
\f -> (\a, b -> f b a)
"#
),
"(a, b -> c) -> (b, a -> c)",
"(a, b -> d) -> (b, a -> d)",
);
}
@ -2400,7 +2400,7 @@ pub mod test_constrain {
\{} -> x
"#
),
"{}* -> Num *",
"{}* -> Num a",
)
}

View file

@ -422,12 +422,13 @@ pub fn constrain_expr(
constraints.lookup(*symbol, expected, region)
}
&AbilityMember(symbol, specialization_id, specialization_var) => {
// make lookup constraint to lookup this symbol's type in the environment
let store_expected = constraints.equal_types_var(
// Save the expectation in the `specialization_var` so we know what to specialize, then
// lookup the member in the environment.
let store_expected = constraints.store(
expected.get_type_ref().clone(),
specialization_var,
expected,
Category::Storage(file!(), line!()),
region,
file!(),
line!(),
);
let lookup_constr = constraints.lookup(
symbol,
@ -435,7 +436,7 @@ pub fn constrain_expr(
region,
);
// Make sure we attempt to resolve the specialization, if we need to.
// Make sure we attempt to resolve the specialization, if we can.
if let Some(specialization_id) = specialization_id {
env.resolutions_to_make.push(OpportunisticResolve {
specialization_variable: specialization_var,

View file

@ -928,51 +928,10 @@ fn solve(
aliases,
*source_index,
);
let target = *target;
match unify(&mut UEnv::new(subs), actual, target, Mode::EQ) {
Success {
vars,
// ERROR NOT REPORTED
must_implement_ability: _,
lambda_sets_to_specialize,
extra_metadata: _,
} => {
introduce(subs, rank, pools, &vars);
let CompactionResult {
obligations,
awaiting_specialization,
} = compact_lambda_sets_of_vars(
subs,
derived_env,
arena,
pools,
lambda_sets_to_specialize,
&SolvePhase { abilities_store },
);
// implement obligations not reported
_ = obligations;
// but awaited specializations must be recorded
awaiting_specializations.union(awaiting_specialization);
state
}
Failure(vars, _actual_type, _expected_type, _bad_impls) => {
introduce(subs, rank, pools, &vars);
// ERROR NOT REPORTED
state
}
BadType(vars, _) => {
introduce(subs, rank, pools, &vars);
// ERROR NOT REPORTED
state
}
}
let actual_desc = subs.get(actual);
subs.union(*target, actual, actual_desc);
state
}
Lookup(symbol, expectation_index, region) => {
match env.get_var_by_symbol(symbol) {

View file

@ -579,9 +579,13 @@ fn compact_lambda_set<P: Phase>(
// 3. Unify `t_f1 ~ t_f2`.
trace_compact!(3iter_start. subs, this_lambda_set, t_f1, t_f2);
let (vars, new_obligations, new_lambda_sets_to_specialize, _meta) =
unify(&mut UEnv::new(subs), t_f1, t_f2, Mode::EQ)
.expect_success("ambient functions don't unify");
let (vars, new_obligations, new_lambda_sets_to_specialize, _meta) = unify(
&mut UEnv::new(subs),
t_f1,
t_f2,
Mode::LAMBDA_SET_SPECIALIZATION,
)
.expect_success("ambient functions don't unify");
trace_compact!(3iter_end. subs, t_f1);
introduce(subs, target_rank, pools, &vars);

View file

@ -6715,7 +6715,7 @@ mod solve_expr {
),
@r#"
A#id(5) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
Id#id(3) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
Id#id(3) : a -[[] + a:id(3):1]-> ({} -[[] + a:id(3):2]-> a) | a has Id
alias : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
"#
print_only_under_alias: true
@ -7324,6 +7324,169 @@ mod solve_expr {
);
}
#[test]
fn polymorphic_lambda_set_specialization_varying_over_multiple_variables() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
J has j : j -> (k -> {}) | j has J, k has K
K has k : k -> {} | k has K
C := {} has [J {j: jC}]
jC = \@C _ -> k
#^^{-1}
D := {} has [J {j: jD}]
jD = \@D _ -> k
#^^{-1}
E := {} has [K {k}]
k = \@E _ -> {}
#^{-1}
f = \flag, a, b ->
# ^ ^
it =
# ^^
when flag is
A -> j a
# ^
B -> j b
# ^
it
# ^^
main = (f A (@C {}) (@D {})) (@E {})
# ^
# ^^^^^^^^^^^^^^^^^^^
#^^^^{-1}
"#
),
@r###"
jC : C -[[jC(8)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
jD : D -[[jD(9)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
E#k(10) : E -[[k(10)]]-> {}
a : j | j has J
b : j | j has J
it : k -[[] + j:j(2):2 + a:j(2):2]-> {} | a has J, j has J, k has K
J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + j:j(2):2 + a:j(2):2]-> {}) | a has J, j has J, k has K
J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + a:j(2):2 + j:j(2):2]-> {}) | a has J, j has J, k has K
it : k -[[] + j:j(2):2 + a:j(2):2]-> {} | a has J, j has J, k has K
f : [A, B], C, D -[[f(11)]]-> (E -[[k(10)]]-> {})
f A (@C {}) (@D {}) : E -[[k(10)]]-> {}
main : {}
"###
);
}
#[test]
fn polymorphic_lambda_set_specialization_varying_over_multiple_variables_two_results() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
J has j : j -> (k -> {}) | j has J, k has K
K has k : k -> {} | k has K
C := {} has [J {j: jC}]
jC = \@C _ -> k
#^^{-1}
D := {} has [J {j: jD}]
jD = \@D _ -> k
#^^{-1}
E := {} has [K {k: kE}]
kE = \@E _ -> {}
#^^{-1}
F := {} has [K {k: kF}]
kF = \@F _ -> {}
#^^{-1}
f = \flag, a, b ->
# ^ ^
it =
# ^^
when flag is
A -> j a
# ^
B -> j b
# ^
it
# ^^
main =
#^^^^{-1}
it =
# ^^
(f A (@C {}) (@D {}))
# ^
if True
then it (@E {})
# ^^
else it (@F {})
# ^^
"#
),
@r###"
jC : C -[[jC(9)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
jD : D -[[jD(10)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
kE : E -[[kE(11)]]-> {}
kF : F -[[kF(12)]]-> {}
a : j | j has J
b : j | j has J
it : k -[[] + j:j(2):2 + a:j(2):2]-> {} | a has J, j has J, k has K
J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + j:j(2):2 + a:j(2):2]-> {}) | a has J, j has J, k has K
J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + a:j(2):2 + j:j(2):2]-> {}) | a has J, j has J, k has K
it : k -[[] + j:j(2):2 + a:j(2):2]-> {} | a has J, j has J, k has K
main : {}
it : k -[[] + k:k(4):1]-> {} | k has K
f : [A, B], C, D -[[f(13)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
it : E -[[kE(11)]]-> {}
it : F -[[kF(12)]]-> {}
"###
);
}
#[test]
fn polymorphic_lambda_set_specialization_branching_over_single_variable() {
infer_queries!(
indoc!(
r#"
app "test" provides [f] to "./platform"
J has j : j -> (k -> {}) | j has J, k has K
K has k : k -> {} | k has K
C := {} has [J {j: jC}]
jC = \@C _ -> k
D := {} has [J {j: jD}]
jD = \@D _ -> k
E := {} has [K {k}]
k = \@E _ -> {}
f = \flag, a, c ->
it =
when flag is
A -> j a
B -> j a
it c
# ^^ ^
"#
),
@r###"
it : k -[[] + j:j(2):2]-> {} | j has J, k has K
c : k | k has K
"###
);
}
#[test]
fn wrap_recursive_opaque_negative_position() {
infer_eq_without_problem(

View file

@ -350,7 +350,7 @@ fn encode_use_stdlib() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_use_stdlib_without_wrapping_custom() {
assert_evals_to!(
indoc!(
@ -375,7 +375,7 @@ fn encode_use_stdlib_without_wrapping_custom() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn to_encoder_encode_custom_has_capture() {
assert_evals_to!(
indoc!(
@ -406,6 +406,9 @@ mod encode_immediate {
#[cfg(feature = "gen-llvm")]
use crate::helpers::llvm::assert_evals_to;
#[cfg(feature = "gen-wasm")]
use crate::helpers::wasm::assert_evals_to;
#[cfg(all(test, any(feature = "gen-llvm", feature = "gen-wasm")))]
use indoc::indoc;
@ -413,7 +416,7 @@ mod encode_immediate {
use roc_std::RocStr;
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn string() {
assert_evals_to!(
indoc!(
@ -472,7 +475,7 @@ mod encode_immediate {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_record_one_field_string() {
assert_evals_to!(
indoc!(
@ -494,7 +497,7 @@ fn encode_derived_record_one_field_string() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_record_two_fields_strings() {
assert_evals_to!(
indoc!(
@ -517,7 +520,7 @@ fn encode_derived_record_two_fields_strings() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_nested_record_string() {
assert_evals_to!(
indoc!(
@ -541,7 +544,7 @@ fn encode_derived_nested_record_string() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_tag_one_payload_string() {
assert_evals_to!(
indoc!(
@ -565,7 +568,7 @@ fn encode_derived_tag_one_payload_string() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_tag_two_payloads_string() {
assert_evals_to!(
indoc!(
@ -589,7 +592,7 @@ fn encode_derived_tag_two_payloads_string() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_nested_tag_string() {
assert_evals_to!(
indoc!(
@ -614,7 +617,7 @@ fn encode_derived_nested_tag_string() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_nested_record_tag_record() {
assert_evals_to!(
indoc!(
@ -639,7 +642,7 @@ fn encode_derived_nested_record_tag_record() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_list_string() {
assert_evals_to!(
indoc!(
@ -663,7 +666,7 @@ fn encode_derived_list_string() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_list_of_records() {
assert_evals_to!(
indoc!(
@ -685,3 +688,33 @@ fn encode_derived_list_of_records() {
RocStr
)
}
#[test]
#[cfg(all(
any(feature = "gen-llvm", feature = "gen-wasm"),
not(feature = "gen-llvm-wasm") // hits a stack limit in wasm3
))]
fn encode_derived_record_with_many_types() {
assert_evals_to!(
indoc!(
r#"
app "test"
imports [Encode.{ toEncoder }, Json]
provides [main] to "./platform"
main =
fresh : [Fresh Str, Rotten Str]
fresh = Fresh "tomatoes"
rcd = {actors: ["Idris Elba", "Mila Kunis"], year: 2004u16, rating: {average: 7u8, min: 1u8, max: 10u8, sentiment: fresh}}
result = Str.fromUtf8 (Encode.toBytes rcd Json.toUtf8)
when result is
Ok s -> s
_ -> "<bad>"
"#
),
RocStr::from(
r#"{"actors":["Idris Elba","Mila Kunis"],"rating":{"average":7,"max":10,"min":1,"sentiment":{"Fresh":["tomatoes"]}},"year":2004}"#
),
RocStr
)
}

View file

@ -202,7 +202,7 @@ fn find_names_needed(
);
}
}
Structure(Func(arg_vars, _closure_var, ret_var)) => {
Structure(Func(arg_vars, closure_var, ret_var)) => {
for index in arg_vars.into_iter() {
let var = subs[index];
find_names_needed(
@ -215,6 +215,15 @@ fn find_names_needed(
);
}
find_names_needed(
*closure_var,
subs,
roots,
root_appearances,
names_taken,
find_under_alias,
);
find_names_needed(
*ret_var,
subs,

View file

@ -100,6 +100,10 @@ bitflags! {
///
/// For example, t1 += [A Str] says we should "add" the tag "A Str" to the type of "t1".
const PRESENT = 1 << 1;
/// Like [`Mode::EQ`], but also instructs the unifier that the ambient lambda set
/// specialization algorithm is running. This has implications for the unification of
/// unspecialized lambda sets; see [`unify_unspecialized_lambdas`].
const LAMBDA_SET_SPECIALIZATION = Mode::EQ.bits | (1 << 2);
}
}
@ -114,6 +118,11 @@ impl Mode {
self.contains(Mode::PRESENT)
}
fn is_lambda_set_specialization(&self) -> bool {
debug_assert!(!self.contains(Mode::EQ | Mode::PRESENT));
self.contains(Mode::LAMBDA_SET_SPECIALIZATION)
}
fn as_eq(self) -> Self {
(self - Mode::PRESENT) | Mode::EQ
}
@ -1065,6 +1074,7 @@ struct SeparatedUnionLambdas {
fn separate_union_lambdas<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
mode: Mode,
fields1: UnionLambdas,
fields2: UnionLambdas,
) -> (Outcome<M>, SeparatedUnionLambdas) {
@ -1173,7 +1183,7 @@ fn separate_union_lambdas<M: MetaCollector>(
maybe_mark_union_recursive(env, var1);
maybe_mark_union_recursive(env, var2);
let outcome = unify_pool(env, pool, var1, var2, Mode::EQ);
let outcome = unify_pool(env, pool, var1, var2, mode);
if !outcome.mismatches.is_empty() {
env.subs.rollback_to(snapshot);
@ -1214,64 +1224,292 @@ fn separate_union_lambdas<M: MetaCollector>(
)
}
/// ULS-SORT-ORDER:
/// - Arrange into partitions of (_, member, region), in ascending order of (member, region).
/// - Within each partition, place flex-able vars at the end of the partition.
/// - Amongst all flex-able vars, sort by their root key, so that identical vars are next to each other.
#[inline(always)]
fn unspecialized_lambda_set_sorter(subs: &Subs, uls1: Uls, uls2: Uls) -> std::cmp::Ordering {
let Uls(var1, sym1, region1) = uls1;
let Uls(var2, sym2, region2) = uls2;
use std::cmp::Ordering::*;
use Content::*;
match (sym1, region1).cmp(&(sym2, region2)) {
Equal => {
match (
subs.get_content_without_compacting(var1),
subs.get_content_without_compacting(var2),
) {
(FlexAbleVar(..) | RigidAbleVar(..), FlexAbleVar(..) | RigidAbleVar(..)) => subs
.get_root_key_without_compacting(var1)
.cmp(&subs.get_root_key_without_compacting(var2)),
(FlexVar(..) | RigidVar(..), _) | (_, FlexVar(..) | RigidVar(..)) => {
internal_error!("unexpected variable type in unspecialized lambda set!")
}
(FlexAbleVar(..), _) => Greater,
(_, FlexAbleVar(..)) => Less,
// For everything else, the order is irrelevant
(_, _) => Less,
}
}
ord => ord,
}
}
#[inline(always)]
fn sort_unspecialized_lambda_sets(subs: &Subs, mut uls: Vec<Uls>) -> Vec<Uls> {
uls.sort_by(|&uls1, &uls2| unspecialized_lambda_set_sorter(subs, uls1, uls2));
uls
}
#[inline(always)]
fn is_sorted_unspecialized_lamba_set_list(subs: &Subs, uls: &[Uls]) -> bool {
uls == sort_unspecialized_lambda_sets(subs, uls.to_vec())
}
fn unify_unspecialized_lambdas<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
uls1: SubsSlice<Uls>,
uls2: SubsSlice<Uls>,
mode: Mode,
uls_left: SubsSlice<Uls>,
uls_right: SubsSlice<Uls>,
) -> Result<(SubsSlice<Uls>, Outcome<M>), Outcome<M>> {
// For now we merge all variables of unspecialized lambdas in a lambda set that share the same
// ability member/region.
// See the section "A property that's lost, and how we can hold on to it" of
// solve/docs/ambient_lambda_set_specialization.md to see how we can loosen this restriction.
// Note that we don't need to update the bookkeeping of variable -> lambda set to be resolved,
// because if we had v1 -> lset1, and now lset1 ~ lset2, then afterward either lset1 still
// resolves to itself or re-points to lset2.
// In either case the merged unspecialized lambda sets will be there.
match (uls1.is_empty(), uls2.is_empty()) {
(true, true) => Ok((SubsSlice::default(), Default::default())),
(false, true) => Ok((uls1, Default::default())),
(true, false) => Ok((uls2, Default::default())),
(false, false) => {
let mut all_uls = (env.subs.get_subs_slice(uls1).iter())
.chain(env.subs.get_subs_slice(uls2))
.map(|&Uls(var, sym, region)| {
// Take the root key to deduplicate
Uls(env.subs.get_root_key_without_compacting(var), sym, region)
})
.collect::<Vec<_>>();
// Arrange into partitions of (_, member, region).
all_uls.sort_by_key(|&Uls(_, sym, region)| (sym, region));
let (uls_left, uls_right) = match (uls_left.is_empty(), uls_right.is_empty()) {
(true, true) => return Ok((SubsSlice::default(), Default::default())),
(false, true) => return Ok((uls_left, Default::default())),
(true, false) => return Ok((uls_right, Default::default())),
(false, false) => (
env.subs.get_subs_slice(uls_left).to_vec(),
env.subs.get_subs_slice(uls_right).to_vec(),
),
};
// Now merge the variables of unspecialized lambdas pointing to the same
// member/region.
let mut whole_outcome = Outcome::default();
let mut j = 1;
while j < all_uls.len() {
let i = j - 1;
let Uls(var_i, sym_i, region_i) = all_uls[i];
let Uls(var_j, sym_j, region_j) = all_uls[j];
if sym_i == sym_j && region_i == region_j {
let outcome = unify_pool(env, pool, var_i, var_j, Mode::EQ);
if !outcome.mismatches.is_empty() {
return Err(outcome);
// Unfortunately, it is not an invariant that `uls_left` and `uls_right` obey ULS-SORT-ORDER before
// merging.
//
// That's because flex-able variables in unspecialized lambda sets may be unified at any time,
// and unification of flex-able variables may change their root keys, which ULS-SORT-ORDER
// considers.
//
// As such, we must sort beforehand. In practice these sets are very, very small (<5 elements).
let uls_left = sort_unspecialized_lambda_sets(env.subs, uls_left);
let uls_right = sort_unspecialized_lambda_sets(env.subs, uls_right);
let (mut uls_left, mut uls_right) = (uls_left.iter().peekable(), uls_right.iter().peekable());
let mut merged_uls = Vec::with_capacity(uls_left.len() + uls_right.len());
let mut whole_outcome = Outcome::default();
loop {
let (uls_l, uls_r) = match (uls_left.peek(), uls_right.peek()) {
(Some(uls_l), Some(uls_r)) => (**uls_l, **uls_r),
(Some(_), None) => {
merged_uls.push(*uls_left.next().unwrap());
continue;
}
(None, Some(_)) => {
merged_uls.push(*uls_right.next().unwrap());
continue;
}
(None, None) => break,
};
let Uls(var_l, sym_l, region_l) = uls_l;
let Uls(var_r, sym_r, region_r) = uls_r;
use std::cmp::Ordering::*;
match (sym_l, region_l).cmp(&(sym_r, region_r)) {
Less => {
// Left needs to catch up to right, add it to the merged lambdas.
merged_uls.push(*uls_left.next().unwrap());
}
Greater => {
// Right needs to catch up to left, add it to the merged lambdas.
merged_uls.push(*uls_right.next().unwrap());
}
Equal => {
// The interesting case - both point to the same specialization.
use Content::*;
match (
env.subs.get_content_without_compacting(var_l),
env.subs.get_content_without_compacting(var_r),
) {
(FlexAbleVar(..) | RigidAbleVar(..), FlexAbleVar(..) | RigidAbleVar(..)) => {
// If the types are root-equivalent, de-duplicate them.
//
// Otherwise, the type variables are disjoint, and we want to keep both
// of them, for purposes of disjoint variable lambda specialization.
//
// For more information, see "A Property thats lost, and how we can hold on to it"
// in solve/docs/ambient_lambda_set_specialization.md.
if env.subs.equivalent_without_compacting(var_l, var_r) {
// ... a1 ...
// ... b1=a1 ...
// => ... a1 ...
//
// Keep the one on the left, drop the one on the right.
//
// Then progress both, because the invariant tells us they must be
// disjoint, and if there were any concrete variables, they would have
// appeared earlier.
let _dropped = uls_right.next().unwrap();
let kept = uls_left.next().unwrap();
merged_uls.push(*kept);
} else if mode.is_lambda_set_specialization() {
// ... a1 ...
// ... b1 ...
// => ... a1=b1 ...
//
// If we're in the process of running the ambient lambda set
// specialization procedure, disjoint type variables being merged from
// the left and right lists are treated specially!
//
// In particular, we are unifying a local list of lambda sets, for
// which the specialization is for (on the left), with specialization
// lambda sets, which have just been freshened (on the right).
//
// [ .. a:lam:1 ] (local, undergoing specialization)
// [ .. a':lam:1 ] (specialization lambda sets, just freshened)
//
// Because the specialization lambdas are freshened, they certainly are
// disjoint from the local lambdas - but they may be equivalent in
// principle, from the perspective of a human looking at the
// unification!
//
// Running with the example above, the specialization lambda set has an
// unspecialized lambda `a':lam:1`. Now, this is disjoint from
// `a:lam:1` in the local lambda set, from the purely technical
// perspective that `a' != a`.
//
// But, in expected function, they **should not** be treated as disjoint!
// In this case, the specialization lambda is not introducing any new
// information, and is targeting exactly the local lambda `a:lam:1`.
//
// So, to avoid introducing superfluous variables, we unify these disjoint
// variables once, and then progress on both sides. We progress on both
// sides to avoid unifying more than what we should in our principle.
//
// It doesn't matter which side we choose to progress on, since after
// unification of flex vars roots are equivalent. So, choose the left
// side.
//
// See the ambient lambda set specialization document for more details.
let outcome = unify_pool(env, pool, var_l, var_r, mode);
if !outcome.mismatches.is_empty() {
return Err(outcome);
}
whole_outcome.union(outcome);
debug_assert!(env.subs.equivalent_without_compacting(var_l, var_r));
let _dropped = uls_right.next().unwrap();
let kept = uls_left.next().unwrap();
merged_uls.push(*kept);
} else {
// ... a1 ...
// ... b1 ...
// => ... a1, b1 ...
//
// Keep both. But, we have to be careful about how we do this -
// immediately add the one with the lower root, and advance that side;
// keep the other as-is, because the next variable on the advanced side
// might be lower than the current non-advanced variable. For example:
//
// ... 640 645 ...
// ... 670 ...
//
// we want to add `640` to the merged list and advance to
//
// ... 645 ...
// ... 670 ...
//
// rather than adding both `640` and `670`, and skipping the comparison
// of `645` with `670`.
//
// An important thing to notice is that we *don't* want to advance
// both sides, because if these two variables are disjoint, then
// advancing one side *might* make the next comparison be between
// equivalent variables, for example in a case like
//
// ... 640 670 ...
// ... 670 ...
//
// In the above case, we certainly only want to advance the left side!
if env.subs.get_root_key(var_l) < env.subs.get_root_key(var_r) {
let kept = uls_left.next().unwrap();
merged_uls.push(*kept);
} else {
let kept = uls_right.next().unwrap();
merged_uls.push(*kept);
}
}
}
(FlexAbleVar(..) | RigidAbleVar(..), _) => {
// ... a1 ...
// ... {foo: _} ...
// => ... {foo: _} ...
//
// Unify them, then advance the merged flex var.
let outcome = unify_pool(env, pool, var_l, var_r, mode);
if !outcome.mismatches.is_empty() {
return Err(outcome);
}
whole_outcome.union(outcome);
let _dropped = uls_right.next().unwrap();
}
(_, FlexAbleVar(..) | RigidAbleVar(..)) => {
// ... {foo: _} ...
// ... a1 ...
// => ... {foo: _} ...
//
// Unify them, then advance the merged flex var.
let outcome = unify_pool(env, pool, var_l, var_r, mode);
if !outcome.mismatches.is_empty() {
return Err(outcome);
}
whole_outcome.union(outcome);
let _dropped = uls_left.next().unwrap();
}
(_, _) => {
// ... {foo: _} ...
// ... {foo: _} ...
// => ... {foo: _} ...
//
// Unify them, then advance one.
// (the choice is arbitrary, so we choose the left)
let outcome = unify_pool(env, pool, var_l, var_r, mode);
if !outcome.mismatches.is_empty() {
return Err(outcome);
}
whole_outcome.union(outcome);
let _dropped = uls_left.next().unwrap();
}
whole_outcome.union(outcome);
// Keep the Uls in position `i` and remove the one in position `j`.
all_uls.remove(j);
} else {
// Keep both Uls, look at the next one.
j += 1;
}
}
Ok((
SubsSlice::extend_new(&mut env.subs.unspecialized_lambda_sets, all_uls),
whole_outcome,
))
}
}
debug_assert!(
is_sorted_unspecialized_lamba_set_list(env.subs, &merged_uls),
"merging of unspecialized lambda sets does not preserve sort! {:?}",
merged_uls
);
Ok((
SubsSlice::extend_new(&mut env.subs.unspecialized_lambda_sets, merged_uls),
whole_outcome,
))
}
fn unify_lambda_set_help<M: MetaCollector>(
@ -1316,7 +1554,7 @@ fn unify_lambda_set_help<M: MetaCollector>(
only_in_right,
joined,
},
) = separate_union_lambdas(env, pool, solved1, solved2);
) = separate_union_lambdas(env, pool, ctx.mode, solved1, solved2);
let all_lambdas = joined
.into_iter()
@ -1343,7 +1581,7 @@ fn unify_lambda_set_help<M: MetaCollector>(
(None, None) => OptVariable::NONE,
};
let merged_unspecialized = match unify_unspecialized_lambdas(env, pool, uls1, uls2) {
let merged_unspecialized = match unify_unspecialized_lambdas(env, pool, ctx.mode, uls1, uls2) {
Ok((merged, outcome)) => {
whole_outcome.union(outcome);
merged

View file

@ -1226,7 +1226,7 @@ mod test_reporting {
// variables they can put themselves in, and to run the constraint algorithm
// against that extra variable, rather than possibly having to translate a `Type`
// again.
@r#"
@r###"
CIRCULAR TYPE /code/proj/Main.roc
I'm inferring a weird self-referential type for `f`:
@ -1265,7 +1265,20 @@ mod test_reporting {
infinitely.
List -> List a
"#
CIRCULAR TYPE /code/proj/Main.roc
I'm inferring a weird self-referential type for `main`:
3 main =
^^^^
Here is my best effort at writing down the type. You will see for
parts of the type that repeat something already printed out
infinitely.
List -> List a
"###
);
test_report!(