Merge pull request #4745 from roc-lang/i4712

Ensure that disjoint nested lambda sets force parents to be disjoint
This commit is contained in:
Ayaz 2022-12-14 14:55:25 -06:00 committed by GitHub
commit 9b4e30a903
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 431 additions and 202 deletions

1
Cargo.lock generated
View file

@ -3700,6 +3700,7 @@ dependencies = [
"roc_serialize",
"roc_types",
"static_assertions",
"ven_pretty",
]
[[package]]

View file

@ -21,6 +21,8 @@ bumpalo.workspace = true
static_assertions.workspace = true
bitvec.workspace = true
ven_pretty = { path = "../../vendor/pretty" }
[dev-dependencies]
pretty_assertions.workspace = true
indoc.workspace = true

View file

@ -0,0 +1,5 @@
mod pretty_print;
pub use pretty_print::pretty_print_declarations;
pub use pretty_print::pretty_print_def;
pub use pretty_print::Ctx as PPCtx;

View file

@ -1,16 +1,50 @@
//! Pretty-prints the canonical AST back to check our work - do things look reasonable?
use roc_can::def::Def;
use roc_can::expr::Expr::{self, *};
use roc_can::expr::{ClosureData, OpaqueWrapFunctionData, WhenBranch};
use roc_can::pattern::{Pattern, RecordDestruct};
use crate::def::Def;
use crate::expr::Expr::{self, *};
use crate::expr::{
ClosureData, DeclarationTag, Declarations, FunctionDef, OpaqueWrapFunctionData, WhenBranch,
};
use crate::pattern::{Pattern, RecordDestruct};
use roc_module::symbol::Interns;
use roc_module::symbol::{Interns, ModuleId, Symbol};
use ven_pretty::{Arena, DocAllocator, DocBuilder};
pub struct Ctx<'a> {
pub home: ModuleId,
pub interns: &'a Interns,
pub print_lambda_names: bool,
}
pub fn pretty_print_declarations(c: &Ctx, declarations: &Declarations) -> String {
let f = Arena::new();
let mut defs = Vec::with_capacity(declarations.len());
for (index, tag) in declarations.iter_bottom_up() {
let symbol = declarations.symbols[index].value;
let body = &declarations.expressions[index];
let def = match tag {
DeclarationTag::Value => def_symbol_help(c, &f, symbol, &body.value),
DeclarationTag::Function(f_index)
| DeclarationTag::Recursive(f_index)
| DeclarationTag::TailRecursive(f_index) => {
let function_def = &declarations.function_bodies[f_index.index()].value;
toplevel_function(c, &f, symbol, function_def, &body.value)
}
DeclarationTag::Expectation => todo!(),
DeclarationTag::ExpectationFx => todo!(),
DeclarationTag::Destructure(_) => todo!(),
DeclarationTag::MutualRecursion { .. } => todo!(),
};
defs.push(def);
}
f.intersperse(defs, f.hardline().append(f.hardline()))
.1
.pretty(80)
.to_string()
}
pub fn pretty_print_def(c: &Ctx, d: &Def) -> String {
@ -40,10 +74,58 @@ fn def<'a>(c: &Ctx, f: &'a Arena<'a>, d: &'a Def) -> DocBuilder<'a, Arena<'a>> {
annotation: _,
} = d;
pattern(c, PPrec::Free, f, &loc_pattern.value)
def_help(c, f, &loc_pattern.value, &loc_expr.value)
}
fn def_symbol_help<'a>(
c: &Ctx,
f: &'a Arena<'a>,
sym: Symbol,
body: &'a Expr,
) -> DocBuilder<'a, Arena<'a>> {
pp_sym(c, f, sym)
.append(f.text(" ="))
.append(f.line())
.append(expr(c, EPrec::Free, f, &loc_expr.value))
.append(expr(c, EPrec::Free, f, body))
.nest(2)
.group()
}
fn def_help<'a>(
c: &Ctx,
f: &'a Arena<'a>,
pat: &'a Pattern,
body: &'a Expr,
) -> DocBuilder<'a, Arena<'a>> {
pattern(c, PPrec::Free, f, pat)
.append(f.text(" ="))
.append(f.line())
.append(expr(c, EPrec::Free, f, body))
.nest(2)
.group()
}
fn toplevel_function<'a>(
c: &Ctx,
f: &'a Arena<'a>,
sym: Symbol,
function_def: &'a FunctionDef,
body: &'a Expr,
) -> DocBuilder<'a, Arena<'a>> {
let FunctionDef { arguments, .. } = function_def;
let args = arguments
.iter()
.map(|arg| pattern(c, PPrec::Free, f, &arg.2.value));
pp_sym(c, f, sym)
.append(f.text(" ="))
.append(f.line())
.append(f.text("\\"))
.append(f.intersperse(args, f.text(", ")))
.append(f.text("->"))
.append(f.line())
.append(expr(c, EPrec::Free, f, body))
.nest(2)
.group()
}
@ -87,11 +169,7 @@ fn expr<'a>(c: &Ctx, p: EPrec, f: &'a Arena<'a>, e: &'a Expr) -> DocBuilder<'a,
.append("]")
.group(),
),
Var(sym, _) | AbilityMember(sym, _, _) => f.text(format!(
"{}.{}",
sym.module_string(c.interns),
sym.as_str(c.interns),
)),
Var(sym, _) | AbilityMember(sym, _, _) => pp_sym(c, f, *sym),
When {
loc_cond, branches, ..
} => maybe_paren!(
@ -184,6 +262,7 @@ fn expr<'a>(c: &Ctx, p: EPrec, f: &'a Arena<'a>, e: &'a Expr) -> DocBuilder<'a,
Closure(ClosureData {
arguments,
loc_body,
name,
..
}) => f
.text("\\")
@ -195,7 +274,13 @@ fn expr<'a>(c: &Ctx, p: EPrec, f: &'a Arena<'a>, e: &'a Expr) -> DocBuilder<'a,
f.text(", "),
),
)
.append(f.text(" ->"))
.append(if c.print_lambda_names {
f.text(" -[")
.append(pp_sym(c, f, *name))
.append(f.text("]->"))
} else {
f.text(" ->")
})
.append(f.line())
.append(expr(c, Free, f, &loc_body.value))
.nest(2)
@ -290,6 +375,18 @@ fn expr<'a>(c: &Ctx, p: EPrec, f: &'a Arena<'a>, e: &'a Expr) -> DocBuilder<'a,
}
}
fn pp_sym<'a>(c: &Ctx, f: &'a Arena<'a>, sym: Symbol) -> DocBuilder<'a, Arena<'a>> {
if sym.module_id() == c.home {
f.text(sym.as_str(c.interns).to_owned())
} else {
f.text(format!(
"{}.{}",
sym.module_string(c.interns),
sym.as_str(c.interns),
))
}
}
fn branch<'a>(c: &Ctx, f: &'a Arena<'a>, b: &'a WhenBranch) -> DocBuilder<'a, Arena<'a>> {
let WhenBranch {
patterns,
@ -333,11 +430,7 @@ fn pattern<'a>(
Identifier(sym)
| AbilityMemberSpecialization {
specializes: sym, ..
} => f.text(format!(
"{}.{}",
sym.module_string(c.interns),
sym.as_str(c.interns),
)),
} => pp_sym(c, f, *sym),
AppliedTag {
tag_name,
arguments,
@ -373,12 +466,12 @@ fn pattern<'a>(
f.intersperse(
destructs.iter().map(|l| &l.value).map(
|RecordDestruct { label, typ, .. }| match typ {
roc_can::pattern::DestructType::Required => f.text(label.as_str()),
roc_can::pattern::DestructType::Optional(_, e) => f
crate::pattern::DestructType::Required => f.text(label.as_str()),
crate::pattern::DestructType::Optional(_, e) => f
.text(label.as_str())
.append(f.text(" ? "))
.append(expr(c, EPrec::Free, f, &e.value)),
roc_can::pattern::DestructType::Guard(_, p) => f
crate::pattern::DestructType::Guard(_, p) => f
.text(label.as_str())
.append(f.text(": "))
.append(pattern(c, Free, f, &p.value)),

View file

@ -28,3 +28,5 @@ pub mod string;
pub mod traverse;
pub use derive::DERIVED_REGION;
pub mod debug;

View file

@ -162,9 +162,15 @@ impl<'a> DeclarationToIndex<'a> {
}
}
}
let similar = self
.elements
.iter()
.filter_map(|((s, lay), _)| if *s == needle_symbol { Some(lay) } else { None })
.collect::<std::vec::Vec<_>>();
unreachable!(
"symbol/layout {:?} {:#?} combo must be in DeclarationToIndex",
needle_symbol, needle_layout
"symbol/layout {:?} {:#?} combo must be in DeclarationToIndex\nHowever {} similar layouts were found:\n{:#?}",
needle_symbol, needle_layout, similar.len(), similar
)
}
}

View file

@ -262,6 +262,7 @@ mod solve_expr {
#[derive(Default)]
struct InferOptions {
print_can_decls: bool,
print_only_under_alias: bool,
allow_errors: bool,
}
@ -302,7 +303,20 @@ mod solve_expr {
let queries = parse_queries(&src);
assert!(!queries.is_empty(), "No queries provided!");
let mut solved_queries = Vec::with_capacity(queries.len());
let mut output_parts = Vec::with_capacity(queries.len() + 2);
if options.print_can_decls {
use roc_can::debug::{pretty_print_declarations, PPCtx};
let ctx = PPCtx {
home,
interns: &interns,
print_lambda_names: true,
};
let pretty_decls = pretty_print_declarations(&ctx, &decls);
output_parts.push(pretty_decls);
output_parts.push("\n".to_owned());
}
for TypeQuery(region) in queries.into_iter() {
let start = region.start().offset;
let end = region.end().offset;
@ -340,12 +354,12 @@ mod solve_expr {
}
};
solved_queries.push(elaborated);
output_parts.push(elaborated);
}
let pretty_solved_queries = solved_queries.join("\n");
let pretty_output = output_parts.join("\n");
expected(&pretty_solved_queries);
expected(&pretty_output);
}
macro_rules! infer_queries {
@ -6720,9 +6734,9 @@ mod solve_expr {
"#
),
@r#"
A#id(5) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
A#id(5) : {} -[[id(5)]]-> ({} -[[8]]-> {})
Id#id(3) : a -[[] + a:id(3):1]-> ({} -[[] + a:id(3):2]-> a) | a has Id
alias : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
alias : {} -[[id(5)]]-> ({} -[[8]]-> {})
"#
print_only_under_alias: true
)
@ -6751,8 +6765,8 @@ mod solve_expr {
"#
),
@r#"
A#id(5) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
it : {} -[[8(8)]]-> {}
A#id(5) : {} -[[id(5)]]-> ({} -[[8]]-> {})
it : {} -[[8]]-> {}
"#
print_only_under_alias: true
)
@ -6782,8 +6796,8 @@ mod solve_expr {
"#
),
@r#"
A#id(5) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
A#id(5) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
A#id(5) : {} -[[id(5)]]-> ({} -[[8]]-> {})
A#id(5) : {} -[[id(5)]]-> ({} -[[8]]-> {})
"#
print_only_under_alias: true
)
@ -6903,7 +6917,7 @@ mod solve_expr {
#^^^^^^^^^^^^^^^^^^^^^^{-1}
"#
),
@r#"[\{} -> {}, \{} -> {}] : List ({}* -[[1(1), 2(2)]]-> {})"#
@r###"[\{} -> {}, \{} -> {}] : List ({}* -[[1, 2]]-> {})"###
)
}
@ -7078,7 +7092,7 @@ mod solve_expr {
#^^^{-1}
"#
),
@r#"fun : {} -[[thunk(9) (({} -[[15(15)]]-> { s1 : Str })) ({ s1 : Str } -[[g(4)]]-> ({} -[[13(13) Str]]-> Str)), thunk(9) (({} -[[14(14)]]-> Str)) (Str -[[f(3)]]-> ({} -[[11(11)]]-> Str))]]-> Str"#
@r#"fun : {} -[[thunk(9) (({} -[[15]]-> { s1 : Str })) ({ s1 : Str } -[[g(4)]]-> ({} -[[13 Str]]-> Str)), thunk(9) (({} -[[14]]-> Str)) (Str -[[f(3)]]-> ({} -[[11]]-> Str))]]-> Str"#
print_only_under_alias: true
);
}
@ -7323,9 +7337,9 @@ mod solve_expr {
"#
),
@r###"
Fo#f(7) : Fo, b -[[f(7)]]-> ({} -[[13(13) b]]-> ({} -[[] + b:g(4):2]-> {})) | b has G
Go#g(8) : Go -[[g(8)]]-> ({} -[[14(14)]]-> {})
Fo#f(7) : Fo, Go -[[f(7)]]-> ({} -[[13(13) Go]]-> ({} -[[14(14)]]-> {}))
Fo#f(7) : Fo, b -[[f(7)]]-> ({} -[[13 b]]-> ({} -[[] + b:g(4):2]-> {})) | b has G
Go#g(8) : Go -[[g(8)]]-> ({} -[[14]]-> {})
Fo#f(7) : Fo, Go -[[f(7)]]-> ({} -[[13 Go]]-> ({} -[[14]]-> {}))
"###
);
}
@ -7692,7 +7706,7 @@ mod solve_expr {
@r###"
const : Str -[[const(2)]]-> (Str -[[closCompose(7) (Str -a-> Str) (Str -[[]]-> Str), closConst(10) Str] as a]-> Str)
compose : (Str -a-> Str), (Str -[[]]-> Str) -[[compose(1)]]-> (Str -a-> Str)
\c1, c2 -> compose c1 c2 : (Str -a-> Str), (Str -[[]]-> Str) -[[11(11)]]-> (Str -a-> Str)
\c1, c2 -> compose c1 c2 : (Str -a-> Str), (Str -[[]]-> Str) -[[11]]-> (Str -a-> Str)
res : Str -[[closCompose(7) (Str -a-> Str) (Str -[[]]-> Str), closConst(10) Str] as a]-> Str
res : Str -[[closCompose(7) (Str -a-> Str) (Str -[[]]-> Str), closConst(10) Str] as a]-> Str
"###
@ -8077,7 +8091,7 @@ mod solve_expr {
# ^^^^^^^^^^^^^^
"#
),
@"N#Decode.decoder(3) : List U8, fmt -[[7(7)]]-> { rest : List U8, result : [Err [TooShort], Ok U8] } | fmt has DecoderFormatting"
@"N#Decode.decoder(3) : List U8, fmt -[[7]]-> { rest : List U8, result : [Err [TooShort], Ok U8] } | fmt has DecoderFormatting"
print_only_under_alias: true
);
}
@ -8360,7 +8374,7 @@ mod solve_expr {
),
@r###"
isEqQ : ({} -[[]]-> Str), ({} -[[]]-> Str) -[[isEqQ(2)]]-> [False, True]
isEqQ : ({} -[[6(6), 7(7)]]-> Str), ({} -[[6(6), 7(7)]]-> Str) -[[isEqQ(2)]]-> [False, True]
isEqQ : ({} -[[6, 7]]-> Str), ({} -[[6, 7]]-> Str) -[[isEqQ(2)]]-> [False, True]
"###
print_only_under_alias: true
);
@ -8456,4 +8470,63 @@ mod solve_expr {
"###
);
}
#[test]
fn disjoint_nested_lambdas_result_in_disjoint_parents_issue_4712() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
Parser a : {} -> a
v1 : {}
v1 = {}
v2 : Str
v2 = ""
apply : Parser (a -> Str), a -> Parser Str
apply = \fnParser, valParser ->
\{} ->
(fnParser {}) (valParser)
map : a, (a -> Str) -> Parser Str
map = \simpleParser, transform ->
apply (\{} -> transform) simpleParser
parseInput = \{} ->
when [ map v1 (\{} -> ""), map v2 (\s -> s) ] is
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ -> ""
main = parseInput {} == ""
"#
),
@r###"
v1 = {}
v2 = ""
apply = \fnParser, valParser-> \{} -[9]-> (fnParser {}) valParser
map = \simpleParser, transform-> apply \{} -[12]-> transform simpleParser
parseInput =
\{}->
when [
map v1 \{} -[13]-> "",
map v2 \s -[14]-> s,
] is
_ -> ""
main = Bool.isEq (parseInput {}) ""
[ map v1 (\{} -> ""), map v2 (\s -> s) ] : List (({} -[[9 (({} -[[12 (Str -[[14]]-> Str)]]-> (Str -[[14]]-> Str))) Str, 9 (({} -[[12 ({} -[[13]]-> Str)]]-> ({} -[[13]]-> Str))) {}]]-> Str))
"###
print_only_under_alias: true
print_can_decls: true
);
}
}

View file

@ -103,9 +103,9 @@ fn list() {
# Specialization lambda sets:
# @<1>: [[custom(3)]]
#Derived.decoder_list =
Decode.custom
custom
\#Derived.bytes, #Derived.fmt ->
Decode.decodeWith #Derived.bytes (Decode.list Decode.decoder) #Derived.fmt
decodeWith #Derived.bytes (list decoder) #Derived.fmt
"###
)
})
@ -121,21 +121,18 @@ fn record_2_fields() {
# Specialization lambda sets:
# @<1>: [[custom(22)]]
#Derived.decoder_{first,second} =
Decode.custom
custom
\#Derived.bytes3, #Derived.fmt3 ->
Decode.decodeWith
decodeWith
#Derived.bytes3
(Decode.record
(record
{ second: Err NoField, first: Err NoField }
\#Derived.stateRecord2, #Derived.field ->
when #Derived.field is
"first" ->
Keep (Decode.custom
Keep (custom
\#Derived.bytes, #Derived.fmt ->
when Decode.decodeWith
#Derived.bytes
Decode.decoder
#Derived.fmt is
when decodeWith #Derived.bytes decoder #Derived.fmt is
#Derived.rec ->
{
result: when #Derived.rec.result is
@ -145,12 +142,9 @@ fn record_2_fields() {
rest: #Derived.rec.rest
})
"second" ->
Keep (Decode.custom
Keep (custom
\#Derived.bytes2, #Derived.fmt2 ->
when Decode.decodeWith
#Derived.bytes2
Decode.decoder
#Derived.fmt2 is
when decodeWith #Derived.bytes2 decoder #Derived.fmt2 is
#Derived.rec2 ->
{
result: when #Derived.rec2.result is

View file

@ -187,9 +187,9 @@ fn empty_record() {
# @<2>: [[custom(2) {}]]
#Derived.toEncoder_{} =
\#Derived.rcd ->
Encode.custom
custom
\#Derived.bytes, #Derived.fmt ->
Encode.appendWith #Derived.bytes (Encode.record []) #Derived.fmt
appendWith #Derived.bytes (record []) #Derived.fmt
"###
)
})
@ -207,9 +207,9 @@ fn zero_field_record() {
# @<2>: [[custom(2) {}]]
#Derived.toEncoder_{} =
\#Derived.rcd ->
Encode.custom
custom
\#Derived.bytes, #Derived.fmt ->
Encode.appendWith #Derived.bytes (Encode.record []) #Derived.fmt
appendWith #Derived.bytes (record []) #Derived.fmt
"###
)
})
@ -227,11 +227,11 @@ fn one_field_record() {
# @<2>: [[custom(2) { a : val }]] | val has Encoding
#Derived.toEncoder_{a} =
\#Derived.rcd ->
Encode.custom
custom
\#Derived.bytes, #Derived.fmt ->
Encode.appendWith
appendWith
#Derived.bytes
(Encode.record [{ value: Encode.toEncoder #Derived.rcd.a, key: "a" }])
(record [{ value: toEncoder #Derived.rcd.a, key: "a" }])
#Derived.fmt
"###
)
@ -250,14 +250,14 @@ fn two_field_record() {
# @<2>: [[custom(2) { a : val, b : val1 }]] | val has Encoding, val1 has Encoding
#Derived.toEncoder_{a,b} =
\#Derived.rcd ->
Encode.custom
custom
\#Derived.bytes, #Derived.fmt ->
Encode.appendWith
appendWith
#Derived.bytes
(Encode.record
(record
[
{ value: Encode.toEncoder #Derived.rcd.a, key: "a" },
{ value: Encode.toEncoder #Derived.rcd.b, key: "b" },
{ value: toEncoder #Derived.rcd.a, key: "a" },
{ value: toEncoder #Derived.rcd.b, key: "b" },
])
#Derived.fmt
"###
@ -290,12 +290,12 @@ fn tag_one_label_zero_args() {
# @<2>: [[custom(2) [A]]]
#Derived.toEncoder_[A 0] =
\#Derived.tag ->
Encode.custom
custom
\#Derived.bytes, #Derived.fmt ->
Encode.appendWith
appendWith
#Derived.bytes
(when #Derived.tag is
A -> Encode.tag "A" [])
A -> tag "A" [])
#Derived.fmt
"###
)
@ -314,18 +314,13 @@ fn tag_one_label_two_args() {
# @<2>: [[custom(4) [A val val1]]] | val has Encoding, val1 has Encoding
#Derived.toEncoder_[A 2] =
\#Derived.tag ->
Encode.custom
custom
\#Derived.bytes, #Derived.fmt ->
Encode.appendWith
appendWith
#Derived.bytes
(when #Derived.tag is
A #Derived.2 #Derived.3 ->
Encode.tag
"A"
[
Encode.toEncoder #Derived.2,
Encode.toEncoder #Derived.3,
])
tag "A" [toEncoder #Derived.2, toEncoder #Derived.3])
#Derived.fmt
"###
)
@ -339,30 +334,30 @@ fn tag_two_labels() {
v!([A v!(U8) v!(STR) v!(U16), B v!(STR)]),
|golden| {
assert_snapshot!(golden, @r###"
# derived for [A U8 Str U16, B Str]
# [A val val1 val1, B val1] -[[toEncoder_[A 3,B 1](0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
# [A val val1 val1, B val1] -[[toEncoder_[A 3,B 1](0)]]-> (List U8, fmt -[[custom(6) [A val val1 val1, B val1]]]-> List U8) | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
# Specialization lambda sets:
# @<1>: [[toEncoder_[A 3,B 1](0)]]
# @<2>: [[custom(6) [A val val1 val1, B val1]]] | val has Encoding, val1 has Encoding
#Derived.toEncoder_[A 3,B 1] =
\#Derived.tag ->
Encode.custom
\#Derived.bytes, #Derived.fmt ->
Encode.appendWith
#Derived.bytes
(when #Derived.tag is
A #Derived.2 #Derived.3 #Derived.4 ->
Encode.tag
"A"
[
Encode.toEncoder #Derived.2,
Encode.toEncoder #Derived.3,
Encode.toEncoder #Derived.4,
]
B #Derived.5 -> Encode.tag "B" [Encode.toEncoder #Derived.5])
#Derived.fmt
"###
# derived for [A U8 Str U16, B Str]
# [A val val1 val1, B val1] -[[toEncoder_[A 3,B 1](0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
# [A val val1 val1, B val1] -[[toEncoder_[A 3,B 1](0)]]-> (List U8, fmt -[[custom(6) [A val val1 val1, B val1]]]-> List U8) | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
# Specialization lambda sets:
# @<1>: [[toEncoder_[A 3,B 1](0)]]
# @<2>: [[custom(6) [A val val1 val1, B val1]]] | val has Encoding, val1 has Encoding
#Derived.toEncoder_[A 3,B 1] =
\#Derived.tag ->
custom
\#Derived.bytes, #Derived.fmt ->
appendWith
#Derived.bytes
(when #Derived.tag is
A #Derived.2 #Derived.3 #Derived.4 ->
tag
"A"
[
toEncoder #Derived.2,
toEncoder #Derived.3,
toEncoder #Derived.4,
]
B #Derived.5 -> tag "B" [toEncoder #Derived.5])
#Derived.fmt
"###
)
},
)
@ -375,29 +370,24 @@ fn recursive_tag_union() {
v!([Nil, Cons v!(U8) v!(^lst) ] as lst),
|golden| {
assert_snapshot!(golden, @r###"
# derived for [Cons U8 $rec, Nil] as $rec
# [Cons val val1, Nil] -[[toEncoder_[Cons 2,Nil 0](0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
# [Cons val val1, Nil] -[[toEncoder_[Cons 2,Nil 0](0)]]-> (List U8, fmt -[[custom(4) [Cons val val1, Nil]]]-> List U8) | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
# Specialization lambda sets:
# @<1>: [[toEncoder_[Cons 2,Nil 0](0)]]
# @<2>: [[custom(4) [Cons val val1, Nil]]] | val has Encoding, val1 has Encoding
#Derived.toEncoder_[Cons 2,Nil 0] =
\#Derived.tag ->
Encode.custom
\#Derived.bytes, #Derived.fmt ->
Encode.appendWith
#Derived.bytes
(when #Derived.tag is
Cons #Derived.2 #Derived.3 ->
Encode.tag
"Cons"
[
Encode.toEncoder #Derived.2,
Encode.toEncoder #Derived.3,
]
Nil -> Encode.tag "Nil" [])
#Derived.fmt
"###
# derived for [Cons U8 $rec, Nil] as $rec
# [Cons val val1, Nil] -[[toEncoder_[Cons 2,Nil 0](0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
# [Cons val val1, Nil] -[[toEncoder_[Cons 2,Nil 0](0)]]-> (List U8, fmt -[[custom(4) [Cons val val1, Nil]]]-> List U8) | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
# Specialization lambda sets:
# @<1>: [[toEncoder_[Cons 2,Nil 0](0)]]
# @<2>: [[custom(4) [Cons val val1, Nil]]] | val has Encoding, val1 has Encoding
#Derived.toEncoder_[Cons 2,Nil 0] =
\#Derived.tag ->
custom
\#Derived.bytes, #Derived.fmt ->
appendWith
#Derived.bytes
(when #Derived.tag is
Cons #Derived.2 #Derived.3 ->
tag "Cons" [toEncoder #Derived.2, toEncoder #Derived.3]
Nil -> tag "Nil" [])
#Derived.fmt
"###
)
},
)
@ -415,13 +405,11 @@ fn list() {
# @<2>: [[custom(4) (List val)]] | val has Encoding
#Derived.toEncoder_list =
\#Derived.lst ->
Encode.custom
custom
\#Derived.bytes, #Derived.fmt ->
Encode.appendWith
appendWith
#Derived.bytes
(Encode.list
#Derived.lst
\#Derived.elem -> Encode.toEncoder #Derived.elem)
(list #Derived.lst \#Derived.elem -> toEncoder #Derived.elem)
#Derived.fmt
"###
)

View file

@ -178,7 +178,7 @@ fn one_field_record() {
# Specialization lambda sets:
# @<1>: [[hash_{a}(0)]]
#Derived.hash_{a} =
\#Derived.hasher, #Derived.rcd -> Hash.hash #Derived.hasher #Derived.rcd.a
\#Derived.hasher, #Derived.rcd -> hash #Derived.hasher #Derived.rcd.a
"###
)
})
@ -195,7 +195,7 @@ fn two_field_record() {
# @<1>: [[hash_{a,b}(0)]]
#Derived.hash_{a,b} =
\#Derived.hasher, #Derived.rcd ->
Hash.hash (Hash.hash #Derived.hasher #Derived.rcd.a) #Derived.rcd.b
hash (hash #Derived.hasher #Derived.rcd.a) #Derived.rcd.b
"###
)
})
@ -227,7 +227,7 @@ fn tag_one_label_newtype() {
# @<1>: [[hash_[A 2](0)]]
#Derived.hash_[A 2] =
\#Derived.hasher, A #Derived.2 #Derived.3 ->
Hash.hash (Hash.hash #Derived.hasher #Derived.2) #Derived.3
hash (hash #Derived.hasher #Derived.2) #Derived.3
"###
)
})
@ -246,12 +246,10 @@ fn tag_two_labels() {
\#Derived.hasher, #Derived.union ->
when #Derived.union is
A #Derived.3 #Derived.4 #Derived.5 ->
Hash.hash
(Hash.hash
(Hash.hash (Hash.addU8 #Derived.hasher 0) #Derived.3)
#Derived.4)
hash
(hash (hash (addU8 #Derived.hasher 0) #Derived.3) #Derived.4)
#Derived.5
B #Derived.6 -> Hash.hash (Hash.addU8 #Derived.hasher 1) #Derived.6
B #Derived.6 -> hash (addU8 #Derived.hasher 1) #Derived.6
"###
)
})
@ -269,8 +267,8 @@ fn tag_two_labels_no_payloads() {
#Derived.hash_[A 0,B 0] =
\#Derived.hasher, #Derived.union ->
when #Derived.union is
A -> Hash.addU8 #Derived.hasher 0
B -> Hash.addU8 #Derived.hasher 1
A -> addU8 #Derived.hasher 0
B -> addU8 #Derived.hasher 1
"###
)
})
@ -289,10 +287,8 @@ fn recursive_tag_union() {
\#Derived.hasher, #Derived.union ->
when #Derived.union is
Cons #Derived.3 #Derived.4 ->
Hash.hash
(Hash.hash (Hash.addU8 #Derived.hasher 0) #Derived.3)
#Derived.4
Nil -> Hash.addU8 #Derived.hasher 1
hash (hash (addU8 #Derived.hasher 0) #Derived.3) #Derived.4
Nil -> addU8 #Derived.hasher 1
"###
)
})

View file

@ -5,5 +5,4 @@ mod encoding;
mod eq;
mod hash;
mod pretty_print;
mod util;

View file

@ -5,10 +5,10 @@ use bumpalo::Bump;
use roc_packaging::cache::RocCacheDir;
use ven_pretty::DocAllocator;
use crate::pretty_print::{pretty_print_def, Ctx};
use roc_can::{
abilities::{AbilitiesStore, SpecializationLambdaSets},
constraint::Constraints,
debug::{pretty_print_def, PPCtx},
def::Def,
expr::Declarations,
module::{
@ -529,8 +529,12 @@ where
interns.all_ident_ids.insert(DERIVED_MODULE, ident_ids);
DERIVED_MODULE.register_debug_idents(interns.all_ident_ids.get(&DERIVED_MODULE).unwrap());
let ctx = Ctx { interns: &interns };
let derived_program = pretty_print_def(&ctx, &derived_def);
let pp_ctx = PPCtx {
interns: &interns,
print_lambda_names: false,
home: builtin_module,
};
let derived_program = pretty_print_def(&pp_ctx, &derived_def);
check_derived_typechecks_and_golden(
derived_def,

View file

@ -4107,3 +4107,41 @@ fn issue_4349() {
RocStr
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn issue_4712() {
assert_evals_to!(
indoc!(
r#"
app "test" provides [main] to "./platform"
Parser a : {} -> a
v1 : {}
v1 = {}
v2 : Str
v2 = "cd"
apply : Parser (a -> Str), a -> Parser Str
apply = \fnParser, valParser ->
\{} ->
(fnParser {}) (valParser)
map : a, (a -> Str) -> Parser Str
map = \simpleParser, transform ->
apply (\{} -> transform) simpleParser
gen = \{} ->
[ map v1 (\{} -> "ab"), map v2 (\s -> s) ]
|> List.map (\f -> f {})
|> Str.joinWith ","
main = gen {}
"#
),
RocStr::from("ab,cd"),
RocStr
);
}

View file

@ -767,18 +767,23 @@ fn write_content<'a>(
buf.push_str("[[");
let print_symbol = |symbol: &Symbol| {
let ident_str = symbol.as_str(env.interns);
let ident_index_str = symbol.ident_id().index().to_string();
let disambiguation = if ident_str != ident_index_str {
// The pretty name is a named identifier; print the ident as well to avoid
// ambguity (in shadows or ability specializations).
format!("({ident_index_str})")
} else {
"".to_string()
};
if env.home == symbol.module_id() {
format!(
"{}({})",
symbol.as_str(env.interns),
symbol.ident_id().index(),
)
format!("{}{}", ident_str, disambiguation,)
} else {
format!(
"{}.{}({})",
"{}.{}{}",
symbol.module_string(env.interns),
symbol.as_str(env.interns),
symbol.ident_id().index(),
ident_str,
disambiguation
)
}
};

View file

@ -3645,11 +3645,8 @@ impl Alias {
#[derive(PartialEq, Eq, Debug, Clone)]
pub enum Mismatch {
TypeMismatch,
IfConditionNotBool,
InconsistentIfElse,
InconsistentWhenBranches,
CanonicalizationProblem,
TypeNotInRange,
DisjointLambdaSets,
DoesNotImplementAbiity(Variable, Symbol),
}

View file

@ -320,7 +320,6 @@ impl<M: MetaCollector> Outcome<M> {
pub struct Env<'a> {
pub subs: &'a mut Subs,
compute_outcome_only: bool,
seen_recursion: VecSet<(Variable, Variable)>,
fixed_variables: VecSet<Variable>,
}
@ -329,21 +328,11 @@ impl<'a> Env<'a> {
pub fn new(subs: &'a mut Subs) -> Self {
Self {
subs,
compute_outcome_only: false,
seen_recursion: Default::default(),
fixed_variables: Default::default(),
}
}
// Computes a closure in outcome-only mode. Unifications run in outcome-only mode will check
// for unifiability, but will not modify type variables or merge them.
pub fn with_outcome_only<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
self.compute_outcome_only = true;
let result = f(self);
self.compute_outcome_only = false;
result
}
fn add_recursion_pair(&mut self, var1: Variable, var2: Variable) {
let pair = (
self.subs.get_root_key_without_compacting(var1),
@ -1331,7 +1320,7 @@ fn separate_union_lambdas<M: MetaCollector>(
mode: Mode,
fields1: UnionLambdas,
fields2: UnionLambdas,
) -> (Outcome<M>, SeparatedUnionLambdas) {
) -> Result<(Outcome<M>, SeparatedUnionLambdas), Outcome<M>> {
debug_assert!(
fields1.is_sorted_allow_duplicates(env.subs),
"not sorted: {:?}",
@ -1451,19 +1440,55 @@ fn separate_union_lambdas<M: MetaCollector>(
//
// If they are not unifiable, that means the two lambdas must be
// different (since they have different capture sets), and so we don't
// want to merge the variables.
let variables_are_unifiable = env.with_outcome_only(|env| {
unify_pool::<NoCollector>(env, pool, var1, var2, mode)
.mismatches
.is_empty()
});
// want to merge the variables. Instead, we'll treat the lambda sets
// are disjoint, and keep them as independent lambda in the resulting
// set.
//
// # Nested lambda sets
//
// XREF https://github.com/roc-lang/roc/issues/4712
//
// We must be careful to ensure that if unifying nested lambda sets
// results in disjoint lambdas, that the parent lambda sets are
// ultimately treated disjointly as well.
// Consider
//
// v1: {} -[ foo ({} -[ bar Str ]-> {}) ]-> {}
// ~ v2: {} -[ foo ({} -[ bar U64 ]-> {}) ]-> {}
//
// When considering unification of the nested sets
//
// [ bar Str ]
// ~ [ bar U64 ]
//
// we should not unify these sets, even disjointly, because that would
// ultimately lead us to unifying
//
// v1 ~ v2
// => {} -[ foo ({} -[ bar Str, bar U64 ]-> {}) ] -> {}
//
// which is quite wrong - we do not have a lambda `foo` that captures
// either `bar captures: Str` or `bar captures: U64`, we have two
// different lambdas `foo` that capture different `bars`. The target
// unification is
//
// v1 ~ v2
// => {} -[ foo ({} -[ bar Str ]-> {}),
// foo ({} -[ bar U64 ]-> {}) ] -> {}
let subs_snapshot = env.subs.snapshot();
let pool_snapshot = pool.len();
let outcome: Outcome<M> = unify_pool(env, pool, var1, var2, mode);
if !variables_are_unifiable {
if !outcome.mismatches.is_empty() {
// Rolling back will also pull apart any nested lambdas that
// were joined into the same set.
env.subs.rollback_to(subs_snapshot);
pool.truncate(pool_snapshot);
continue 'try_next_right;
} else {
let outcome = unify_pool(env, pool, var1, var2, mode);
whole_outcome.union(outcome);
}
let outcome = unify_pool(env, pool, var1, var2, mode);
whole_outcome.union(outcome);
}
// All the variables unified, so we can join the left + right.
@ -1487,14 +1512,14 @@ fn separate_union_lambdas<M: MetaCollector>(
}
}
(
Ok((
whole_outcome,
SeparatedUnionLambdas {
only_in_left,
only_in_right,
joined,
},
)
))
}
/// ULS-SORT-ORDER:
@ -1829,7 +1854,10 @@ fn unify_lambda_set_help<M: MetaCollector>(
only_in_right,
joined,
},
) = separate_union_lambdas(env, pool, ctx.mode, solved1, solved2);
) = match separate_union_lambdas(env, pool, ctx.mode, solved1, solved2) {
Ok((outcome, separated)) => (outcome, separated),
Err(err_outcome) => return err_outcome,
};
let all_lambdas = joined
.into_iter()
@ -3495,24 +3523,22 @@ fn unify_recursion<M: MetaCollector>(
pub fn merge<M: MetaCollector>(env: &mut Env, ctx: &Context, content: Content) -> Outcome<M> {
let mut outcome: Outcome<M> = Outcome::default();
if !env.compute_outcome_only {
let rank = ctx.first_desc.rank.min(ctx.second_desc.rank);
let desc = Descriptor {
content,
rank,
mark: Mark::NONE,
copy: OptVariable::NONE,
};
let rank = ctx.first_desc.rank.min(ctx.second_desc.rank);
let desc = Descriptor {
content,
rank,
mark: Mark::NONE,
copy: OptVariable::NONE,
};
outcome
.extra_metadata
.record_changed_variable(env.subs, ctx.first);
outcome
.extra_metadata
.record_changed_variable(env.subs, ctx.second);
outcome
.extra_metadata
.record_changed_variable(env.subs, ctx.first);
outcome
.extra_metadata
.record_changed_variable(env.subs, ctx.second);
env.subs.union(ctx.first, ctx.second, desc);
}
env.subs.union(ctx.first, ctx.second, desc);
outcome
}