Merge branch 'trunk' into ct/case-when-to-when-is

This commit is contained in:
Richard Feldman 2019-12-23 23:29:43 -08:00 committed by GitHub
commit fd36e153ed
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 353 additions and 192 deletions

View file

@ -318,17 +318,15 @@ fn canonicalize_def_pattern(
env: &mut Env,
loc_pattern: &Located<ast::Pattern>,
scope: &mut Scope,
flex_info: &mut Info,
var_store: &VarStore,
expr_type: Type,
) -> (PatternState, Located<Pattern>) {
// Exclude the current ident from shadowable_idents; you can't shadow yourself!
// (However, still include it in scope, because you *can* recursively refer to yourself.)
let mut shadowable_idents = scope.idents.clone();
remove_idents(&loc_pattern.value, &mut shadowable_idents);
let pattern_var = var_store.fresh();
let pattern_type = Type::Variable(pattern_var);
let pattern_expected = PExpected::NoExpectation(pattern_type);
let pattern_expected = PExpected::NoExpectation(expr_type);
let mut state = PatternState {
headers: SendMap::default(),
@ -348,8 +346,6 @@ fn canonicalize_def_pattern(
pattern_expected,
);
flex_info.vars.push(pattern_var);
(state, loc_can_pattern)
}
@ -397,18 +393,12 @@ fn canonicalize_def<'a>(
// immediately, then canonicalize it to get its Variable, then use that
// Variable to generate the extra constraints.
let (_state, loc_can_pattern) =
canonicalize_def_pattern(env, loc_pattern, scope, flex_info, var_store);
let (pattern_state, loc_can_pattern) =
canonicalize_def_pattern(env, loc_pattern, scope, var_store, expr_type.clone());
// Any time there's a lookup on this symbol in the outer Let,
// it should result in this expression's type. After all, this
// is the type to which this symbol is defined!
add_pattern_to_lookup_types(
&scope,
&loc_pattern,
&mut flex_info.def_types,
expr_type.clone(),
);
for (k, v) in pattern_state.headers.clone() {
flex_info.def_types.insert(k, v);
}
// annotation sans body cannot introduce new rigids that are visible in other annotations
// but the rigids can show up in type error messages, so still register them
@ -429,10 +419,16 @@ fn canonicalize_def<'a>(
can_annotation,
);
// ensure expected type unifies with annotated type
flex_info
.constraints
.push(Eq(expr_type, annotation_expected, loc_annotation.region));
flex_info.constraints.push(Let(Box::new(LetConstraint {
rigid_vars: Vec::new(),
flex_vars: pattern_state.vars,
def_types: pattern_state.headers,
defs_constraint: And(vec![
And(pattern_state.constraints),
Eq(expr_type, annotation_expected, loc_annotation.region),
]),
ret_constraint: True,
})));
// Fabricate a body for this annotation, that will error at runtime
let value = Expr::RuntimeError(NoImplementation);
@ -487,18 +483,12 @@ fn canonicalize_def<'a>(
}
TypedDef(loc_pattern, loc_annotation, loc_expr) => {
let (state, loc_can_pattern) =
canonicalize_def_pattern(env, loc_pattern, scope, flex_info, var_store);
let (pattern_state, loc_can_pattern) =
canonicalize_def_pattern(env, loc_pattern, scope, var_store, expr_type.clone());
// Any time there's a lookup on this symbol in the outer Let,
// it should result in this expression's type. After all, this
// is the type to which this symbol is defined!
add_pattern_to_lookup_types(
&scope,
&loc_pattern,
&mut flex_info.def_types,
expr_type.clone(),
);
for (k, v) in &pattern_state.headers {
flex_info.def_types.insert(k.clone(), v.clone());
}
// bookkeeping for tail-call detection. If we're assigning to an
// identifier (e.g. `f = \x -> ...`), then this symbol can be tail-called.
@ -557,9 +547,9 @@ fn canonicalize_def<'a>(
flex_info.constraints.push(Let(Box::new(LetConstraint {
rigid_vars: Vec::new(),
flex_vars: state.vars,
def_types: state.headers,
defs_constraint: And(state.constraints),
flex_vars: pattern_state.vars,
def_types: pattern_state.headers,
defs_constraint: And(pattern_state.constraints),
ret_constraint,
})));
@ -673,18 +663,12 @@ fn canonicalize_def<'a>(
// If we have a pattern, then the def has a body (that is, it's not a
// standalone annotation), so we need to canonicalize the pattern and expr.
Body(loc_pattern, loc_expr) => {
let (state, loc_can_pattern) =
canonicalize_def_pattern(env, loc_pattern, scope, flex_info, var_store);
let (pattern_state, loc_can_pattern) =
canonicalize_def_pattern(env, loc_pattern, scope, var_store, expr_type.clone());
// Any time there's a lookup on this symbol in the outer Let,
// it should result in this expression's type. After all, this
// is the type to which this symbol is defined!
add_pattern_to_lookup_types(
&scope,
&loc_pattern,
&mut flex_info.def_types,
expr_type.clone(),
);
for (k, v) in &pattern_state.headers {
flex_info.def_types.insert(k.clone(), v.clone());
}
// bookkeeping for tail-call detection. If we're assigning to an
// identifier (e.g. `f = \x -> ...`), then this symbol can be tail-called.
@ -714,9 +698,9 @@ fn canonicalize_def<'a>(
flex_info.constraints.push(Let(Box::new(LetConstraint {
rigid_vars: Vec::new(),
flex_vars: state.vars,
def_types: state.headers,
defs_constraint: And(state.constraints),
flex_vars: pattern_state.vars,
def_types: pattern_state.headers,
defs_constraint: And(pattern_state.constraints),
ret_constraint,
})));
@ -967,38 +951,6 @@ pub fn can_defs_with_return<'a>(
}
}
/// This lets us share bound type variables between nested annotations, e.g.
///
/// blah : Map k v -> Int
/// blah mapping =
/// nested : Map k v # <-- the same k and v from the top-level annotation
/// nested = mapping
/// 42
///
/// In elm/compiler this is called RTV - the "Rigid Type Variables" dictionary.
/// type BoundTypeVars = ImMap<Box<str>, Type>;
fn add_pattern_to_lookup_types<'a>(
scope: &Scope,
loc_pattern: &'a Located<ast::Pattern<'a>>,
lookup_types: &mut SendMap<Symbol, Located<Type>>,
expr_type: Type,
) {
let region = loc_pattern.region;
match loc_pattern.value {
ast::Pattern::Identifier(name) => {
let symbol = scope.symbol(&name);
let loc_type = Located {
region,
value: expr_type,
};
lookup_types.insert(symbol, loc_type);
}
_ => panic!("TODO constrain patterns other than Identifier"),
}
}
fn pattern_from_def<'a>(def: &'a ast::Def<'a>) -> Option<&'a Located<ast::Pattern<'a>>> {
use crate::parse::ast::Def::*;

View file

@ -1,4 +1,5 @@
use crate::can::env::Env;
use crate::can::ident::Lowercase;
use crate::can::num::{finish_parsing_base, finish_parsing_float, finish_parsing_int};
use crate::can::problem::Problem;
use crate::can::scope::Scope;
@ -23,6 +24,7 @@ pub enum Pattern {
IntLiteral(i64),
FloatLiteral(f64),
ExactString(Box<str>),
RecordDestructure(Vec<(Located<Pattern>, Option<Located<Pattern>>)>),
EmptyRecordLiteral,
Underscore,
@ -69,79 +71,42 @@ pub fn canonicalize_pattern<'a>(
region: Region,
shadowable_idents: &'a mut ImMap<Ident, (Symbol, Region)>,
expected: PExpected<Type>,
) -> Located<Pattern> {
// add_constraints recurses by itself
add_constraints(&pattern, &scope, region, expected, state, var_store);
canonicalize_pattern_help(
env,
state,
var_store,
scope,
pattern_type,
pattern,
region,
shadowable_idents,
)
}
// TODO trim down these arguments
#[allow(clippy::too_many_arguments)]
fn canonicalize_pattern_help<'a>(
env: &'a mut Env,
state: &'a mut PatternState,
var_store: &VarStore,
scope: &mut Scope,
pattern_type: PatternType,
pattern: &'a ast::Pattern<'a>,
region: Region,
shadowable_idents: &'a mut ImMap<Ident, (Symbol, Region)>,
) -> Located<Pattern> {
use self::PatternType::*;
use crate::parse::ast::Pattern::*;
let can_pattern = match &pattern {
&Identifier(ref name) => {
let lowercase_ident = Ident::Unqualified((*name).into());
// We use shadowable_idents for this, and not scope, because for assignments
// they are different. When canonicalizing a particular assignment, that new
// ident is in scope (for recursion) but not shadowable.
//
// For example, when canonicalizing (fibonacci = ...), `fibonacci` should be in scope
// so that it can refer to itself without getting a naming problem, but it should not
// be in the collection of shadowable idents because you can't shadow yourself!
match shadowable_idents.get(&lowercase_ident) {
Some((_, region)) => {
let loc_shadowed_ident = Located {
region: *region,
value: lowercase_ident,
};
// This is already in scope, meaning it's about to be shadowed.
// Shadowing is not allowed!
env.problem(Problem::Shadowing(loc_shadowed_ident.clone()));
// Change this Pattern to a Shadowed variant, so that
// codegen knows to generate a runtime exception here.
Pattern::Shadowed(loc_shadowed_ident)
}
None => {
// Make sure we aren't shadowing something in the home module's scope.
let qualified_ident =
Ident::Qualified(env.home.clone(), lowercase_ident.name());
match scope.idents.get(&qualified_ident) {
Some((_, region)) => {
let loc_shadowed_ident = Located {
region: *region,
value: qualified_ident,
};
// This is already in scope, meaning it's about to be shadowed.
// Shadowing is not allowed!
env.problem(Problem::Shadowing(loc_shadowed_ident.clone()));
// Change this Pattern to a Shadowed variant, so that
// codegen knows to generate a runtime exception here.
Pattern::Shadowed(loc_shadowed_ident)
}
None => {
let new_ident = qualified_ident.clone();
let new_name = qualified_ident.name();
let symbol = scope.symbol(&new_name);
// This is a fresh identifier that wasn't already in scope.
// Add it to scope!
let symbol_and_region = (symbol.clone(), region);
// Add this to both scope.idents *and* shadowable_idents.
// The latter is relevant when recursively canonicalizing
// tag application patterns, which can bring multiple
// new idents into scope. For example, it's important that
// we catch (Blah foo foo) -> … as being an example of shadowing.
scope
.idents
.insert(new_ident.clone(), symbol_and_region.clone());
shadowable_idents.insert(new_ident, symbol_and_region);
Pattern::Identifier(symbol)
}
}
}
match canonicalize_pattern_identifier(name, env, scope, region, shadowable_idents) {
Ok(symbol) => Pattern::Identifier(symbol),
Err(loc_shadowed_ident) => Pattern::Shadowed(loc_shadowed_ident),
}
}
&GlobalTag(name) => {
@ -223,7 +188,7 @@ pub fn canonicalize_pattern<'a>(
// &EmptyRecordLiteral => Pattern::EmptyRecordLiteral,
&SpaceBefore(sub_pattern, _) | SpaceAfter(sub_pattern, _) | Nested(sub_pattern) => {
return canonicalize_pattern(
return canonicalize_pattern_help(
env,
state,
var_store,
@ -232,21 +197,145 @@ pub fn canonicalize_pattern<'a>(
sub_pattern,
region,
shadowable_idents,
expected,
)
}
&RecordDestructure(patterns) => {
let mut fields = Vec::with_capacity(patterns.len());
for loc_pattern in patterns {
match loc_pattern.value {
Identifier(ref name) => {
let result = match canonicalize_pattern_identifier(
name,
env,
scope,
region,
shadowable_idents,
) {
Ok(symbol) => Pattern::Identifier(symbol),
Err(loc_shadowed_ident) => Pattern::Shadowed(loc_shadowed_ident),
};
fields.push((Located::at(region, result), None));
}
RecordField(ref name, loc_guard) => {
let result = match canonicalize_pattern_identifier(
name,
env,
scope,
region,
shadowable_idents,
) {
Ok(symbol) => Pattern::Identifier(symbol),
Err(loc_shadowed_ident) => Pattern::Shadowed(loc_shadowed_ident),
};
let can_guard = canonicalize_pattern_help(
env,
state,
var_store,
scope,
pattern_type,
&loc_guard.value,
loc_guard.region,
shadowable_idents,
);
fields.push((Located::at(region, result), Some(can_guard)));
}
_ => panic!("invalid pattern in record"),
}
}
Pattern::RecordDestructure(fields)
}
&RecordField(_name, _loc_pattern) => {
unreachable!("should be handled in RecordDestructure");
}
_ => panic!("TODO finish restoring can_pattern branch for {:?}", pattern),
};
add_constraints(&pattern, &scope, region, expected, state);
Located {
region,
value: can_pattern,
}
}
pub fn canonicalize_pattern_identifier<'a>(
name: &'a &str,
env: &'a mut Env,
scope: &mut Scope,
region: Region,
shadowable_idents: &'a mut ImMap<Ident, (Symbol, Region)>,
) -> Result<Symbol, Located<Ident>> {
let lowercase_ident = Ident::Unqualified((*name).into());
// We use shadowable_idents for this, and not scope, because for assignments
// they are different. When canonicalizing a particular assignment, that new
// ident is in scope (for recursion) but not shadowable.
//
// For example, when canonicalizing (fibonacci = ...), `fibonacci` should be in scope
// so that it can refer to itself without getting a naming problem, but it should not
// be in the collection of shadowable idents because you can't shadow yourself!
match shadowable_idents.get(&lowercase_ident) {
Some((_, region)) => {
let loc_shadowed_ident = Located {
region: *region,
value: lowercase_ident,
};
// This is already in scope, meaning it's about to be shadowed.
// Shadowing is not allowed!
env.problem(Problem::Shadowing(loc_shadowed_ident.clone()));
// Change this Pattern to a Shadowed variant, so that
// codegen knows to generate a runtime exception here.
Err(loc_shadowed_ident)
}
None => {
// Make sure we aren't shadowing something in the home module's scope.
let qualified_ident = Ident::Qualified(env.home.clone(), lowercase_ident.name());
match scope.idents.get(&qualified_ident) {
Some((_, region)) => {
let loc_shadowed_ident = Located {
region: *region,
value: qualified_ident,
};
// This is already in scope, meaning it's about to be shadowed.
// Shadowing is not allowed!
env.problem(Problem::Shadowing(loc_shadowed_ident.clone()));
// Change this Pattern to a Shadowed variant, so that
// codegen knows to generate a runtime exception here.
Err(loc_shadowed_ident)
}
None => {
let new_ident = qualified_ident.clone();
let new_name = qualified_ident.name();
let symbol = scope.symbol(&new_name);
// This is a fresh identifier that wasn't already in scope.
// Add it to scope!
let symbol_and_region = (symbol.clone(), region);
// Add this to both scope.idents *and* shadowable_idents.
// The latter is relevant when recursively canonicalizing
// tag application patterns, which can bring multiple
// new idents into scope. For example, it's important that
// we catch (Blah foo foo) -> … as being an example of shadowing.
scope
.idents
.insert(new_ident.clone(), symbol_and_region.clone());
shadowable_idents.insert(new_ident, symbol_and_region);
Ok(symbol)
}
}
}
}
}
/// When we detect an unsupported pattern type (e.g. 5 = 1 + 2 is unsupported because you can't
/// assign to Int patterns), report it to Env and return an UnsupportedPattern runtime error pattern.
fn unsupported_pattern(env: &mut Env, pattern_type: PatternType, region: Region) -> Pattern {
@ -269,6 +358,7 @@ fn add_constraints<'a>(
region: Region,
expected: PExpected<Type>,
state: &'a mut PatternState,
var_store: &VarStore,
) {
use crate::parse::ast::Pattern::*;
@ -322,15 +412,58 @@ fn add_constraints<'a>(
}
SpaceBefore(pattern, _) | SpaceAfter(pattern, _) | Nested(pattern) => {
add_constraints(pattern, scope, region, expected, state)
add_constraints(pattern, scope, region, expected, state, var_store)
}
GlobalTag(_)
| PrivateTag(_)
| Apply(_, _)
| RecordDestructure(_)
| RecordField(_, _)
| EmptyRecordLiteral => {
RecordDestructure(patterns) => {
let ext_var = var_store.fresh();
let ext_type = Type::Variable(ext_var);
let mut field_types: SendMap<Lowercase, Type> = SendMap::default();
for loc_pattern in patterns {
let pat_var = var_store.fresh();
let pat_type = Type::Variable(pat_var);
let expected = PExpected::NoExpectation(pat_type.clone());
match loc_pattern.value {
Identifier(name) | RecordField(name, _) => {
let symbol = scope.symbol(name);
if !state.headers.contains_key(&symbol) {
state
.headers
.insert(symbol, Located::at(region, pat_type.clone()));
}
field_types.insert(name.into(), pat_type.clone());
}
_ => panic!("invalid record pattern"),
}
if let RecordField(_, guard) = loc_pattern.value {
add_constraints(
&guard.value,
scope,
guard.region,
expected,
state,
var_store,
);
}
state.vars.push(pat_var);
}
let record_type = Type::Record(field_types, Box::new(ext_type));
let record_con =
Constraint::Pattern(region, PatternCategory::Record, record_type, expected);
state.constraints.push(record_con);
}
RecordField(_, _) => {
// unreachable, this pattern is handled by already by RecordDestructure
}
GlobalTag(_) | PrivateTag(_) | Apply(_, _) | EmptyRecordLiteral => {
panic!("TODO add_constraints for {:?}", pattern);
}
}
@ -425,11 +558,16 @@ fn add_idents_from_pattern<'a>(
// },
}
RecordDestructure(_) => {
panic!("TODO implement RecordDestructure pattern in add_idents_from_pattern.");
RecordDestructure(patterns) => {
for loc_pattern in patterns {
add_idents_from_pattern(&loc_pattern.region, &loc_pattern.value, scope, answer);
}
}
RecordField(_, _) => {
panic!("TODO implement RecordField pattern in add_idents_from_pattern.");
RecordField(name, loc_pattern) => {
let symbol = scope.symbol(&name);
answer.push_back((Ident::Unqualified((*name).into()), (symbol, *region)));
add_idents_from_pattern(&loc_pattern.region, &loc_pattern.value, scope, answer);
}
SpaceBefore(pattern, _) | SpaceAfter(pattern, _) | Nested(pattern) => {
// Ignore the newline/comment info; it doesn't matter in canonicalization.

View file

@ -290,7 +290,7 @@ fn expr_to_pattern<'a>(arena: &'a Bump, expr: &Expr<'a>) -> Result<Pattern<'a>,
for loc_assigned_field in loc_assigned_fields {
let region = loc_assigned_field.region;
let value = assigned_field_to_pattern(arena, &loc_assigned_field.value)?;
let value = assigned_expr_field_to_pattern(arena, &loc_assigned_field.value)?;
loc_patterns.push(Located { region, value });
}
@ -331,7 +331,8 @@ fn expr_to_pattern<'a>(arena: &'a Bump, expr: &Expr<'a>) -> Result<Pattern<'a>,
}
}
pub fn assigned_field_to_pattern<'a>(
/// use for expressions like { x: a + b }
pub fn assigned_expr_field_to_pattern<'a>(
arena: &'a Bump,
assigned_field: &AssignedField<'a, Expr<'a>>,
) -> Result<Pattern<'a>, Fail> {
@ -354,17 +355,63 @@ pub fn assigned_field_to_pattern<'a>(
}
AssignedField::LabelOnly(name) => Pattern::Identifier(name.value),
AssignedField::SpaceBefore(nested, spaces) => Pattern::SpaceBefore(
arena.alloc(assigned_field_to_pattern(arena, nested)?),
arena.alloc(assigned_expr_field_to_pattern(arena, nested)?),
spaces,
),
AssignedField::SpaceAfter(nested, spaces) => Pattern::SpaceAfter(
arena.alloc(assigned_field_to_pattern(arena, nested)?),
arena.alloc(assigned_expr_field_to_pattern(arena, nested)?),
spaces,
),
AssignedField::Malformed(string) => Pattern::Malformed(string),
})
}
/// Used for patterns like { x: Just _ }
pub fn assigned_pattern_field_to_pattern<'a>(
arena: &'a Bump,
assigned_field: &AssignedField<'a, Pattern<'a>>,
backup_region: Region,
) -> Result<Located<Pattern<'a>>, Fail> {
// the assigned fields always store spaces, but this slice is often empty
Ok(match assigned_field {
AssignedField::LabeledValue(name, spaces, value) => {
let pattern = value.value.clone();
let region = Region::span_across(&value.region, &value.region);
let result = arena.alloc(Located {
region: value.region,
value: pattern,
});
if spaces.is_empty() {
Located::at(region, Pattern::RecordField(name.value, result))
} else {
Located::at(
region,
Pattern::SpaceAfter(
arena.alloc(Pattern::RecordField(name.value, result)),
spaces,
),
)
}
}
AssignedField::LabelOnly(name) => Located::at(name.region, Pattern::Identifier(name.value)),
AssignedField::SpaceBefore(nested, spaces) => {
let can_nested = assigned_pattern_field_to_pattern(arena, nested, backup_region)?;
Located::at(
can_nested.region,
Pattern::SpaceBefore(arena.alloc(can_nested.value), spaces),
)
}
AssignedField::SpaceAfter(nested, spaces) => {
let can_nested = assigned_pattern_field_to_pattern(arena, nested, backup_region)?;
Located::at(
can_nested.region,
Pattern::SpaceAfter(arena.alloc(can_nested.value), spaces),
)
}
AssignedField::Malformed(string) => Located::at(backup_region, Pattern::Malformed(string)),
})
}
/// A def beginning with a parenthetical pattern, for example:
///
/// (UserId userId) = ...
@ -745,15 +792,23 @@ fn underscore_pattern<'a>() -> impl Parser<'a, Pattern<'a>> {
}
fn record_destructure<'a>(min_indent: u16) -> impl Parser<'a, Pattern<'a>> {
map!(
collection!(
char('{'),
loc!(ident_pattern()),
char(','),
char('}'),
min_indent
),
Pattern::RecordDestructure
then(
record!(loc!(pattern(min_indent)), min_indent),
move |arena, state, assigned_fields| {
let mut patterns = Vec::with_capacity_in(assigned_fields.len(), arena);
for assigned_field in assigned_fields {
match assigned_pattern_field_to_pattern(
arena,
&assigned_field.value,
assigned_field.region,
) {
Ok(pattern) => patterns.push(pattern),
Err(e) => return Err((e, state)),
}
}
Ok((Pattern::RecordDestructure(patterns), state))
},
)
}
@ -1270,7 +1325,7 @@ pub fn record_literal<'a>(min_indent: u16) -> impl Parser<'a, Expr<'a>> {
for loc_assigned_field in assigned_fields {
let region = loc_assigned_field.region;
match assigned_field_to_pattern(arena, &loc_assigned_field.value) {
match assigned_expr_field_to_pattern(arena, &loc_assigned_field.value) {
Ok(value) => loc_patterns.push(Located { region, value }),
// an Expr became a pattern that should not be.
Err(e) => return Err((e, state)),
@ -1305,7 +1360,7 @@ pub fn record_literal<'a>(min_indent: u16) -> impl Parser<'a, Expr<'a>> {
for loc_assigned_field in assigned_fields {
let region = loc_assigned_field.region;
match assigned_field_to_pattern(arena, &loc_assigned_field.value) {
match assigned_expr_field_to_pattern(arena, &loc_assigned_field.value) {
Ok(value) => loc_patterns.push(Located { region, value }),
// an Expr became a pattern that should not be.
Err(e) => return Err((e, state)),

View file

@ -85,6 +85,10 @@ impl<T> Located<T> {
};
Located { value, region }
}
pub fn at(region: Region, value: T) -> Located<T> {
Located { value, region }
}
}
impl<T> Located<T> {

View file

@ -104,7 +104,7 @@ fn canonicalize_pattern(
));
}
Tag(_, _) | AppliedTag(_, _, _) | EmptyRecordLiteral => {
Tag(_, _) | AppliedTag(_, _, _) | EmptyRecordLiteral | RecordDestructure(_) => {
panic!("TODO add_constraints for {:?}", pattern);
}

View file

@ -936,18 +936,30 @@ mod test_infer {
infer_eq(".foo", "{ foo : a }* -> a");
}
// RecordDestructure does not get canonicalized yet
// #[test]
// fn type_signature_without_body_record() {
// infer_eq(
// indoc!(
// r#"
// { x, y } : { x : (Int -> custom) , y : Int }
#[test]
fn type_signature_without_body_record() {
infer_eq(
indoc!(
r#"
{ x, y } : { x : (Int -> custom) , y : Int }
// x
// "#
// ),
// "Int -> custom",
// );
// }
x
"#
),
"Int -> custom",
);
}
#[test]
fn record_pattern_match_infer() {
infer_eq(
indoc!(
r#"
case foo when
{ x: 4 }-> x
"#
),
"Int",
);
}
}