Merge branch 'trunk' into list-append

This commit is contained in:
Chadtech 2020-07-18 21:30:51 -04:00 committed by GitHub
commit 36a259b56b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
31 changed files with 1659 additions and 789 deletions

View file

@ -227,14 +227,21 @@ pub fn build(
// Populate Procs further and get the low-level Expr from the canonical Expr // Populate Procs further and get the low-level Expr from the canonical Expr
let main_body = Expr::new(&mut mono_env, loc_expr.value, &mut procs); let main_body = Expr::new(&mut mono_env, loc_expr.value, &mut procs);
let mut headers = {
let num_headers = match &procs.pending_specializations {
Some(map) => map.len(),
None => 0,
};
let mut headers = Vec::with_capacity(procs.pending_specializations.len()); Vec::with_capacity(num_headers)
};
let mut layout_cache = LayoutCache::default(); let mut layout_cache = LayoutCache::default();
let mut procs = roc_mono::expr::specialize_all(&mut mono_env, procs, &mut layout_cache);
let (mut specializations, runtime_errors) = assert_eq!(
roc_mono::expr::specialize_all(&mut mono_env, procs, &mut layout_cache); procs.runtime_errors,
roc_collections::all::MutMap::default()
assert_eq!(runtime_errors, roc_collections::all::MutSet::default()); );
// Put this module's ident_ids back in the interns, so we can use them in env. // Put this module's ident_ids back in the interns, so we can use them in env.
// This must happen *after* building the headers, because otherwise there's // This must happen *after* building the headers, because otherwise there's
@ -244,12 +251,21 @@ pub fn build(
// Add all the Proc headers to the module. // Add all the Proc headers to the module.
// We have to do this in a separate pass first, // We have to do this in a separate pass first,
// because their bodies may reference each other. // because their bodies may reference each other.
for ((symbol, layout), proc) in specializations.drain() { for ((symbol, layout), proc) in procs.specialized.drain() {
use roc_mono::expr::InProgressProc::*;
match proc {
InProgress => {
panic!("A specialization was still marked InProgress after monomorphization had completed: {:?} with layout {:?}", symbol, layout);
}
Done(proc) => {
let (fn_val, arg_basic_types) = let (fn_val, arg_basic_types) =
build_proc_header(&env, &mut layout_ids, symbol, &layout, &proc); build_proc_header(&env, &mut layout_ids, symbol, &layout, &proc);
headers.push((proc, fn_val, arg_basic_types)); headers.push((proc, fn_val, arg_basic_types));
} }
}
}
// Build each proc using its header info. // Build each proc using its header info.
for (proc, fn_val, arg_basic_types) in headers { for (proc, fn_val, arg_basic_types) in headers {

View file

@ -500,8 +500,32 @@ pub fn types() -> MutMap<Symbol, (SolvedType, Region)> {
unique_function(vec![list_type(star1, a)], int_type(star2)) unique_function(vec![list_type(star1, a)], int_type(star2))
}); });
// get : Attr (* | u) (List (Attr u a)) // List.first :
// , Attr * Int // Attr (* | u) (List (Attr u a)),
// -> Attr * (Result (Attr u a) (Attr * [ OutOfBounds ]*))
let list_was_empty = SolvedType::TagUnion(
vec![(TagName::Global("ListWasEmpty".into()), vec![])],
Box::new(SolvedType::Wildcard),
);
add_type(Symbol::LIST_FIRST, {
let_tvars! { a, u, star1, star2, star3 };
unique_function(
vec![SolvedType::Apply(
Symbol::ATTR_ATTR,
vec![
container(star1, vec![u]),
SolvedType::Apply(Symbol::LIST_LIST, vec![attr_type(u, a)]),
],
)],
result_type(star2, attr_type(u, a), lift(star3, list_was_empty)),
)
});
// List.get :
// Attr (* | u) (List (Attr u a)),
// Attr * Int
// -> Attr * (Result (Attr u a) (Attr * [ OutOfBounds ]*)) // -> Attr * (Result (Attr u a) (Attr * [ OutOfBounds ]*))
let index_out_of_bounds = SolvedType::TagUnion( let index_out_of_bounds = SolvedType::TagUnion(
vec![(TagName::Global("OutOfBounds".into()), vec![])], vec![(TagName::Global("OutOfBounds".into()), vec![])],
@ -526,10 +550,11 @@ pub fn types() -> MutMap<Symbol, (SolvedType, Region)> {
) )
}); });
// set : Attr (w | u | v) (List (Attr u a)) // List.set :
// , Attr * Int // Attr (w | u | v) (List (Attr u a)),
// , Attr (u | v) a // Attr * Int,
// -> List a // Attr (u | v) a
// -> Attr * (List (Attr u a))
add_type(Symbol::LIST_SET, { add_type(Symbol::LIST_SET, {
let_tvars! { u, v, w, star1, star2, a }; let_tvars! { u, v, w, star1, star2, a };

View file

@ -1,6 +1,6 @@
use crate::env::Env; use crate::env::Env;
use crate::scope::Scope; use crate::scope::Scope;
use roc_collections::all::{MutMap, MutSet, SendMap}; use roc_collections::all::{MutSet, SendMap};
use roc_module::ident::{Ident, Lowercase, TagName}; use roc_module::ident::{Ident, Lowercase, TagName};
use roc_module::symbol::Symbol; use roc_module::symbol::Symbol;
use roc_parse::ast::{AssignedField, Tag, TypeAnnotation}; use roc_parse::ast::{AssignedField, Tag, TypeAnnotation};
@ -294,34 +294,17 @@ fn can_annotation_help(
}, },
Record { fields, ext } => { Record { fields, ext } => {
let mut field_types = SendMap::default(); let field_types = can_assigned_fields(
let mut seen = MutMap::default();
for field in fields.iter() {
let opt_field_name = can_assigned_field(
env, env,
&field.value, fields,
region, region,
scope, scope,
var_store, var_store,
introduced_variables, introduced_variables,
local_aliases, local_aliases,
&mut field_types,
references, references,
); );
if let Some(added) = opt_field_name {
if let Some(replaced_region) = seen.insert(added.clone(), field.region) {
env.problem(roc_problem::can::Problem::DuplicateRecordFieldType {
field_name: added.clone(),
field_region: field.region,
record_region: region,
replaced_region,
});
}
}
}
let ext_type = match ext { let ext_type = match ext {
Some(loc_ann) => can_annotation_help( Some(loc_ann) => can_annotation_help(
env, env,
@ -339,39 +322,22 @@ fn can_annotation_help(
Type::Record(field_types, Box::new(ext_type)) Type::Record(field_types, Box::new(ext_type))
} }
TagUnion { tags, ext } => { TagUnion { tags, ext } => {
let mut tag_types = Vec::with_capacity(tags.len()); let tag_types = can_tags(
let mut seen = MutMap::default();
for tag in tags.iter() {
let opt_tag_name = can_tag(
env, env,
&tag.value, tags,
region, region,
scope, scope,
var_store, var_store,
introduced_variables, introduced_variables,
local_aliases, local_aliases,
&mut tag_types,
references, references,
); );
if let Some(added) = opt_tag_name {
if let Some(replaced_region) = seen.insert(added.clone(), tag.region) {
env.problem(roc_problem::can::Problem::DuplicateTag {
tag_name: added.clone(),
tag_region: tag.region,
tag_union_region: region,
replaced_region,
});
}
}
}
let ext_type = match ext { let ext_type = match ext {
Some(loc_ann) => can_annotation_help( Some(loc_ann) => can_annotation_help(
env, env,
&loc_ann.value, &loc_ann.value,
region, loc_ann.region,
scope, scope,
var_store, var_store,
introduced_variables, introduced_variables,
@ -405,19 +371,32 @@ fn can_annotation_help(
// TODO trim down these arguments! // TODO trim down these arguments!
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
fn can_assigned_field<'a>( fn can_assigned_fields<'a>(
env: &mut Env, env: &mut Env,
field: &AssignedField<'a, TypeAnnotation<'a>>, fields: &&[Located<AssignedField<'a, TypeAnnotation<'a>>>],
region: Region, region: Region,
scope: &mut Scope, scope: &mut Scope,
var_store: &mut VarStore, var_store: &mut VarStore,
introduced_variables: &mut IntroducedVariables, introduced_variables: &mut IntroducedVariables,
local_aliases: &mut SendMap<Symbol, Alias>, local_aliases: &mut SendMap<Symbol, Alias>,
field_types: &mut SendMap<Lowercase, Type>,
references: &mut MutSet<Symbol>, references: &mut MutSet<Symbol>,
) -> Option<Lowercase> { ) -> SendMap<Lowercase, Type> {
use roc_parse::ast::AssignedField::*; use roc_parse::ast::AssignedField::*;
// SendMap doesn't have a `with_capacity`
let mut field_types = SendMap::default();
// field names we've seen so far in this record
let mut seen = std::collections::HashMap::with_capacity(fields.len());
'outer: for loc_field in fields.iter() {
let mut field = &loc_field.value;
// use this inner loop to unwrap the SpaceAfter/SpaceBefore
// when we find the name of this field, break out of the loop
// with that value, so we can check whether the field name is
// a duplicate
let new_name = 'inner: loop {
match field { match field {
LabeledValue(field_name, _, annotation) => { LabeledValue(field_name, _, annotation) => {
let field_type = can_annotation_help( let field_type = can_annotation_help(
@ -434,7 +413,7 @@ fn can_assigned_field<'a>(
let label = Lowercase::from(field_name.value); let label = Lowercase::from(field_name.value);
field_types.insert(label.clone(), field_type); field_types.insert(label.clone(), field_type);
Some(label) break 'inner label;
} }
LabelOnly(loc_field_name) => { LabelOnly(loc_field_name) => {
// Interpret { a, b } as { a : a, b : b } // Interpret { a, b } as { a : a, b : b }
@ -451,36 +430,61 @@ fn can_assigned_field<'a>(
field_types.insert(field_name.clone(), field_type); field_types.insert(field_name.clone(), field_type);
Some(field_name) break 'inner field_name;
} }
SpaceBefore(nested, _) | SpaceAfter(nested, _) => can_assigned_field( SpaceBefore(nested, _) | SpaceAfter(nested, _) => {
env, // check the nested field instead
nested, field = nested;
region, continue 'inner;
scope,
var_store,
introduced_variables,
local_aliases,
field_types,
references,
),
Malformed(_) => None,
} }
Malformed(_) => {
// TODO report this?
// completely skip this element, advance to the next tag
continue 'outer;
}
}
};
// ensure that the new name is not already in this record:
// note that the right-most tag wins when there are two with the same name
if let Some(replaced_region) = seen.insert(new_name.clone(), loc_field.region) {
env.problem(roc_problem::can::Problem::DuplicateRecordFieldType {
field_name: new_name,
record_region: region,
field_region: loc_field.region,
replaced_region,
});
}
}
field_types
} }
// TODO trim down these arguments! // TODO trim down these arguments!
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
fn can_tag<'a>( fn can_tags<'a>(
env: &mut Env, env: &mut Env,
tag: &Tag<'a>, tags: &'a [Located<Tag<'a>>],
region: Region, region: Region,
scope: &mut Scope, scope: &mut Scope,
var_store: &mut VarStore, var_store: &mut VarStore,
introduced_variables: &mut IntroducedVariables, introduced_variables: &mut IntroducedVariables,
local_aliases: &mut SendMap<Symbol, Alias>, local_aliases: &mut SendMap<Symbol, Alias>,
tag_types: &mut Vec<(TagName, Vec<Type>)>,
references: &mut MutSet<Symbol>, references: &mut MutSet<Symbol>,
) -> Option<TagName> { ) -> Vec<(TagName, Vec<Type>)> {
let mut tag_types = Vec::with_capacity(tags.len());
// tag names we've seen so far in this tag union
let mut seen = std::collections::HashMap::with_capacity(tags.len());
'outer: for loc_tag in tags.iter() {
let mut tag = &loc_tag.value;
// use this inner loop to unwrap the SpaceAfter/SpaceBefore
// when we find the name of this tag, break out of the loop
// with that value, so we can check whether the tag name is
// a duplicate
let new_name = 'inner: loop {
match tag { match tag {
Tag::Global { name, args } => { Tag::Global { name, args } => {
let name = name.value.into(); let name = name.value.into();
@ -490,7 +494,7 @@ fn can_tag<'a>(
let ann = can_annotation_help( let ann = can_annotation_help(
env, env,
&arg.value, &arg.value,
region, arg.region,
scope, scope,
var_store, var_store,
introduced_variables, introduced_variables,
@ -504,7 +508,7 @@ fn can_tag<'a>(
let tag_name = TagName::Global(name); let tag_name = TagName::Global(name);
tag_types.push((tag_name.clone(), arg_types)); tag_types.push((tag_name.clone(), arg_types));
Some(tag_name) break 'inner tag_name;
} }
Tag::Private { name, args } => { Tag::Private { name, args } => {
let ident_id = env.ident_ids.get_or_insert(&name.value.into()); let ident_id = env.ident_ids.get_or_insert(&name.value.into());
@ -515,7 +519,7 @@ fn can_tag<'a>(
let ann = can_annotation_help( let ann = can_annotation_help(
env, env,
&arg.value, &arg.value,
region, arg.region,
scope, scope,
var_store, var_store,
introduced_variables, introduced_variables,
@ -529,19 +533,32 @@ fn can_tag<'a>(
let tag_name = TagName::Private(symbol); let tag_name = TagName::Private(symbol);
tag_types.push((tag_name.clone(), arg_types)); tag_types.push((tag_name.clone(), arg_types));
Some(tag_name) break 'inner tag_name;
} }
Tag::SpaceBefore(nested, _) | Tag::SpaceAfter(nested, _) => can_tag( Tag::SpaceBefore(nested, _) | Tag::SpaceAfter(nested, _) => {
env, // check the nested tag instead
nested, tag = nested;
region, continue 'inner;
scope,
var_store,
introduced_variables,
local_aliases,
tag_types,
references,
),
Tag::Malformed(_) => None,
} }
Tag::Malformed(_) => {
// TODO report this?
// completely skip this element, advance to the next tag
continue 'outer;
}
}
};
// ensure that the new name is not already in this tag union:
// note that the right-most tag wins when there are two with the same name
if let Some(replaced_region) = seen.insert(new_name.clone(), loc_tag.region) {
env.problem(roc_problem::can::Problem::DuplicateTag {
tag_name: new_name,
tag_region: loc_tag.region,
tag_union_region: region,
replaced_region,
});
}
}
tag_types
} }

View file

@ -704,6 +704,11 @@ fn list_len(symbol: Symbol, var_store: &mut VarStore) -> Def {
} }
/// List.get : List elem, Int -> Result elem [ OutOfBounds ]* /// List.get : List elem, Int -> Result elem [ OutOfBounds ]*
///
/// List.get :
/// Attr (* | u) (List (Attr u a)),
/// Attr * Int
/// -> Attr * (Result (Attr u a) (Attr * [ OutOfBounds ]*))
fn list_get(symbol: Symbol, var_store: &mut VarStore) -> Def { fn list_get(symbol: Symbol, var_store: &mut VarStore) -> Def {
let arg_list = Symbol::ARG_1; let arg_list = Symbol::ARG_1;
let arg_index = Symbol::ARG_2; let arg_index = Symbol::ARG_2;
@ -716,7 +721,7 @@ fn list_get(symbol: Symbol, var_store: &mut VarStore) -> Def {
// Perform a bounds check. If it passes, run LowLevel::ListGetUnsafe // Perform a bounds check. If it passes, run LowLevel::ListGetUnsafe
let body = If { let body = If {
cond_var: bool_var, cond_var: bool_var,
branch_var: ret_var, branch_var: var_store.fresh(),
branches: vec![( branches: vec![(
// if-condition // if-condition
no_region( no_region(
@ -777,21 +782,27 @@ fn list_get(symbol: Symbol, var_store: &mut VarStore) -> Def {
} }
/// List.set : List elem, Int, elem -> List elem /// List.set : List elem, Int, elem -> List elem
///
/// List.set :
/// Attr (w | u | v) (List (Attr u a)),
/// Attr * Int,
/// Attr (u | v) a
/// -> Attr * (List (Attr u a))
fn list_set(symbol: Symbol, var_store: &mut VarStore) -> Def { fn list_set(symbol: Symbol, var_store: &mut VarStore) -> Def {
let arg_list = Symbol::ARG_1; let arg_list = Symbol::ARG_1;
let arg_index = Symbol::ARG_2; let arg_index = Symbol::ARG_2;
let arg_elem = Symbol::ARG_3; let arg_elem = Symbol::ARG_3;
let bool_var = var_store.fresh(); let bool_var = var_store.fresh();
let len_var = var_store.fresh(); let len_var = var_store.fresh();
let list_var = var_store.fresh();
let elem_var = var_store.fresh(); let elem_var = var_store.fresh();
let ret_var = var_store.fresh(); let list_arg_var = var_store.fresh(); // Uniqueness type Attr differs between
let list_ret_var = var_store.fresh(); // the arg list and the returned list
// Perform a bounds check. If it passes, run LowLevel::ListSet. // Perform a bounds check. If it passes, run LowLevel::ListSet.
// Otherwise, return the list unmodified. // Otherwise, return the list unmodified.
let body = If { let body = If {
cond_var: bool_var, cond_var: bool_var,
branch_var: ret_var, branch_var: list_ret_var,
branches: vec![( branches: vec![(
// if-condition // if-condition
no_region( no_region(
@ -804,7 +815,7 @@ fn list_set(symbol: Symbol, var_store: &mut VarStore) -> Def {
len_var, len_var,
RunLowLevel { RunLowLevel {
op: LowLevel::ListLen, op: LowLevel::ListLen,
args: vec![(list_var, Var(arg_list))], args: vec![(list_arg_var, Var(arg_list))],
ret_var: len_var, ret_var: len_var,
}, },
), ),
@ -818,11 +829,11 @@ fn list_set(symbol: Symbol, var_store: &mut VarStore) -> Def {
RunLowLevel { RunLowLevel {
op: LowLevel::ListSet, op: LowLevel::ListSet,
args: vec![ args: vec![
(list_var, Var(arg_list)), (list_arg_var, Var(arg_list)),
(len_var, Var(arg_index)), (len_var, Var(arg_index)),
(elem_var, Var(arg_elem)), (elem_var, Var(arg_elem)),
], ],
ret_var: list_var, ret_var: list_ret_var,
}, },
), ),
)], )],
@ -835,13 +846,13 @@ fn list_set(symbol: Symbol, var_store: &mut VarStore) -> Def {
defn( defn(
symbol, symbol,
vec![ vec![
(list_var, Symbol::ARG_1), (list_arg_var, Symbol::ARG_1),
(len_var, Symbol::ARG_2), (len_var, Symbol::ARG_2),
(elem_var, Symbol::ARG_3), (elem_var, Symbol::ARG_3),
], ],
var_store, var_store,
body, body,
ret_var, list_ret_var,
) )
} }
@ -1090,6 +1101,10 @@ fn num_div_int(symbol: Symbol, var_store: &mut VarStore) -> Def {
} }
/// List.first : List elem -> Result elem [ ListWasEmpty ]* /// List.first : List elem -> Result elem [ ListWasEmpty ]*
///
/// List.first :
/// Attr (* | u) (List (Attr u a)),
/// -> Attr * (Result (Attr u a) (Attr * [ OutOfBounds ]*))
fn list_first(symbol: Symbol, var_store: &mut VarStore) -> Def { fn list_first(symbol: Symbol, var_store: &mut VarStore) -> Def {
let bool_var = var_store.fresh(); let bool_var = var_store.fresh();
let list_var = var_store.fresh(); let list_var = var_store.fresh();
@ -1101,7 +1116,7 @@ fn list_first(symbol: Symbol, var_store: &mut VarStore) -> Def {
// Perform a bounds check. If it passes, delegate to List.getUnsafe. // Perform a bounds check. If it passes, delegate to List.getUnsafe.
let body = If { let body = If {
cond_var: bool_var, cond_var: bool_var,
branch_var: ret_var, branch_var: var_store.fresh(),
branches: vec![( branches: vec![(
// if-condition // if-condition
no_region( no_region(

View file

@ -115,7 +115,11 @@ impl<'a> Env<'a> {
} }
None => Err(RuntimeError::ModuleNotImported { None => Err(RuntimeError::ModuleNotImported {
module_name, module_name,
ident: ident.into(), imported_modules: self
.module_ids
.available_modules()
.map(|string| string.as_ref().into())
.collect(),
region, region,
}), }),
} }

View file

@ -214,10 +214,17 @@ pub fn canonicalize_expr<'a>(
(answer, output) (answer, output)
} else { } else {
panic!( // only (optionally qualified) variables can be updated, not arbitrary expressions
"TODO canonicalize invalid record update (non-Var in update position)\n{:?}",
can_update.value let error = roc_problem::can::RuntimeError::InvalidRecordUpdate {
); region: can_update.region,
};
let answer = Expr::RuntimeError(error.clone());
env.problems.push(Problem::RuntimeError(error));
(answer, Output::default())
} }
} }
ast::Expr::Record { ast::Expr::Record {

View file

@ -211,7 +211,7 @@ pub fn desugar_expr<'a>(arena: &'a Bump, loc_expr: &'a Located<Expr<'a>>) -> &'a
let value = match op { let value = match op {
Negate => Var { Negate => Var {
module_name: ModuleName::NUM, module_name: ModuleName::NUM,
ident: "negate", ident: "neg",
}, },
Not => Var { Not => Var {
module_name: ModuleName::BOOL, module_name: ModuleName::BOOL,

View file

@ -78,6 +78,28 @@ where
answer answer
} }
/// Like intersection_with, except for MutMap and specialized to return
/// a tuple. Also, only clones the values that will be actually returned,
/// rather than cloning everything.
pub fn get_shared<K, V>(map1: &MutMap<K, V>, map2: &MutMap<K, V>) -> MutMap<K, (V, V)>
where
K: Clone + Eq + Hash,
V: Clone,
{
let mut answer = MutMap::default();
for (key, right_value) in map2 {
match std::collections::HashMap::get(map1, &key) {
None => (),
Some(left_value) => {
answer.insert(key.clone(), (left_value.clone(), right_value.clone()));
}
}
}
answer
}
/// Like im's union, but for MutMap. /// Like im's union, but for MutMap.
pub fn union<K, V>(mut map: MutMap<K, V>, other: &MutMap<K, V>) -> MutMap<K, V> pub fn union<K, V>(mut map: MutMap<K, V>, other: &MutMap<K, V>) -> MutMap<K, V>
where where

View file

@ -16,7 +16,7 @@ use roc_types::types::AnnotationSource::{self, *};
use roc_types::types::Type::{self, *}; use roc_types::types::Type::{self, *};
use roc_types::types::{Alias, Category, PReason, Reason}; use roc_types::types::{Alias, Category, PReason, Reason};
use roc_uniq::builtins::{attr_type, empty_list_type, list_type, str_type}; use roc_uniq::builtins::{attr_type, empty_list_type, list_type, str_type};
use roc_uniq::sharing::{self, Container, FieldAccess, Mark, Usage, VarUsage}; use roc_uniq::sharing::{self, FieldAccess, Mark, Usage, VarUsage};
pub struct Env { pub struct Env {
/// Whenever we encounter a user-defined type variable (a "rigid" var for short), /// Whenever we encounter a user-defined type variable (a "rigid" var for short),
@ -1434,11 +1434,14 @@ fn constrain_var(
]), ]),
) )
} }
Some(Simple(Unique)) => { Some(Simple(Unique)) | Some(Simple(Seen)) => {
// no additional constraints, keep uniqueness unbound // no additional constraints, keep uniqueness unbound
Lookup(symbol_for_lookup, expected, region) Lookup(symbol_for_lookup, expected, region)
} }
Some(Usage::Access(_, _, _)) | Some(Usage::Update(_, _, _)) => { Some(Usage::RecordAccess(_, _))
| Some(Usage::RecordUpdate(_, _))
| Some(Usage::ApplyAccess(_, _))
| Some(Usage::ApplyUpdate(_, _)) => {
applied_usage_constraint.insert(symbol_for_lookup); applied_usage_constraint.insert(symbol_for_lookup);
let mut variables = Vec::new(); let mut variables = Vec::new();
@ -1457,8 +1460,6 @@ fn constrain_var(
]), ]),
) )
} }
Some(other) => panic!("some other rc value: {:?}", other),
} }
} }
@ -1486,12 +1487,12 @@ fn constrain_by_usage(
(Bool::container(uvar, vec![]), Type::Variable(var)) (Bool::container(uvar, vec![]), Type::Variable(var))
} }
Usage::Access(Container::Record, mark, fields) => { Usage::RecordAccess(mark, fields) => {
let (record_bool, ext_type) = constrain_by_usage(&Simple(*mark), var_store, introduced); let (record_bool, ext_type) = constrain_by_usage(&Simple(*mark), var_store, introduced);
constrain_by_usage_record(fields, record_bool, ext_type, introduced, var_store) constrain_by_usage_record(fields, record_bool, ext_type, introduced, var_store)
} }
Usage::Update(Container::Record, _, fields) => { Usage::RecordUpdate(_, fields) => {
let record_uvar = var_store.fresh(); let record_uvar = var_store.fresh();
introduced.push(record_uvar); introduced.push(record_uvar);
@ -1503,12 +1504,11 @@ fn constrain_by_usage(
constrain_by_usage_record(fields, record_bool, ext_type, introduced, var_store) constrain_by_usage_record(fields, record_bool, ext_type, introduced, var_store)
} }
Usage::Access(Container::List, mark, fields) => { Usage::ApplyAccess(mark, fields) => {
let (list_bool, _ext_type) = constrain_by_usage(&Simple(*mark), var_store, introduced); let (list_bool, _ext_type) = constrain_by_usage(&Simple(*mark), var_store, introduced);
let field_usage = fields // TODO reconsier this for multi-value applies
.get(&sharing::LIST_ELEM.into()) let field_usage = fields.get(0).expect("no LIST_ELEM key");
.expect("no LIST_ELEM key");
let (elem_bool, elem_type) = constrain_by_usage(field_usage, var_store, introduced); let (elem_bool, elem_type) = constrain_by_usage(field_usage, var_store, introduced);
@ -1543,13 +1543,12 @@ fn constrain_by_usage(
} }
} }
Usage::Update(Container::List, _, fields) => { Usage::ApplyUpdate(_, fields) => {
let list_uvar = var_store.fresh(); let list_uvar = var_store.fresh();
introduced.push(list_uvar); introduced.push(list_uvar);
let field_usage = fields // TODO reconsier this for multi-value applies
.get(&sharing::LIST_ELEM.into()) let field_usage = fields.get(0).expect("no LIST_ELEM key");
.expect("no LIST_ELEM key");
let (elem_bool, elem_type) = constrain_by_usage(field_usage, var_store, introduced); let (elem_bool, elem_type) = constrain_by_usage(field_usage, var_store, introduced);
@ -2397,7 +2396,7 @@ fn fix_mutual_recursive_alias(typ: &mut Type, attribute: &Bool) {
fn fix_mutual_recursive_alias_help(rec_var: Variable, attribute: &Type, into_type: &mut Type) { fn fix_mutual_recursive_alias_help(rec_var: Variable, attribute: &Type, into_type: &mut Type) {
if into_type.contains_variable(rec_var) { if into_type.contains_variable(rec_var) {
if let Type::Apply(Symbol::ATTR_ATTR, args) = into_type { if let Type::Apply(Symbol::ATTR_ATTR, args) = into_type {
std::mem::replace(&mut args[0], attribute.clone()); args[0] = attribute.clone();
fix_mutual_recursive_alias_help_help(rec_var, attribute, &mut args[1]); fix_mutual_recursive_alias_help_help(rec_var, attribute, &mut args[1]);
} }

View file

@ -290,8 +290,6 @@ fn format_assigned_field_help<'a, T>(
) where ) where
T: Formattable<'a>, T: Formattable<'a>,
{ {
// TODO multiline?
use self::AssignedField::*; use self::AssignedField::*;
match zelf { match zelf {

View file

@ -191,34 +191,7 @@ impl<'a> Formattable<'a> for Expr<'a> {
// (Canonicalization can remove defs later, but that hasn't happened yet!) // (Canonicalization can remove defs later, but that hasn't happened yet!)
debug_assert!(!defs.is_empty()); debug_assert!(!defs.is_empty());
// The first def is located last in the list, because it gets added there for loc_def in defs.iter() {
// with .push() for efficiency. (The order of parsed defs doesn't
// matter because canonicalization sorts them anyway.)
// The other defs in the list are in their usual order.
//
// But, the first element of `defs` could be the annotation belonging to the final
// element, so format the annotation first.
let it = defs.iter().peekable();
/*
// so if it exists, format the annotation
if let Some(Located {
value: Def::Annotation(_, _),
..
}) = it.peek()
{
let def = it.next().unwrap();
fmt_def(buf, &def.value, indent);
}
// then (using iter_back to get the last value of the `defs` vec) format the first body
if let Some(loc_first_def) = it.next_back() {
fmt_def(buf, &loc_first_def.value, indent);
}
*/
// then format the other defs in order
for loc_def in it {
fmt_def(buf, &loc_def.value, indent); fmt_def(buf, &loc_def.value, indent);
} }

View file

@ -1662,6 +1662,8 @@ fn run_low_level<'a, 'ctx, 'env>(
let (list, list_layout) = &args[0]; let (list, list_layout) = &args[0];
match list_layout {
Layout::Builtin(Builtin::List(elem_layout)) => {
let wrapper_struct = let wrapper_struct =
build_expr(env, layout_ids, scope, parent, list).into_struct_value(); build_expr(env, layout_ids, scope, parent, list).into_struct_value();
@ -1681,8 +1683,6 @@ fn run_low_level<'a, 'ctx, 'env>(
); );
let build_then = || { let build_then = || {
match list_layout {
Layout::Builtin(Builtin::List(elem_layout)) => {
// Allocate space for the new array that we'll copy into. // Allocate space for the new array that we'll copy into.
let elem_type = let elem_type =
basic_type_from_layout(env.arena, ctx, elem_layout, env.ptr_bytes); basic_type_from_layout(env.arena, ctx, elem_layout, env.ptr_bytes);
@ -1798,12 +1798,6 @@ fn run_low_level<'a, 'ctx, 'env>(
collection(ctx, ptr_bytes), collection(ctx, ptr_bytes),
"cast_collection", "cast_collection",
) )
}
Layout::Builtin(Builtin::EmptyList) => empty_list(env),
_ => {
unreachable!("Invalid List layout for List.get: {:?}", list_layout);
}
}
}; };
let build_else = || empty_list(env); let build_else = || empty_list(env);
@ -1819,6 +1813,12 @@ fn run_low_level<'a, 'ctx, 'env>(
BasicTypeEnum::StructType(struct_type), BasicTypeEnum::StructType(struct_type),
) )
} }
Layout::Builtin(Builtin::EmptyList) => empty_list(env),
_ => {
unreachable!("Invalid List layout for List.reverse {:?}", list_layout);
}
}
}
ListAppend => list_append(env, layout_ids, scope, parent, args), ListAppend => list_append(env, layout_ids, scope, parent, args),
ListPush => { ListPush => {
// List.push List elem, elem -> List elem // List.push List elem, elem -> List elem

View file

@ -245,6 +245,10 @@ mod gen_list {
); );
} }
// TODO getting this to work requires generating a runtime error for the Ok
// branch here, which is not yet something we support as of when this
// test was originally written.
//
// #[test] // #[test]
// fn first_empty_list() { // fn first_empty_list() {
// assert_evals_to!( // assert_evals_to!(
@ -422,70 +426,129 @@ mod gen_list {
); );
} }
#[test]
fn gen_wrap_len() {
assert_evals_to!(
indoc!(
r#"
wrapLen = \list ->
[ List.len list ]
wrapLen [ 1, 7, 9 ]
"#
),
&[3],
&'static [i64]
);
}
#[test]
fn gen_wrap_first() {
assert_evals_to!(
indoc!(
r#"
wrapFirst = \list ->
[ List.first list ]
wrapFirst [ 1, 2 ]
"#
),
&[1],
&'static [i64]
);
}
#[test]
fn gen_duplicate() {
assert_evals_to!(
indoc!(
r#"
# Duplicate the first element into the second index
dupe = \list ->
when List.first list is
Ok elem ->
List.set list 1 elem
_ ->
[]
dupe [ 1, 2 ]
"#
),
&[1, 1],
&'static [i64]
);
}
#[test] #[test]
fn gen_quicksort() { fn gen_quicksort() {
with_larger_debug_stack(|| { with_larger_debug_stack(|| {
// assert_evals_to!( assert_evals_to!(
// indoc!( indoc!(
// r#" r#"
// quicksort : List (Num a) -> List (Num a) quicksort : List (Num a) -> List (Num a)
// quicksort = \list -> quicksort = \list ->
// quicksortHelp list 0 (List.len list - 1) quicksortHelp list 0 (List.len list - 1)
// quicksortHelp : List (Num a), Int, Int -> List (Num a)
// quicksortHelp = \list, low, high ->
// if low < high then
// when partition low high list is
// Pair partitionIndex partitioned ->
// partitioned
// |> quicksortHelp low (partitionIndex - 1)
// |> quicksortHelp (partitionIndex + 1) high
// else
// list
// swap : Int, Int, List a -> List a quicksortHelp : List (Num a), Int, Int -> List (Num a)
// swap = \i, j, list -> quicksortHelp = \list, low, high ->
// when Pair (List.get list i) (List.get list j) is if low < high then
// Pair (Ok atI) (Ok atJ) -> when partition low high list is
// list Pair partitionIndex partitioned ->
// |> List.set i atJ partitioned
// |> List.set j atI |> quicksortHelp low (partitionIndex - 1)
|> quicksortHelp (partitionIndex + 1) high
else
list
// _ ->
// []
// partition : Int, Int, List (Num a) -> [ Pair Int (List (Num a)) ] swap : Int, Int, List a -> List a
// partition = \low, high, initialList -> swap = \i, j, list ->
// when List.get initialList high is when Pair (List.get list i) (List.get list j) is
// Ok pivot -> Pair (Ok atI) (Ok atJ) ->
// when partitionHelp (low - 1) low initialList high pivot is list
// Pair newI newList -> |> List.set i atJ
// Pair (newI + 1) (swap (newI + 1) high newList) |> List.set j atI
// Err _ -> _ ->
// Pair (low - 1) initialList []
// partitionHelp : Int, Int, List (Num a), Int, Int -> [ Pair Int (List (Num a)) ] partition : Int, Int, List (Num a) -> [ Pair Int (List (Num a)) ]
// partitionHelp = \i, j, list, high, pivot -> partition = \low, high, initialList ->
// if j < high then when List.get initialList high is
// when List.get list j is Ok pivot ->
// Ok value -> when partitionHelp (low - 1) low initialList high pivot is
// if value <= pivot then Pair newI newList ->
// partitionHelp (i + 1) (j + 1) (swap (i + 1) j list) high pivot Pair (newI + 1) (swap (newI + 1) high newList)
// else
// partitionHelp i (j + 1) list high pivot
// Err _ -> Err _ ->
// Pair i list Pair (low - 1) initialList
// else
// Pair i list
// quicksort [ 7, 4, 21, 19 ]
// "# partitionHelp : Int, Int, List (Num a), Int, Int -> [ Pair Int (List (Num a)) ]
// ), partitionHelp = \i, j, list, high, pivot ->
// &[4, 7, 19, 21], if j < high then
// &'static [i64] when List.get list j is
// ); Ok value ->
if value <= pivot then
partitionHelp (i + 1) (j + 1) (swap (i + 1) j list) high pivot
else
partitionHelp i (j + 1) list high pivot
Err _ ->
Pair i list
else
Pair i list
quicksort [ 7, 4, 21, 19 ]
"#
),
&[4, 7, 19, 21],
&'static [i64]
);
}) })
} }
} }

View file

@ -113,6 +113,21 @@ mod gen_num {
); );
} }
#[test]
fn gen_wrap_add_nums() {
assert_evals_to!(
indoc!(
r#"
add2 = \num1, num2 -> num1 + num2
add2 4 5
"#
),
9,
i64
);
}
#[test] #[test]
fn gen_div_f64() { fn gen_div_f64() {
// FIXME this works with normal types, but fails when checking uniqueness types // FIXME this works with normal types, but fails when checking uniqueness types
@ -155,6 +170,23 @@ mod gen_num {
); );
} }
#[test]
fn gen_wrap_int_neq() {
assert_evals_to!(
indoc!(
r#"
wrappedNotEq : a, a -> Bool
wrappedNotEq = \num1, num2 ->
num1 != num2
wrappedNotEq 2 3
"#
),
true,
bool
);
}
#[test] #[test]
fn gen_add_i64() { fn gen_add_i64() {
assert_evals_to!( assert_evals_to!(
@ -479,6 +511,21 @@ mod gen_num {
assert_evals_to!("Num.neg 123", -123, i64); assert_evals_to!("Num.neg 123", -123, i64);
} }
#[test]
fn gen_wrap_int_neg() {
assert_evals_to!(
indoc!(
r#"
wrappedNeg = \num -> -num
wrappedNeg 3
"#
),
-3,
i64
);
}
#[test] #[test]
fn gen_basic_fn() { fn gen_basic_fn() {
assert_evals_to!( assert_evals_to!(

View file

@ -75,14 +75,18 @@ macro_rules! assert_llvm_evals_to {
}; };
let main_body = Expr::new(&mut mono_env, loc_expr.value, &mut procs); let main_body = Expr::new(&mut mono_env, loc_expr.value, &mut procs);
let mut headers = {
let num_headers = match &procs.pending_specializations {
Some(map) => map.len(),
None => 0
};
let mut headers = Vec::with_capacity(procs.pending_specializations.len()); Vec::with_capacity(num_headers)
};
let mut layout_cache = roc_mono::layout::LayoutCache::default(); let mut layout_cache = roc_mono::layout::LayoutCache::default();
let mut procs = roc_mono::expr::specialize_all(&mut mono_env, procs, &mut layout_cache);
let (mut specializations, runtime_errors) = assert_eq!(procs.runtime_errors, roc_collections::all::MutMap::default());
roc_mono::expr::specialize_all(&mut mono_env, procs, &mut layout_cache);
assert_eq!(runtime_errors, roc_collections::all::MutSet::default());
// Put this module's ident_ids back in the interns, so we can use them in env. // Put this module's ident_ids back in the interns, so we can use them in env.
// This must happen *after* building the headers, because otherwise there's // This must happen *after* building the headers, because otherwise there's
@ -92,12 +96,21 @@ macro_rules! assert_llvm_evals_to {
// Add all the Proc headers to the module. // Add all the Proc headers to the module.
// We have to do this in a separate pass first, // We have to do this in a separate pass first,
// because their bodies may reference each other. // because their bodies may reference each other.
for ((symbol, layout), proc) in specializations.drain() { for ((symbol, layout), proc) in procs.specialized.drain() {
use roc_mono::expr::InProgressProc::*;
match proc {
InProgress => {
panic!("A specialization was still marked InProgress after monomorphization had completed: {:?} with layout {:?}", symbol, layout);
}
Done(proc) => {
let (fn_val, arg_basic_types) = let (fn_val, arg_basic_types) =
build_proc_header(&env, &mut layout_ids, symbol, &layout, &proc); build_proc_header(&env, &mut layout_ids, symbol, &layout, &proc);
headers.push((proc, fn_val, arg_basic_types)); headers.push((proc, fn_val, arg_basic_types));
} }
}
}
// Build each proc using its header info. // Build each proc using its header info.
for (proc, fn_val, arg_basic_types) in headers { for (proc, fn_val, arg_basic_types) in headers {
@ -248,13 +261,18 @@ macro_rules! assert_opt_evals_to {
}; };
let main_body = Expr::new(&mut mono_env, loc_expr.value, &mut procs); let main_body = Expr::new(&mut mono_env, loc_expr.value, &mut procs);
let mut headers = Vec::with_capacity(procs.pending_specializations.len()); let mut headers = {
let num_headers = match &procs.pending_specializations {
Some(map) => map.len(),
None => 0
};
Vec::with_capacity(num_headers)
};
let mut layout_cache = roc_mono::layout::LayoutCache::default(); let mut layout_cache = roc_mono::layout::LayoutCache::default();
let mut procs = roc_mono::expr::specialize_all(&mut mono_env, procs, &mut layout_cache);
let (mut specializations, runtime_errors) = assert_eq!(procs.runtime_errors, roc_collections::all::MutMap::default());
roc_mono::expr::specialize_all(&mut mono_env, procs, &mut layout_cache);
assert_eq!(runtime_errors, roc_collections::all::MutSet::default());
// Put this module's ident_ids back in the interns, so we can use them in env. // Put this module's ident_ids back in the interns, so we can use them in env.
// This must happen *after* building the headers, because otherwise there's // This must happen *after* building the headers, because otherwise there's
@ -264,12 +282,21 @@ macro_rules! assert_opt_evals_to {
// Add all the Proc headers to the module. // Add all the Proc headers to the module.
// We have to do this in a separate pass first, // We have to do this in a separate pass first,
// because their bodies may reference each other. // because their bodies may reference each other.
for ((symbol, layout), proc) in specializations.drain() { for ((symbol, layout), proc) in procs.specialized.drain() {
use roc_mono::expr::InProgressProc::*;
match proc {
InProgress => {
panic!("A specialization was still marked InProgress after monomorphization had completed: {:?} with layout {:?}", symbol, layout);
}
Done(proc) => {
let (fn_val, arg_basic_types) = let (fn_val, arg_basic_types) =
build_proc_header(&env, &mut layout_ids, symbol, &layout, &proc); build_proc_header(&env, &mut layout_ids, symbol, &layout, &proc);
headers.push((proc, fn_val, arg_basic_types)); headers.push((proc, fn_val, arg_basic_types));
} }
}
}
// Build each proc using its header info. // Build each proc using its header info.
for (proc, fn_val, arg_basic_types) in headers { for (proc, fn_val, arg_basic_types) in headers {

View file

@ -221,12 +221,12 @@ mod test_uniq_load {
expect_types( expect_types(
loaded_module, loaded_module,
hashmap! { hashmap! {
"findPath" => "Attr * (Attr * { costFunction : (Attr Shared (Attr Shared position, Attr Shared position -> Attr Shared Float)), end : (Attr Shared position), moveFunction : (Attr Shared (Attr Shared position -> Attr * (Set (Attr * position)))), start : (Attr Shared position) } -> Attr * (Result (Attr * (List (Attr Shared position))) (Attr * [ KeyNotFound ]*)))", "findPath" => "Attr * (Attr * { costFunction : (Attr Shared (Attr Shared position, Attr Shared position -> Attr * Float)), end : (Attr Shared position), moveFunction : (Attr Shared (Attr Shared position -> Attr * (Set (Attr * position)))), start : (Attr Shared position) } -> Attr * (Result (Attr * (List (Attr Shared position))) (Attr * [ KeyNotFound ]*)))",
"initialModel" => "Attr * (Attr Shared position -> Attr * (Model (Attr Shared position)))", "initialModel" => "Attr * (Attr Shared position -> Attr * (Model (Attr Shared position)))",
"reconstructPath" => "Attr Shared (Attr Shared (Map (Attr * position) (Attr Shared position)), Attr Shared position -> Attr * (List (Attr Shared position)))", "reconstructPath" => "Attr Shared (Attr Shared (Map (Attr * position) (Attr Shared position)), Attr Shared position -> Attr * (List (Attr Shared position)))",
"updateCost" => "Attr * (Attr Shared position, Attr Shared position, Attr Shared (Model (Attr Shared position)) -> Attr Shared (Model (Attr Shared position)))", "updateCost" => "Attr * (Attr Shared position, Attr Shared position, Attr Shared (Model (Attr Shared position)) -> Attr Shared (Model (Attr Shared position)))",
"cheapestOpen" => "Attr * (Attr * (Attr Shared position -> Attr Shared Float), Attr (* | a | b) (Model (Attr Shared position)) -> Attr * (Result (Attr Shared position) (Attr * [ KeyNotFound ]*)))", "cheapestOpen" => "Attr * (Attr * (Attr Shared position -> Attr * Float), Attr (* | a | b | c) (Model (Attr Shared position)) -> Attr * (Result (Attr Shared position) (Attr * [ KeyNotFound ]*)))",
"astar" => "Attr Shared (Attr Shared (Attr Shared position, Attr Shared position -> Attr Shared Float), Attr Shared (Attr Shared position -> Attr * (Set (Attr * position))), Attr Shared position, Attr Shared (Model (Attr Shared position)) -> Attr * [ Err (Attr * [ KeyNotFound ]*), Ok (Attr * (List (Attr Shared position))) ]*)", "astar" => "Attr Shared (Attr Shared (Attr Shared position, Attr Shared position -> Attr * Float), Attr Shared (Attr Shared position -> Attr * (Set (Attr * position))), Attr Shared position, Attr Shared (Model (Attr Shared position)) -> Attr * [ Err (Attr * [ KeyNotFound ]*), Ok (Attr * (List (Attr Shared position))) ]*)",
}, },
); );
}); });
@ -242,7 +242,7 @@ mod test_uniq_load {
expect_types( expect_types(
loaded_module, loaded_module,
hashmap! { hashmap! {
"swap" => "Attr * (Attr Shared Int, Attr Shared Int, Attr * (List (Attr Shared a)) -> Attr * (List (Attr Shared a)))", "swap" => "Attr * (Attr * Int, Attr * Int, Attr * (List (Attr Shared a)) -> Attr * (List (Attr Shared a)))",
"partition" => "Attr * (Attr Shared Int, Attr Shared Int, Attr b (List (Attr Shared (Num (Attr Shared a)))) -> Attr * [ Pair (Attr * Int) (Attr b (List (Attr Shared (Num (Attr Shared a))))) ])", "partition" => "Attr * (Attr Shared Int, Attr Shared Int, Attr b (List (Attr Shared (Num (Attr Shared a)))) -> Attr * [ Pair (Attr * Int) (Attr b (List (Attr Shared (Num (Attr Shared a))))) ])",
"quicksort" => "Attr Shared (Attr b (List (Attr Shared (Num (Attr Shared a)))), Attr Shared Int, Attr Shared Int -> Attr b (List (Attr Shared (Num (Attr Shared a)))))", "quicksort" => "Attr Shared (Attr b (List (Attr Shared (Num (Attr Shared a)))), Attr Shared Int, Attr Shared Int -> Attr b (List (Attr Shared (Num (Attr Shared a)))))",
}, },

View file

@ -346,6 +346,10 @@ impl ModuleIds {
pub fn get_name(&self, id: ModuleId) -> Option<&InlinableString> { pub fn get_name(&self, id: ModuleId) -> Option<&InlinableString> {
self.by_id.get(id.0 as usize) self.by_id.get(id.0 as usize)
} }
pub fn available_modules(&self) -> impl Iterator<Item = &InlinableString> {
self.by_id.iter()
}
} }
/// An ID that is assigned to interned string identifiers within a module. /// An ID that is assigned to interned string identifiers within a module.

View file

@ -1,3 +1,4 @@
use self::InProgressProc::*;
use crate::layout::{list_layout_from_elem, Builtin, Layout, LayoutCache, LayoutProblem}; use crate::layout::{list_layout_from_elem, Builtin, Layout, LayoutCache, LayoutProblem};
use crate::pattern::{Ctor, Guard, RenderAs, TagId}; use crate::pattern::{Ctor, Guard, RenderAs, TagId};
use bumpalo::collections::Vec; use bumpalo::collections::Vec;
@ -6,6 +7,7 @@ use roc_collections::all::{default_hasher, MutMap, MutSet};
use roc_module::ident::{Ident, Lowercase, TagName}; use roc_module::ident::{Ident, Lowercase, TagName};
use roc_module::low_level::LowLevel; use roc_module::low_level::LowLevel;
use roc_module::symbol::{IdentIds, ModuleId, Symbol}; use roc_module::symbol::{IdentIds, ModuleId, Symbol};
use roc_problem::can::RuntimeError;
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
use roc_types::subs::{Content, FlatType, Subs, Variable}; use roc_types::subs::{Content, FlatType, Subs, Variable};
use std::collections::HashMap; use std::collections::HashMap;
@ -38,7 +40,16 @@ pub struct Proc<'a> {
pub struct Procs<'a> { pub struct Procs<'a> {
pub partial_procs: MutMap<Symbol, PartialProc<'a>>, pub partial_procs: MutMap<Symbol, PartialProc<'a>>,
pub module_thunks: MutSet<Symbol>, pub module_thunks: MutSet<Symbol>,
pub pending_specializations: MutMap<Symbol, MutMap<Layout<'a>, PendingSpecialization<'a>>>, pub pending_specializations:
Option<MutMap<Symbol, MutMap<Layout<'a>, PendingSpecialization<'a>>>>,
pub specialized: MutMap<(Symbol, Layout<'a>), InProgressProc<'a>>,
pub runtime_errors: MutMap<Symbol, &'a str>,
}
#[derive(Clone, Debug, PartialEq)]
pub enum InProgressProc<'a> {
InProgress,
Done(Proc<'a>),
} }
impl<'a> Procs<'a> { impl<'a> Procs<'a> {
@ -51,8 +62,8 @@ impl<'a> Procs<'a> {
loc_body: Located<roc_can::expr::Expr>, loc_body: Located<roc_can::expr::Expr>,
ret_var: Variable, ret_var: Variable,
) { ) {
let (_, pattern_symbols, body) = patterns_to_when(env, loc_args, ret_var, loc_body); match patterns_to_when(env, loc_args, ret_var, loc_body) {
Ok((_, pattern_symbols, body)) => {
// a named closure. Since these aren't specialized by the surrounding // a named closure. Since these aren't specialized by the surrounding
// context, we can't add pending specializations for them yet. // context, we can't add pending specializations for them yet.
// (If we did, all named polymorphic functions would immediately error // (If we did, all named polymorphic functions would immediately error
@ -67,6 +78,22 @@ impl<'a> Procs<'a> {
); );
} }
Err(error) => {
// If the function has invalid patterns in its arguments,
// its call sites will code gen to runtime errors. This happens
// at the call site so we don't have to try to define the
// function LLVM, which would be difficult considering LLVM
// wants to know what symbols each argument corresponds to,
// and in this case the patterns were invalid, so we don't know
// what the symbols ought to be.
let error_msg = format!("TODO generate a RuntimeError message for {:?}", error);
self.runtime_errors.insert(name, env.arena.alloc(error_msg));
}
}
}
// TODO trim these down // TODO trim these down
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn insert_anonymous( pub fn insert_anonymous(
@ -78,10 +105,9 @@ impl<'a> Procs<'a> {
loc_body: Located<roc_can::expr::Expr>, loc_body: Located<roc_can::expr::Expr>,
ret_var: Variable, ret_var: Variable,
layout_cache: &mut LayoutCache<'a>, layout_cache: &mut LayoutCache<'a>,
) -> Layout<'a> { ) -> Result<Layout<'a>, RuntimeError> {
let (pattern_vars, pattern_symbols, body) = match patterns_to_when(env, loc_args, ret_var, loc_body) {
patterns_to_when(env, loc_args, ret_var, loc_body); Ok((pattern_vars, pattern_symbols, body)) => {
// an anonymous closure. These will always be specialized already // an anonymous closure. These will always be specialized already
// by the surrounding context, so we can add pending specializations // by the surrounding context, so we can add pending specializations
// for them immediately. // for them immediately.
@ -89,13 +115,22 @@ impl<'a> Procs<'a> {
.from_var(env.arena, annotation, env.subs, env.pointer_size) .from_var(env.arena, annotation, env.subs, env.pointer_size)
.unwrap_or_else(|err| panic!("TODO turn fn_var into a RuntimeError {:?}", err)); .unwrap_or_else(|err| panic!("TODO turn fn_var into a RuntimeError {:?}", err));
// if we've already specialized this one, no further work is needed.
//
// NOTE: this #[allow(clippy::map_entry)] here is for correctness!
// Changing it to use .entry() would necessarily make it incorrect.
#[allow(clippy::map_entry)]
if !self.specialized.contains_key(&(symbol, layout.clone())) {
let pending = PendingSpecialization { let pending = PendingSpecialization {
ret_var, ret_var,
fn_var: annotation, fn_var: annotation,
pattern_vars, pattern_vars,
}; };
self.add_pending_specialization(symbol, layout.clone(), pending); match &mut self.pending_specializations {
Some(pending_specializations) => {
// register the pending specialization, so this gets code genned later
add_pending(pending_specializations, symbol, layout.clone(), pending);
debug_assert!(!self.partial_procs.contains_key(&symbol), "Procs was told to insert a value for symbol {:?}, but there was already an entry for that key! Procs should never attempt to insert duplicates.", symbol); debug_assert!(!self.partial_procs.contains_key(&symbol), "Procs was told to insert a value for symbol {:?}, but there was already an entry for that key! Procs should never attempt to insert duplicates.", symbol);
@ -107,23 +142,58 @@ impl<'a> Procs<'a> {
body: body.value, body: body.value,
}, },
); );
}
None => {
// TODO should pending_procs hold a Rc<Proc>?
let partial_proc = PartialProc {
annotation,
pattern_symbols,
body: body.value,
};
layout // Mark this proc as in-progress, so if we're dealing with
// mutually recursive functions, we don't loop forever.
// (We had a bug around this before this system existed!)
self.specialized
.insert((symbol, layout.clone()), InProgress);
match specialize(env, self, symbol, layout_cache, pending, partial_proc)
{
Ok(proc) => {
self.specialized
.insert((symbol, layout.clone()), Done(proc));
}
Err(error) => {
let error_msg = format!(
"TODO generate a RuntimeError message for {:?}",
error
);
self.runtime_errors
.insert(symbol, env.arena.alloc(error_msg));
}
}
}
}
} }
fn add_pending_specialization( Ok(layout)
&mut self, }
Err(loc_error) => Err(loc_error.value),
}
}
}
fn add_pending<'a>(
pending_specializations: &mut MutMap<Symbol, MutMap<Layout<'a>, PendingSpecialization<'a>>>,
symbol: Symbol, symbol: Symbol,
layout: Layout<'a>, layout: Layout<'a>,
pending: PendingSpecialization<'a>, pending: PendingSpecialization<'a>,
) { ) {
let all_pending = self let all_pending = pending_specializations
.pending_specializations
.entry(symbol) .entry(symbol)
.or_insert_with(|| HashMap::with_capacity_and_hasher(1, default_hasher())); .or_insert_with(|| HashMap::with_capacity_and_hasher(1, default_hasher()));
all_pending.insert(layout, pending); all_pending.insert(layout, pending);
}
} }
#[derive(Default)] #[derive(Default)]
@ -345,18 +415,23 @@ fn num_argument_to_int_or_float(subs: &Subs, var: Variable) -> IntOrFloat {
/// foo = \r -> when r is { x } -> body /// foo = \r -> when r is { x } -> body
/// ///
/// conversion of one-pattern when expressions will do the most optimal thing /// conversion of one-pattern when expressions will do the most optimal thing
#[allow(clippy::type_complexity)]
fn patterns_to_when<'a>( fn patterns_to_when<'a>(
env: &mut Env<'a, '_>, env: &mut Env<'a, '_>,
patterns: std::vec::Vec<(Variable, Located<roc_can::pattern::Pattern>)>, patterns: std::vec::Vec<(Variable, Located<roc_can::pattern::Pattern>)>,
body_var: Variable, body_var: Variable,
mut body: Located<roc_can::expr::Expr>, body: Located<roc_can::expr::Expr>,
) -> ( ) -> Result<
(
Vec<'a, Variable>, Vec<'a, Variable>,
Vec<'a, Symbol>, Vec<'a, Symbol>,
Located<roc_can::expr::Expr>, Located<roc_can::expr::Expr>,
) { ),
Located<RuntimeError>,
> {
let mut arg_vars = Vec::with_capacity_in(patterns.len(), env.arena); let mut arg_vars = Vec::with_capacity_in(patterns.len(), env.arena);
let mut symbols = Vec::with_capacity_in(patterns.len(), env.arena); let mut symbols = Vec::with_capacity_in(patterns.len(), env.arena);
let mut body = Ok(body);
// patterns that are not yet in a when (e.g. in let or function arguments) must be irrefutable // patterns that are not yet in a when (e.g. in let or function arguments) must be irrefutable
// to pass type checking. So the order in which we add them to the body does not matter: there // to pass type checking. So the order in which we add them to the body does not matter: there
@ -364,6 +439,7 @@ fn patterns_to_when<'a>(
for (pattern_var, pattern) in patterns.into_iter() { for (pattern_var, pattern) in patterns.into_iter() {
let context = crate::pattern::Context::BadArg; let context = crate::pattern::Context::BadArg;
let mono_pattern = from_can_pattern(env, &pattern.value); let mono_pattern = from_can_pattern(env, &pattern.value);
match crate::pattern::check( match crate::pattern::check(
pattern.region, pattern.region,
&[( &[(
@ -373,25 +449,41 @@ fn patterns_to_when<'a>(
context, context,
) { ) {
Ok(_) => { Ok(_) => {
// Replace the body with a new one, but only if it was Ok.
if let Ok(unwrapped_body) = body {
let (new_symbol, new_body) = let (new_symbol, new_body) =
pattern_to_when(env, pattern_var, pattern, body_var, body); pattern_to_when(env, pattern_var, pattern, body_var, unwrapped_body);
symbols.push(new_symbol); symbols.push(new_symbol);
body = new_body; arg_vars.push(pattern_var);
body = Ok(new_body)
}
} }
Err(errors) => { Err(errors) => {
for error in errors { for error in errors {
env.problems.push(MonoProblem::PatternProblem(error)) env.problems.push(MonoProblem::PatternProblem(error))
} }
let error = roc_problem::can::RuntimeError::UnsupportedPattern(pattern.region); let value = RuntimeError::UnsupportedPattern(pattern.region);
body = Located::at(pattern.region, roc_can::expr::Expr::RuntimeError(error));
// Even if the body was Ok, replace it with this Err.
// If it was already an Err, leave it at that Err, so the first
// RuntimeError we encountered remains the first.
body = body.and_then(|_| {
Err(Located {
region: pattern.region,
value,
})
});
}
} }
} }
arg_vars.push(pattern_var); match body {
Ok(body) => Ok((arg_vars, symbols, body)),
Err(loc_error) => Err(loc_error),
} }
(arg_vars, symbols, body)
} }
/// turn irrefutable patterns into when. For example /// turn irrefutable patterns into when. For example
@ -514,10 +606,14 @@ fn from_can<'a>(
Closure(ann, name, _, loc_args, boxed_body) => { Closure(ann, name, _, loc_args, boxed_body) => {
let (loc_body, ret_var) = *boxed_body; let (loc_body, ret_var) = *boxed_body;
let layout =
procs.insert_anonymous(env, name, ann, loc_args, loc_body, ret_var, layout_cache);
Expr::FunctionPointer(name, layout) match procs.insert_anonymous(env, name, ann, loc_args, loc_body, ret_var, layout_cache)
{
Ok(layout) => Expr::FunctionPointer(name, layout),
Err(_error) => Expr::RuntimeError(
"TODO convert anonymous function error to a RuntimeError string",
),
}
} }
RunLowLevel { op, args, .. } => { RunLowLevel { op, args, .. } => {
@ -1303,28 +1399,48 @@ fn call_by_name<'a>(
let mut pattern_vars = Vec::with_capacity_in(loc_args.len(), arena); let mut pattern_vars = Vec::with_capacity_in(loc_args.len(), arena);
for (var, loc_arg) in loc_args { for (var, loc_arg) in loc_args {
pattern_vars.push(var);
match layout_cache.from_var(&env.arena, var, &env.subs, env.pointer_size) { match layout_cache.from_var(&env.arena, var, &env.subs, env.pointer_size) {
Ok(layout) => { Ok(layout) => {
pattern_vars.push(var);
args.push((from_can(env, loc_arg.value, procs, layout_cache), layout)); args.push((from_can(env, loc_arg.value, procs, layout_cache), layout));
} }
Err(_) => { Err(_) => {
// One of this function's arguments code gens to a runtime error, // One of this function's arguments code gens to a runtime error,
// so attempting to call it will immediately crash. // so attempting to call it will immediately crash.
return Expr::RuntimeError(""); return Expr::RuntimeError("TODO runtime error for invalid layout");
} }
} }
} }
// If we've already specialized this one, no further work is needed.
if procs.specialized.contains_key(&(proc_name, layout.clone())) {
Expr::CallByName {
name: proc_name,
layout,
args: args.into_bump_slice(),
}
} else {
let pending = PendingSpecialization { let pending = PendingSpecialization {
pattern_vars, pattern_vars,
ret_var, ret_var,
fn_var, fn_var,
}; };
// When requested (that is, when procs.pending_specializations is `Some`),
// store a pending specialization rather than specializing immediately.
//
// We do this so that we can do specialization in two passes: first,
// build the mono_expr with all the specialized calls in place (but
// no specializations performed yet), and then second, *after*
// de-duplicating requested specializations (since multiple modules
// which could be getting monomorphized in parallel might request
// the same specialization independently), we work through the
// queue of pending specializations to complete each specialization
// exactly once.
match &mut procs.pending_specializations {
Some(pending_specializations) => {
// register the pending specialization, so this gets code genned later // register the pending specialization, so this gets code genned later
procs.add_pending_specialization(proc_name, layout.clone(), pending); add_pending(pending_specializations, proc_name, layout.clone(), pending);
Expr::CallByName { Expr::CallByName {
name: proc_name, name: proc_name,
@ -1332,6 +1448,64 @@ fn call_by_name<'a>(
args: args.into_bump_slice(), args: args.into_bump_slice(),
} }
} }
None => {
let opt_partial_proc = procs.partial_procs.get(&proc_name);
match opt_partial_proc {
Some(partial_proc) => {
// TODO should pending_procs hold a Rc<Proc> to avoid this .clone()?
let partial_proc = partial_proc.clone();
// Mark this proc as in-progress, so if we're dealing with
// mutually recursive functions, we don't loop forever.
// (We had a bug around this before this system existed!)
procs
.specialized
.insert((proc_name, layout.clone()), InProgress);
match specialize(
env,
procs,
proc_name,
layout_cache,
pending,
partial_proc,
) {
Ok(proc) => {
procs
.specialized
.insert((proc_name, layout.clone()), Done(proc));
Expr::CallByName {
name: proc_name,
layout,
args: args.into_bump_slice(),
}
}
Err(error) => {
let error_msg = env.arena.alloc(format!(
"TODO generate a RuntimeError message for {:?}",
error
));
procs.runtime_errors.insert(proc_name, error_msg);
Expr::RuntimeError(error_msg)
}
}
}
None => {
// This must have been a runtime error.
let error = procs.runtime_errors.get(&proc_name).unwrap();
Expr::RuntimeError(error)
}
}
}
}
}
}
Err(_) => { Err(_) => {
// This function code gens to a runtime error, // This function code gens to a runtime error,
// so attempting to call it will immediately crash. // so attempting to call it will immediately crash.
@ -1344,36 +1518,27 @@ pub fn specialize_all<'a>(
env: &mut Env<'a, '_>, env: &mut Env<'a, '_>,
mut procs: Procs<'a>, mut procs: Procs<'a>,
layout_cache: &mut LayoutCache<'a>, layout_cache: &mut LayoutCache<'a>,
) -> (MutMap<(Symbol, Layout<'a>), Proc<'a>>, MutSet<Symbol>) { ) -> Procs<'a> {
let mut answer = let mut pending_specializations = procs.pending_specializations.unwrap_or_default();
HashMap::with_capacity_and_hasher(procs.pending_specializations.len(), default_hasher());
let mut runtime_errors = MutSet::default();
let mut is_finished = procs.pending_specializations.is_empty();
// TODO replace this synchronous loop with a work-stealing queue which // When calling from_can, pending_specializations should be unavailable.
// processes each entry in pending_specializations in parallel, one // This must be a single pass, and we must not add any more entries to it!
// module at a time (because the &mut env will need exclusive access to procs.pending_specializations = None;
// that module's IdentIds; the only reason Env is &mut in specialize is
// that we need to generate unique symbols and register them in them module's
// IdentIds).
while !is_finished {
let Procs {
partial_procs,
module_thunks,
mut pending_specializations,
} = procs;
procs = Procs {
partial_procs,
module_thunks,
pending_specializations: MutMap::default(),
};
for (name, mut by_layout) in pending_specializations.drain() { for (name, mut by_layout) in pending_specializations.drain() {
// Use the function's symbol's home module as the home module
// when doing canonicalization. This will be important to determine
// whether or not it's safe to defer specialization.
env.home = name.module_id();
for (layout, pending) in by_layout.drain() { for (layout, pending) in by_layout.drain() {
// If we've already seen this (Symbol, Layout) combination before, // If we've already seen this (Symbol, Layout) combination before,
// don't try to specialize it again. If we do, we'll loop forever! // don't try to specialize it again. If we do, we'll loop forever!
if !answer.contains_key(&(name, layout.clone())) { //
// NOTE: this #[allow(clippy::map_entry)] here is for correctness!
// Changing it to use .entry() would necessarily make it incorrect.
#[allow(clippy::map_entry)]
if !procs.specialized.contains_key(&(name, layout.clone())) {
// TODO should pending_procs hold a Rc<Proc>? // TODO should pending_procs hold a Rc<Proc>?
let partial_proc = procs let partial_proc = procs
.partial_procs .partial_procs
@ -1381,22 +1546,29 @@ pub fn specialize_all<'a>(
.unwrap_or_else(|| panic!("Could not find partial_proc for {:?}", name)) .unwrap_or_else(|| panic!("Could not find partial_proc for {:?}", name))
.clone(); .clone();
// Mark this proc as in-progress, so if we're dealing with
// mutually recursive functions, we don't loop forever.
// (We had a bug around this before this system existed!)
procs.specialized.insert((name, layout.clone()), InProgress);
match specialize(env, &mut procs, name, layout_cache, pending, partial_proc) { match specialize(env, &mut procs, name, layout_cache, pending, partial_proc) {
Ok(proc) => { Ok(proc) => {
answer.insert((name, layout), proc); procs.specialized.insert((name, layout), Done(proc));
} }
Err(_) => { Err(error) => {
runtime_errors.insert(name); let error_msg = env.arena.alloc(format!(
"TODO generate a RuntimeError message for {:?}",
error
));
procs.runtime_errors.insert(name, error_msg);
} }
} }
} }
} }
} }
is_finished = procs.pending_specializations.is_empty(); procs
}
(answer, runtime_errors)
} }
fn specialize<'a>( fn specialize<'a>(
@ -1431,7 +1603,11 @@ fn specialize<'a>(
let mut proc_args = Vec::with_capacity_in(pattern_vars.len(), &env.arena); let mut proc_args = Vec::with_capacity_in(pattern_vars.len(), &env.arena);
debug_assert!(pattern_vars.len() == pattern_symbols.len()); debug_assert_eq!(
&pattern_vars.len(),
&pattern_symbols.len(),
"Tried to zip two vecs with different lengths!"
);
for (arg_var, arg_name) in pattern_vars.iter().zip(pattern_symbols.iter()) { for (arg_var, arg_name) in pattern_vars.iter().zip(pattern_symbols.iter()) {
let layout = layout_cache.from_var(&env.arena, *arg_var, env.subs, env.pointer_size)?; let layout = layout_cache.from_var(&env.arena, *arg_var, env.subs, env.pointer_size)?;

View file

@ -65,11 +65,13 @@ mod test_mono {
jump_counter: arena.alloc(0), jump_counter: arena.alloc(0),
}; };
let mono_expr = Expr::new(&mut mono_env, loc_expr.value, &mut procs); let mono_expr = Expr::new(&mut mono_env, loc_expr.value, &mut procs);
let procs =
let (_, runtime_errors) =
roc_mono::expr::specialize_all(&mut mono_env, procs, &mut LayoutCache::default()); roc_mono::expr::specialize_all(&mut mono_env, procs, &mut LayoutCache::default());
assert_eq!(runtime_errors, roc_collections::all::MutSet::default()); assert_eq!(
procs.runtime_errors,
roc_collections::all::MutMap::default()
);
// Put this module's ident_ids back in the interns // Put this module's ident_ids back in the interns
interns.all_ident_ids.insert(home, ident_ids); interns.all_ident_ids.insert(home, ident_ids);

View file

@ -836,7 +836,7 @@ fn parse_closure_param<'a>(
) -> ParseResult<'a, Located<Pattern<'a>>> { ) -> ParseResult<'a, Located<Pattern<'a>>> {
one_of!( one_of!(
// An ident is the most common param, e.g. \foo -> ... // An ident is the most common param, e.g. \foo -> ...
loc!(ident_pattern()), loc_ident_pattern(min_indent),
// Underscore is also common, e.g. \_ -> ... // Underscore is also common, e.g. \_ -> ...
loc!(underscore_pattern()), loc!(underscore_pattern()),
// You can destructure records in params, e.g. \{ x, y } -> ... // You can destructure records in params, e.g. \{ x, y } -> ...
@ -847,23 +847,25 @@ fn parse_closure_param<'a>(
char('('), char('('),
space0_around(loc_pattern(min_indent), min_indent), space0_around(loc_pattern(min_indent), min_indent),
char(')') char(')')
), )
// The least common, but still allowed, e.g. \Foo -> ...
loc_tag_pattern(min_indent)
) )
.parse(arena, state) .parse(arena, state)
} }
fn loc_pattern<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<'a>>> { fn loc_pattern<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<'a>>> {
skip_first!(
// If this is a reserved keyword ("if", "then", "case, "when"), then
// it is not a pattern!
not(reserved_keyword()),
one_of!( one_of!(
loc_parenthetical_pattern(min_indent), loc_parenthetical_pattern(min_indent),
loc!(underscore_pattern()), loc!(underscore_pattern()),
loc_tag_pattern(min_indent), loc_ident_pattern(min_indent),
loc!(ident_pattern()),
loc!(record_destructure(min_indent)), loc!(record_destructure(min_indent)),
loc!(string_pattern()), loc!(string_pattern()),
loc!(number_pattern()) loc!(number_pattern())
) )
)
} }
fn loc_parenthetical_pattern<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<'a>>> { fn loc_parenthetical_pattern<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<'a>>> {
@ -915,34 +917,97 @@ fn record_destructure<'a>(min_indent: u16) -> impl Parser<'a, Pattern<'a>> {
) )
} }
fn loc_tag_pattern<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<'a>>> { fn loc_ident_pattern<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<'a>>> {
map_with_arena!( move |arena: &'a Bump, state: State<'a>| {
and!( let (loc_ident, state) = loc!(ident()).parse(arena, state)?;
loc!(one_of!(
map!(private_tag(), Pattern::PrivateTag), match loc_ident.value {
map!(global_tag(), Pattern::GlobalTag) Ident::GlobalTag(tag) => {
)), let (loc_args, state) =
// This can optionally be an applied pattern, e.g. (Foo bar) instead of (Foo)
zero_or_more!(space1_before(loc_pattern(min_indent), min_indent)) zero_or_more!(space1_before(loc_pattern(min_indent), min_indent))
), .parse(arena, state)?;
|arena: &'a Bump, let loc_tag = Located {
(loc_tag, loc_args): (Located<Pattern<'a>>, Vec<'a, Located<Pattern<'a>>>)| { region: loc_ident.region,
value: Pattern::GlobalTag(tag),
};
if loc_args.is_empty() { if loc_args.is_empty() {
loc_tag Ok((loc_tag, state))
} else { } else {
// TODO FIME this region doesn't cover the tag's let region = Region::across_all(
// arguments; need to add them to the region! std::iter::once(&loc_ident.region)
let region = loc_tag.region; .chain(loc_args.iter().map(|loc_arg| &loc_arg.region)),
);
let value = Pattern::Apply(&*arena.alloc(loc_tag), loc_args.into_bump_slice()); let value = Pattern::Apply(&*arena.alloc(loc_tag), loc_args.into_bump_slice());
Located { region, value } Ok((Located { region, value }, state))
} }
} }
) Ident::PrivateTag(tag) => {
} let (loc_args, state) =
zero_or_more!(space1_before(loc_pattern(min_indent), min_indent))
.parse(arena, state)?;
let loc_tag = Located {
region: loc_ident.region,
value: Pattern::PrivateTag(tag),
};
fn ident_pattern<'a>() -> impl Parser<'a, Pattern<'a>> { if loc_args.is_empty() {
map!(lowercase_ident(), Pattern::Identifier) Ok((loc_tag, state))
} else {
let region = Region::across_all(
std::iter::once(&loc_ident.region)
.chain(loc_args.iter().map(|loc_arg| &loc_arg.region)),
);
let value = Pattern::Apply(&*arena.alloc(loc_tag), loc_args.into_bump_slice());
Ok((Located { region, value }, state))
}
}
Ident::Access { module_name, parts } => {
// Plain identifiers (e.g. `foo`) are allowed in patterns, but
// more complex ones (e.g. `Foo.bar` or `foo.bar.baz`) are not.
if module_name.is_empty() && parts.len() == 1 {
Ok((
Located {
region: loc_ident.region,
value: Pattern::Identifier(parts[0]),
},
state,
))
} else {
let malformed_str = if module_name.is_empty() {
parts.join(".")
} else {
format!("{}.{}", module_name, parts.join("."))
};
Ok((
Located {
region: loc_ident.region,
value: Pattern::Malformed(arena.alloc(malformed_str)),
},
state,
))
}
}
Ident::AccessorFunction(string) => Ok((
Located {
region: loc_ident.region,
value: Pattern::Malformed(string),
},
state,
)),
Ident::Malformed(_) => {
let fail = Fail {
attempting: state.attempting,
reason: FailReason::InvalidPattern,
};
Err((fail, state))
}
}
}
} }
mod when { mod when {

View file

@ -2328,9 +2328,106 @@ mod test_parse {
); );
} }
#[test]
fn malformed_pattern_field_access() {
// See https://github.com/rtfeldman/roc/issues/399
let arena = Bump::new();
let newlines = bumpalo::vec![in &arena; Newline];
let pattern1 = Pattern::SpaceBefore(
arena.alloc(Pattern::Malformed("bar.and")),
newlines.into_bump_slice(),
);
let loc_pattern1 = Located::new(1, 1, 4, 11, pattern1);
let expr1 = Num("1");
let loc_expr1 = Located::new(1, 1, 15, 16, expr1);
let branch1 = &*arena.alloc(WhenBranch {
patterns: bumpalo::vec![in &arena;loc_pattern1],
value: loc_expr1,
guard: None,
});
let newlines = bumpalo::vec![in &arena; Newline];
let pattern2 = Pattern::SpaceBefore(arena.alloc(Underscore), newlines.into_bump_slice());
let loc_pattern2 = Located::new(2, 2, 4, 5, pattern2);
let expr2 = Num("4");
let loc_expr2 = Located::new(2, 2, 9, 10, expr2);
let branch2 = &*arena.alloc(WhenBranch {
patterns: bumpalo::vec![in &arena;loc_pattern2 ],
value: loc_expr2,
guard: None,
});
let branches = bumpalo::vec![in &arena; branch1, branch2];
let var = Var {
module_name: "",
ident: "x",
};
let loc_cond = Located::new(0, 0, 5, 6, var);
let expected = Expr::When(arena.alloc(loc_cond), branches);
let actual = parse_with(
&arena,
indoc!(
r#"
when x is
bar.and -> 1
_ -> 4
"#
),
);
assert_eq!(Ok(expected), actual);
}
#[test]
fn malformed_pattern_module_name() {
// See https://github.com/rtfeldman/roc/issues/399
let arena = Bump::new();
let newlines = bumpalo::vec![in &arena; Newline];
let pattern1 = Pattern::SpaceBefore(
arena.alloc(Pattern::Malformed("Foo.and")),
newlines.into_bump_slice(),
);
let loc_pattern1 = Located::new(1, 1, 4, 11, pattern1);
let expr1 = Num("1");
let loc_expr1 = Located::new(1, 1, 15, 16, expr1);
let branch1 = &*arena.alloc(WhenBranch {
patterns: bumpalo::vec![in &arena;loc_pattern1],
value: loc_expr1,
guard: None,
});
let newlines = bumpalo::vec![in &arena; Newline];
let pattern2 = Pattern::SpaceBefore(arena.alloc(Underscore), newlines.into_bump_slice());
let loc_pattern2 = Located::new(2, 2, 4, 5, pattern2);
let expr2 = Num("4");
let loc_expr2 = Located::new(2, 2, 9, 10, expr2);
let branch2 = &*arena.alloc(WhenBranch {
patterns: bumpalo::vec![in &arena;loc_pattern2 ],
value: loc_expr2,
guard: None,
});
let branches = bumpalo::vec![in &arena; branch1, branch2];
let var = Var {
module_name: "",
ident: "x",
};
let loc_cond = Located::new(0, 0, 5, 6, var);
let expected = Expr::When(arena.alloc(loc_cond), branches);
let actual = parse_with(
&arena,
indoc!(
r#"
when x is
Foo.and -> 1
_ -> 4
"#
),
);
assert_eq!(Ok(expected), actual);
}
// PARSE ERROR // PARSE ERROR
// TODO this should be parse error, but isn't! // TODO this should be parse error, but isn't!
// #[test]
// fn trailing_paren() { // fn trailing_paren() {
// assert_parses_to( // assert_parses_to(
// indoc!( // indoc!(

View file

@ -110,12 +110,15 @@ pub enum RuntimeError {
}, },
ModuleNotImported { ModuleNotImported {
module_name: InlinableString, module_name: InlinableString,
ident: InlinableString, imported_modules: MutSet<Box<str>>,
region: Region, region: Region,
}, },
InvalidPrecedence(PrecedenceProblem, Region), InvalidPrecedence(PrecedenceProblem, Region),
MalformedIdentifier(Box<str>, Region), MalformedIdentifier(Box<str>, Region),
MalformedClosure(Region), MalformedClosure(Region),
InvalidRecordUpdate {
region: Region,
},
InvalidFloat(FloatErrorKind, Region, Box<str>), InvalidFloat(FloatErrorKind, Region, Box<str>),
InvalidInt(IntErrorKind, Base, Region, Box<str>), InvalidInt(IntErrorKind, Base, Region, Box<str>),
CircularDef(Vec<Symbol>, Vec<(Region /* pattern */, Region /* expr */)>), CircularDef(Vec<Symbol>, Vec<(Region /* pattern */, Region /* expr */)>),

View file

@ -377,7 +377,11 @@ fn pretty_runtime_error<'b>(
todo!("unsupported patterns are currently not parsed!") todo!("unsupported patterns are currently not parsed!")
} }
RuntimeError::ValueNotExposed { .. } => todo!("value not exposed"), RuntimeError::ValueNotExposed { .. } => todo!("value not exposed"),
RuntimeError::ModuleNotImported { .. } => todo!("module not imported"), RuntimeError::ModuleNotImported {
module_name,
imported_modules,
region,
} => module_not_found(alloc, region, &module_name, imported_modules),
RuntimeError::InvalidPrecedence(_, _) => { RuntimeError::InvalidPrecedence(_, _) => {
// do nothing, reported with PrecedenceProblem // do nothing, reported with PrecedenceProblem
unreachable!() unreachable!()
@ -512,6 +516,14 @@ fn pretty_runtime_error<'b>(
hint, hint,
]) ])
} }
RuntimeError::InvalidRecordUpdate { region } => alloc.stack(vec![
alloc.concat(vec![
alloc.reflow("This expression cannot be updated"),
alloc.reflow(":"),
]),
alloc.region(region),
alloc.reflow("Only variables can be updated with record update syntax."),
]),
RuntimeError::NoImplementation => todo!("no implementation, unreachable"), RuntimeError::NoImplementation => todo!("no implementation, unreachable"),
} }
} }
@ -562,3 +574,49 @@ fn not_found<'b>(
to_details(default_no, default_yes), to_details(default_no, default_yes),
]) ])
} }
fn module_not_found<'b>(
alloc: &'b RocDocAllocator<'b>,
region: roc_region::all::Region,
name: &str,
options: MutSet<Box<str>>,
) -> RocDocBuilder<'b> {
use crate::error::r#type::suggest;
let mut suggestions = suggest::sort(name, options.iter().map(|v| v.as_ref()).collect());
suggestions.truncate(4);
let default_no = alloc.concat(vec![
alloc.reflow("Is there an "),
alloc.keyword("import"),
alloc.reflow(" or "),
alloc.keyword("exposing"),
alloc.reflow(" missing up-top"),
]);
let default_yes = alloc
.reflow("Is there an import missing? Perhaps there is a typo, these names seem close:");
let to_details = |no_suggestion_details, yes_suggestion_details| {
if suggestions.is_empty() {
no_suggestion_details
} else {
alloc.stack(vec![
yes_suggestion_details,
alloc
.vcat(suggestions.into_iter().map(|v| alloc.string(v.to_string())))
.indent(4),
])
}
};
alloc.stack(vec![
alloc.concat(vec![
alloc.reflow("The `"),
alloc.string(name.to_string()),
alloc.reflow("` module is not imported:"),
]),
alloc.region(region),
to_details(default_no, default_yes),
])
}

View file

@ -1,3 +1,4 @@
use inlinable_string::InlinableString;
use roc_module::ident::Ident; use roc_module::ident::Ident;
use roc_module::ident::{Lowercase, TagName, Uppercase}; use roc_module::ident::{Lowercase, TagName, Uppercase};
use roc_module::symbol::{Interns, ModuleId, Symbol}; use roc_module::symbol::{Interns, ModuleId, Symbol};
@ -297,6 +298,10 @@ impl<'a> RocDocAllocator<'a> {
.annotate(Annotation::Module) .annotate(Annotation::Module)
} }
pub fn inlinable_string(&'a self, s: InlinableString) -> DocBuilder<'a, Self, Annotation> {
self.text(format!("{}", s)).annotate(Annotation::Module)
}
pub fn binop( pub fn binop(
&'a self, &'a self,
content: roc_module::operator::BinOp, content: roc_module::operator::BinOp,

View file

@ -145,7 +145,10 @@ pub fn can_expr_with(
let mut var_store = VarStore::default(); let mut var_store = VarStore::default();
let var = var_store.fresh(); let var = var_store.fresh();
let expected = Expected::NoExpectation(Type::Variable(var)); let expected = Expected::NoExpectation(Type::Variable(var));
let module_ids = ModuleIds::default(); let mut module_ids = ModuleIds::default();
// ensure the Test module is accessible in our tests
module_ids.get_or_insert(&"Test".into());
// Desugar operators (convert them to Apply calls, taking into account // Desugar operators (convert them to Apply calls, taking into account
// operator precedence and associativity rules), before doing other canonicalization. // operator precedence and associativity rules), before doing other canonicalization.

View file

@ -2195,7 +2195,7 @@ mod test_reporting {
This pattern does not cover all the possibilities: This pattern does not cover all the possibilities:
7 f = \Left v -> v 7 f = \Left v -> v
^^^^ ^^^^^^
Other possibilities include: Other possibilities include:
@ -2230,7 +2230,7 @@ mod test_reporting {
This pattern does not cover all the possibilities: This pattern does not cover all the possibilities:
5 (Left y) = x 5 (Left y) = x
^^^^ ^^^^^^
Other possibilities include: Other possibilities include:
@ -3395,4 +3395,65 @@ mod test_reporting {
), ),
) )
} }
#[test]
fn invalid_record_update() {
report_problem_as(
indoc!(
r#"
foo = { bar: 3 }
updateNestedRecord = { foo.bar & x: 4 }
example = { age: 42 }
# these should work
y = { Test.example & age: 3 }
x = { example & age: 4 }
{ updateNestedRecord, foo, x, y }
"#
),
indoc!(
r#"
-- SYNTAX PROBLEM --------------------------------------------------------------
This expression cannot be updated:
2 updateNestedRecord = { foo.bar & x: 4 }
^^^^^^^
Only variables can be updated with record update syntax.
"#
),
)
}
#[test]
fn module_not_imported() {
report_problem_as(
indoc!(
r#"
Foo.test
"#
),
indoc!(
r#"
-- SYNTAX PROBLEM --------------------------------------------------------------
The `Foo` module is not imported:
1 Foo.test
^^^^^^^^
Is there an import missing? Perhaps there is a typo, these names seem
close:
Bool
Num
Map
Set
"#
),
)
}
} }

View file

@ -16,6 +16,52 @@ use roc_unify::unify::Unified::*;
// https://github.com/elm/compiler // https://github.com/elm/compiler
// Thank you, Evan! // Thank you, Evan!
// A lot of energy was put into making type inference fast. That means it's pretty intimidating.
//
// Fundamentally, type inference assigns very general types based on syntax, and then tries to
// make all the pieces fit together. For instance when writing
//
// > f x
//
// We know that `f` is a function, and thus must have some type `a -> b`.
// `x` is just a variable, that gets the type `c`
//
// Next comes constraint generation. For `f x` to be well-typed,
// it must be the case that `c = a`, So a constraint `Eq(c, a)` is generated.
// But `Eq` is a bit special: `c` does not need to equal `a` exactly, but they need to be equivalent.
// This allows for instance the use of aliases. `c` could be an alias, and so looks different from
// `a`, but they still represent the same type.
//
// Then we get to solving, which happens in this file.
//
// When we hit an `Eq` constraint, then we check whether the two involved types are in fact
// equivalent using unification, and when they are, we can substitute one for the other.
//
// When all constraints are processed, and no unification errors have occurred, then the program
// is type-correct. Otherwise the errors are reported.
//
// Now, coming back to efficiency, this type checker uses *ranks* to optimize
// The rank tracks the number of let-bindings a variable is "under". Top-level definitions
// have rank 1. A let in a top-level definition gets rank 2, and so on.
//
// This has to do with generalization of type variables. This is described here
//
// http://okmij.org/ftp/ML/generalization.html#levels
//
// The problem is that when doing inference naively, this program would fail to typecheck
//
// f =
// id = \x -> x
//
// { a: id 1, b: id "foo" }
//
// Because `id` is applied to an integer, the type `Int -> Int` is inferred, which then gives a
// type error for `id "foo"`.
//
// Thus instead the inferred type for `id` is generalized (see the `generalize` function) to `a -> a`.
// Ranks are used to limit the number of type variables considered for generalization. Only those inside
// of the let (so those used in inferring the type of `\x -> x`) are considered.
#[derive(PartialEq, Debug, Clone)] #[derive(PartialEq, Debug, Clone)]
pub enum TypeError { pub enum TypeError {
BadExpr(Region, Category, ErrorType, Expected<ErrorType>), BadExpr(Region, Category, ErrorType, Expected<ErrorType>),

View file

@ -1418,7 +1418,7 @@ mod solve_uniq_expr {
swap swap
"# "#
), ),
"Attr * (Attr Shared Int, Attr Shared Int, Attr * (List (Attr Shared a)) -> Attr * (List (Attr Shared a)))" "Attr * (Attr * Int, Attr * Int, Attr * (List (Attr Shared a)) -> Attr * (List (Attr Shared a)))"
); );
} }
@ -2789,7 +2789,7 @@ mod solve_uniq_expr {
cheapestOpen cheapestOpen
"# "#
), ),
"Attr * (Attr * (Attr Shared position -> Attr Shared Float), Attr (* | * | *) (Model (Attr Shared position)) -> Attr * (Result (Attr Shared position) (Attr * [ KeyNotFound ]*)))" "Attr * (Attr * (Attr Shared position -> Attr * Float), Attr (* | * | * | *) (Model (Attr Shared position)) -> Attr * (Result (Attr Shared position) (Attr * [ KeyNotFound ]*)))"
) )
}); });
} }
@ -2956,7 +2956,7 @@ mod solve_uniq_expr {
findPath findPath
"# "#
), ),
"Attr * (Attr * { costFunction : (Attr Shared (Attr Shared position, Attr Shared position -> Attr Shared Float)), end : (Attr Shared position), moveFunction : (Attr Shared (Attr Shared position -> Attr * (Set (Attr * position)))), start : (Attr Shared position) } -> Attr * (Result (Attr * (List (Attr Shared position))) (Attr * [ KeyNotFound ]*)))" "Attr * (Attr * { costFunction : (Attr Shared (Attr Shared position, Attr Shared position -> Attr * Float)), end : (Attr Shared position), moveFunction : (Attr Shared (Attr Shared position -> Attr * (Set (Attr * position)))), start : (Attr Shared position) } -> Attr * (Result (Attr * (List (Attr Shared position))) (Attr * [ KeyNotFound ]*)))"
) )
}); });
} }

View file

@ -1,11 +1,10 @@
use roc_collections::all::{relative_complement, union, MutMap, SendSet}; use roc_collections::all::{get_shared, relative_complement, union, MutMap, SendSet};
use roc_module::ident::{Lowercase, TagName}; use roc_module::ident::{Lowercase, TagName};
use roc_module::symbol::Symbol; use roc_module::symbol::Symbol;
use roc_types::boolean_algebra::Bool; use roc_types::boolean_algebra::Bool;
use roc_types::subs::Content::{self, *}; use roc_types::subs::Content::{self, *};
use roc_types::subs::{Descriptor, FlatType, Mark, OptVariable, Subs, Variable}; use roc_types::subs::{Descriptor, FlatType, Mark, OptVariable, Subs, Variable};
use roc_types::types::{gather_fields, ErrorType, Mismatch, RecordStructure}; use roc_types::types::{gather_fields, ErrorType, Mismatch, RecordStructure};
use std::hash::Hash;
macro_rules! mismatch { macro_rules! mismatch {
() => {{ () => {{
@ -223,28 +222,6 @@ fn unify_structure(
} }
} }
/// Like intersection_with, except for MutMap and specialized to return
/// a tuple. Also, only clones the values that will be actually returned,
/// rather than cloning everything.
fn get_shared<K, V>(map1: &MutMap<K, V>, map2: &MutMap<K, V>) -> MutMap<K, (V, V)>
where
K: Clone + Eq + Hash,
V: Clone,
{
let mut answer = MutMap::default();
for (key, right_value) in map2 {
match std::collections::HashMap::get(map1, &key) {
None => (),
Some(left_value) => {
answer.insert(key.clone(), (left_value.clone(), right_value.clone()));
}
}
}
answer
}
fn unify_record( fn unify_record(
subs: &mut Subs, subs: &mut Subs,
pool: &mut Pool, pool: &mut Pool,

View file

@ -2,8 +2,6 @@ use roc_can::expr::Expr;
use roc_collections::all::{ImMap, ImSet}; use roc_collections::all::{ImMap, ImSet};
use roc_module::ident::Lowercase; use roc_module::ident::Lowercase;
use roc_module::symbol::Symbol; use roc_module::symbol::Symbol;
use roc_region::all::Located;
use roc_types::subs::Variable;
// fake field names for container elements // fake field names for container elements
// e.g. for lists, internally it's a record with a `list_elem` field // e.g. for lists, internally it's a record with a `list_elem` field
@ -33,8 +31,14 @@ impl IntoIterator for FieldAccess {
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub enum Usage { pub enum Usage {
Simple(Mark), Simple(Mark),
Access(Container, Mark, FieldAccess),
Update(Container, ImSet<Lowercase>, FieldAccess), // Lists, Sets, ADTs
ApplyAccess(Mark, Vec<Usage>),
ApplyUpdate(ImSet<usize>, Vec<Usage>),
// Records
RecordAccess(Mark, FieldAccess),
RecordUpdate(ImSet<Lowercase>, FieldAccess),
} }
#[derive(Debug, PartialEq, Eq, Clone, Copy)] #[derive(Debug, PartialEq, Eq, Clone, Copy)]
@ -92,6 +96,58 @@ impl Composable for FieldAccess {
} }
} }
impl Composable for Vec<Usage> {
fn sequential(&mut self, other: &Self) {
// NOTE we don't know they have the same length
let mut it_other = other.iter();
{
for (self_usage, other_usage) in self.iter_mut().zip(&mut it_other) {
self_usage.sequential(&other_usage);
if *self_usage != Usage::Simple(Mark::Seen) {
// e.g. we access `rec.foo` and `rec.foo.bar`.
// Since a reference to `rec.foo` exists, there are at least two references to `foo.bar`
// (`foo.bar` itself and `.bar rec.foo`)
// Therefore fields of the subtrees must be shared!
// TODO make this work? Seems to function well without it
// self_nested.or_subtree(&Usage::Shared);
// other_nested.or_subtree(&Usage::Shared);
//
// member function on FieldAccess
// fn or_subtree(&mut self, constraint: &Usage) {
// for field_usage in self.fields.iter_mut() {
// field_usage.parallel(constraint);
// }
// }
}
}
}
// if there are remaining elements in other, push them onto self
for other_usage in it_other {
self.push(other_usage.clone());
}
}
fn parallel(&mut self, other: &Self) {
// NOTE we don't know they have the same length
let mut it_other = other.iter();
{
for (self_usage, other_usage) in self.iter_mut().zip(&mut it_other) {
self_usage.parallel(&other_usage);
}
}
// if there are remaining elements in other, push them onto self
for other_usage in it_other {
self.push(other_usage.clone());
}
}
}
impl Composable for Usage { impl Composable for Usage {
fn sequential(&mut self, other: &Self) { fn sequential(&mut self, other: &Self) {
use Mark::*; use Mark::*;
@ -103,79 +159,161 @@ impl Composable for Usage {
*self = Simple(Shared); *self = Simple(Shared);
} }
(Update(c1, _, _), Update(c2, _, _)) | (Update(c1, _, _), Access(c2, _, _)) => { // Record Update
debug_assert_eq!(c1, c2); (RecordUpdate(_, _), RecordUpdate(_, _)) | (RecordUpdate(_, _), RecordAccess(_, _)) => {
*self = Simple(Shared); *self = Simple(Shared);
} }
(Update(_, _, _), Simple(Unique)) | (Simple(Unique), Update(_, _, _)) => { (RecordUpdate(_, _), Simple(Unique)) | (Simple(Unique), RecordUpdate(_, _)) => {
*self = Simple(Shared); *self = Simple(Shared);
} }
(Access(c1, m1, fields1), Update(c2, overwritten, fields2)) => { (RecordAccess(m1, fields1), RecordUpdate(overwritten, fields2)) => {
debug_assert_eq!(c1, c2); *self = correct_overwritten(*m1, fields1, Seen, fields2, overwritten);
*self = correct_overwritten(*c1, *m1, fields1, Seen, fields2, overwritten);
} }
(Simple(Seen), Update(c1, overwritten, fa)) => { (Simple(Seen), RecordUpdate(overwritten, fa)) => {
*self = Update(*c1, overwritten.clone(), fa.clone()); *self = RecordUpdate(overwritten.clone(), fa.clone());
} }
(Update(c1, overwritten, fa), Simple(Seen)) => { (RecordUpdate(overwritten, fa), Simple(Seen)) => {
*self = Update(*c1, overwritten.clone(), fa.clone()); *self = RecordUpdate(overwritten.clone(), fa.clone());
} }
// Access // RecordAccess
(Access(c1, m1, fa1), Access(c2, m2, fa2)) => { (RecordAccess(m1, fa1), RecordAccess(m2, fa2)) => {
debug_assert_eq!(c1, c2);
let mut fa = fa1.clone(); let mut fa = fa1.clone();
fa.sequential(fa2); fa.sequential(fa2);
let mut m = *m1; let mut m = *m1;
m.sequential(m2); m.sequential(m2);
*self = Access(*c1, m, fa); *self = RecordAccess(m, fa);
} }
(Access(c1, m, fa1), Simple(Unique)) => { (RecordAccess(m, fa1), Simple(Unique)) => {
let mut copy = Access(*c1, *m, fa1.clone()); let mut copy = RecordAccess(*m, fa1.clone());
make_subtree_shared(&mut copy); make_subtree_shared(&mut copy);
// correct the mark of the top-level access // correct the mark of the top-level access
*self = if let Access(c, _, fa) = copy { *self = if let RecordAccess(_, fa) = copy {
let mut m = *m; let mut m = *m;
m.sequential(&Unique); m.sequential(&Unique);
Access(c, m, fa) RecordAccess(m, fa)
} else { } else {
unreachable!() unreachable!()
}; };
} }
(Simple(Unique), Access(c, m, fa)) => { (Simple(Unique), RecordAccess(m, fa)) => {
let mut copy = Access(*c, *m, fa.clone()); let mut copy = RecordAccess(*m, fa.clone());
make_subtree_shared(&mut copy); make_subtree_shared(&mut copy);
// correct the mark of the top-level access // correct the mark of the top-level access
*self = if let Access(c, _, fa) = copy { *self = if let RecordAccess(_, fa) = copy {
let mut m = *m; let mut m = *m;
m.sequential(&Unique); m.sequential(&Unique);
Access(c, m, fa) RecordAccess(m, fa)
} else { } else {
unreachable!() unreachable!()
}; };
} }
(Simple(m1 @ Seen), Access(c1, m2, fa)) => { (Simple(m1 @ Seen), RecordAccess(m2, fa)) => {
let mut m = *m1; let mut m = *m1;
m.sequential(m2); m.sequential(m2);
*self = Access(*c1, m, fa.clone()) *self = RecordAccess(m, fa.clone())
} }
(Access(c1, m1, fa), Simple(m2 @ Seen)) => { (RecordAccess(m1, fa), Simple(m2 @ Seen)) => {
let mut m = *m1; let mut m = *m1;
m.sequential(m2); m.sequential(m2);
*self = Access(*c1, m, fa.clone()); *self = RecordAccess(m, fa.clone());
} }
// Apply Update
(ApplyUpdate(_, _), ApplyUpdate(_, _)) | (ApplyUpdate(_, _), ApplyAccess(_, _)) => {
*self = Simple(Shared);
}
(ApplyUpdate(_, _), Simple(Unique)) | (Simple(Unique), ApplyUpdate(_, _)) => {
*self = Simple(Shared);
}
(ApplyAccess(m1, fields1), ApplyUpdate(overwritten, fields2)) => {
*self = correct_overwritten_apply(*m1, fields1, Seen, fields2, overwritten);
}
(Simple(Seen), ApplyUpdate(overwritten, fa)) => {
*self = ApplyUpdate(overwritten.clone(), fa.clone());
}
(ApplyUpdate(overwritten, fa), Simple(Seen)) => {
*self = ApplyUpdate(overwritten.clone(), fa.clone());
}
// RecordAccess
(ApplyAccess(m1, fa1), ApplyAccess(m2, fa2)) => {
let mut fa = fa1.clone();
fa.sequential(fa2);
let mut m = *m1;
m.sequential(m2);
*self = ApplyAccess(m, fa);
}
(ApplyAccess(m, fa1), Simple(Unique)) => {
let mut copy = ApplyAccess(*m, fa1.clone());
make_subtree_shared(&mut copy);
// correct the mark of the top-level access
*self = if let ApplyAccess(_, fa) = copy {
let mut m = *m;
m.sequential(&Unique);
ApplyAccess(m, fa)
} else {
unreachable!()
};
}
(Simple(Unique), ApplyAccess(m, fa)) => {
let mut copy = ApplyAccess(*m, fa.clone());
make_subtree_shared(&mut copy);
// correct the mark of the top-level access
*self = if let ApplyAccess(_, fa) = copy {
let mut m = *m;
m.sequential(&Unique);
ApplyAccess(m, fa)
} else {
unreachable!()
};
}
(Simple(m1 @ Seen), ApplyAccess(m2, fa)) => {
let mut m = *m1;
m.sequential(m2);
*self = ApplyAccess(m, fa.clone())
}
(ApplyAccess(m1, fa), Simple(m2 @ Seen)) => {
let mut m = *m1;
m.sequential(m2);
*self = ApplyAccess(m, fa.clone());
}
// Things cannot change type
(ApplyAccess(_, _), RecordAccess(_, _))
| (ApplyAccess(_, _), RecordUpdate(_, _))
| (ApplyUpdate(_, _), RecordAccess(_, _))
| (ApplyUpdate(_, _), RecordUpdate(_, _))
| (RecordAccess(_, _), ApplyAccess(_, _))
| (RecordUpdate(_, _), ApplyAccess(_, _))
| (RecordAccess(_, _), ApplyUpdate(_, _))
| (RecordUpdate(_, _), ApplyUpdate(_, _)) => {
unreachable!("applies cannot turn into records or vice versa!")
}
// Simple
(Simple(s1), Simple(s2)) => { (Simple(s1), Simple(s2)) => {
let mut s = *s1; let mut s = *s1;
s.sequential(s2); s.sequential(s2);
@ -199,58 +337,120 @@ impl Composable for Usage {
(Simple(Shared), _) | (_, Simple(Shared)) => { (Simple(Shared), _) | (_, Simple(Shared)) => {
*self = Simple(Shared); *self = Simple(Shared);
} }
// Record update
(Update(c1, w1, fa1), Update(c2, w2, fa2)) => { (RecordUpdate(w1, fa1), RecordUpdate(w2, fa2)) => {
debug_assert_eq!(c1, c2);
let mut fa = fa1.clone(); let mut fa = fa1.clone();
fa.parallel(fa2); fa.parallel(fa2);
let w = w1.clone().intersection(w2.clone()); let w = w1.clone().intersection(w2.clone());
*self = Update(*c1, w, fa); *self = RecordUpdate(w, fa);
} }
(Update(_, _, _), Simple(Unique)) | (Update(_, _, _), Simple(Seen)) => { (RecordUpdate(_, _), Simple(Unique)) | (RecordUpdate(_, _), Simple(Seen)) => {
//*self = Update(*c1, w.clone(), fa.clone()); //*self = RecordUpdate( w.clone(), fa.clone());
} }
(Simple(Unique), Update(c1, w, fa)) | (Simple(Seen), Update(c1, w, fa)) => { (Simple(Unique), RecordUpdate(w, fa)) | (Simple(Seen), RecordUpdate(w, fa)) => {
*self = Update(*c1, w.clone(), fa.clone()); *self = RecordUpdate(w.clone(), fa.clone());
} }
(Update(c1, w, fa1), Access(c2, _, fa2)) => { (RecordUpdate(w, fa1), RecordAccess(_, fa2)) => {
debug_assert_eq!(c1, c2);
let mut fa = fa1.clone(); let mut fa = fa1.clone();
fa.parallel(&fa2.clone()); fa.parallel(&fa2.clone());
*self = Update(*c1, w.clone(), fa); *self = RecordUpdate(w.clone(), fa);
} }
(Access(c1, _, fa1), Update(c2, w, fa2)) => { (RecordAccess(_, fa1), RecordUpdate(w, fa2)) => {
debug_assert_eq!(c1, c2);
let mut fa = fa1.clone(); let mut fa = fa1.clone();
fa.parallel(&fa2.clone()); fa.parallel(&fa2.clone());
*self = Update(*c1, w.clone(), fa); *self = RecordUpdate(w.clone(), fa);
} }
(Access(c1, m1, fa1), Access(c2, m2, fa2)) => { // Record Access
debug_assert_eq!(c1, c2); (RecordAccess(m1, fa1), RecordAccess(m2, fa2)) => {
let mut m = *m1; let mut m = *m1;
m.parallel(m2); m.parallel(m2);
let mut fa = fa1.clone(); let mut fa = fa1.clone();
fa.parallel(fa2); fa.parallel(fa2);
*self = Access(*c1, m, fa) *self = RecordAccess(m, fa)
} }
(Access(c, m, fa), Simple(Unique)) => { (RecordAccess(m, fa), Simple(Unique)) => {
let mut m = *m; let mut m = *m;
m.parallel(&Unique); m.parallel(&Unique);
*self = Access(*c, m, fa.clone()); *self = RecordAccess(m, fa.clone());
} }
(Access(_, _, _), Simple(Seen)) => { (RecordAccess(_, _), Simple(Seen)) => {
// *self = Access(*c1, *m, fa.clone()); // *self = RecordAccess( *m, fa.clone());
} }
(Simple(m1 @ Unique), Access(c1, m2, fa)) | (Simple(m1 @ Seen), Access(c1, m2, fa)) => { (Simple(m1 @ Unique), RecordAccess(m2, fa))
| (Simple(m1 @ Seen), RecordAccess(m2, fa)) => {
let mut m = *m1; let mut m = *m1;
m.sequential(m2); m.sequential(m2);
*self = Access(*c1, m, fa.clone()); *self = RecordAccess(m, fa.clone());
}
// Apply Update
(ApplyUpdate(w1, fa1), ApplyUpdate(w2, fa2)) => {
let mut fa = fa1.clone();
fa.parallel(fa2);
let w = w1.clone().intersection(w2.clone());
*self = ApplyUpdate(w, fa);
}
(ApplyUpdate(_, _), Simple(Unique)) | (ApplyUpdate(_, _), Simple(Seen)) => {
//*self = ApplyUpdate( w.clone(), fa.clone());
}
(Simple(Unique), ApplyUpdate(w, fa)) | (Simple(Seen), ApplyUpdate(w, fa)) => {
*self = ApplyUpdate(w.clone(), fa.clone());
}
(ApplyUpdate(w, fa1), ApplyAccess(_, fa2)) => {
let mut fa = fa1.clone();
fa.parallel(&fa2.clone());
*self = ApplyUpdate(w.clone(), fa);
}
(ApplyAccess(_, fa1), ApplyUpdate(w, fa2)) => {
let mut fa = fa1.clone();
fa.parallel(&fa2.clone());
*self = ApplyUpdate(w.clone(), fa);
}
// Apply Access
(ApplyAccess(m1, fa1), ApplyAccess(m2, fa2)) => {
let mut m = *m1;
m.parallel(m2);
let mut fa = fa1.clone();
fa.parallel(fa2);
*self = ApplyAccess(m, fa)
}
(ApplyAccess(m, fa), Simple(Unique)) => {
let mut m = *m;
m.parallel(&Unique);
*self = ApplyAccess(m, fa.clone());
}
(ApplyAccess(_, _), Simple(Seen)) => {
// *self = ApplyAccess( *m, fa.clone());
}
(Simple(m1 @ Unique), ApplyAccess(m2, fa))
| (Simple(m1 @ Seen), ApplyAccess(m2, fa)) => {
let mut m = *m1;
m.sequential(m2);
*self = ApplyAccess(m, fa.clone());
}
// Things cannot change type
(ApplyAccess(_, _), RecordAccess(_, _))
| (ApplyAccess(_, _), RecordUpdate(_, _))
| (ApplyUpdate(_, _), RecordAccess(_, _))
| (ApplyUpdate(_, _), RecordUpdate(_, _))
| (RecordAccess(_, _), ApplyAccess(_, _))
| (RecordUpdate(_, _), ApplyAccess(_, _))
| (RecordAccess(_, _), ApplyUpdate(_, _))
| (RecordUpdate(_, _), ApplyUpdate(_, _)) => {
unreachable!("applies cannot turn into records or vice versa!")
} }
} }
} }
@ -293,7 +493,6 @@ impl Composable for Mark {
} }
fn correct_overwritten( fn correct_overwritten(
c: Container,
mut mark1: Mark, mut mark1: Mark,
fa1: &FieldAccess, fa1: &FieldAccess,
mark2: Mark, mark2: Mark,
@ -314,7 +513,34 @@ fn correct_overwritten(
} }
} }
Update(c, overwritten.clone(), fa1) RecordUpdate(overwritten.clone(), fa1)
}
fn correct_overwritten_apply(
mut mark1: Mark,
fa1: &[Usage],
mark2: Mark,
fa2: &[Usage],
overwritten: &ImSet<usize>,
) -> Usage {
use Usage::*;
let mut fa1 = fa1.to_owned();
// TODO fix this cloning
// tricky because Composable is defined on Vec, not &[]
let fa2 = fa2.to_owned();
mark1.sequential(&mark2);
fa1.sequential(&fa2);
// fields that are accessed, but not overwritten in the update, must be shared!
for (index, usage) in fa1.iter_mut().enumerate() {
if !overwritten.contains(&index) {
make_subtree_shared(usage);
}
}
ApplyUpdate(overwritten.clone(), fa1)
} }
fn make_subtree_shared(usage: &mut Usage) { fn make_subtree_shared(usage: &mut Usage) {
@ -328,12 +554,12 @@ fn make_subtree_shared(usage: &mut Usage) {
*usage = Simple(Shared); *usage = Simple(Shared);
} }
Update(_, _, fa) => { RecordUpdate(_, fa) => {
for nested in fa.fields.iter_mut() { for nested in fa.fields.iter_mut() {
make_subtree_shared(nested); make_subtree_shared(nested);
} }
} }
Access(_, m, fa) => { RecordAccess(m, fa) => {
for nested in fa.fields.iter_mut() { for nested in fa.fields.iter_mut() {
make_subtree_shared(nested); make_subtree_shared(nested);
} }
@ -342,28 +568,61 @@ fn make_subtree_shared(usage: &mut Usage) {
_ => Shared, _ => Shared,
}; };
} }
ApplyUpdate(_, fa) => {
for nested in fa.iter_mut() {
make_subtree_shared(nested);
}
}
ApplyAccess(m, fa) => {
for nested in fa.iter_mut() {
make_subtree_shared(nested);
}
*m = match &m {
Seen => Seen,
_ => Shared,
};
}
} }
} }
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub struct VarUsage { pub struct VarUsage {
usage: ImMap<Symbol, Usage>, pub usage: ImMap<Symbol, Usage>,
} pub closure_usage_signatures: ImMap<Symbol, Vec<Usage>>,
impl IntoIterator for VarUsage {
type Item = (Symbol, Usage);
type IntoIter = im_rc::hashmap::ConsumingIter<(Symbol, Usage)>;
fn into_iter(self) -> Self::IntoIter {
self.usage.into_iter()
}
} }
impl VarUsage { impl VarUsage {
pub fn default() -> VarUsage { pub fn default() -> VarUsage {
let empty: ImMap<Symbol, Usage> = ImMap::default(); let mut closure_signatures = ImMap::default();
VarUsage { usage: empty } closure_signatures.insert(
Symbol::NUM_ADD,
vec![Usage::Simple(Mark::Seen), Usage::Simple(Mark::Seen)],
);
closure_signatures.insert(
Symbol::LIST_GET,
vec![
Usage::ApplyAccess(Mark::Seen, vec![Usage::Simple(Mark::Unique)]),
Usage::Simple(Mark::Seen),
],
);
closure_signatures.insert(Symbol::LIST_IS_EMPTY, vec![Usage::Simple(Mark::Seen)]);
closure_signatures.insert(
Symbol::LIST_SET,
vec![
Usage::ApplyUpdate(ImSet::default(), vec![Usage::Simple(Mark::Seen)]),
Usage::Simple(Mark::Seen),
Usage::Simple(Mark::Unique),
],
);
VarUsage {
usage: ImMap::default(),
closure_usage_signatures: closure_signatures,
}
} }
pub fn register_with(&mut self, symbol: Symbol, rc: &Usage) { pub fn register_with(&mut self, symbol: Symbol, rc: &Usage) {
@ -389,6 +648,11 @@ impl VarUsage {
self.register_with(symbol, &Simple(Mark::Unique)); self.register_with(symbol, &Simple(Mark::Unique));
} }
pub fn register_seen(&mut self, symbol: Symbol) {
use self::Usage::*;
self.register_with(symbol, &Simple(Mark::Seen));
}
pub fn unregister(&mut self, symbol: Symbol) { pub fn unregister(&mut self, symbol: Symbol) {
self.usage.remove(&symbol); self.usage.remove(&symbol);
} }
@ -450,7 +714,7 @@ impl Usage {
for field in access_chain.into_iter().rev() { for field in access_chain.into_iter().rev() {
let mut fa = FieldAccess::default(); let mut fa = FieldAccess::default();
fa.fields.insert(field, accum); fa.fields.insert(field, accum);
accum = Usage::Access(Container::Record, Mark::Seen, fa); accum = Usage::RecordAccess(Mark::Seen, fa);
} }
accum accum
@ -619,17 +883,34 @@ pub fn annotate_usage(expr: &Expr, usage: &mut VarUsage) {
annotate_usage(&loc_expr.value, usage); annotate_usage(&loc_expr.value, usage);
} }
Call(fun, loc_args, _) => { Call(fun, loc_args, _) => {
annotate_usage(&fun.1.value, usage);
if let Var(symbol) = fun.1.value { if let Var(symbol) = fun.1.value {
// call by name // call by name
special_case_builtins(usage, symbol, loc_args);
} else {
// unknown call
annotate_usage(&fun.1.value, usage);
for (_, arg) in loc_args { // fetch the signature
let opt_signature = match usage.closure_usage_signatures.get(&symbol) {
Some(v) => Some(v.clone()),
None => None,
};
if let Some(signature) = opt_signature {
// we know the usage signature of this function
for ((_, arg), annotated) in loc_args.iter().zip(signature.iter()) {
if let Var(arg_symbol) = arg.value {
usage.register_with(arg_symbol, &annotated);
} else {
annotate_usage(&arg.value, usage); annotate_usage(&arg.value, usage);
} }
} }
return;
}
}
// unknown call
for (_, arg) in loc_args {
annotate_usage(&arg.value, usage);
}
} }
Closure(_, _, _, _, body) => { Closure(_, _, _, _, body) => {
annotate_usage(&body.0.value, usage); annotate_usage(&body.0.value, usage);
@ -657,7 +938,7 @@ pub fn annotate_usage(expr: &Expr, usage: &mut VarUsage) {
usage.register_with( usage.register_with(
*symbol, *symbol,
&Usage::Update(Container::Record, labels, FieldAccess::default()), &Usage::RecordUpdate(labels, FieldAccess::default()),
); );
} }
Expr::Access { Expr::Access {
@ -693,77 +974,3 @@ fn get_access_chain<'a>(expr: &'a Expr, chain: &mut Vec<Lowercase>) -> Option<&'
_ => None, _ => None,
} }
} }
fn special_case_builtins(
usage: &mut VarUsage,
symbol: Symbol,
loc_args: &[(Variable, Located<Expr>)],
) {
use Expr::Var;
use Mark::*;
use Usage::*;
match symbol {
Symbol::LIST_GET => {
debug_assert!(loc_args.len() == 2);
let loc_list = &loc_args[0].1;
let loc_index = &loc_args[1].1;
if let Var(list_var) = loc_list.value {
usage.register_with(
list_var,
&Access(Container::List, Seen, FieldAccess::list_access()),
);
} else {
annotate_usage(&loc_list.value, usage);
}
annotate_usage(&loc_index.value, usage);
}
Symbol::LIST_SET => {
debug_assert_eq!(loc_args.len(), 3);
let loc_list = &loc_args[0].1;
let loc_index = &loc_args[1].1;
let loc_value = &loc_args[2].1;
if let Var(list_var) = loc_list.value {
usage.register_with(
list_var,
&Update(
Container::List,
ImSet::default(),
FieldAccess::list_update(),
),
);
} else {
annotate_usage(&loc_list.value, usage);
}
annotate_usage(&loc_index.value, usage);
annotate_usage(&loc_value.value, usage);
}
Symbol::LIST_IS_EMPTY | Symbol::LIST_LEN => {
debug_assert!(loc_args.len() == 1);
let loc_list = &loc_args[0].1;
if let Var(list_var) = loc_list.value {
usage.register_with(
list_var,
&Access(Container::List, Seen, FieldAccess::list_seen()),
);
} else {
annotate_usage(&loc_list.value, usage);
}
}
_ => {
usage.register_unique(symbol);
for (_, arg) in loc_args {
annotate_usage(&arg.value, usage);
}
}
}
}

View file

@ -18,10 +18,9 @@ mod test_usage_analysis {
use roc_collections::all::{ImMap, ImSet}; use roc_collections::all::{ImMap, ImSet};
use roc_module::ident::Lowercase; use roc_module::ident::Lowercase;
use roc_module::symbol::{Interns, Symbol}; use roc_module::symbol::{Interns, Symbol};
use roc_uniq::sharing::{self, Container, FieldAccess, Mark, Usage, VarUsage}; use roc_uniq::sharing::{FieldAccess, Mark, Usage, VarUsage};
use std::collections::HashMap; use std::collections::HashMap;
use Container::*;
use Mark::*; use Mark::*;
use Usage::*; use Usage::*;
@ -37,7 +36,7 @@ mod test_usage_analysis {
} }
match usage { match usage {
Usage::Access(_, _, fields) => { Usage::RecordAccess(_, fields) => {
let mut actual: HashMap<Lowercase, Usage> = HashMap::default(); let mut actual: HashMap<Lowercase, Usage> = HashMap::default();
for (k, v) in fields.into_iter() { for (k, v) in fields.into_iter() {
actual.insert(k, v); actual.insert(k, v);
@ -66,7 +65,7 @@ mod test_usage_analysis {
} }
match usage { match usage {
Usage::Access(_, _, fields) => { Usage::RecordAccess(_, fields) => {
let mut actual: HashMap<Lowercase, Usage> = HashMap::default(); let mut actual: HashMap<Lowercase, Usage> = HashMap::default();
for (k, v) in fields.into_iter() { for (k, v) in fields.into_iter() {
actual.insert(k, v); actual.insert(k, v);
@ -135,14 +134,14 @@ mod test_usage_analysis {
field_access_seq( field_access_seq(
vec![vec!["foo", "bar"], vec!["foo"]], vec![vec!["foo", "bar"], vec!["foo"]],
hashmap![ hashmap![
"foo" => Access(Record, Unique, field_access(hashmap![ "bar" => Simple(Shared) ])) "foo" => RecordAccess( Unique, field_access(hashmap![ "bar" => Simple(Shared) ]))
], ],
); );
field_access_seq( field_access_seq(
vec![vec!["foo"], vec!["foo", "bar"]], vec![vec!["foo"], vec!["foo", "bar"]],
hashmap![ hashmap![
"foo" => Access(Record, Unique, field_access(hashmap![ "bar" => Simple(Shared) ])) "foo" => RecordAccess( Unique, field_access(hashmap![ "bar" => Simple(Shared) ]))
], ],
); );
} }
@ -151,13 +150,13 @@ mod test_usage_analysis {
field_access_par( field_access_par(
vec![vec!["foo", "bar"], vec!["foo"]], vec![vec!["foo", "bar"], vec!["foo"]],
hashmap![ hashmap![
"foo" => Access(Record, Unique, field_access(hashmap![ "bar" => Simple(Unique) ])) "foo" => RecordAccess( Unique, field_access(hashmap![ "bar" => Simple(Unique) ]))
], ],
); );
field_access_par( field_access_par(
vec![vec!["foo"], vec!["foo", "bar"]], vec![vec!["foo"], vec!["foo", "bar"]],
hashmap![ hashmap![
"foo" => Access(Record, Unique, field_access(hashmap![ "bar" => Simple(Unique) ])) "foo" => RecordAccess( Unique, field_access(hashmap![ "bar" => Simple(Unique) ]))
], ],
); );
} }
@ -167,13 +166,13 @@ mod test_usage_analysis {
field_access_seq( field_access_seq(
vec![vec!["foo", "bar", "baz"], vec!["foo", "bar"]], vec![vec!["foo", "bar", "baz"], vec!["foo", "bar"]],
hashmap![ hashmap![
"foo" => Access(Record, Seen, field_access(hashmap![ "bar" => Access(Record, Unique, field_access(hashmap![ "baz" => Simple(Shared) ]))])) "foo" => RecordAccess( Seen, field_access(hashmap![ "bar" => RecordAccess( Unique, field_access(hashmap![ "baz" => Simple(Shared) ]))]))
], ],
); );
field_access_seq( field_access_seq(
vec![vec!["foo", "bar"], vec!["foo", "bar", "baz"]], vec![vec!["foo", "bar"], vec!["foo", "bar", "baz"]],
hashmap![ hashmap![
"foo" => Access(Record, Seen, field_access(hashmap![ "bar" => Access(Record, Unique, field_access(hashmap![ "baz" => Simple(Shared) ]))])) "foo" => RecordAccess( Seen, field_access(hashmap![ "bar" => RecordAccess( Unique, field_access(hashmap![ "baz" => Simple(Shared) ]))]))
], ],
); );
} }
@ -235,11 +234,7 @@ mod test_usage_analysis {
usage.register_with( usage.register_with(
interns.symbol(home, "rec".into()), interns.symbol(home, "rec".into()),
&Access( &RecordAccess(Seen, field_access(hashmap![ "foo" => Simple(Unique) ])),
Record,
Seen,
field_access(hashmap![ "foo" => Simple(Unique) ]),
),
); );
usage usage
@ -263,8 +258,7 @@ mod test_usage_analysis {
let overwritten = hashset!["foo".into()].into(); let overwritten = hashset!["foo".into()].into();
usage.register_with( usage.register_with(
interns.symbol(home, "rec".into()), interns.symbol(home, "rec".into()),
&Update( &RecordUpdate(
Record,
overwritten, overwritten,
field_access(hashmap![ "foo" => Simple(Unique) ]), field_access(hashmap![ "foo" => Simple(Unique) ]),
), ),
@ -313,11 +307,7 @@ mod test_usage_analysis {
usage.register_with( usage.register_with(
interns.symbol(home, "rec".into()), interns.symbol(home, "rec".into()),
&Access( &RecordAccess(Unique, field_access(hashmap![ "foo" => Simple(Shared) ])),
Record,
Unique,
field_access(hashmap![ "foo" => Simple(Shared) ]),
),
); );
usage usage
@ -351,10 +341,7 @@ mod test_usage_analysis {
]); ]);
usage.register_unique(interns.symbol(home, "p".into())); usage.register_unique(interns.symbol(home, "p".into()));
usage.register_with( usage.register_with(interns.symbol(home, "r".into()), &RecordAccess(Unique, fa));
interns.symbol(home, "r".into()),
&Access(Record, Unique, fa),
);
usage usage
}, },
@ -379,16 +366,13 @@ mod test_usage_analysis {
let fa = field_access(hashmap![ let fa = field_access(hashmap![
"foo" => "foo" =>
Access(Record, Seen, field_access(hashmap![ RecordAccess( Seen, field_access(hashmap![
"bar" => Simple(Shared), "bar" => Simple(Shared),
"baz" => Simple(Shared), "baz" => Simple(Shared),
])) ]))
]); ]);
usage.register_with( usage.register_with(interns.symbol(home, "r".into()), &RecordAccess(Unique, fa));
interns.symbol(home, "r".into()),
&Access(Record, Unique, fa),
);
usage usage
}, },
@ -421,14 +405,14 @@ mod test_usage_analysis {
"x" => Simple(Shared), "x" => Simple(Shared),
]); ]);
usage.register_with(r, &Update(Record, overwritten, fa)); usage.register_with(r, &RecordUpdate(overwritten, fa));
let fa = field_access(hashmap![ let fa = field_access(hashmap![
"x" => Simple(Unique), "x" => Simple(Unique),
"y" => Simple(Unique), "y" => Simple(Unique),
]); ]);
usage.register_with(s, &Access(Record, Seen, fa)); usage.register_with(s, &RecordAccess(Seen, fa));
usage usage
}, },
); );
@ -460,14 +444,14 @@ mod test_usage_analysis {
"x" => Simple(Unique), "x" => Simple(Unique),
]); ]);
usage.register_with(r, &Update(Record, overwritten, fa)); usage.register_with(r, &RecordUpdate(overwritten, fa));
let fa = field_access(hashmap![ let fa = field_access(hashmap![
"x" => Simple(Unique), "x" => Simple(Unique),
"y" => Simple(Unique), "y" => Simple(Unique),
]); ]);
usage.register_with(s, &Access(Record, Seen, fa)); usage.register_with(s, &RecordAccess(Seen, fa));
usage usage
}, },
@ -496,7 +480,7 @@ mod test_usage_analysis {
"y" => Simple(Unique), "y" => Simple(Unique),
]); ]);
usage.register_with(r, &Access(Record, Seen, fa)); usage.register_with(r, &RecordAccess(Seen, fa));
usage usage
}, },
@ -525,7 +509,7 @@ mod test_usage_analysis {
let overwritten = hashset!["y".into()].into(); let overwritten = hashset!["y".into()].into();
usage.register_with(r, &Update(Record, overwritten, fa)); usage.register_with(r, &RecordUpdate(overwritten, fa));
usage usage
}, },
@ -546,8 +530,7 @@ mod test_usage_analysis {
let home = test_home(); let home = test_home();
let mut usage = VarUsage::default(); let mut usage = VarUsage::default();
let access = Access( let access = RecordAccess(
Record,
Seen, Seen,
field_access(hashmap![ field_access(hashmap![
"x" => Simple(Unique), "x" => Simple(Unique),
@ -599,16 +582,8 @@ mod test_usage_analysis {
let mut usage = VarUsage::default(); let mut usage = VarUsage::default();
let home = test_home(); let home = test_home();
let access_r = Access( let access_r = RecordAccess(Seen, field_access(hashmap![ "y" => Simple(Unique) ]));
Record, let access_s = RecordAccess(Seen, field_access(hashmap![ "x" => Simple(Shared) ]));
Seen,
field_access(hashmap![ "y" => Simple(Unique) ]),
);
let access_s = Access(
Record,
Seen,
field_access(hashmap![ "x" => Simple(Shared) ]),
);
let r = interns.symbol(home, "r".into()); let r = interns.symbol(home, "r".into());
let s = interns.symbol(home, "s".into()); let s = interns.symbol(home, "s".into());
@ -640,16 +615,8 @@ mod test_usage_analysis {
let mut usage = VarUsage::default(); let mut usage = VarUsage::default();
let home = test_home(); let home = test_home();
let access_r = Access( let access_r = RecordAccess(Seen, field_access(hashmap![ "x" => Simple(Unique) ]));
Record, let access_s = RecordAccess(Seen, field_access(hashmap![ "x" => Simple(Shared) ]));
Seen,
field_access(hashmap![ "x" => Simple(Unique) ]),
);
let access_s = Access(
Record,
Seen,
field_access(hashmap![ "x" => Simple(Shared) ]),
);
let r = interns.symbol(home, "r".into()); let r = interns.symbol(home, "r".into());
let s = interns.symbol(home, "s".into()); let s = interns.symbol(home, "s".into());
@ -681,14 +648,10 @@ mod test_usage_analysis {
let home = test_home(); let home = test_home();
let overwritten = hashset!["y".into()].into(); let overwritten = hashset!["y".into()].into();
let access_r = Update( let access_r =
Record, RecordUpdate(overwritten, field_access(hashmap![ "x" => Simple(Shared) ]));
overwritten,
field_access(hashmap![ "x" => Simple(Shared) ]),
);
let access_s = Access( let access_s = RecordAccess(
Record,
Unique, Unique,
field_access(hashmap![ field_access(hashmap![
"x" => Simple(Shared), "x" => Simple(Shared),
@ -722,18 +685,14 @@ mod test_usage_analysis {
let mut usage = VarUsage::default(); let mut usage = VarUsage::default();
let home = test_home(); let home = test_home();
let access = Access( let access = ApplyAccess(Unique, vec![Simple(Shared)]);
List,
Unique,
field_access(hashmap![
sharing::LIST_ELEM => Simple(Shared),
]),
);
let r = interns.symbol(home, "r".into()); let r = interns.symbol(home, "r".into());
usage.register_with(r, &access); usage.register_with(r, &access);
usage.register_shared(Symbol::LIST_GET);
usage usage
}, },
); );
@ -758,22 +717,19 @@ mod test_usage_analysis {
let mut usage = VarUsage::default(); let mut usage = VarUsage::default();
let home = test_home(); let home = test_home();
let access = Update( let access = ApplyUpdate(ImSet::default(), vec![Simple(Shared)]);
List,
ImSet::default(),
field_access(hashmap![
sharing::LIST_ELEM => Simple(Shared),
]),
);
let r = interns.symbol(home, "list".into()); let r = interns.symbol(home, "list".into());
let v = interns.symbol(home, "v".into()); let v = interns.symbol(home, "v".into());
usage.register_with(r, &access); usage.register_with(r, &access);
usage.register_unique(v); usage.register_seen(v);
usage.register_unique(Symbol::NUM_ADD); usage.register_unique(Symbol::NUM_ADD);
usage.register_unique(Symbol::LIST_SET);
usage.register_unique(Symbol::LIST_GET);
usage usage
}, },
); );
@ -796,18 +752,15 @@ mod test_usage_analysis {
let mut usage = VarUsage::default(); let mut usage = VarUsage::default();
let home = test_home(); let home = test_home();
let access = Update( let access = ApplyUpdate(ImSet::default(), vec![Simple(Seen)]);
List,
ImSet::default(),
field_access(hashmap![
sharing::LIST_ELEM => Simple(Seen),
]),
);
let r = interns.symbol(home, "list".into()); let r = interns.symbol(home, "list".into());
usage.register_with(r, &access); usage.register_with(r, &access);
usage.register_unique(Symbol::LIST_SET);
usage.register_unique(Symbol::LIST_IS_EMPTY);
usage usage
}, },
); );