Merge branch 'trunk' of github.com:rtfeldman/roc into format-precedence-conflict

This commit is contained in:
Chad Stearns 2020-04-13 22:30:34 -04:00
commit 3f7d1cf33a
108 changed files with 20538 additions and 4064 deletions

View file

@ -10,7 +10,10 @@ jobs:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- name: Install LLVM - name: Install LLVM
run: sudo ./ci/install-llvm.sh 8 run: sudo ./ci/install-llvm.sh 10
- name: Enable LLD
run: sudo ./ci/enable-lld.sh
- uses: actions-rs/toolchain@v1 - uses: actions-rs/toolchain@v1
name: Install Rust Toolchain name: Install Rust Toolchain

View file

@ -9,16 +9,22 @@ To see which version of LLVM you need, take a look at `Cargo.toml`, in particula
For Ubuntu, I used the `Automatic installation script` at [apt.llvm.org](https://apt.llvm.org) - but there are plenty of alternative options at http://releases.llvm.org/download.html For Ubuntu, I used the `Automatic installation script` at [apt.llvm.org](https://apt.llvm.org) - but there are plenty of alternative options at http://releases.llvm.org/download.html
You may run into an error like this: ## Use LLD for the linker
Using [`lld` for Rust's linker](https://github.com/rust-lang/rust/issues/39915#issuecomment-538049306)
makes build times a lot faster, and I highly recommend it.
Create `~/.config/cargo` and add this to it:
``` ```
Updating git repository `https://github.com/TheDan64/inkwell` [build]
error: failed to load source for a dependency on `inkwell` # Link with lld, per https://github.com/rust-lang/rust/issues/39915#issuecomment-538049306
# Use target-cpu=native, per https://deterministic.space/high-performance-rust.html
Caused by: rustflags = ["-C", "link-arg=-fuse-ld=lld", "-C", "target-cpu=native"]
Unable to update https://github.com/TheDan64/inkwell?branch=llvm8-0#d0f5c1e1
Caused by:
revspec 'd0f5c1e198853bc06d8427fbafb7b068032d1d1a' not found; class=Reference (4); code=NotFound (-3)
``` ```
This seems to be caused by cargo being out of date (even if it's freshly installed), and can be fixed with `cargo update`. Then install `lld` version 9 (e.g. with `$ sudo apt-get install lld-9`)
and add make sure there's a `ld.lld` executable on your `PATH` which
is symlinked to `lld-9`.
That's it! Enjoy the faster builds.

734
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -10,6 +10,7 @@ members = [
"compiler/types", "compiler/types",
"compiler/uniq", "compiler/uniq",
"compiler/builtins", "compiler/builtins",
"compiler/builtins/bitcode",
"compiler/constrain", "compiler/constrain",
"compiler/unify", "compiler/unify",
"compiler/solve", "compiler/solve",
@ -19,5 +20,12 @@ members = [
"compiler/load", "compiler/load",
"compiler/gen", "compiler/gen",
"vendor/ena", "vendor/ena",
"vendor/pathfinding" "vendor/pathfinding",
"vendor/pretty",
"cli"
] ]
# Optimizations based on https://deterministic.space/high-performance-rust.html
[profile.release]
lto = "fat"
codegen-units = 1

6
ci/enable-lld.sh Executable file
View file

@ -0,0 +1,6 @@
#!/bin/bash
mkdir -p $HOME/.cargo
echo -e "[build]\nrustflags = [\"-C\", \"link-arg=-fuse-ld=lld\", \"-C\", \"target-cpu=native\"]" > $HOME/.cargo/config
ln -s /usr/bin/lld-8 /usr/local/bin/ld.lld

View file

@ -11,7 +11,7 @@
set -eux set -eux
# read optional command line argument # read optional command line argument
LLVM_VERSION=9 LLVM_VERSION=10
if [ "$#" -eq 1 ]; then if [ "$#" -eq 1 ]; then
LLVM_VERSION=$1 LLVM_VERSION=$1
fi fi
@ -26,9 +26,9 @@ if [[ $EUID -ne 0 ]]; then
fi fi
declare -A LLVM_VERSION_PATTERNS declare -A LLVM_VERSION_PATTERNS
LLVM_VERSION_PATTERNS[8]="-8"
LLVM_VERSION_PATTERNS[9]="-9" LLVM_VERSION_PATTERNS[9]="-9"
LLVM_VERSION_PATTERNS[10]="" LLVM_VERSION_PATTERNS[10]="-10"
LLVM_VERSION_PATTERNS[11]=""
if [ ! ${LLVM_VERSION_PATTERNS[$LLVM_VERSION]+_} ]; then if [ ! ${LLVM_VERSION_PATTERNS[$LLVM_VERSION]+_} ]; then
echo "This script does not support LLVM version $LLVM_VERSION" echo "This script does not support LLVM version $LLVM_VERSION"
@ -47,6 +47,8 @@ case "$DIST_VERSION" in
Ubuntu_18.04 ) REPO_NAME="deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic$LLVM_VERSION_STRING main" ;; Ubuntu_18.04 ) REPO_NAME="deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic$LLVM_VERSION_STRING main" ;;
Ubuntu_18.10 ) REPO_NAME="deb http://apt.llvm.org/cosmic/ llvm-toolchain-cosmic$LLVM_VERSION_STRING main" ;; Ubuntu_18.10 ) REPO_NAME="deb http://apt.llvm.org/cosmic/ llvm-toolchain-cosmic$LLVM_VERSION_STRING main" ;;
Ubuntu_19.04 ) REPO_NAME="deb http://apt.llvm.org/disco/ llvm-toolchain-disco$LLVM_VERSION_STRING main" ;; Ubuntu_19.04 ) REPO_NAME="deb http://apt.llvm.org/disco/ llvm-toolchain-disco$LLVM_VERSION_STRING main" ;;
Ubuntu_19.10 ) REPO_NAME="deb http://apt.llvm.org/eoan/ llvm-toolchain-eoan$LLVM_VERSION_STRING main" ;;
Ubuntu_20.04 ) REPO_NAME="deb http://apt.llvm.org/focal/ llvm-toolchain-focal$LLVM_VERSION_STRING main" ;;
* ) * )
echo "Distribution '$DISTRO' in version '$VERSION' is not supported by this script (${DIST_VERSION})." echo "Distribution '$DISTRO' in version '$VERSION' is not supported by this script (${DIST_VERSION})."
exit 2 exit 2

View file

@ -8,6 +8,51 @@ keywords = ["roc", "gui"]
edition = "2018" edition = "2018"
description = "A CLI for Roc" description = "A CLI for Roc"
license = "Apache-2.0" license = "Apache-2.0"
default-run = "roc"
[[bin]]
name = "roc"
path = "src/main.rs"
test = false
bench = false
[dependencies] [dependencies]
roc = { path = "../", version = "0.1.0" } roc_collections = { path = "../compiler/collections" }
roc_can = { path = "../compiler/can" }
roc_parse = { path = "../compiler/parse" }
roc_region = { path = "../compiler/region" }
roc_module = { path = "../compiler/module" }
roc_problem = { path = "../compiler/problem" }
roc_types = { path = "../compiler/types" }
roc_builtins = { path = "../compiler/builtins" }
roc_constrain = { path = "../compiler/constrain" }
roc_uniq = { path = "../compiler/uniq" }
roc_unify = { path = "../compiler/unify" }
roc_solve = { path = "../compiler/solve" }
roc_mono = { path = "../compiler/mono" }
roc_gen = { path = "../compiler/gen", version = "0.1.0" }
roc_reporting = { path = "../compiler/reporting", version = "0.1.0" }
im = "14" # im and im-rc should always have the same version!
im-rc = "14" # im and im-rc should always have the same version!
bumpalo = { version = "3.2", features = ["collections"] }
inlinable_string = "0.1.0"
# NOTE: rtfeldman/inkwell is a fork of TheDan64/inkwell which does not change anything.
#
# The reason for this fork is that the way Inkwell is designed, you have to use
# a particular branch (e.g. "llvm8-0") in Cargo.toml. That would be fine, except that
# breaking changes get pushed directly to that branch, which breaks our build
# without warning.
#
# We tried referencing a specific rev on TheDan64/inkwell directly (instead of branch),
# but although that worked locally, it did not work on GitHub Actions. (After a few
# hours of investigation, gave up trying to figure out why.) So this is the workaround:
# having an immutable tag on the rtfeldman/inkwell fork which points to
# a particular "release" of Inkwell.
#
# When we want to update Inkwell, we can sync up rtfeldman/inkwell to the latest
# commit of TheDan64/inkwell, push a new tag which points to the latest commit,
# change the tag value in this Cargo.toml to point to that tag, and `cargo update`.
# This way, GitHub Actions works and nobody's builds get broken.
inkwell = { git = "https://github.com/rtfeldman/inkwell", tag = "llvm10-0.release1" }
target-lexicon = "0.10"

396
cli/src/helpers.rs Normal file
View file

@ -0,0 +1,396 @@
use bumpalo::Bump;
use roc_builtins::unique::uniq_stdlib;
use roc_can::constraint::Constraint;
use roc_can::env::Env;
use roc_can::expected::Expected;
use roc_can::expr::{canonicalize_expr, Expr, Output};
use roc_can::operator;
use roc_can::scope::Scope;
use roc_collections::all::{ImMap, ImSet, MutMap, SendMap, SendSet};
use roc_constrain::expr::constrain_expr;
use roc_constrain::module::{constrain_imported_values, load_builtin_aliases, Import};
use roc_module::ident::Ident;
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds, Symbol};
use roc_parse::ast::{self, Attempting};
use roc_parse::blankspace::space0_before;
use roc_parse::parser::{loc, Fail, Parser, State};
use roc_problem::can::Problem;
use roc_region::all::{Located, Region};
use roc_solve::solve;
use roc_types::subs::{Content, Subs, VarStore, Variable};
use roc_types::types::Type;
use std::hash::Hash;
pub fn test_home() -> ModuleId {
ModuleIds::default().get_or_insert(&"Test".into())
}
pub fn infer_expr(
subs: Subs,
problems: &mut Vec<solve::TypeError>,
constraint: &Constraint,
expr_var: Variable,
) -> (Content, Subs) {
let env = solve::Env {
aliases: MutMap::default(),
vars_by_symbol: SendMap::default(),
};
let (solved, _) = solve::run(&env, problems, subs, constraint);
let content = solved.inner().get_without_compacting(expr_var).content;
(content, solved.into_inner())
}
pub fn parse_with<'a>(arena: &'a Bump, input: &'a str) -> Result<ast::Expr<'a>, Fail> {
parse_loc_with(arena, input).map(|loc_expr| loc_expr.value)
}
pub fn parse_loc_with<'a>(arena: &'a Bump, input: &'a str) -> Result<Located<ast::Expr<'a>>, Fail> {
let state = State::new(&input, Attempting::Module);
let parser = space0_before(loc(roc_parse::expr::expr(0)), 0);
let answer = parser.parse(&arena, state);
answer
.map(|(loc_expr, _)| loc_expr)
.map_err(|(fail, _)| fail)
}
pub fn can_expr(expr_str: &str) -> CanExprOut {
can_expr_with(&Bump::new(), test_home(), expr_str)
}
pub fn uniq_expr(
expr_str: &str,
) -> (
Located<Expr>,
Output,
Vec<Problem>,
Subs,
Variable,
Constraint,
ModuleId,
Interns,
) {
let declared_idents: &ImMap<Ident, (Symbol, Region)> = &ImMap::default();
uniq_expr_with(&Bump::new(), expr_str, declared_idents)
}
pub fn uniq_expr_with(
arena: &Bump,
expr_str: &str,
declared_idents: &ImMap<Ident, (Symbol, Region)>,
) -> (
Located<Expr>,
Output,
Vec<Problem>,
Subs,
Variable,
Constraint,
ModuleId,
Interns,
) {
let home = test_home();
let CanExprOut {
loc_expr,
output,
problems,
var_store: old_var_store,
var,
interns,
..
} = can_expr_with(arena, home, expr_str);
// double check
let var_store = VarStore::new(old_var_store.fresh());
let expected2 = Expected::NoExpectation(Type::Variable(var));
let constraint = roc_constrain::uniq::constrain_declaration(
home,
&var_store,
Region::zero(),
&loc_expr,
declared_idents,
expected2,
);
let stdlib = uniq_stdlib();
let types = stdlib.types;
let imports: Vec<_> = types
.iter()
.map(|(symbol, (solved_type, region))| Import {
loc_symbol: Located::at(*region, *symbol),
solved_type,
})
.collect();
// load builtin values
// TODO what to do with those rigids?
let (_introduced_rigids, constraint) =
constrain_imported_values(imports, constraint, &var_store);
// load builtin types
let mut constraint = load_builtin_aliases(&stdlib.aliases, constraint, &var_store);
constraint.instantiate_aliases(&var_store);
let subs2 = Subs::new(var_store.into());
(
loc_expr, output, problems, subs2, var, constraint, home, interns,
)
}
pub struct CanExprOut {
pub loc_expr: Located<Expr>,
pub output: Output,
pub problems: Vec<Problem>,
pub home: ModuleId,
pub interns: Interns,
pub var_store: VarStore,
pub var: Variable,
pub constraint: Constraint,
}
pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut {
let loc_expr = parse_loc_with(&arena, expr_str).unwrap_or_else(|e| {
panic!(
"can_expr_with() got a parse error when attempting to canonicalize:\n\n{:?} {:?}",
expr_str, e
)
});
let var_store = VarStore::default();
let var = var_store.fresh();
let expected = Expected::NoExpectation(Type::Variable(var));
let module_ids = ModuleIds::default();
// Desugar operators (convert them to Apply calls, taking into account
// operator precedence and associativity rules), before doing other canonicalization.
//
// If we did this *during* canonicalization, then each time we
// visited a BinOp node we'd recursively try to apply this to each of its nested
// operators, and then again on *their* nested operators, ultimately applying the
// rules multiple times unnecessarily.
let loc_expr = operator::desugar_expr(arena, &loc_expr);
let mut scope = Scope::new(home);
let dep_idents = IdentIds::exposed_builtins(0);
let mut env = Env::new(home, dep_idents, &module_ids, IdentIds::default());
let (loc_expr, output) = canonicalize_expr(
&mut env,
&var_store,
&mut scope,
Region::zero(),
&loc_expr.value,
);
let constraint = constrain_expr(
&roc_constrain::expr::Env {
rigids: ImMap::default(),
home,
},
loc_expr.region,
&loc_expr.value,
expected,
);
let types = roc_builtins::std::types();
let imports: Vec<_> = types
.iter()
.map(|(symbol, (solved_type, region))| Import {
loc_symbol: Located::at(*region, *symbol),
solved_type,
})
.collect();
//load builtin values
let (_introduced_rigids, constraint) =
constrain_imported_values(imports, constraint, &var_store);
// TODO determine what to do with those rigids
// for var in introduced_rigids {
// output.ftv.insert(var, format!("internal_{:?}", var).into());
// }
//load builtin types
let mut constraint =
load_builtin_aliases(&roc_builtins::std::aliases(), constraint, &var_store);
constraint.instantiate_aliases(&var_store);
let mut all_ident_ids = MutMap::default();
// When pretty printing types, we may need the exposed builtins,
// so include them in the Interns we'll ultimately return.
for (module_id, ident_ids) in IdentIds::exposed_builtins(0) {
all_ident_ids.insert(module_id, ident_ids);
}
all_ident_ids.insert(home, env.ident_ids);
let interns = Interns {
module_ids: env.module_ids.clone(),
all_ident_ids,
};
CanExprOut {
loc_expr,
output,
problems: env.problems,
home: env.home,
var_store,
interns,
var,
constraint,
}
}
pub fn mut_map_from_pairs<K, V, I>(pairs: I) -> MutMap<K, V>
where
I: IntoIterator<Item = (K, V)>,
K: Hash + Eq,
{
let mut answer = MutMap::default();
for (key, value) in pairs {
answer.insert(key, value);
}
answer
}
pub fn im_map_from_pairs<K, V, I>(pairs: I) -> ImMap<K, V>
where
I: IntoIterator<Item = (K, V)>,
K: Hash + Eq + Clone,
V: Clone,
{
let mut answer = ImMap::default();
for (key, value) in pairs {
answer.insert(key, value);
}
answer
}
pub fn send_set_from<V, I>(elems: I) -> SendSet<V>
where
I: IntoIterator<Item = V>,
V: Hash + Eq + Clone,
{
let mut answer = SendSet::default();
for elem in elems {
answer.insert(elem);
}
answer
}
// Check constraints
//
// Keep track of the used (in types or expectations) variables, and the declared variables (in
// flex_vars or rigid_vars fields of LetConstraint. These roc_collections should match: no duplicates
// and no variables that are used but not declared are allowed.
//
// There is one exception: the initial variable (that stores the type of the whole expression) is
// never declared, but is used.
pub fn assert_correct_variable_usage(constraint: &Constraint) {
// variables declared in constraint (flex_vars or rigid_vars)
// and variables actually used in constraints
let (declared, used) = variable_usage(constraint);
let used: ImSet<Variable> = used.into();
let mut decl: ImSet<Variable> = declared.rigid_vars.clone().into();
for var in declared.flex_vars.clone() {
decl.insert(var);
}
let diff = used.clone().relative_complement(decl);
// NOTE: this checks whether we're using variables that are not declared. For recursive type
// definitions, their rigid types are declared twice, which is correct!
if !diff.is_empty() {
println!("VARIABLE USAGE PROBLEM");
println!("used: {:?}", &used);
println!("rigids: {:?}", &declared.rigid_vars);
println!("flexs: {:?}", &declared.flex_vars);
println!("difference: {:?}", &diff);
panic!("variable usage problem (see stdout for details)");
}
}
#[derive(Default)]
pub struct SeenVariables {
pub rigid_vars: Vec<Variable>,
pub flex_vars: Vec<Variable>,
}
pub fn variable_usage(con: &Constraint) -> (SeenVariables, Vec<Variable>) {
let mut declared = SeenVariables::default();
let mut used = ImSet::default();
variable_usage_help(con, &mut declared, &mut used);
used.remove(unsafe { &Variable::unsafe_test_debug_variable(1) });
let mut used_vec: Vec<Variable> = used.into_iter().collect();
used_vec.sort();
declared.rigid_vars.sort();
declared.flex_vars.sort();
(declared, used_vec)
}
fn variable_usage_help(con: &Constraint, declared: &mut SeenVariables, used: &mut ImSet<Variable>) {
use Constraint::*;
match con {
True | SaveTheEnvironment => (),
Eq(tipe, expectation, _, _) => {
for v in tipe.variables() {
used.insert(v);
}
for v in expectation.get_type_ref().variables() {
used.insert(v);
}
}
Lookup(_, expectation, _) => {
for v in expectation.get_type_ref().variables() {
used.insert(v);
}
}
Pattern(_, _, tipe, pexpectation) => {
for v in tipe.variables() {
used.insert(v);
}
for v in pexpectation.get_type_ref().variables() {
used.insert(v);
}
}
Let(letcon) => {
declared.rigid_vars.extend(letcon.rigid_vars.clone());
declared.flex_vars.extend(letcon.flex_vars.clone());
variable_usage_help(&letcon.defs_constraint, declared, used);
variable_usage_help(&letcon.ret_constraint, declared, used);
}
And(constraints) => {
for sub in constraints {
variable_usage_help(sub, declared, used);
}
}
}
}

View file

@ -1,27 +1,97 @@
extern crate roc; extern crate roc_gen;
extern crate roc_reporting;
use roc::eval::Evaluated::*; use crate::helpers::{infer_expr, uniq_expr_with};
use roc::eval::{call, eval, Evaluated}; use bumpalo::Bump;
use roc::expr::Expr; use inkwell::context::Context;
use roc::parse; use inkwell::module::Linkage;
use roc::region::{Located, Region}; use inkwell::passes::PassManager;
use inkwell::types::BasicType;
use inkwell::OptimizationLevel;
use roc_collections::all::ImMap;
use roc_gen::llvm::build::{
build_proc, build_proc_header, get_call_conventions, module_from_builtins,
};
use roc_gen::llvm::convert::basic_type_from_layout;
use roc_mono::expr::{Expr, Procs};
use roc_mono::layout::Layout;
use std::time::SystemTime;
use inkwell::targets::{
CodeModel, FileType, InitializationConfig, RelocMode, Target, TargetTriple,
};
use std::fs::File; use std::fs::File;
use std::io; use std::io;
use std::io::prelude::*; use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::process::Command;
use target_lexicon::{Architecture, OperatingSystem, Triple, Vendor};
fn main() -> std::io::Result<()> { pub mod helpers;
let argv = std::env::args().into_iter().collect::<Vec<String>>();
fn main() -> io::Result<()> {
let now = SystemTime::now();
let argv = std::env::args().collect::<Vec<String>>();
match argv.get(1) { match argv.get(1) {
Some(filename) => { Some(filename) => {
let mut file = File::open(filename)?; let mut path = Path::new(filename).canonicalize().unwrap();
if !path.is_absolute() {
path = std::env::current_dir()?.join(path).canonicalize().unwrap();
}
// Step 1: build the .o file for the app
let mut file = File::open(path.clone())?;
let mut contents = String::new(); let mut contents = String::new();
file.read_to_string(&mut contents)?; file.read_to_string(&mut contents)?;
let expr = parse::parse_string(contents.as_str()).unwrap(); let dest_filename = path.with_extension("o");
process_task(eval(expr)) gen(
Path::new(filename).to_path_buf(),
contents.as_str(),
Triple::host(),
&dest_filename,
);
let end_time = now.elapsed().unwrap();
println!(
"Finished compilation and code gen in {} ms\n",
end_time.as_millis()
);
let cwd = dest_filename.parent().unwrap();
let lib_path = dest_filename.with_file_name("libroc_app.a");
// Step 2: turn the .o file into a .a static library
Command::new("ar") // TODO on Windows, use `link`
.args(&[
"rcs",
lib_path.to_str().unwrap(),
dest_filename.to_str().unwrap(),
])
.spawn()
.expect("`ar` failed to run");
// Step 3: have rustc compile the host and link in the .a file
Command::new("rustc")
.args(&["-L", ".", "host.rs", "-o", "app"])
.current_dir(cwd)
.spawn()
.expect("rustc failed to run");
// Step 4: Run the compiled app
Command::new(cwd.join("app")).spawn().unwrap_or_else(|err| {
panic!(
"{} failed to run: {:?}",
cwd.join("app").to_str().unwrap(),
err
)
});
Ok(())
} }
None => { None => {
println!("Usage: roc FILENAME.roc"); println!("Usage: roc FILENAME.roc");
@ -31,109 +101,241 @@ fn main() -> std::io::Result<()> {
} }
} }
fn process_task(evaluated: Evaluated) -> std::io::Result<()> { fn gen(filename: PathBuf, src: &str, target: Triple, dest_filename: &Path) {
match evaluated { use roc_reporting::report::{can_problem, RocDocAllocator, DEFAULT_PALETTE};
EvalError(region, problem) => { use roc_reporting::type_error::type_problem;
println!(
"\n\u{001B}[4mruntime error\u{001B}[24m\n\n{} at {}\n", // Build the expr
format!("{}", problem), let arena = Bump::new();
format!("line {}, column {}", region.start_line, region.start_col)
let (loc_expr, _output, can_problems, subs, var, constraint, home, interns) =
uniq_expr_with(&arena, src, &ImMap::default());
let mut type_problems = Vec::new();
let (content, mut subs) = infer_expr(subs, &mut type_problems, &constraint, var);
let src_lines: Vec<&str> = src.split('\n').collect();
let palette = DEFAULT_PALETTE;
// Report parsing and canonicalization problems
let alloc = RocDocAllocator::new(&src_lines, home, &interns);
for problem in can_problems.into_iter() {
let report = can_problem(&alloc, filename.clone(), problem);
let mut buf = String::new();
report.render_color_terminal(&mut buf, &alloc, &palette);
println!("\n{}\n", buf);
}
for problem in type_problems.into_iter() {
let report = type_problem(&alloc, filename.clone(), problem);
let mut buf = String::new();
report.render_color_terminal(&mut buf, &alloc, &palette);
println!("\n{}\n", buf);
}
// Generate the binary
let context = Context::create();
let module = module_from_builtins(&context, "app");
let builder = context.create_builder();
let fpm = PassManager::create(&module);
roc_gen::llvm::build::add_passes(&fpm);
fpm.initialize();
// Compute main_fn_type before moving subs to Env
let ptr_bytes = target.pointer_width().unwrap().bytes() as u32;
let layout = Layout::from_content(&arena, content, &subs, ptr_bytes).unwrap_or_else(|err| {
panic!(
"Code gen error in test: could not convert to layout. Err was {:?} and Subs were {:?}",
err, subs
)
});
let main_fn_type =
basic_type_from_layout(&arena, &context, &layout, ptr_bytes).fn_type(&[], false);
let main_fn_name = "$Test.main";
// Compile and add all the Procs before adding main
let mut env = roc_gen::llvm::build::Env {
arena: &arena,
builder: &builder,
context: &context,
interns,
module: arena.alloc(module),
ptr_bytes,
};
let mut procs = Procs::default();
let mut ident_ids = env.interns.all_ident_ids.remove(&home).unwrap();
// Populate Procs and get the low-level Expr from the canonical Expr
let mut mono_problems = std::vec::Vec::new();
let main_body = Expr::new(
&arena,
&mut subs,
&mut mono_problems,
loc_expr.value,
&mut procs,
home,
&mut ident_ids,
ptr_bytes,
); );
Ok(()) // Put this module's ident_ids back in the interns, so we can use them in env.
env.interns.all_ident_ids.insert(home, ident_ids);
let mut headers = Vec::with_capacity(procs.len());
// Add all the Proc headers to the module.
// We have to do this in a separate pass first,
// because their bodies may reference each other.
for (symbol, opt_proc) in procs.as_map().into_iter() {
if let Some(proc) = opt_proc {
let (fn_val, arg_basic_types) = build_proc_header(&env, symbol, &proc);
headers.push((proc, fn_val, arg_basic_types));
} }
ApplyVariant(name, Some(mut vals)) => { }
match name.as_str() {
"Echo" => { // Build each proc using its header info.
// Extract the string from the Echo variant. for (proc, fn_val, arg_basic_types) in headers {
let string_to_be_displayed = match vals.pop() { // NOTE: This is here to be uncommented in case verification fails.
Some(Str(payload)) => payload, // (This approach means we don't have to defensively clone name here.)
Some(EvalError(region, err)) => { //
// println!("\n\nBuilding and then verifying function {}\n\n", name);
build_proc(&env, proc, &procs, fn_val, arg_basic_types);
if fn_val.verify(true) {
fpm.run_on(&fn_val);
} else {
// NOTE: If this fails, uncomment the above println to debug.
panic!( panic!(
"RUNTIME ERROR in Echo: {} at {}", "Non-main function failed LLVM verification. Uncomment the above println to debug!"
format!("{}", err),
format!("line {}, column {}", region.start_line, region.start_col)
); );
} }
Some(val) => {
panic!("TYPE MISMATCH in Echo: {}", format!("{}", val));
} }
None => {
panic!("TYPE MISMATCH in Echo: None"); // Add main to the module.
let cc = get_call_conventions(target.default_calling_convention().unwrap());
let main_fn = env.module.add_function(main_fn_name, main_fn_type, None);
main_fn.set_call_conventions(cc);
main_fn.set_linkage(Linkage::External);
// Add main's body
let basic_block = context.append_basic_block(main_fn, "entry");
builder.position_at_end(basic_block);
let ret = roc_gen::llvm::build::build_expr(
&env,
&ImMap::default(),
main_fn,
&main_body,
&Procs::default(),
);
builder.build_return(Some(&ret));
// Uncomment this to see the module's un-optimized LLVM instruction output:
// env.module.print_to_stderr();
if main_fn.verify(true) {
fpm.run_on(&main_fn);
} else {
panic!("Function {} failed LLVM verification.", main_fn_name);
} }
// Verify the module
if let Err(errors) = env.module.verify() {
panic!("😱 LLVM errors when defining module: {:?}", errors);
}
// Uncomment this to see the module's optimized LLVM instruction output:
// env.module.print_to_stderr();
// Emit the .o file
// NOTE: arch_str is *not* the same as the beginning of the magic target triple
// string! For example, if it's "x86-64" here, the magic target triple string
// will begin with "x86_64" (with an underscore) instead.
let arch_str = match target.architecture {
Architecture::X86_64 => {
Target::initialize_x86(&InitializationConfig::default());
"x86-64"
}
Architecture::Arm(_) => {
Target::initialize_arm(&InitializationConfig::default());
"arm"
}
Architecture::Wasm32 => {
Target::initialize_webassembly(&InitializationConfig::default());
"wasm32"
}
_ => panic!(
"TODO gracefully handle unsupported target architecture: {:?}",
target.architecture
),
}; };
// Print the string to the console, since that's what Echo does! let opt = OptimizationLevel::Default;
println!("{}", string_to_be_displayed); let reloc = RelocMode::Default;
let model = CodeModel::Default;
// Continue with the callback. // Best guide I've found on how to determine these magic strings:
let callback = vals.pop().unwrap(); //
// https://stackoverflow.com/questions/15036909/clang-how-to-list-supported-target-architectures
process_task(call( let target_triple_str = match target {
Region { Triple {
start_line: 0, architecture: Architecture::X86_64,
start_col: 0, vendor: Vendor::Unknown,
end_line: 0, operating_system: OperatingSystem::Linux,
end_col: 0, ..
}, } => "x86_64-unknown-linux-gnu",
callback, Triple {
vec![with_zero_loc(Expr::EmptyRecord)], architecture: Architecture::X86_64,
)) vendor: Vendor::Pc,
} operating_system: OperatingSystem::Linux,
"Read" => { ..
// Read a line from from stdin, since that's what Read does! } => "x86_64-pc-linux-gnu",
let mut input = String::new(); Triple {
architecture: Architecture::X86_64,
io::stdin().read_line(&mut input)?; vendor: Vendor::Unknown,
operating_system: OperatingSystem::Darwin,
// Continue with the callback. ..
let callback = vals.pop().unwrap(); } => "x86_64-unknown-darwin10",
Triple {
process_task(call( architecture: Architecture::X86_64,
Region { vendor: Vendor::Apple,
start_line: 0, operating_system: OperatingSystem::Darwin,
start_col: 0, ..
end_line: 0, } => "x86_64-apple-darwin10",
end_col: 0, _ => panic!("TODO gracefully handle unsupported target: {:?}", target),
}, };
callback, let target_machine = Target::from_name(arch_str)
vec![with_zero_loc(Expr::Str(input.trim().to_string()))], .unwrap()
)) .create_target_machine(
} &TargetTriple::create(target_triple_str),
"Success" => { arch_str,
// We finished all our tasks. Great! No need to print anything. "+avx2", // TODO this string was used uncritically from an example, and should be reexamined
Ok(()) opt,
} reloc,
_ => { model,
// We don't recognize this variant, so display it and exit.
display_val(ApplyVariant(name, Some(vals)));
Ok(())
}
}
}
output => {
// We don't recognize this value, so display it and exit.
display_val(output);
Ok(())
}
}
}
fn with_zero_loc<T>(val: T) -> Located<T> {
Located::new(
val,
Region {
start_line: 0,
start_col: 0,
end_line: 0,
end_col: 0,
},
) )
} .unwrap();
fn display_val(evaluated: Evaluated) { target_machine
println!("\n\u{001B}[4mroc out\u{001B}[24m\n\n{}\n", evaluated); .write_to_file(&env.module, FileType::Object, &dest_filename)
.expect("Writing .o file failed");
println!("\nSuccess! 🎉\n\n\t{}\n", dest_filename.display());
} }

View file

@ -0,0 +1,9 @@
[package]
name = "roc_builtins_bitcode"
version = "0.1.0"
authors = ["Richard Feldman <richard.t.feldman@gmail.com>"]
repository = "https://github.com/rtfeldman/roc"
readme = "README.md"
edition = "2018"
description = "Generate LLVM bitcode for Roc builtins"
license = "Apache-2.0"

View file

@ -0,0 +1,56 @@
# Bitcode for Builtins
Roc's builtins are implemented in the compiler using LLVM only.
When their implementations are simple enough (e.g. addition), they
can be implemented directly in Inkwell.
When their implementations are complex enough, it's nicer to
implement them in a higher-level language like Rust, compile the
result to LLVM bitcode, and import that bitcode into the compiler.
Here is the process for doing that.
## Building the bitcode
The source we'll use to generate the bitcode is in `src/lib.rs` in this directory.
To generate the bitcode, `cd` into `compiler/builtins/bitcode/` and run:
```bash
$ cargo rustc --release --lib -- --emit=llvm-bc
```
Then look in the root `roc` source directory under `target/release/deps/` for a file
with a name like `roc_builtins_bitcode-8da0901c58a73ebf.bc` - except
probably with a different hash before the `.bc`. If there's more than one
`*.bc` file in that directory, delete the whole `deps/` directory and re-run
the `cargo rustc` command above to regenerate it.
> If you want to take a look at the human-readable LLVM IR rather than the
> bitcode, run this instead and look for a `.ll` file instead of a `.bc` file:
>
> ```bash
> $ cargo rustc --release --lib -- --emit=llvm-ir
> ```
## Importing the bitcode
The bitcode is a bunch of bytes that aren't particularly human-readable.
Since Roc is designed to be distributed as a single binary, these bytes
need to be included in the raw source somewhere.
The `llvm/src/build.rs` file statically imports these raw bytes
using the [`include_bytes!` macro](https://doc.rust-lang.org/std/macro.include_bytes.html),
so we just need to move the `.bc` file from the previous step to the correct
location.
The current `.bc` file is located at:
```
compiler/gen/src/llvm/builtins.bc
```
...so you want to overwrite it with the new `.bc` file in `target/deps/`
Once that's done, `git status` should show that the `builtins.bc` file
has been changed. Commit that change and you're done!

View file

@ -0,0 +1,12 @@
// NOTE: Editing this file on its own does nothing! The procedure for
// incorporating changes here is in this crate' README.
#![crate_type = "lib"]
#![no_std]
/// TODO replace this with a normal Inkwell build_cast call - this was just
/// used as a proof of concept for getting bitcode importing working!
#[no_mangle]
pub fn i64_to_f64_(num: i64) -> f64 {
num as f64
}

View file

@ -48,7 +48,7 @@ interface Float
## See #Float.highest and #Float.lowest for the highest and ## See #Float.highest and #Float.lowest for the highest and
## lowest values that can be held in a #Float. ## lowest values that can be held in a #Float.
## ##
## Like #Int, it's possible for #Float operations to overflow and underflow ## Like #Int, it's possible for #Float operations to overflow.
## if they exceed the bounds of #Float.highest and #Float.lowest. When this happens: ## if they exceed the bounds of #Float.highest and #Float.lowest. When this happens:
## ##
## * In a development build, you'll get an assertion failure. ## * In a development build, you'll get an assertion failure.
@ -151,12 +151,19 @@ div = \numerator, denominator ->
## ##
## >>> Float.pi ## >>> Float.pi
## >>> |> Float.mod 2.0 ## >>> |> Float.mod 2.0
#mod : Float, Float -> Result Float DivByZero mod : Float a, Float a -> Result Float DivByZero
## Return the reciprocal of the #Float. tryMod : Float a, Float a -> Result (Float a) [ DivByZero ]*
#recip : Float -> Result Float [ DivByZero ]*
#recip = \float -> ## Return the reciprocal of a #Float - that is, divides `1.0` by the given number.
# 1.0 / float ##
## Crashes if given `0.0`, because division by zero is undefined in mathematics.
##
## For a version that does not crash, use #tryRecip
recip : Float a -> Result (Float a) [ DivByZero ]*
tryRecip : Float a -> Result (Float a) [ DivByZero ]*
## Return an approximation of the absolute value of the square root of the #Float. ## Return an approximation of the absolute value of the square root of the #Float.
## ##
@ -169,32 +176,39 @@ div = \numerator, denominator ->
## >>> Float.sqrt 0.0 ## >>> Float.sqrt 0.0
## ##
## >>> Float.sqrt -4.0 ## >>> Float.sqrt -4.0
#sqrt : Float -> Result Float InvalidSqrt sqrt : Float a -> Result (Float a) [ InvalidSqrt ]
## Constants ## Constants
## An approximation of e, specifically 2.718281828459045. ## An approximation of e, specifically 2.718281828459045.
#e : Float e : Float *
e = 2.718281828459045
## An approximation of pi, specifically 3.141592653589793. ## An approximation of pi, specifically 3.141592653589793.
#pi : Float pi : Float *
pi = 3.141592653589793
## Sort ascending - that is, with the lowest first, and the highest last.
##
## List.sort Float.asc [ 3.0, 6.0, 0.0 ]
##
asc : Float a, Float a -> [ Eq, Lt, Gt ]
## Sort descending - that is, with the highest first, and the lowest last.
##
## List.sort Float.desc [ 3.0, 6.0, 0.0 ]
##
desc : Float a, Float a -> [ Eq, Lt, Gt ]
## Limits ## Limits
## The highest supported #Float value you can have, which is approximately 1.8 × 10^308. ## The highest supported #Float value you can have, which is approximately 1.8 × 10^308.
## ##
## If you go higher than this, your running Roc code will crash - so be careful not to! ## If you go higher than this, your running Roc code will crash - so be careful not to!
#highest : Float highest : Float *
highest : Num.Num Float.FloatingPoint
highest = 1.0
## The lowest supported #Float value you can have, which is approximately -1.8 × 10^308. ## The lowest supported #Float value you can have, which is approximately -1.8 × 10^308.
## ##
## If you go lower than this, your running Roc code will crash - so be careful not to! ## If you go lower than this, your running Roc code will crash - so be careful not to!
#lowest : Float lowest : Float *
lowest = 1.0
## The highest integer that can be represented as a #Float without # losing precision. ## The highest integer that can be represented as a #Float without # losing precision.
## It is equal to 2^53, which is approximately 9 × 10^15. ## It is equal to 2^53, which is approximately 9 × 10^15.
@ -206,8 +220,7 @@ lowest = 1.0
## >>> Float.highestInt + 100 # Increasing may lose precision ## >>> Float.highestInt + 100 # Increasing may lose precision
## ##
## >>> Float.highestInt - 100 # Decreasing is fine - but watch out for lowestLosslessInt! ## >>> Float.highestInt - 100 # Decreasing is fine - but watch out for lowestLosslessInt!
#highestInt : Float highestInt : Float *
highestInt = 1.0
## The lowest integer that can be represented as a #Float without losing precision. ## The lowest integer that can be represented as a #Float without losing precision.
## It is equal to -2^53, which is approximately -9 × 10^15. ## It is equal to -2^53, which is approximately -9 × 10^15.
@ -219,5 +232,4 @@ highestInt = 1.0
## >>> Float.lowestIntVal - 100 # Decreasing may lose precision ## >>> Float.lowestIntVal - 100 # Decreasing may lose precision
## ##
## >>> Float.lowestIntVal + 100 # Increasing is fine - but watch out for highestInt! ## >>> Float.lowestIntVal + 100 # Increasing is fine - but watch out for highestInt!
#lowestInt : Float lowestInt : Float *
lowestInt = 1.0

View file

@ -4,9 +4,72 @@ interface Int
## Types ## Types
# Integer := Integer ## A fixed-size integer - that is, a number with no fractional component.
##
## Integers come in two flavors: signed and unsigned. Signed integers can be
## negative ("signed" refers to how they can incorporate a minus sign),
## whereas unsigned integers cannot be negative.
##
## Since integers have a fixed size, the size you choose determines both the
## range of numbers it can represent, and also how much memory it takes up.
##
## #U8 is an an example of an integer. It is an unsigned #Int that takes up 8 bits
## (aka 1 byte) in memory. The `U` is for Unsigned and the 8 is for 8 bits.
## Because it has 8 bits to work with, it can store 256 numbers (2^8),
## and because it is unsigned, its lowest value is 0. This means the 256 numbers
## it can store range from 0 to 255.
##
## #I8 is a signed integer that takes up 8 bits. The `I` is for Integer, since
## integers in mathematics are signed by default. Because it has 8 bits just
## like #U8, it can store 256 numbers (still 2^16), but because it is signed,
## the range is different. Its 256 numbers range from -128 to 127.
##
## Here are some other examples:
##
## * #U16 is like #U8, except it takes up 16 bytes in memory. It can store 65,536 numbers (2^16), ranging from 0 to 65,536.
## * #I16 is like #U16, except it is signed. It can still store the same 65,536 numbers (2^16), ranging from -32,768 to 32,767.
##
## This pattern continues up to #U128 and #I128.
##
## ## Performance notes
##
## In general, using smaller numeric sizes means your program will use less memory.
## However, if a mathematical operation results in an answer that is too big
## or too small to fit in the size available for that answer (which is typically
## the same size as the inputs), then you'll get an overflow error.
##
## As such, minimizing memory usage without causing overflows involves choosing
## number sizes based on your knowledge of what numbers you expect your program
## to encounter at runtime.
##
## Minimizing memory usage does not imply maximum runtime speed!
## CPUs are typically fastest at performing integer operations on integers that
## are the same size as that CPU's native machine word size. That means a 64-bit
## CPU is typically fastest at executing instructions on #U64 and #I64 values,
## whereas a 32-bit CPU is typically fastest on #U32 and #I32 values.
##
## Putting these factors together, here are some reasonable guidelines for optimizing performance through integer size choice:
##
## * Start by deciding if this integer should allow negative numbers, and choose signed or unsigned accordingly.
## * Next, think about the range of numbers you expect this number to hold. Choose the smallest size you will never expect to overflow, no matter the inputs your program receives. (Validating inputs for size, and presenting the user with an error if they are too big, can help guard against overflow.)
## * Finally, if a particular operation is too slow at runtime, and you know the native machine word size on which it will be running (most often either 64-bit or 32-bit), try switching to an integer of that size and see if it makes a meaningful difference. (The difference is typically extremely small.)
Int size : Num (@Int size)
## A 64-bit signed integer. All number literals without decimal points are #Int values. ## A signed 8-bit integer, ranging from -128 to 127
I8 : Int @I8
U8 : Int @U8
U16 : Int @U16
I16 : Int @I16
U32 : Int @U32
I32 : Int @I32
I64 : Int @I64
U64 : Int @U64
I128 : Int @I128
U128 : Int @U128
ILen : Int @ILen
ULen : Int @ULen
## A 64-bit signed integer. All number literals without decimal points are compatible with #Int values.
## ##
## >>> 1 ## >>> 1
## ##
@ -17,22 +80,72 @@ interface Int
## ##
## >>> 1_000_000 ## >>> 1_000_000
## ##
## See #Int.highest and #Int.lowest for the highest and ## Integers come in two flavors: *signed* and *unsigned*.
## lowest values that can be held in an #Int. ##
## * *Unsigned* integers can never be negative. The lowest value they can hold is zero.
## * *Signed* integers can be negative.
##
## Integers also come in different sizes. Choosing a size depends on your performance
## needs and the range of numbers you need to represent. At a high level, the
## general trade-offs are:
##
## * Larger integer sizes can represent a wider range of numbers. If you absolutely need to represent numbers in a certain range, make sure to pick an integer size that can hold them!
## * Smaller integer sizes take up less memory. This savings rarely matters in variables and function arguments, but the sizes of integers that you use in data structures can add up. This can also affect whether those data structures fit in [cache lines](https://en.wikipedia.org/wiki/CPU_cache#Cache_performance), which can be a performance bottleneck.
## * CPUs typically work fastest on their native [word size](https://en.wikipedia.org/wiki/Word_(computer_architecture)). For example, 64-bit CPUs tend to work fastest on 64-bit integers. Especially if your performance profiling shows that you are CPU bound rather than memory bound, consider #ILen or #ULen.
##
## Here are the different fixed size integer types:
##
## | Range | Type | Size |
## |--------------------------------------------------------|-------|----------|
## | ` -128` | #I8 | 1 Byte |
## | ` 127` | | |
## |--------------------------------------------------------|-------|----------|
## | ` 0` | #U8 | 1 Byte |
## | ` 255` | | |
## |--------------------------------------------------------|-------|----------|
## | ` -32_768` | #I16 | 2 Bytes |
## | ` 32_767` | | |
## |--------------------------------------------------------|-------|----------|
## | ` 0` | #U16 | 2 Bytes |
## | ` 65_535` | | |
## |--------------------------------------------------------|-------|----------|
## | ` -2_147_483_648` | #I32 | 4 Bytes |
## | ` 2_147_483_647` | | |
## |--------------------------------------------------------|-------|----------|
## | ` 0` | #U32 | 4 Bytes |
## | ` (over 4 billion) 4_294_967_295` | | |
## |--------------------------------------------------------|-------|----------|
## | ` -9_223_372_036_854_775_808` | #I64 | 8 Bytes |
## | ` 9_223_372_036_854_775_807` | | |
## |--------------------------------------------------------|-------|----------|
## | ` 0` | #U64 | 8 Bytes |
## | ` (over 18 quintillion) 18_446_744_073_709_551_615` | | |
## |--------------------------------------------------------|-------|----------|
## | `-170_141_183_460_469_231_731_687_303_715_884_105_728` | #I128 | 16 Bytes |
## | ` 170_141_183_460_469_231_731_687_303_715_884_105_727` | | |
## |--------------------------------------------------------|-------|----------|
## | ` (over 340 undecillion) 0` | #U128 | 16 Bytes |
## | ` 340_282_366_920_938_463_463_374_607_431_768_211_455` | | |
##
## There are also two variable-size integer types: #Iword and #Uword.
## Their sizes are determined by the machine word size for the system you're
## compiling for. For example, on a 64-bit system, #Iword is the same as #I64,
## and #Uword is the same as #U64.
## ##
## If any operation would result in an #Int that is either too big ## If any operation would result in an #Int that is either too big
## or too small to fit in that range (e.g. running `Int.highest + 1`), ## or too small to fit in that range (e.g. calling `Int.highest32 + 1`),
## then the operation will *overflow* or *underflow*, respectively. ## then the operation will *overflow* or *underflow*, respectively.
##
## When this happens: ## When this happens:
## ##
## * In a development build, you'll get an assertion failure. ## * In a development build, you'll get an assertion failure.
## * In a release build, you'll get [wrapping overflow](https://en.wikipedia.org/wiki/Integer_overflow#Saturated_arithmetic), which is almost always a mathematically incorrect outcome for the requested operation. ## * In a release build, you'll get [wrapping overflow](https://en.wikipedia.org/wiki/Integer_overflow), which is almost always a mathematically incorrect outcome for the requested operation. (If you actually want wrapping, because you're writing something like a hash function, use functions like #Int.addWrapping.)
## ##
## As such, it's very important to design your code not to exceed these bounds! ## As such, it's very important to design your code not to exceed these bounds!
## If you need to do math outside these bounds, consider using ## If you need to do math outside these bounds, consider using
## a different representation other than #Int. The reason #Int has these ## a different representation other than #Int. The reason #Int has these
## bounds is for performance reasons. ## bounds is for performance reasons.
#Int : Num Integer # Int size : Num [ @Int size ]
## Arithmetic ## Arithmetic
@ -90,21 +203,37 @@ interface Int
#bitwiseNot : Int -> Int #bitwiseNot : Int -> Int
## Sort ascending - that is, with the lowest first, and the highest last.
##
## List.sort Int.asc [ 3, 6, 0 ]
##
asc : Int a, Int a -> [ Eq, Lt, Gt ]
## Sort descending - that is, with the highest first, and the lowest last.
##
## List.sort Int.desc [ 3, 6, 0 ]
##
desc : Int a, Int a -> [ Eq, Lt, Gt ]
## TODO should we offer hash32 etc even if someday it has to do a hash64 and truncate?
##
## CAUTION: This function may give different answers in future releases of Roc,
## so be aware that if you rely on the exact answer this gives today, your
## code may break in a future Roc release.
hash64 : a -> U64
## Limits ## Limits
## The highest number that can be stored in an #Int without overflowing its ## The highest number that can be stored in an #I32 without overflowing its
## available memory (64 bits total) and crashing. ## available memory and crashing.
## ##
## Note that this is smaller than the positive version of #Int.lowest, ## Note that this is smaller than the positive version of #Int.lowestI32
## which means if you call #Num.abs on #Int.lowest, it will crash! ## which means if you call #Num.abs on #Int.lowestI32, it will overflow and crash!
#highest : Int highestI32 : I32
highest = 0x7fff_ffff_ffff_ffff
## The lowest number that can be stored in an #Int without overflowing its ## The lowest number that can be stored in an #I32 without overflowing its
## available memory (64 bits total) and crashing. ## available memory and crashing.
## ##
## Note that the positive version of this number is this is larger than ## Note that the positive version of this number is this is larger than
## #Int.highest, which means if you call #Num.abs on #Int.lowest, ## #Int.highestI32, which means if you call #Num.abs on #Int.lowestI32, it will overflow and crash!
## it will crash! lowest : I32
#lowest : Int
lowest = -0x8000_0000_0000_0000

View file

@ -27,6 +27,11 @@ interface List
## > applies to lists that take up 8 machine words in memory or fewer, so ## > applies to lists that take up 8 machine words in memory or fewer, so
## > for example on a 64-bit system, a list of 8 #Int values will be ## > for example on a 64-bit system, a list of 8 #Int values will be
## > stored as a flat array instead of as an RRBT. ## > stored as a flat array instead of as an RRBT.
##
## One #List can store up to 2,147,483,648 elements (just over 2 billion). If you need to store more
## elements than that, you can split them into smaller lists and operate
## on those instead of on one large #List. This often runs faster in practice,
## even for strings much smaller than 2 gigabytes.
List elem : @List elem List elem : @List elem
## Initialize ## Initialize
@ -54,7 +59,8 @@ fromResult : Result elem * -> List elem
reverse : List elem -> List elem reverse : List elem -> List elem
sort : List elem, Sorter elem -> List elem sort : List elem, (elem, elem -> [ Eq, Lt, Gt ]) -> List elem
sortBy : List elem, (elem -> field), (field, field -> [ Eq, Lt, Gt ]) -> List elem
## Convert each element in the list to something new, by calling a conversion ## Convert each element in the list to something new, by calling a conversion
## function on each of them. Then return a new list of the converted values. ## function on each of them. Then return a new list of the converted values.
@ -191,6 +197,13 @@ walkBackwards : List elem, { start : state, step : (state, elem -> state) } -> s
## Check ## Check
## Returns the length of the list - the number of elements it contains.
##
## One #List can store up to 2,147,483,648 elements (just over 2 billion), which
## is exactly equal to the highest valid #I32 value. This means the #U32 this function
## returns can always be safely converted to an #I32 without losing any data.
len : List * -> U32
isEmpty : List * -> Bool isEmpty : List * -> Bool
contains : List elem, elem -> Bool contains : List elem, elem -> Bool
@ -198,3 +211,4 @@ contains : List elem, elem -> Bool
all : List elem, (elem -> Bool) -> Bool all : List elem, (elem -> Bool) -> Bool
any : List elem, (elem -> Bool) -> Bool any : List elem, (elem -> Bool) -> Bool

View file

@ -1,4 +1,4 @@
api Num provides Num, DivByZero..., negate, abs, add, sub, mul, isOdd, isEven, isPositive, isNegative, isZero api Num provides Num, DivByZero..., neg, abs, add, sub, mul, isOdd, isEven, isPositive, isNegative, isZero
## Types ## Types
@ -25,17 +25,21 @@ Num range : @Num range
## Return a negative number when given a positive one, and vice versa. ## Return a negative number when given a positive one, and vice versa.
## ##
## Some languages have a unary `-` operator (for example, `-(a + b)`), but Roc does not. If you want to negate a number, calling this function is the way to do it! ## >>> Num.neg 5
## ##
## > Num.neg 5 ## >>> Num.neg -2.5
## ##
## > Num.neg -2.5 ## >>> Num.neg 0
## ##
## > Num.neg 0 ## >>> Num.neg 0.0
## ##
## > Num.neg 0.0 ## This is safe to use with any #Float, but it can cause overflow when used with certain #Int values.
## ##
## This will crash when given #Int.lowestValue, because doing so will result in a number higher than #Int.highestValue. ## For example, calling #Num.neg on the lowest value of a signed integer (such as #Int.lowestI64 or #Int.lowestI32) will cause overflow.
## This is because, for any given size of signed integer (32-bit, 64-bit, etc.) its negated lowest value turns out to be 1 higher than
## the highest value it can represent. (For this reason, calling #Num.abs on the lowest signed value will also cause overflow.)
##
## Additionally, calling #Num.neg on any unsigned integer (such as any #U64 or #U32 value) other than 0 will cause overflow.
## ##
## (It will never crash when given a #Float, however, because of how floating point numbers represent positive and negative numbers.) ## (It will never crash when given a #Float, however, because of how floating point numbers represent positive and negative numbers.)
neg : Num range -> Num range neg : Num range -> Num range
@ -44,14 +48,23 @@ neg : Num range -> Num range
## ##
## * For a positive number, returns the same number. ## * For a positive number, returns the same number.
## * For a negative number, returns the same number except positive. ## * For a negative number, returns the same number except positive.
## * For zero, returns zero.
## ##
## > Num.abs 4 ## >>> Num.abs 4
## ##
## > Num.abs -2.5 ## >>> Num.abs -2.5
## ##
## > Num.abs 0 ## >>> Num.abs 0
## ##
## > Num.abs 0.0 ## >>> Num.abs 0.0
##
## This is safe to use with any #Float, but it can cause overflow when used with certain #Int values.
##
## For example, calling #Num.abs on the lowest value of a signed integer (such as #Int.lowestI64 or #Int.lowestI32) will cause overflow.
## This is because, for any given size of signed integer (32-bit, 64-bit, etc.) its negated lowest value turns out to be 1 higher than
## the highest value it can represent. (For this reason, calling #Num.neg on the lowest signed value will also cause overflow.)
##
## Calling this on an unsigned integer (like #U32 or #U64) never does anything.
abs : Num range -> Num range abs : Num range -> Num range
## Check ## Check

View file

@ -2,7 +2,59 @@ api Str provides Str, isEmpty, join
## Types ## Types
Str := Str ## A [Unicode](https://unicode.org) text value.
##
## Dealing with text is deep topic, so by design, Roc's `Str` module sticks
## to the basics. For more advanced use cases like working with raw [code points](https://unicode.org/glossary/#code_point),
## see the [roc/unicode](roc/unicode) package, and for locale-specific text
## functions (including capitalization, as capitalization rules vary by locale)
## see the [roc/locale](roc/locale) package.
##
## ### Unicode
##
## Unicode can represent text values which span multiple languages, symbols, and emoji.
## Here are some valid Roc strings:
##
## * "Roc"
## * "鹏"
## * "🐦"
##
## Every Unicode string is a sequence of [grapheme clusters](https://unicode.org/glossary/#grapheme_cluster).
## A grapheme cluster corresponds to what a person reading a string might call
## a "character", but because the term "character" is used to mean many different
## concepts across different programming languages, we intentionally avoid it in Roc.
## Instead, we use the term "clusters" as a shorthand for "grapheme clusters."
##
## You can get the number of grapheme clusters in a string by calling `Str.countClusters` on it:
##
## >>> Str.countClusters "Roc"
##
## >>> Str.countClusters "音乐"
##
## >>> Str.countClusters "👍"
##
## > The `countClusters` function traverses the entire string to calculate its answer,
## > so it's much better for performance to use `Str.isEmpty` instead of
## > calling `Str.countClusters` and checking whether the count was `0`.
##
## ### Escape characters
##
## ### String interpolation
##
## ### Encoding
##
## Roc strings are not coupled to any particular
## [encoding](https://en.wikipedia.org/wiki/Character_encoding). As it happens,
## they are currently encoded in UTF-8, but this module is intentionally designed
## not to rely on that implementation detail so that a future release of Roc can
## potentially change it without breaking existing Roc applications.
##
## This module has functions to can convert a #Str to a #List of raw [code unit](https://unicode.org/glossary/#code_unit)
## integers (not to be confused with the [code points](https://unicode.org/glossary/#code_point)
## mentioned earlier) in a particular encoding. If you need encoding-specific functions,
## you should take a look at the [roc/unicode](roc/unicode) package.
## It has many more tools than this module does!
Str : [ @Str ]
## Convert ## Convert
@ -15,10 +67,22 @@ Str := Str
## but it's recommended to pass much smaller numbers instead. ## but it's recommended to pass much smaller numbers instead.
## ##
## Passing a negative number for decimal places is equivalent to passing 0. ## Passing a negative number for decimal places is equivalent to passing 0.
decimal : Int, Float -> Str decimal : Float *, ULen -> Str
## Convert an #Int to a string. ## Convert an #Int to a string.
int : Float -> Str int : Int * -> Str
## Split a string around a separator.
##
## >>> Str.splitClusters "1,2,3" ","
##
## Passing `""` for the separator is not useful; it returns the original string
## wrapped in a list.
##
## >>> Str.splitClusters "1,2,3" ""
##
## To split a string into its grapheme clusters, use #Str.clusters
split : Str, Str -> List Str
## Check ## Check
@ -52,4 +116,196 @@ padStart : Str, Int, Str -> Str
padEnd : Str, Int, Str -> Str padEnd : Str, Int, Str -> Str
## Grapheme Clusters
## Split a string into its grapheme clusters.
##
## >>> Str.clusters "1,2,3"
##
## >>> Str.clusters "👍👍👍"
##
clusters : Str -> List Str
reverseClusters : Str -> Str
foldClusters : Str, { start: state, step: (state, Str -> state) } -> state
## Returns #True if the string begins with a capital letter, and #False otherwise.
##
## >>> Str.isCapitalized "hi"
##
## >>> Str.isCapitalized "Hi"
##
## >>> Str.isCapitalized " Hi"
##
## >>> Str.isCapitalized "Česká"
##
## >>> Str.isCapitalized "Э"
##
## >>> Str.isCapitalized "東京"
##
## >>> Str.isCapitalized "🐦"
##
## >>> Str.isCapitalized ""
##
## Since the rules for how to capitalize an uncapitalized string vary by locale,
## see the [roc/locale](roc/locale) package for functions which do that.
isCapitalized : Str -> Bool
## ## Code Units
##
## Besides grapheme clusters, another way to break down strings is into
## raw code unit integers.
##
## Code units are no substitute for grapheme clusters!
## These functions exist to support advanced use cases like those found in
## [roc/unicode](roc/unicode), and using code units when grapheme clusters would
## be more appropriate can very easily lead to bugs.
##
## For example, `Str.countGraphemes "👩‍👩‍👦‍👦"` returns `1`,
## whereas `Str.toUtf8 "👩‍👩‍👦‍👦"` returns a list with a length of 25,
## `Str.toUtf16 "👩‍👩‍👦‍👦"` returns a list with a length of 11.
## and `Str.toUtf32 "👩‍👩‍👦‍👦"` returns a list with a length of 7.
## Return a #List of the string's #U8 UTF-8 [code units](https://unicode.org/glossary/#code_unit).
## (To split the string into a #List of smaller #Str values instead of #U8 values,
## see #Str.split and #Str.clusters.)
##
## >>> Str.toUtf8 "👩‍👩‍👦‍👦"
##
## >>> Str.toUtf8 "Roc"
##
## >>> Str.toUtf8 "鹏"
##
## >>> Str.toUtf8 "🐦"
##
## For a more flexible function that walks through each of these #U8 code units
## without creating a #List, see #Str.foldUtf8 and #Str.foldRevUtf8.
toUtf8 : Str -> List U8
## Return a #List of the string's #U16 UTF-16 [code units](https://unicode.org/glossary/#code_unit).
## (To split the string into a #List of smaller #Str values instead of #U16 values,
## see #Str.split and #Str.clusters.)
##
## >>> Str.toUtf16 "👩‍👩‍👦‍👦"
##
## >>> Str.toUtf16 "Roc"
##
## >>> Str.toUtf16 "鹏"
##
## >>> Str.toUtf16 "🐦"
##
## For a more flexible function that walks through each of these #U16 code units
## without creating a #List, see #Str.foldUtf16 and #Str.foldRevUtf16.
toUtf16 : Str -> List U16
## Return a #List of the string's #U32 UTF-32 [code units](https://unicode.org/glossary/#code_unit).
## (To split the string into a #List of smaller #Str values instead of #U32 values,
## see #Str.split and #Str.clusters.)
##
## >>> Str.toUtf32 "👩‍👩‍👦‍👦"
##
## >>> Str.toUtf32 "Roc"
##
## >>> Str.toUtf32 "鹏"
##
## >>> Str.toUtf32 "🐦"
##
## For a more flexible function that walks through each of these #U32 code units
## without creating a #List, see #Str.foldUtf32 and #Str.foldRevUtf32.
toUtf32 : Str -> List U32
## Walk through the string's #U8 UTF-8 [code units](https://unicode.org/glossary/#code_unit)
## to build up a state.
## (If you want a `step` function which receives a #Str instead of an #U8, see #Str.foldClusters.)
##
## Here are the #U8 values that will be passed to `step` when this function is
## called on various strings:
##
## * `"👩‍👩‍👦‍👦"` passes 240, 159, 145, 169, 226, 128, 141, 240, 159, 145, 169, 226, 128, 141, 240, 159, 145, 166, 226, 128, 141, 240, 159, 145, 166
## * `"Roc"` passes 82, 111, 99
## * `"鹏"` passes 233, 185, 143
## * `"🐦"` passes 240, 159, 144, 166
##
## To convert a #Str into a plain `List U8` of UTF-8 code units, see #Str.toUtf8.
foldUtf8 : Str, { start: state, step: (state, U8 -> state) } -> state
## Walk through the string's #U16 UTF-16 [code units](https://unicode.org/glossary/#code_unit)
## to build up a state.
## (If you want a `step` function which receives a #Str instead of an #U16, see #Str.foldClusters.)
##
## Here are the #U16 values that will be passed to `step` when this function is
## called on various strings:
##
## * `"👩‍👩‍👦‍👦"` passes 55357, 56425, 8205, 55357, 56425, 8205, 55357, 56422, 8205, 55357, 56422
## * `"Roc"` passes 82, 111, 99
## * `"鹏"` passes 40527
## * `"🐦"` passes 55357, 56358
##
## To convert a #Str into a plain `List U16` of UTF-16 code units, see #Str.toUtf16.
foldUtf16 : Str, { start: state, step: (state, U16 -> state) } -> state
## Walk through the string's #U32 UTF-32 [code units](https://unicode.org/glossary/#code_unit)
## to build up a state.
## (If you want a `step` function which receives a #Str instead of an #U32, see #Str.foldClusters.)
##
## Here are the #U32 values that will be passed to `step` when this function is
## called on various strings:
##
## * `"👩‍👩‍👦‍👦"` passes 128105, 8205, 128105, 8205, 128102, 8205, 128102
## * `"Roc"` passes 82, 111, 99
## * `"鹏"` passes 40527
## * `"🐦"` passes 128038
##
## To convert a #Str into a plain `List U32` of UTF-32 code units, see #Str.toUtf32.
foldUtf32 : Str, { start: state, step: (state, U32 -> state) } -> state
## Walk backwards through the string's #U8 UTF-8 [code units](https://unicode.org/glossary/#code_unit)
## to build up a state.
## (If you want a `step` function which receives a #Str instead of an #U8, see #Str.foldClusters.)
##
## Here are the #U8 values that will be passed to `step` when this function is
## called on various strings:
##
## * `"👩‍👩‍👦‍👦"` passes 166, 145, 159, 240, 141, 128, 226, 166, 145, 159, 240, 141, 128, 226, 169, 145, 159, 240, 141, 128, 226, 169, 145, 159, 240
## * `"Roc"` passes 99, 111, 82
## * `"鹏"` passes 143, 185, 233
## * `"🐦"` passes 166, 144, 159, 240
##
## To convert a #Str into a plain `List U8` of UTF-8 code units, see #Str.toUtf8.
foldRevUtf8 : Str, { start: state, step: (state, U8 -> state) } -> state
## Walk backwards through the string's #U16 UTF-16 [code units](https://unicode.org/glossary/#code_unit)
## to build up a state.
## (If you want a `step` function which receives a #Str instead of an #U16, see #Str.foldClusters.)
##
## Here are the #U16 values that will be passed to `step` when this function is
## called on various strings:
##
## * `"👩‍👩‍👦‍👦"` passes 56422, 55357, 8205, 56422, 55357, 8205, 56425, 55357, 8205, 56425, 55357
## * `"Roc"` passes 99, 111, 82
## * `"鹏"` passes 40527
## * `"🐦"` passes 56358, 55357
##
## To convert a #Str into a plain `List U16` of UTF-16 code units, see #Str.toUtf16.
foldRevUtf16 : Str, { start: state, step: (state, U16 -> state) } -> state
## Walk backwards through the string's #U32 UTF-32 [code units](https://unicode.org/glossary/#code_unit)
## to build up a state.
## (If you want a `step` function which receives a #Str instead of an #U32, see #Str.foldClusters.)
##
## Here are the #U32 values that will be passed to `step` when this function is
## called on various strings:
##
## * `"👩‍👩‍👦‍👦"` passes 128102, 8205, 128102, 8205, 128105, 8205, 128105
## * `"Roc"` passes 99, 111, 82
## * `"鹏"` passes 40527
## * `"🐦"` passes 128038
##
## To convert a #Str into a plain `List U32` of UTF-32 code units, see #Str.toUtf32.
foldRevUtf32 : Str, { start: state, step: (state, U32 -> state) } -> state

View file

@ -367,7 +367,7 @@ pub fn types() -> MutMap<Symbol, (SolvedType, Region)> {
// isEmpty : List * -> Bool // isEmpty : List * -> Bool
add_type( add_type(
Symbol::LIST_ISEMPTY, Symbol::LIST_IS_EMPTY,
SolvedType::Func( SolvedType::Func(
vec![SolvedType::Apply( vec![SolvedType::Apply(
Symbol::LIST_LIST, Symbol::LIST_LIST,
@ -408,6 +408,15 @@ pub fn types() -> MutMap<Symbol, (SolvedType, Region)> {
), ),
); );
// concat : List elem, List elem -> List elem
add_type(
Symbol::LIST_CONCAT,
SolvedType::Func(
vec![list_type(flex(TVAR1)), list_type(flex(TVAR1))],
Box::new(list_type(flex(TVAR1))),
),
);
// map : List before, (before -> after) -> List after // map : List before, (before -> after) -> List after
add_type( add_type(
Symbol::LIST_MAP, Symbol::LIST_MAP,
@ -442,9 +451,9 @@ pub fn types() -> MutMap<Symbol, (SolvedType, Region)> {
), ),
); );
// length : List a -> Int // len : List * -> Int
add_type( add_type(
Symbol::LIST_LENGTH, Symbol::LIST_LEN,
SolvedType::Func(vec![list_type(flex(TVAR1))], Box::new(int_type())), SolvedType::Func(vec![list_type(flex(TVAR1))], Box::new(int_type())),
); );

View file

@ -394,16 +394,16 @@ pub fn types() -> MutMap<Symbol, (SolvedType, Region)> {
// Bool module // Bool module
// isEq or (==) : Attr u1 Bool, Attr u2 Bool -> Attr u3 Bool // isEq or (==) : a, a -> Attr u Bool
add_type( add_type(
Symbol::BOOL_EQ, Symbol::BOOL_EQ,
unique_function(vec![bool_type(UVAR1), bool_type(UVAR2)], bool_type(UVAR3)), unique_function(vec![flex(TVAR1), flex(TVAR1)], bool_type(UVAR3)),
); );
// isNeq or (!=) : Attr u1 Bool, Attr u2 Bool -> Attr u3 Bool // isNeq or (!=) : a, a -> Attr u Bool
add_type( add_type(
Symbol::BOOL_NEQ, Symbol::BOOL_NEQ,
unique_function(vec![bool_type(UVAR1), bool_type(UVAR2)], bool_type(UVAR3)), unique_function(vec![flex(TVAR1), flex(TVAR1)], bool_type(UVAR3)),
); );
// and or (&&) : Attr u1 Bool, Attr u2 Bool -> Attr u3 Bool // and or (&&) : Attr u1 Bool, Attr u2 Bool -> Attr u3 Bool
@ -434,13 +434,13 @@ pub fn types() -> MutMap<Symbol, (SolvedType, Region)> {
// isEmpty : Attr u (List *) -> Attr v Bool // isEmpty : Attr u (List *) -> Attr v Bool
add_type( add_type(
Symbol::LIST_ISEMPTY, Symbol::LIST_IS_EMPTY,
unique_function(vec![list_type(UVAR1, TVAR1)], bool_type(UVAR2)), unique_function(vec![list_type(UVAR1, TVAR1)], bool_type(UVAR2)),
); );
// length : List a -> Int // len : List * -> Int
add_type( add_type(
Symbol::LIST_LENGTH, Symbol::LIST_LEN,
unique_function(vec![list_type(UVAR1, TVAR1)], int_type(UVAR2)), unique_function(vec![list_type(UVAR1, TVAR1)], int_type(UVAR2)),
); );

View file

@ -3,11 +3,11 @@ use roc_collections::all::{ImMap, ImSet, SendMap};
use roc_module::symbol::Symbol; use roc_module::symbol::Symbol;
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
use roc_types::subs::{VarStore, Variable}; use roc_types::subs::{VarStore, Variable};
use roc_types::types::{Alias, PatternCategory, Type}; use roc_types::types::{Alias, Category, PatternCategory, Type};
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub enum Constraint { pub enum Constraint {
Eq(Type, Expected<Type>, Region), Eq(Type, Expected<Type>, Category, Region),
Lookup(Symbol, Expected<Type>, Region), Lookup(Symbol, Expected<Type>, Region),
Pattern(Region, PatternCategory, Type, PExpected<Type>), Pattern(Region, PatternCategory, Type, PExpected<Type>),
True, // Used for things that always unify, e.g. blanks and runtime errors True, // Used for things that always unify, e.g. blanks and runtime errors
@ -32,7 +32,7 @@ impl Constraint {
match self { match self {
True | SaveTheEnvironment => {} True | SaveTheEnvironment => {}
Eq(typ, expected, _) => { Eq(typ, expected, _, _) => {
expected expected
.get_type_mut_ref() .get_type_mut_ref()
.instantiate_aliases(aliases, var_store, introduced); .instantiate_aliases(aliases, var_store, introduced);

View file

@ -22,14 +22,21 @@ use std::collections::HashMap;
use std::fmt::Debug; use std::fmt::Debug;
use ven_graph::{strongly_connected_components, topological_sort_into_groups}; use ven_graph::{strongly_connected_components, topological_sort_into_groups};
#[allow(clippy::type_complexity)]
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub struct Def { pub struct Def {
pub loc_pattern: Located<Pattern>, pub loc_pattern: Located<Pattern>,
pub loc_expr: Located<Expr>, pub loc_expr: Located<Expr>,
pub expr_var: Variable, pub expr_var: Variable,
pub pattern_vars: SendMap<Symbol, Variable>, pub pattern_vars: SendMap<Symbol, Variable>,
pub annotation: Option<(Type, IntroducedVariables, SendMap<Symbol, Alias>)>, pub annotation: Option<Annotation>,
}
#[derive(Clone, Debug, PartialEq)]
pub struct Annotation {
pub signature: Type,
pub introduced_variables: IntroducedVariables,
pub aliases: SendMap<Symbol, Alias>,
pub region: Region,
} }
#[derive(Debug)] #[derive(Debug)]
@ -71,6 +78,8 @@ enum PendingDef<'a> {
vars: Vec<Located<Lowercase>>, vars: Vec<Located<Lowercase>>,
ann: &'a Located<ast::TypeAnnotation<'a>>, ann: &'a Located<ast::TypeAnnotation<'a>>,
}, },
ShadowedAlias,
} }
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
@ -118,7 +127,7 @@ pub fn canonicalize_defs<'a>(
// This way, whenever any expr is doing lookups, it knows everything that's in scope - // This way, whenever any expr is doing lookups, it knows everything that's in scope -
// even defs that appear after it in the source. // even defs that appear after it in the source.
// //
// This naturally handles recursion too, because a given exper which refers // This naturally handles recursion too, because a given expr which refers
// to itself won't be processed until after its def has been added to scope. // to itself won't be processed until after its def has been added to scope.
use roc_parse::ast::Def::*; use roc_parse::ast::Def::*;
@ -662,19 +671,24 @@ fn pattern_to_vars_by_symbol(
vars_by_symbol.insert(symbol.clone(), expr_var); vars_by_symbol.insert(symbol.clone(), expr_var);
} }
AppliedTag(_, _, arguments) => { AppliedTag { arguments, .. } => {
for (var, nested) in arguments { for (var, nested) in arguments {
pattern_to_vars_by_symbol(vars_by_symbol, &nested.value, *var); pattern_to_vars_by_symbol(vars_by_symbol, &nested.value, *var);
} }
} }
RecordDestructure(_, destructs) => { RecordDestructure { destructs, .. } => {
for destruct in destructs { for destruct in destructs {
vars_by_symbol.insert(destruct.value.symbol.clone(), destruct.value.var); vars_by_symbol.insert(destruct.value.symbol.clone(), destruct.value.var);
} }
} }
IntLiteral(_) | FloatLiteral(_) | StrLiteral(_) | Underscore | UnsupportedPattern(_) => {} NumLiteral(_, _)
| IntLiteral(_)
| FloatLiteral(_)
| StrLiteral(_)
| Underscore
| UnsupportedPattern(_) => {}
Shadowed(_, _) => {} Shadowed(_, _) => {}
} }
@ -787,11 +801,12 @@ fn canonicalize_pending_def<'a>(
value: loc_can_expr.value.clone(), value: loc_can_expr.value.clone(),
}, },
pattern_vars: im::HashMap::clone(&vars_by_symbol), pattern_vars: im::HashMap::clone(&vars_by_symbol),
annotation: Some(( annotation: Some(Annotation {
typ.clone(), signature: typ.clone(),
output.introduced_variables.clone(), introduced_variables: output.introduced_variables.clone(),
ann.aliases.clone(), aliases: ann.aliases.clone(),
)), region: loc_ann.region,
}),
}, },
); );
} }
@ -848,6 +863,11 @@ fn canonicalize_pending_def<'a>(
.introduced_variables .introduced_variables
.union(&can_ann.introduced_variables); .union(&can_ann.introduced_variables);
} }
ShadowedAlias => {
// Since this alias was shadowed, it gets ignored and has no
// effect on the output.
}
TypedBody(loc_pattern, loc_can_pattern, loc_ann, loc_expr) => { TypedBody(loc_pattern, loc_can_pattern, loc_ann, loc_expr) => {
let ann = let ann =
canonicalize_annotation(env, scope, &loc_ann.value, loc_ann.region, var_store); canonicalize_annotation(env, scope, &loc_ann.value, loc_ann.region, var_store);
@ -880,6 +900,8 @@ fn canonicalize_pending_def<'a>(
let (mut loc_can_expr, can_output) = let (mut loc_can_expr, can_output) =
canonicalize_expr(env, var_store, scope, loc_expr.region, &loc_expr.value); canonicalize_expr(env, var_store, scope, loc_expr.region, &loc_expr.value);
output.references = output.references.union(can_output.references.clone());
// reset the tailcallable_symbol // reset the tailcallable_symbol
env.tailcallable_symbol = outer_identifier; env.tailcallable_symbol = outer_identifier;
@ -982,11 +1004,12 @@ fn canonicalize_pending_def<'a>(
value: loc_can_expr.value.clone(), value: loc_can_expr.value.clone(),
}, },
pattern_vars: im::HashMap::clone(&vars_by_symbol), pattern_vars: im::HashMap::clone(&vars_by_symbol),
annotation: Some(( annotation: Some(Annotation {
typ.clone(), signature: typ.clone(),
output.introduced_variables.clone(), introduced_variables: output.introduced_variables.clone(),
ann.aliases.clone(), aliases: ann.aliases.clone(),
)), region: loc_ann.region,
}),
}, },
); );
} }
@ -1285,6 +1308,7 @@ fn to_pending_def<'a>(
Alias { name, vars, ann } => { Alias { name, vars, ann } => {
let region = Region::span_across(&name.region, &ann.region); let region = Region::span_across(&name.region, &ann.region);
match scope.introduce( match scope.introduce(
name.value.into(), name.value.into(),
&env.exposed_ident_ids, &env.exposed_ident_ids,
@ -1321,7 +1345,14 @@ fn to_pending_def<'a>(
} }
} }
Err(_err) => panic!("TODO gracefully handle shadowing of type alias"), Err((original_region, loc_shadowed_symbol)) => {
env.problem(Problem::ShadowingInAnnotation {
original_region,
shadow: loc_shadowed_symbol,
});
PendingDef::ShadowedAlias
}
} }
} }

View file

@ -37,6 +37,13 @@ impl<T> PExpected<T> {
PExpected::ForReason(_, val, _) => val, PExpected::ForReason(_, val, _) => val,
} }
} }
pub fn replace<U>(self, new: U) -> PExpected<U> {
match self {
PExpected::NoExpectation(_val) => PExpected::NoExpectation(new),
PExpected::ForReason(reason, _val, region) => PExpected::ForReason(reason, new, region),
}
}
} }
impl<T> Expected<T> { impl<T> Expected<T> {
@ -63,4 +70,14 @@ impl<T> Expected<T> {
Expected::FromAnnotation(_, _, _, val) => val, Expected::FromAnnotation(_, _, _, val) => val,
} }
} }
pub fn replace<U>(self, new: U) -> Expected<U> {
match self {
Expected::NoExpectation(_val) => Expected::NoExpectation(new),
Expected::ForReason(reason, _val, region) => Expected::ForReason(reason, new, region),
Expected::FromAnnotation(pattern, size, source, _val) => {
Expected::FromAnnotation(pattern, size, source, new)
}
}
}
} }

View file

@ -3,7 +3,7 @@ use crate::def::{can_defs_with_return, Def};
use crate::env::Env; use crate::env::Env;
use crate::num::{ use crate::num::{
finish_parsing_base, finish_parsing_float, finish_parsing_int, float_expr_from_result, finish_parsing_base, finish_parsing_float, finish_parsing_int, float_expr_from_result,
int_expr_from_result, int_expr_from_result, num_expr_from_result,
}; };
use crate::pattern::{canonicalize_pattern, Pattern}; use crate::pattern::{canonicalize_pattern, Pattern};
use crate::procedure::References; use crate::procedure::References;
@ -14,7 +14,7 @@ use roc_module::symbol::Symbol;
use roc_parse::ast; use roc_parse::ast;
use roc_parse::operator::CalledVia; use roc_parse::operator::CalledVia;
use roc_parse::pattern::PatternType::*; use roc_parse::pattern::PatternType::*;
use roc_problem::can::{Problem, RuntimeError}; use roc_problem::can::{PrecedenceProblem, Problem, RuntimeError};
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
use roc_types::subs::{VarStore, Variable}; use roc_types::subs::{VarStore, Variable};
use roc_types::types::Alias; use roc_types::types::Alias;
@ -30,9 +30,28 @@ pub struct Output {
pub aliases: SendMap<Symbol, Alias>, pub aliases: SendMap<Symbol, Alias>,
} }
impl Output {
pub fn union(&mut self, other: Self) {
self.references.union_mut(other.references);
if let (None, Some(later)) = (self.tail_call, other.tail_call) {
self.tail_call = Some(later);
}
self.introduced_variables.union(&other.introduced_variables);
self.aliases.extend(other.aliases);
}
}
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub enum Expr { pub enum Expr {
// Literals // Literals
// Num stores the `a` variable in `Num a`. Not the same as the variable
// stored in Int and Float below, which is strictly for better error messages
Num(Variable, i64),
// Int and Float store a variable to generate better error messages
Int(Variable, i64), Int(Variable, i64),
Float(Variable, f64), Float(Variable, f64),
Str(Box<str>), Str(Box<str>),
@ -48,8 +67,9 @@ pub enum Expr {
When { When {
cond_var: Variable, cond_var: Variable,
expr_var: Variable, expr_var: Variable,
region: Region,
loc_cond: Box<Located<Expr>>, loc_cond: Box<Located<Expr>>,
branches: Vec<(Located<Pattern>, Located<Expr>)>, branches: Vec<WhenBranch>,
}, },
If { If {
cond_var: Variable, cond_var: Variable,
@ -79,13 +99,17 @@ pub enum Expr {
), ),
// Product Types // Product Types
Record(Variable, SendMap<Lowercase, Field>), Record {
record_var: Variable,
fields: SendMap<Lowercase, Field>,
},
/// Empty record constant /// Empty record constant
EmptyRecord, EmptyRecord,
/// Look up exactly one field on a record, e.g. (expr).foo. /// Look up exactly one field on a record, e.g. (expr).foo.
Access { Access {
record_var: Variable,
ext_var: Variable, ext_var: Variable,
field_var: Variable, field_var: Variable,
loc_expr: Box<Located<Expr>>, loc_expr: Box<Located<Expr>>,
@ -93,6 +117,7 @@ pub enum Expr {
}, },
/// field accessor as a function, e.g. (.foo) expr /// field accessor as a function, e.g. (.foo) expr
Accessor { Accessor {
record_var: Variable,
ext_var: Variable, ext_var: Variable,
field_var: Variable, field_var: Variable,
field: Lowercase, field: Lowercase,
@ -134,6 +159,13 @@ pub enum Recursive {
NotRecursive, NotRecursive,
} }
#[derive(Clone, Debug, PartialEq)]
pub struct WhenBranch {
pub patterns: Vec<Located<Pattern>>,
pub value: Located<Expr>,
pub guard: Option<Located<Expr>>,
}
pub fn canonicalize_expr<'a>( pub fn canonicalize_expr<'a>(
env: &mut Env<'a>, env: &mut Env<'a>,
var_store: &VarStore, var_store: &VarStore,
@ -144,8 +176,8 @@ pub fn canonicalize_expr<'a>(
use Expr::*; use Expr::*;
let (expr, output) = match expr { let (expr, output) = match expr {
ast::Expr::Int(string) => { ast::Expr::Num(string) => {
let answer = int_expr_from_result(var_store, finish_parsing_int(*string), env); let answer = num_expr_from_result(var_store, finish_parsing_int(*string), env);
(answer, Output::default()) (answer, Output::default())
} }
@ -189,7 +221,13 @@ pub fn canonicalize_expr<'a>(
} else { } else {
let (can_fields, output) = canonicalize_fields(env, var_store, scope, fields); let (can_fields, output) = canonicalize_fields(env, var_store, scope, fields);
(Record(var_store.fresh(), can_fields), output) (
Record {
record_var: var_store.fresh(),
fields: can_fields,
},
output,
)
} }
} }
ast::Expr::Str(string) => (Str((*string).into()), Output::default()), ast::Expr::Str(string) => (Str((*string).into()), Output::default()),
@ -394,20 +432,19 @@ pub fn canonicalize_expr<'a>(
loc_body_expr.region, loc_body_expr.region,
&loc_body_expr.value, &loc_body_expr.value,
); );
// Now that we've collected all the references, check to see if any of the args we defined // Now that we've collected all the references, check to see if any of the args we defined
// went unreferenced. If any did, report them as unused arguments. // went unreferenced. If any did, report them as unused arguments.
for (symbol, region) in scope.symbols() { for (sub_symbol, region) in scope.symbols() {
if !original_scope.contains_symbol(*symbol) { if !original_scope.contains_symbol(*sub_symbol) {
if !output.references.has_lookup(*symbol) { if !output.references.has_lookup(*sub_symbol) {
// The body never referenced this argument we declared. It's an unused argument! // The body never referenced this argument we declared. It's an unused argument!
env.problem(Problem::UnusedArgument(*symbol, *region)); env.problem(Problem::UnusedArgument(symbol, *sub_symbol, *region));
} }
// We shouldn't ultimately count arguments as referenced locals. Otherwise, // We shouldn't ultimately count arguments as referenced locals. Otherwise,
// we end up with weird conclusions like the expression (\x -> x + 1) // we end up with weird conclusions like the expression (\x -> x + 1)
// references the (nonexistant) local variable x! // references the (nonexistant) local variable x!
output.references.lookups.remove(symbol); output.references.lookups.remove(sub_symbol);
} }
} }
@ -428,7 +465,7 @@ pub fn canonicalize_expr<'a>(
// Infer the condition expression's type. // Infer the condition expression's type.
let cond_var = var_store.fresh(); let cond_var = var_store.fresh();
let (can_cond, mut output) = let (can_cond, mut output) =
canonicalize_expr(env, var_store, scope, region, &loc_cond.value); canonicalize_expr(env, var_store, scope, loc_cond.region, &loc_cond.value);
// the condition can never be a tail-call // the condition can never be a tail-call
output.tail_call = None; output.tail_call = None;
@ -436,19 +473,12 @@ pub fn canonicalize_expr<'a>(
let mut can_branches = Vec::with_capacity(branches.len()); let mut can_branches = Vec::with_capacity(branches.len());
for branch in branches { for branch in branches {
let (can_when_pattern, loc_can_expr, branch_references) = canonicalize_when_branch( let (can_when_branch, branch_references) =
env, canonicalize_when_branch(env, var_store, scope, region, *branch, &mut output);
var_store,
scope,
region,
branch.patterns.first().unwrap(),
&branch.value,
&mut output,
);
output.references = output.references.union(branch_references); output.references = output.references.union(branch_references);
can_branches.push((can_when_pattern, loc_can_expr)); can_branches.push(can_when_branch);
} }
// A "when" with no branches is a runtime error, but it will mess things up // A "when" with no branches is a runtime error, but it will mess things up
@ -462,6 +492,7 @@ pub fn canonicalize_expr<'a>(
let expr = When { let expr = When {
expr_var: var_store.fresh(), expr_var: var_store.fresh(),
cond_var, cond_var,
region,
loc_cond: Box::new(can_cond), loc_cond: Box::new(can_cond),
branches: can_branches, branches: can_branches,
}; };
@ -473,6 +504,7 @@ pub fn canonicalize_expr<'a>(
( (
Access { Access {
record_var: var_store.fresh(),
field_var: var_store.fresh(), field_var: var_store.fresh(),
ext_var: var_store.fresh(), ext_var: var_store.fresh(),
loc_expr: Box::new(loc_expr), loc_expr: Box::new(loc_expr),
@ -481,20 +513,15 @@ pub fn canonicalize_expr<'a>(
output, output,
) )
} }
ast::Expr::AccessorFunction(field) => { ast::Expr::AccessorFunction(field) => (
let ext_var = var_store.fresh();
let field_var = var_store.fresh();
let field_name: Lowercase = (*field).into();
(
Accessor { Accessor {
field: field_name, record_var: var_store.fresh(),
ext_var, ext_var: var_store.fresh(),
field_var, field_var: var_store.fresh(),
field: (*field).into(),
}, },
Output::default(), Output::default(),
) ),
}
ast::Expr::GlobalTag(tag) => { ast::Expr::GlobalTag(tag) => {
let variant_var = var_store.fresh(); let variant_var = var_store.fresh();
let ext_var = var_store.fresh(); let ext_var = var_store.fresh();
@ -557,14 +584,32 @@ pub fn canonicalize_expr<'a>(
) )
} }
ast::Expr::MalformedIdent(_) ast::Expr::PrecedenceConflict(whole_region, binop1, binop2, _expr) => {
| ast::Expr::MalformedClosure use roc_problem::can::RuntimeError::*;
| ast::Expr::PrecedenceConflict(_, _, _) => {
panic!( let problem = PrecedenceProblem::BothNonAssociative(
"TODO restore the rest of canonicalize()'s branches {:?} {:?}", *whole_region,
&expr, binop1.clone(),
local_successors(&References::new(), &env.closures) binop2.clone(),
); );
env.problem(Problem::PrecedenceProblem(problem.clone()));
(
RuntimeError(InvalidPrecedence(problem, region)),
Output::default(),
)
}
ast::Expr::MalformedClosure => {
use roc_problem::can::RuntimeError::*;
(RuntimeError(MalformedClosure(region)), Output::default())
}
ast::Expr::MalformedIdent(name) => {
use roc_problem::can::RuntimeError::*;
(
RuntimeError(MalformedIdentifier((*name).into(), region)),
Output::default(),
)
} }
ast::Expr::Nested(sub_expr) => { ast::Expr::Nested(sub_expr) => {
let (answer, output) = canonicalize_expr(env, var_store, scope, region, sub_expr); let (answer, output) = canonicalize_expr(env, var_store, scope, region, sub_expr);
@ -644,34 +689,45 @@ pub fn canonicalize_expr<'a>(
fn canonicalize_when_branch<'a>( fn canonicalize_when_branch<'a>(
env: &mut Env<'a>, env: &mut Env<'a>,
var_store: &VarStore, var_store: &VarStore,
scope: &Scope, scope: &mut Scope,
region: Region, _region: Region,
loc_pattern: &Located<ast::Pattern<'a>>, branch: &'a ast::WhenBranch<'a>,
loc_expr: &'a Located<ast::Expr<'a>>,
output: &mut Output, output: &mut Output,
) -> (Located<Pattern>, Located<Expr>, References) { ) -> (WhenBranch, References) {
// Each case branch gets a new scope for canonicalization. let mut patterns = Vec::with_capacity(branch.patterns.len());
// Shadow `scope` to make sure we don't accidentally use the original one for the
// rest of this block, but keep the original around for later diffing.
let original_scope = scope; let original_scope = scope;
let mut scope = original_scope.clone(); let mut scope = original_scope.clone();
let loc_can_pattern = canonicalize_pattern( // TODO report symbols not bound in all patterns
for loc_pattern in &branch.patterns {
patterns.push(canonicalize_pattern(
env, env,
var_store, var_store,
&mut scope, &mut scope,
WhenBranch, WhenBranch,
&loc_pattern.value, &loc_pattern.value,
loc_pattern.region, loc_pattern.region,
));
}
let (value, mut branch_output) = canonicalize_expr(
env,
var_store,
&mut scope,
branch.value.region,
&branch.value.value,
); );
let (can_expr, branch_output) = let guard = match &branch.guard {
canonicalize_expr(env, var_store, &mut scope, region, &loc_expr.value); None => None,
Some(loc_expr) => {
let (can_guard, guard_branch_output) =
canonicalize_expr(env, var_store, &mut scope, loc_expr.region, &loc_expr.value);
// If we already recorded a tail call then keep it, else use this branch's tail call branch_output.union(guard_branch_output);
match output.tail_call { Some(can_guard)
Some(_) => {} }
None => output.tail_call = branch_output.tail_call,
}; };
// Now that we've collected all the references for this branch, check to see if // Now that we've collected all the references for this branch, check to see if
@ -687,7 +743,17 @@ fn canonicalize_when_branch<'a>(
} }
} }
(loc_can_pattern, can_expr, branch_output.references) let references = branch_output.references.clone();
output.union(branch_output);
(
WhenBranch {
patterns,
value,
guard,
},
references,
)
} }
pub fn local_successors<'a>( pub fn local_successors<'a>(

View file

@ -6,12 +6,34 @@ use roc_problem::can::RuntimeError::*;
use roc_types::subs::VarStore; use roc_types::subs::VarStore;
use std::i64; use std::i64;
#[inline(always)]
pub fn num_expr_from_result(
var_store: &VarStore,
result: Result<i64, &str>,
env: &mut Env,
) -> Expr {
match result {
Ok(int) => Expr::Num(var_store.fresh(), int),
Err(raw) => {
// (Num *) compiles to Int if it doesn't
// get specialized to something else first,
// so use int's overflow bounds here.
let runtime_error = IntOutsideRange(raw.into());
env.problem(Problem::RuntimeError(runtime_error.clone()));
Expr::RuntimeError(runtime_error)
}
}
}
#[inline(always)] #[inline(always)]
pub fn int_expr_from_result( pub fn int_expr_from_result(
var_store: &VarStore, var_store: &VarStore,
result: Result<i64, &str>, result: Result<i64, &str>,
env: &mut Env, env: &mut Env,
) -> Expr { ) -> Expr {
// Int stores a variable to generate better error messages
match result { match result {
Ok(int) => Expr::Int(var_store.fresh(), int), Ok(int) => Expr::Int(var_store.fresh(), int),
Err(raw) => { Err(raw) => {
@ -30,6 +52,7 @@ pub fn float_expr_from_result(
result: Result<f64, &str>, result: Result<f64, &str>,
env: &mut Env, env: &mut Env,
) -> Expr { ) -> Expr {
// Float stores a variable to generate better error messages
match result { match result {
Ok(float) => Expr::Float(var_store.fresh(), float), Ok(float) => Expr::Float(var_store.fresh(), float),
Err(raw) => { Err(raw) => {

View file

@ -62,8 +62,8 @@ pub fn desugar_expr<'a>(arena: &'a Bump, loc_expr: &'a Located<Expr<'a>>) -> &'a
match &loc_expr.value { match &loc_expr.value {
Float(_) Float(_)
| Nested(Float(_)) | Nested(Float(_))
| Int(_) | Num(_)
| Nested(Int(_)) | Nested(Num(_))
| NonBase10Int { .. } | NonBase10Int { .. }
| Nested(NonBase10Int { .. }) | Nested(NonBase10Int { .. })
| Str(_) | Str(_)
@ -78,8 +78,8 @@ pub fn desugar_expr<'a>(arena: &'a Bump, loc_expr: &'a Located<Expr<'a>>) -> &'a
| Nested(MalformedIdent(_)) | Nested(MalformedIdent(_))
| MalformedClosure | MalformedClosure
| Nested(MalformedClosure) | Nested(MalformedClosure)
| PrecedenceConflict(_, _, _) | PrecedenceConflict(_, _, _, _)
| Nested(PrecedenceConflict(_, _, _)) | Nested(PrecedenceConflict(_, _, _, _))
| GlobalTag(_) | GlobalTag(_)
| Nested(GlobalTag(_)) | Nested(GlobalTag(_))
| PrivateTag(_) | PrivateTag(_)
@ -179,13 +179,19 @@ pub fn desugar_expr<'a>(arena: &'a Bump, loc_expr: &'a Located<Expr<'a>>) -> &'a
}) })
} }
let desugared_guard = if let Some(guard) = &branch.guard {
Some(desugar_expr(arena, guard).clone())
} else {
None
};
desugared_branches.push(&*arena.alloc(WhenBranch { desugared_branches.push(&*arena.alloc(WhenBranch {
patterns: alternatives, patterns: alternatives,
value: Located { value: Located {
region: desugared.region, region: desugared.region,
value: Nested(&desugared.value), value: Nested(&desugared.value),
}, },
guard: None, guard: desugared_guard,
})); }));
} }
@ -412,8 +418,9 @@ fn desugar_bin_op<'a>(arena: &'a Bump, loc_expr: &'a Located<Expr<'_>>) -> &'a L
); );
let region = broken_expr.region; let region = broken_expr.region;
let value = Expr::PrecedenceConflict( let value = Expr::PrecedenceConflict(
bad_op, loc_expr.region,
stack_op, stack_op,
bad_op,
arena.alloc(broken_expr), arena.alloc(broken_expr),
); );

View file

@ -14,11 +14,21 @@ use roc_types::subs::{VarStore, Variable};
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub enum Pattern { pub enum Pattern {
Identifier(Symbol), Identifier(Symbol),
AppliedTag(Variable, TagName, Vec<(Variable, Located<Pattern>)>), AppliedTag {
whole_var: Variable,
ext_var: Variable,
tag_name: TagName,
arguments: Vec<(Variable, Located<Pattern>)>,
},
RecordDestructure {
whole_var: Variable,
ext_var: Variable,
destructs: Vec<Located<RecordDestruct>>,
},
IntLiteral(i64), IntLiteral(i64),
NumLiteral(Variable, i64),
FloatLiteral(f64), FloatLiteral(f64),
StrLiteral(Box<str>), StrLiteral(Box<str>),
RecordDestructure(Variable, Vec<Located<RecordDestruct>>),
Underscore, Underscore,
// Runtime Exceptions // Runtime Exceptions
@ -50,18 +60,23 @@ pub fn symbols_from_pattern_help(pattern: &Pattern, symbols: &mut Vec<Symbol>) {
symbols.push(symbol.clone()); symbols.push(symbol.clone());
} }
AppliedTag(_, _, arguments) => { AppliedTag { arguments, .. } => {
for (_, nested) in arguments { for (_, nested) in arguments {
symbols_from_pattern_help(&nested.value, symbols); symbols_from_pattern_help(&nested.value, symbols);
} }
} }
RecordDestructure(_, destructs) => { RecordDestructure { destructs, .. } => {
for destruct in destructs { for destruct in destructs {
symbols.push(destruct.value.symbol.clone()); symbols.push(destruct.value.symbol.clone());
} }
} }
IntLiteral(_) | FloatLiteral(_) | StrLiteral(_) | Underscore | UnsupportedPattern(_) => {} NumLiteral(_, _)
| IntLiteral(_)
| FloatLiteral(_)
| StrLiteral(_)
| Underscore
| UnsupportedPattern(_) => {}
Shadowed(_, _) => {} Shadowed(_, _) => {}
} }
@ -97,17 +112,23 @@ pub fn canonicalize_pattern<'a>(
}, },
GlobalTag(name) => { GlobalTag(name) => {
// Canonicalize the tag's name. // Canonicalize the tag's name.
Pattern::AppliedTag(var_store.fresh(), TagName::Global((*name).into()), vec![]) Pattern::AppliedTag {
whole_var: var_store.fresh(),
ext_var: var_store.fresh(),
tag_name: TagName::Global((*name).into()),
arguments: vec![],
}
} }
PrivateTag(name) => { PrivateTag(name) => {
let ident_id = env.ident_ids.get_or_insert(&(*name).into()); let ident_id = env.ident_ids.get_or_insert(&(*name).into());
// Canonicalize the tag's name. // Canonicalize the tag's name.
Pattern::AppliedTag( Pattern::AppliedTag {
var_store.fresh(), whole_var: var_store.fresh(),
TagName::Private(Symbol::new(env.home, ident_id)), ext_var: var_store.fresh(),
vec![], tag_name: TagName::Private(Symbol::new(env.home, ident_id)),
) arguments: vec![],
}
} }
Apply(tag, patterns) => { Apply(tag, patterns) => {
let tag_name = match tag.value { let tag_name = match tag.value {
@ -135,7 +156,12 @@ pub fn canonicalize_pattern<'a>(
)); ));
} }
Pattern::AppliedTag(var_store.fresh(), tag_name, can_patterns) Pattern::AppliedTag {
whole_var: var_store.fresh(),
ext_var: var_store.fresh(),
tag_name,
arguments: can_patterns,
}
} }
FloatLiteral(ref string) => match pattern_type { FloatLiteral(ref string) => match pattern_type {
@ -155,12 +181,12 @@ pub fn canonicalize_pattern<'a>(
ptype @ DefExpr | ptype @ TopLevelDef => unsupported_pattern(env, ptype, region), ptype @ DefExpr | ptype @ TopLevelDef => unsupported_pattern(env, ptype, region),
}, },
IntLiteral(string) => match pattern_type { NumLiteral(string) => match pattern_type {
WhenBranch => { WhenBranch => {
let int = finish_parsing_int(string) let int = finish_parsing_int(string)
.unwrap_or_else(|_| panic!("TODO handle malformed int pattern")); .unwrap_or_else(|_| panic!("TODO handle malformed int pattern"));
Pattern::IntLiteral(int) Pattern::NumLiteral(var_store.fresh(), int)
} }
ptype @ DefExpr | ptype @ TopLevelDef | ptype @ FunctionArg => { ptype @ DefExpr | ptype @ TopLevelDef | ptype @ FunctionArg => {
unsupported_pattern(env, ptype, region) unsupported_pattern(env, ptype, region)
@ -202,7 +228,8 @@ pub fn canonicalize_pattern<'a>(
} }
RecordDestructure(patterns) => { RecordDestructure(patterns) => {
let ext_var = var_store.fresh(); let ext_var = var_store.fresh();
let mut fields = Vec::with_capacity(patterns.len()); let whole_var = var_store.fresh();
let mut destructs = Vec::with_capacity(patterns.len());
let mut opt_erroneous = None; let mut opt_erroneous = None;
for loc_pattern in *patterns { for loc_pattern in *patterns {
@ -215,7 +242,7 @@ pub fn canonicalize_pattern<'a>(
region, region,
) { ) {
Ok(symbol) => { Ok(symbol) => {
fields.push(Located { destructs.push(Located {
region: loc_pattern.region, region: loc_pattern.region,
value: RecordDestruct { value: RecordDestruct {
var: var_store.fresh(), var: var_store.fresh(),
@ -240,13 +267,8 @@ pub fn canonicalize_pattern<'a>(
}; };
} }
RecordField(label, loc_guard) => { RecordField(label, loc_guard) => {
match scope.introduce( // a guard does not introduce the label into scope!
label.into(), let symbol = scope.ignore(label.into(), &mut env.ident_ids);
&env.exposed_ident_ids,
&mut env.ident_ids,
region,
) {
Ok(symbol) => {
let can_guard = canonicalize_pattern( let can_guard = canonicalize_pattern(
env, env,
var_store, var_store,
@ -256,7 +278,7 @@ pub fn canonicalize_pattern<'a>(
loc_guard.region, loc_guard.region,
); );
fields.push(Located { destructs.push(Located {
region: loc_pattern.region, region: loc_pattern.region,
value: RecordDestruct { value: RecordDestruct {
var: var_store.fresh(), var: var_store.fresh(),
@ -266,27 +288,17 @@ pub fn canonicalize_pattern<'a>(
}, },
}); });
} }
Err((original_region, shadow)) => {
env.problem(Problem::RuntimeError(RuntimeError::Shadowing {
original_region,
shadow: shadow.clone(),
}));
// No matter what the other patterns
// are, we're definitely shadowed and will
// get a runtime exception as soon as we
// encounter the first bad pattern.
opt_erroneous = Some(Pattern::Shadowed(original_region, shadow));
}
};
}
_ => panic!("invalid pattern in record"), _ => panic!("invalid pattern in record"),
} }
} }
// If we encountered an erroneous pattern (e.g. one with shadowing), // If we encountered an erroneous pattern (e.g. one with shadowing),
// use the resulting RuntimeError. Otherwise, return a successful record destructure. // use the resulting RuntimeError. Otherwise, return a successful record destructure.
opt_erroneous.unwrap_or_else(|| Pattern::RecordDestructure(ext_var, fields)) opt_erroneous.unwrap_or_else(|| Pattern::RecordDestructure {
whole_var,
ext_var,
destructs,
})
} }
RecordField(_name, _loc_pattern) => { RecordField(_name, _loc_pattern) => {
unreachable!("should have been handled in RecordDestructure"); unreachable!("should have been handled in RecordDestructure");
@ -339,12 +351,15 @@ fn add_bindings_from_patterns(
Identifier(symbol) => { Identifier(symbol) => {
answer.push((*symbol, *region)); answer.push((*symbol, *region));
} }
AppliedTag(_, _, loc_args) => { AppliedTag {
arguments: loc_args,
..
} => {
for (_, loc_arg) in loc_args { for (_, loc_arg) in loc_args {
add_bindings_from_patterns(&loc_arg.region, &loc_arg.value, scope, answer); add_bindings_from_patterns(&loc_arg.region, &loc_arg.value, scope, answer);
} }
} }
RecordDestructure(_, destructs) => { RecordDestructure { destructs, .. } => {
for Located { for Located {
region, region,
value: RecordDestruct { symbol, .. }, value: RecordDestruct { symbol, .. },
@ -353,7 +368,8 @@ fn add_bindings_from_patterns(
answer.push((*symbol, *region)); answer.push((*symbol, *region));
} }
} }
IntLiteral(_) NumLiteral(_, _)
| IntLiteral(_)
| FloatLiteral(_) | FloatLiteral(_)
| StrLiteral(_) | StrLiteral(_)
| Underscore | Underscore

View file

@ -63,6 +63,11 @@ impl References {
self self
} }
pub fn union_mut(&mut self, other: References) {
self.lookups.extend(other.lookups);
self.calls.extend(other.calls);
}
pub fn has_lookup(&self, symbol: Symbol) -> bool { pub fn has_lookup(&self, symbol: Symbol) -> bool {
self.lookups.contains(&symbol) self.lookups.contains(&symbol)
} }

View file

@ -57,10 +57,13 @@ impl Scope {
pub fn lookup(&mut self, ident: &Ident, region: Region) -> Result<Symbol, RuntimeError> { pub fn lookup(&mut self, ident: &Ident, region: Region) -> Result<Symbol, RuntimeError> {
match self.idents.get(ident) { match self.idents.get(ident) {
Some((symbol, _)) => Ok(*symbol), Some((symbol, _)) => Ok(*symbol),
None => Err(RuntimeError::LookupNotInScope(Located { None => Err(RuntimeError::LookupNotInScope(
Located {
region, region,
value: ident.clone().into(), value: ident.clone().into(),
})), },
self.idents.keys().map(|v| v.as_ref().into()).collect(),
)),
} }
} }
@ -107,6 +110,14 @@ impl Scope {
} }
} }
/// Ignore an identifier.
///
/// Used for record guards like { x: Just _ }
pub fn ignore(&mut self, ident: Ident, all_ident_ids: &mut IdentIds) -> Symbol {
let ident_id = all_ident_ids.add(ident.into());
Symbol::new(self.home, ident_id)
}
/// Import a Symbol from another module into this module's top-level scope. /// Import a Symbol from another module into this module's top-level scope.
/// ///
/// Returns Err if this would shadow an existing ident, including the /// Returns Err if this would shadow an existing ident, including the

View file

@ -11,7 +11,7 @@ extern crate roc_region;
mod helpers; mod helpers;
#[cfg(test)] #[cfg(test)]
mod test_canonicalize { mod test_can {
use crate::helpers::{can_expr_with, test_home, CanExprOut}; use crate::helpers::{can_expr_with, test_home, CanExprOut};
use bumpalo::Bump; use bumpalo::Bump;
use roc_can::expr::Expr::{self, *}; use roc_can::expr::Expr::{self, *};
@ -40,6 +40,7 @@ mod test_canonicalize {
} }
} }
} }
fn assert_can_int(input: &str, expected: i64) { fn assert_can_int(input: &str, expected: i64) {
let arena = Bump::new(); let arena = Bump::new();
let actual_out = can_expr_with(&arena, test_home(), input); let actual_out = can_expr_with(&arena, test_home(), input);
@ -54,6 +55,20 @@ mod test_canonicalize {
} }
} }
fn assert_can_num(input: &str, expected: i64) {
let arena = Bump::new();
let actual_out = can_expr_with(&arena, test_home(), input);
match actual_out.loc_expr.value {
Expr::Num(_, actual) => {
assert_eq!(expected, actual);
}
actual => {
panic!("Expected a Num, but got: {:?}", actual);
}
}
}
// NUMBER LITERALS // NUMBER LITERALS
#[test] #[test]
@ -98,12 +113,12 @@ mod test_canonicalize {
#[test] #[test]
fn zero() { fn zero() {
assert_can_int("0", 0); assert_can_num("0", 0);
} }
#[test] #[test]
fn minus_zero() { fn minus_zero() {
assert_can_int("-0", 0); assert_can_num("-0", 0);
} }
#[test] #[test]
@ -551,6 +566,31 @@ mod test_canonicalize {
} }
} }
#[test]
fn unused_def_regression() {
let src = indoc!(
r#"
Booly : [ Yes, No, Maybe ]
y : Booly
y = No
# There was a bug where annotating a def meant that its
# references no longer got reported.
#
# https://github.com/rtfeldman/roc/issues/298
x : List Booly
x = [ y ]
x
"#
);
let arena = Bump::new();
let CanExprOut { problems, .. } = can_expr_with(&arena, test_home(), src);
assert_eq!(problems, Vec::new());
}
//#[test] //#[test]
//fn closing_over_locals() { //fn closing_over_locals() {
// // "local" should be used, because the closure used it. // // "local" should be used, because the closure used it.

View file

@ -93,3 +93,44 @@ where
map map
} }
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct Index(usize);
impl Index {
pub const FIRST: Self = Index(0);
pub fn zero_based(i: usize) -> Self {
Index(i)
}
pub fn to_zero_based(self) -> usize {
self.0
}
pub fn one_based(i: usize) -> Self {
Index(i - 1)
}
pub fn ordinal(self) -> std::string::String {
int_to_ordinal(self.0 + 1)
}
}
fn int_to_ordinal(number: usize) -> std::string::String {
// NOTE: one-based
let remainder10 = number % 10;
let remainder100 = number % 100;
let ending = match remainder100 {
11..=13 => "th",
_ => match remainder10 {
1 => "st",
2 => "nd",
3 => "rd",
_ => "th",
},
};
format!("{}{}", number, ending)
}

View file

@ -5,6 +5,7 @@ use roc_collections::all::SendMap;
use roc_module::symbol::Symbol; use roc_module::symbol::Symbol;
use roc_region::all::Region; use roc_region::all::Region;
use roc_types::subs::Variable; use roc_types::subs::Variable;
use roc_types::types::Category;
use roc_types::types::Reason; use roc_types::types::Reason;
use roc_types::types::Type::{self, *}; use roc_types::types::Type::{self, *};
@ -17,8 +18,8 @@ pub fn int_literal(num_var: Variable, expected: Expected<Type>, region: Region)
exists( exists(
vec![num_var], vec![num_var],
And(vec![ And(vec![
Eq(num_type.clone(), expected_literal, region), Eq(num_type.clone(), expected_literal, Category::Int, region),
Eq(num_type, expected, region), Eq(num_type, expected, Category::Int, region),
]), ]),
) )
} }
@ -32,8 +33,8 @@ pub fn float_literal(num_var: Variable, expected: Expected<Type>, region: Region
exists( exists(
vec![num_var], vec![num_var],
And(vec![ And(vec![
Eq(num_type.clone(), expected_literal, region), Eq(num_type.clone(), expected_literal, Category::Float, region),
Eq(num_type, expected, region), Eq(num_type, expected, Category::Float, region),
]), ]),
) )
} }

View file

@ -7,16 +7,16 @@ use roc_can::def::{Declaration, Def};
use roc_can::expected::Expected::{self, *}; use roc_can::expected::Expected::{self, *};
use roc_can::expected::PExpected; use roc_can::expected::PExpected;
use roc_can::expr::Expr::{self, *}; use roc_can::expr::Expr::{self, *};
use roc_can::expr::Field; use roc_can::expr::{Field, WhenBranch};
use roc_can::pattern::Pattern; use roc_can::pattern::Pattern;
use roc_collections::all::{ImMap, SendMap}; use roc_collections::all::{ImMap, Index, SendMap};
use roc_module::ident::{Lowercase, TagName}; use roc_module::ident::Lowercase;
use roc_module::symbol::{ModuleId, Symbol}; use roc_module::symbol::{ModuleId, Symbol};
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
use roc_types::subs::Variable; use roc_types::subs::Variable;
use roc_types::types::AnnotationSource::{self, *}; use roc_types::types::AnnotationSource::{self, *};
use roc_types::types::Type::{self, *}; use roc_types::types::Type::{self, *};
use roc_types::types::{Alias, PReason, Reason}; use roc_types::types::{Alias, Category, PReason, Reason};
/// This is for constraining Defs /// This is for constraining Defs
#[derive(Default, Debug)] #[derive(Default, Debug)]
@ -80,9 +80,18 @@ pub fn constrain_expr(
) -> Constraint { ) -> Constraint {
match expr { match expr {
Int(var, _) => int_literal(*var, expected, region), Int(var, _) => int_literal(*var, expected, region),
Num(var, _) => exists(
vec![*var],
Eq(
Type::Apply(Symbol::NUM_NUM, vec![Type::Variable(*var)]),
expected,
Category::Num,
region,
),
),
Float(var, _) => float_literal(*var, expected, region), Float(var, _) => float_literal(*var, expected, region),
EmptyRecord => constrain_empty_record(region, expected), EmptyRecord => constrain_empty_record(region, expected),
Expr::Record(stored_var, fields) => { Expr::Record { record_var, fields } => {
if fields.is_empty() { if fields.is_empty() {
constrain_empty_record(region, expected) constrain_empty_record(region, expected)
} else { } else {
@ -113,13 +122,18 @@ pub fn constrain_expr(
// could all share? // could all share?
Box::new(Type::EmptyRec), Box::new(Type::EmptyRec),
); );
let record_con = Eq(record_type, expected.clone(), region); let record_con = Eq(record_type, expected.clone(), Category::Record, region);
constraints.push(record_con); constraints.push(record_con);
// variable to store in the AST // variable to store in the AST
let stored_con = Eq(Type::Variable(*stored_var), expected, region); let stored_con = Eq(
Type::Variable(*record_var),
expected,
Category::Storage,
region,
);
field_vars.push(*stored_var); field_vars.push(*record_var);
constraints.push(stored_con); constraints.push(stored_con);
exists(field_vars, And(constraints)) exists(field_vars, And(constraints))
@ -135,19 +149,29 @@ pub fn constrain_expr(
let mut vars = Vec::with_capacity(updates.len() + 2); let mut vars = Vec::with_capacity(updates.len() + 2);
let mut cons = Vec::with_capacity(updates.len() + 1); let mut cons = Vec::with_capacity(updates.len() + 1);
for (field_name, Field { var, loc_expr, .. }) in updates.clone() { for (field_name, Field { var, loc_expr, .. }) in updates.clone() {
let (var, tipe, con) = let (var, tipe, con) = constrain_field_update(
constrain_field_update(env, var, region, field_name.clone(), &loc_expr); env,
var,
loc_expr.region,
field_name.clone(),
&loc_expr,
);
fields.insert(field_name, tipe); fields.insert(field_name, tipe);
vars.push(var); vars.push(var);
cons.push(con); cons.push(con);
} }
let fields_type = Type::Record(fields.clone(), Box::new(Type::Variable(*ext_var))); let fields_type = Type::Record(fields, Box::new(Type::Variable(*ext_var)));
let record_type = Type::Variable(*record_var); let record_type = Type::Variable(*record_var);
// NOTE from elm compiler: fields_type is separate so that Error propagates better // NOTE from elm compiler: fields_type is separate so that Error propagates better
let fields_con = Eq(record_type.clone(), NoExpectation(fields_type), region); let fields_con = Eq(
let record_con = Eq(record_type.clone(), expected, region); record_type.clone(),
NoExpectation(fields_type),
Category::Record,
region,
);
let record_con = Eq(record_type.clone(), expected, Category::Record, region);
vars.push(*record_var); vars.push(*record_var);
vars.push(*ext_var); vars.push(*ext_var);
@ -155,20 +179,27 @@ pub fn constrain_expr(
let con = Lookup( let con = Lookup(
*symbol, *symbol,
ForReason( ForReason(
Reason::RecordUpdateKeys(*symbol, fields), Reason::RecordUpdateKeys(
*symbol,
updates
.iter()
.map(|(key, field)| (key.clone(), field.region))
.collect(),
),
record_type, record_type,
region, region,
), ),
region, region,
); );
cons.push(con); // ensure constraints are solved in this order, gives better errors
cons.push(fields_con); cons.insert(0, fields_con);
cons.push(record_con); cons.insert(1, con);
cons.insert(2, record_con);
exists(vars, And(cons)) exists(vars, And(cons))
} }
Str(_) | BlockStr(_) => Eq(str_type(), expected, region), Str(_) | BlockStr(_) => Eq(str_type(), expected, Category::Str, region),
List { List {
elem_var, elem_var,
loc_elems, loc_elems,
@ -177,22 +208,32 @@ pub fn constrain_expr(
if loc_elems.is_empty() { if loc_elems.is_empty() {
exists( exists(
vec![*elem_var], vec![*elem_var],
Eq(empty_list_type(*elem_var), expected, region), Eq(empty_list_type(*elem_var), expected, Category::List, region),
) )
} else { } else {
let list_elem_type = Type::Variable(*elem_var); let list_elem_type = Type::Variable(*elem_var);
let mut constraints = Vec::with_capacity(1 + loc_elems.len()); let mut constraints = Vec::with_capacity(1 + loc_elems.len());
for loc_elem in loc_elems { for (index, loc_elem) in loc_elems.iter().enumerate() {
let elem_expected = let elem_expected = ForReason(
ForReason(Reason::ElemInList, list_elem_type.clone(), region); Reason::ElemInList {
index: Index::zero_based(index),
},
list_elem_type.clone(),
loc_elem.region,
);
let constraint = let constraint =
constrain_expr(env, loc_elem.region, &loc_elem.value, elem_expected); constrain_expr(env, loc_elem.region, &loc_elem.value, elem_expected);
constraints.push(constraint); constraints.push(constraint);
} }
constraints.push(Eq(list_type(list_elem_type), expected, region)); constraints.push(Eq(
list_type(list_elem_type),
expected,
Category::List,
region,
));
exists(vec![*elem_var], And(constraints)) exists(vec![*elem_var], And(constraints))
} }
@ -201,11 +242,18 @@ pub fn constrain_expr(
let (fn_var, loc_fn, ret_var) = &**boxed; let (fn_var, loc_fn, ret_var) = &**boxed;
// The expression that evaluates to the function being called, e.g. `foo` in // The expression that evaluates to the function being called, e.g. `foo` in
// (foo) bar baz // (foo) bar baz
let opt_symbol = if let Var(symbol) = loc_fn.value {
Some(symbol)
} else {
None
};
let fn_type = Variable(*fn_var); let fn_type = Variable(*fn_var);
let fn_region = loc_fn.region; let fn_region = loc_fn.region;
let fn_expected = NoExpectation(fn_type.clone()); let fn_expected = NoExpectation(fn_type.clone());
// TODO look up the name and use NamedFnArg if possible.
let fn_reason = Reason::AnonymousFnCall { let fn_reason = Reason::FnCall {
name: opt_symbol,
arity: loc_args.len() as u8, arity: loc_args.len() as u8,
}; };
@ -226,9 +274,10 @@ pub fn constrain_expr(
for (index, (arg_var, loc_arg)) in loc_args.iter().enumerate() { for (index, (arg_var, loc_arg)) in loc_args.iter().enumerate() {
let region = loc_arg.region; let region = loc_arg.region;
let arg_type = Variable(*arg_var); let arg_type = Variable(*arg_var);
// TODO look up the name and use NamedFnArg if possible.
let reason = Reason::AnonymousFnArg { let reason = Reason::FnArg {
arg_index: index as u8, name: opt_symbol,
arg_index: Index::zero_based(index),
}; };
let expected_arg = ForReason(reason, arg_type.clone(), region); let expected_arg = ForReason(reason, arg_type.clone(), region);
let arg_con = constrain_expr(env, loc_arg.region, &loc_arg.value, expected_arg); let arg_con = constrain_expr(env, loc_arg.region, &loc_arg.value, expected_arg);
@ -244,13 +293,15 @@ pub fn constrain_expr(
region, region,
); );
let category = Category::CallResult(opt_symbol);
exists( exists(
vars, vars,
And(vec![ And(vec![
fn_con, fn_con,
Eq(fn_type, expected_fn_type, fn_region), Eq(fn_type, expected_fn_type, category.clone(), fn_region),
And(arg_cons), And(arg_cons),
Eq(ret_type, expected, region), Eq(ret_type, expected, category, region),
]), ]),
) )
} }
@ -305,9 +356,14 @@ pub fn constrain_expr(
ret_constraint, ret_constraint,
})), })),
// "the closure's type is equal to expected type" // "the closure's type is equal to expected type"
Eq(fn_type.clone(), expected, region), Eq(fn_type.clone(), expected, Category::Lambda, region),
// "fn_var is equal to the closure's type" - fn_var is used in code gen // "fn_var is equal to the closure's type" - fn_var is used in code gen
Eq(Type::Variable(*fn_var), NoExpectation(fn_type), region), Eq(
Type::Variable(*fn_var),
NoExpectation(fn_type),
Category::Storage,
region,
),
]), ]),
) )
} }
@ -318,25 +374,33 @@ pub fn constrain_expr(
branches, branches,
final_else, final_else,
} => { } => {
// TODO use Bool alias here, so we don't allocate this type every time let expect_bool = |region| {
let bool_type = Type::TagUnion( let bool_type = Type::Variable(Variable::BOOL);
vec![ Expected::ForReason(Reason::IfCondition, bool_type, region)
(TagName::Global("True".into()), vec![]), };
(TagName::Global("False".into()), vec![]), let mut branch_cons = Vec::with_capacity(2 * branches.len() + 3);
],
Box::new(Type::EmptyTagUnion), // TODO why does this cond var exist? is it for error messages?
let first_cond_region = branches[0].0.region;
let cond_var_is_bool_con = Eq(
Type::Variable(*cond_var),
expect_bool(first_cond_region),
Category::If,
first_cond_region,
); );
let expect_bool = Expected::ForReason(Reason::IfCondition, bool_type, region);
let mut branch_cons = Vec::with_capacity(2 * branches.len() + 2); branch_cons.push(cond_var_is_bool_con);
match expected { match expected {
FromAnnotation(name, arity, _, tipe) => { FromAnnotation(name, arity, _, tipe) => {
for (index, (loc_cond, loc_body)) in branches.iter().enumerate() { for (index, (loc_cond, loc_body)) in branches.iter().enumerate() {
let cond_con = Eq( let cond_con = constrain_expr(
Type::Variable(*cond_var), env,
expect_bool.clone(),
loc_cond.region, loc_cond.region,
&loc_cond.value,
expect_bool(loc_cond.region),
); );
let then_con = constrain_expr( let then_con = constrain_expr(
env, env,
loc_body.region, loc_body.region,
@ -344,7 +408,10 @@ pub fn constrain_expr(
FromAnnotation( FromAnnotation(
name.clone(), name.clone(),
arity, arity,
AnnotationSource::TypedIfBranch(index + 1), AnnotationSource::TypedIfBranch {
index: Index::zero_based(index),
num_branches: branches.len(),
},
tipe.clone(), tipe.clone(),
), ),
); );
@ -359,12 +426,20 @@ pub fn constrain_expr(
FromAnnotation( FromAnnotation(
name, name,
arity, arity,
AnnotationSource::TypedIfBranch(branches.len() + 1), AnnotationSource::TypedIfBranch {
index: Index::zero_based(branches.len()),
num_branches: branches.len(),
},
tipe.clone(), tipe.clone(),
), ),
); );
let ast_con = Eq(Type::Variable(*branch_var), NoExpectation(tipe), region); let ast_con = Eq(
Type::Variable(*branch_var),
NoExpectation(tipe),
Category::Storage,
region,
);
branch_cons.push(ast_con); branch_cons.push(ast_con);
branch_cons.push(else_con); branch_cons.push(else_con);
@ -373,19 +448,24 @@ pub fn constrain_expr(
} }
_ => { _ => {
for (index, (loc_cond, loc_body)) in branches.iter().enumerate() { for (index, (loc_cond, loc_body)) in branches.iter().enumerate() {
let cond_con = Eq( let cond_con = constrain_expr(
Type::Variable(*cond_var), env,
expect_bool.clone(),
loc_cond.region, loc_cond.region,
&loc_cond.value,
expect_bool(loc_cond.region),
); );
let then_con = constrain_expr( let then_con = constrain_expr(
env, env,
loc_body.region, loc_body.region,
&loc_body.value, &loc_body.value,
ForReason( ForReason(
Reason::IfBranch { index: index + 1 }, Reason::IfBranch {
index: Index::zero_based(index),
total_branches: branches.len(),
},
Type::Variable(*branch_var), Type::Variable(*branch_var),
region, loc_body.region,
), ),
); );
@ -398,14 +478,20 @@ pub fn constrain_expr(
&final_else.value, &final_else.value,
ForReason( ForReason(
Reason::IfBranch { Reason::IfBranch {
index: branches.len() + 1, index: Index::zero_based(branches.len()),
total_branches: branches.len() + 1,
}, },
Type::Variable(*branch_var), Type::Variable(*branch_var),
region, final_else.region,
), ),
); );
branch_cons.push(Eq(Type::Variable(*branch_var), expected, region)); branch_cons.push(Eq(
Type::Variable(*branch_var),
expected,
Category::Storage,
region,
));
branch_cons.push(else_con); branch_cons.push(else_con);
exists(vec![*cond_var, *branch_var], And(branch_cons)) exists(vec![*cond_var, *branch_var], And(branch_cons))
@ -417,6 +503,7 @@ pub fn constrain_expr(
expr_var, expr_var,
loc_cond, loc_cond,
branches, branches,
..
} => { } => {
// Infer the condition expression's type. // Infer the condition expression's type.
let cond_var = *cond_var; let cond_var = *cond_var;
@ -434,24 +521,35 @@ pub fn constrain_expr(
match &expected { match &expected {
FromAnnotation(name, arity, _, typ) => { FromAnnotation(name, arity, _, typ) => {
// record the type of the whole expression in the AST // record the type of the whole expression in the AST
let ast_con = Eq(Type::Variable(*expr_var), expected.clone(), region); let ast_con = Eq(
Type::Variable(*expr_var),
expected.clone(),
Category::Storage,
region,
);
constraints.push(ast_con); constraints.push(ast_con);
for (index, (loc_when_pattern, loc_expr)) in branches.iter().enumerate() { for (index, when_branch) in branches.iter().enumerate() {
let pattern_region =
Region::across_all(when_branch.patterns.iter().map(|v| &v.region));
let branch_con = constrain_when_branch( let branch_con = constrain_when_branch(
env, env,
region, when_branch.value.region,
&loc_when_pattern, when_branch,
loc_expr,
PExpected::ForReason( PExpected::ForReason(
PReason::WhenMatch { index }, PReason::WhenMatch {
index: Index::zero_based(index),
},
cond_type.clone(), cond_type.clone(),
region, pattern_region,
), ),
FromAnnotation( FromAnnotation(
name.clone(), name.clone(),
*arity, *arity,
TypedWhenBranch(index), TypedWhenBranch {
index: Index::zero_based(index),
},
typ.clone(), typ.clone(),
), ),
); );
@ -464,18 +562,27 @@ pub fn constrain_expr(
let branch_type = Variable(*expr_var); let branch_type = Variable(*expr_var);
let mut branch_cons = Vec::with_capacity(branches.len()); let mut branch_cons = Vec::with_capacity(branches.len());
for (index, (loc_when_pattern, loc_expr)) in branches.iter().enumerate() { for (index, when_branch) in branches.iter().enumerate() {
let pattern_region =
Region::across_all(when_branch.patterns.iter().map(|v| &v.region));
let branch_con = constrain_when_branch( let branch_con = constrain_when_branch(
env, env,
region, region,
&loc_when_pattern, when_branch,
loc_expr,
PExpected::ForReason( PExpected::ForReason(
PReason::WhenMatch { index }, PReason::WhenMatch {
index: Index::zero_based(index),
},
cond_type.clone(), cond_type.clone(),
region, pattern_region,
),
ForReason(
Reason::WhenBranch {
index: Index::zero_based(index),
},
branch_type.clone(),
when_branch.value.region,
), ),
ForReason(Reason::WhenBranch { index }, branch_type.clone(), region),
); );
branch_cons.push(branch_con); branch_cons.push(branch_con);
@ -488,7 +595,7 @@ pub fn constrain_expr(
And(branch_cons), And(branch_cons),
// The return type of each branch must equal // The return type of each branch must equal
// the return type of the entire when-expression. // the return type of the entire when-expression.
Eq(branch_type, expected, region), Eq(branch_type, expected, Category::When, region),
])); ]));
} }
} }
@ -501,6 +608,7 @@ pub fn constrain_expr(
exists(vec![cond_var, *expr_var], And(constraints)) exists(vec![cond_var, *expr_var], And(constraints))
} }
Access { Access {
record_var,
ext_var, ext_var,
field_var, field_var,
loc_expr, loc_expr,
@ -519,6 +627,15 @@ pub fn constrain_expr(
let record_type = Type::Record(rec_field_types, Box::new(ext_type)); let record_type = Type::Record(rec_field_types, Box::new(ext_type));
let record_expected = Expected::NoExpectation(record_type); let record_expected = Expected::NoExpectation(record_type);
let category = Category::Access(field.clone());
let record_con = Eq(
Type::Variable(*record_var),
record_expected.clone(),
category.clone(),
region,
);
let constraint = constrain_expr( let constraint = constrain_expr(
&Env { &Env {
home: env.home, home: env.home,
@ -530,12 +647,17 @@ pub fn constrain_expr(
); );
exists( exists(
vec![field_var, ext_var], vec![*record_var, field_var, ext_var],
And(vec![constraint, Eq(field_type, expected, region)]), And(vec![
constraint,
Eq(field_type, expected, category, region),
record_con,
]),
) )
} }
Accessor { Accessor {
field, field,
record_var,
ext_var, ext_var,
field_var, field_var,
} => { } => {
@ -549,13 +671,27 @@ pub fn constrain_expr(
field_types.insert(label, field_type.clone()); field_types.insert(label, field_type.clone());
let record_type = Type::Record(field_types, Box::new(ext_type)); let record_type = Type::Record(field_types, Box::new(ext_type));
let category = Category::Accessor(field.clone());
let record_expected = Expected::NoExpectation(record_type.clone());
let record_con = Eq(
Type::Variable(*record_var),
record_expected,
category.clone(),
region,
);
exists( exists(
vec![field_var, ext_var], vec![*record_var, field_var, ext_var],
And(vec![
Eq( Eq(
Type::Function(vec![record_type], Box::new(field_type)), Type::Function(vec![record_type], Box::new(field_type)),
expected, expected,
category,
region, region,
), ),
record_con,
]),
) )
} }
LetRec(defs, loc_ret, var, aliases) => { LetRec(defs, loc_ret, var, aliases) => {
@ -568,7 +704,12 @@ pub fn constrain_expr(
constrain_recursive_defs(env, defs, body_con), constrain_recursive_defs(env, defs, body_con),
// Record the type of tne entire def-expression in the variable. // Record the type of tne entire def-expression in the variable.
// Code gen will need that later! // Code gen will need that later!
Eq(Type::Variable(*var), expected, loc_ret.region), Eq(
Type::Variable(*var),
expected,
Category::Storage,
loc_ret.region,
),
]), ]),
) )
} }
@ -582,7 +723,12 @@ pub fn constrain_expr(
constrain_def(env, def, body_con), constrain_def(env, def, body_con),
// Record the type of tne entire def-expression in the variable. // Record the type of tne entire def-expression in the variable.
// Code gen will need that later! // Code gen will need that later!
Eq(Type::Variable(*var), expected, loc_ret.region), Eq(
Type::Variable(*var),
expected,
Category::Storage,
loc_ret.region,
),
]), ]),
) )
} }
@ -615,9 +761,15 @@ pub fn constrain_expr(
Box::new(Type::Variable(*ext_var)), Box::new(Type::Variable(*ext_var)),
), ),
expected.clone(), expected.clone(),
Category::TagApply(name.clone()),
region,
);
let ast_con = Eq(
Type::Variable(*variant_var),
expected,
Category::Storage,
region, region,
); );
let ast_con = Eq(Type::Variable(*variant_var), expected, region);
vars.push(*variant_var); vars.push(*variant_var);
vars.push(*ext_var); vars.push(*ext_var);
@ -634,12 +786,11 @@ pub fn constrain_expr(
fn constrain_when_branch( fn constrain_when_branch(
env: &Env, env: &Env,
region: Region, region: Region,
loc_pattern: &Located<Pattern>, when_branch: &WhenBranch,
loc_expr: &Located<Expr>,
pattern_expected: PExpected<Type>, pattern_expected: PExpected<Type>,
expr_expected: Expected<Type>, expr_expected: Expected<Type>,
) -> Constraint { ) -> Constraint {
let ret_constraint = constrain_expr(env, region, &loc_expr.value, expr_expected); let ret_constraint = constrain_expr(env, region, &when_branch.value.value, expr_expected);
let mut state = PatternState { let mut state = PatternState {
headers: SendMap::default(), headers: SendMap::default(),
@ -647,13 +798,47 @@ fn constrain_when_branch(
constraints: Vec::with_capacity(1), constraints: Vec::with_capacity(1),
}; };
// TODO ensure this is correct
// TODO investigate for error messages, is it better to unify all branches with a variable,
// then unify that variable with the expectation?
for loc_pattern in &when_branch.patterns {
constrain_pattern( constrain_pattern(
&loc_pattern.value, &loc_pattern.value,
loc_pattern.region, loc_pattern.region,
pattern_expected, pattern_expected.clone(),
&mut state, &mut state,
); );
}
if let Some(loc_guard) = &when_branch.guard {
let guard_constraint = constrain_expr(
env,
region,
&loc_guard.value,
Expected::ForReason(
Reason::WhenGuard,
Type::Variable(Variable::BOOL),
loc_guard.region,
),
);
// must introduce the headers from the pattern before constraining the guard
Constraint::Let(Box::new(LetConstraint {
rigid_vars: Vec::new(),
flex_vars: state.vars,
def_types: state.headers,
def_aliases: SendMap::default(),
defs_constraint: Constraint::And(state.constraints),
ret_constraint: Constraint::Let(Box::new(LetConstraint {
rigid_vars: Vec::new(),
flex_vars: Vec::new(),
def_types: SendMap::default(),
def_aliases: SendMap::default(),
defs_constraint: guard_constraint,
ret_constraint,
})),
}))
} else {
Constraint::Let(Box::new(LetConstraint { Constraint::Let(Box::new(LetConstraint {
rigid_vars: Vec::new(), rigid_vars: Vec::new(),
flex_vars: state.vars, flex_vars: state.vars,
@ -662,6 +847,7 @@ fn constrain_when_branch(
defs_constraint: Constraint::And(state.constraints), defs_constraint: Constraint::And(state.constraints),
ret_constraint, ret_constraint,
})) }))
}
} }
fn constrain_field(env: &Env, field_var: Variable, loc_expr: &Located<Expr>) -> (Type, Constraint) { fn constrain_field(env: &Env, field_var: Variable, loc_expr: &Located<Expr>) -> (Type, Constraint) {
@ -674,7 +860,7 @@ fn constrain_field(env: &Env, field_var: Variable, loc_expr: &Located<Expr>) ->
#[inline(always)] #[inline(always)]
fn constrain_empty_record(region: Region, expected: Expected<Type>) -> Constraint { fn constrain_empty_record(region: Region, expected: Expected<Type>) -> Constraint {
Eq(EmptyRec, expected, region) Eq(EmptyRec, expected, Category::Record, region)
} }
/// Constrain top-level module declarations /// Constrain top-level module declarations
@ -755,16 +941,16 @@ fn constrain_def(env: &Env, def: &Def, body_con: Constraint) -> Constraint {
let mut new_rigids = Vec::new(); let mut new_rigids = Vec::new();
let expr_con = match &def.annotation { let expr_con = match &def.annotation {
Some((annotation, introduced_vars, ann_def_aliases)) => { Some(annotation) => {
def_aliases = ann_def_aliases.clone(); def_aliases = annotation.aliases.clone();
let arity = annotation.arity(); let arity = annotation.signature.arity();
let rigids = &env.rigids; let rigids = &env.rigids;
let mut ftv = rigids.clone(); let mut ftv = rigids.clone();
let annotation = instantiate_rigids( let signature = instantiate_rigids(
annotation, &annotation.signature,
&introduced_vars, &annotation.introduced_variables,
&mut new_rigids, &mut new_rigids,
&mut ftv, &mut ftv,
&def.loc_pattern, &def.loc_pattern,
@ -774,13 +960,16 @@ fn constrain_def(env: &Env, def: &Def, body_con: Constraint) -> Constraint {
let annotation_expected = FromAnnotation( let annotation_expected = FromAnnotation(
def.loc_pattern.clone(), def.loc_pattern.clone(),
arity, arity,
AnnotationSource::TypedBody, AnnotationSource::TypedBody {
annotation, region: annotation.region,
},
signature,
); );
pattern_state.constraints.push(Eq( pattern_state.constraints.push(Eq(
expr_type, expr_type,
annotation_expected.clone(), annotation_expected.clone(),
Category::Storage,
// TODO proper region // TODO proper region
Region::zero(), Region::zero(),
)); ));
@ -925,17 +1114,17 @@ pub fn rec_defs_help(
flex_info.def_types.extend(pattern_state.headers); flex_info.def_types.extend(pattern_state.headers);
} }
Some((annotation, introduced_vars, ann_def_aliases)) => { Some(annotation) => {
for (symbol, alias) in ann_def_aliases.clone() { for (symbol, alias) in annotation.aliases.clone() {
def_aliases.insert(symbol, alias); def_aliases.insert(symbol, alias);
} }
let arity = annotation.arity(); let arity = annotation.signature.arity();
let mut ftv = env.rigids.clone(); let mut ftv = env.rigids.clone();
let annotation = instantiate_rigids( let signature = instantiate_rigids(
annotation, &annotation.signature,
&introduced_vars, &annotation.introduced_variables,
&mut new_rigids, &mut new_rigids,
&mut ftv, &mut ftv,
&def.loc_pattern, &def.loc_pattern,
@ -945,8 +1134,10 @@ pub fn rec_defs_help(
let annotation_expected = FromAnnotation( let annotation_expected = FromAnnotation(
def.loc_pattern.clone(), def.loc_pattern.clone(),
arity, arity,
AnnotationSource::TypedBody, AnnotationSource::TypedBody {
annotation.clone(), region: annotation.region,
},
signature.clone(),
); );
let expr_con = constrain_expr( let expr_con = constrain_expr(
&Env { &Env {
@ -962,6 +1153,7 @@ pub fn rec_defs_help(
rigid_info.constraints.push(Eq( rigid_info.constraints.push(Eq(
expr_type, expr_type,
annotation_expected.clone(), annotation_expected.clone(),
Category::Storage,
def.loc_expr.region, def.loc_expr.region,
)); ));

View file

@ -3,12 +3,12 @@ use roc_can::constraint::Constraint;
use roc_can::expected::{Expected, PExpected}; use roc_can::expected::{Expected, PExpected};
use roc_can::pattern::Pattern::{self, *}; use roc_can::pattern::Pattern::{self, *};
use roc_can::pattern::RecordDestruct; use roc_can::pattern::RecordDestruct;
use roc_collections::all::SendMap; use roc_collections::all::{Index, SendMap};
use roc_module::ident::Lowercase; use roc_module::ident::Lowercase;
use roc_module::symbol::Symbol; use roc_module::symbol::Symbol;
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
use roc_types::subs::Variable; use roc_types::subs::Variable;
use roc_types::types::{PatternCategory, Type}; use roc_types::types::{Category, PReason, PatternCategory, Type};
pub struct PatternState { pub struct PatternState {
pub headers: SendMap<Symbol, Located<Type>>, pub headers: SendMap<Symbol, Located<Type>>,
@ -53,11 +53,12 @@ fn headers_from_annotation_help(
Underscore Underscore
| Shadowed(_, _) | Shadowed(_, _)
| UnsupportedPattern(_) | UnsupportedPattern(_)
| NumLiteral(_, _)
| IntLiteral(_) | IntLiteral(_)
| FloatLiteral(_) | FloatLiteral(_)
| StrLiteral(_) => true, | StrLiteral(_) => true,
RecordDestructure(_, destructs) => match annotation.value.shallow_dealias() { RecordDestructure { destructs, .. } => match annotation.value.shallow_dealias() {
Type::Record(fields, _) => { Type::Record(fields, _) => {
for destruct in destructs { for destruct in destructs {
// NOTE ignores the .guard field. // NOTE ignores the .guard field.
@ -76,7 +77,11 @@ fn headers_from_annotation_help(
_ => false, _ => false,
}, },
AppliedTag(_, tag_name, arguments) => match annotation.value.shallow_dealias() { AppliedTag {
tag_name,
arguments,
..
} => match annotation.value.shallow_dealias() {
Type::TagUnion(tags, _) => { Type::TagUnion(tags, _) => {
if let Some((_, arg_types)) = tags.iter().find(|(name, _)| name == tag_name) { if let Some((_, arg_types)) = tags.iter().find(|(name, _)| name == tag_name) {
if !arguments.len() == arg_types.len() { if !arguments.len() == arg_types.len() {
@ -112,9 +117,10 @@ pub fn constrain_pattern(
state: &mut PatternState, state: &mut PatternState,
) { ) {
match pattern { match pattern {
Underscore | UnsupportedPattern(_) => { Underscore | UnsupportedPattern(_) | Shadowed(_, _) => {
// Neither the _ pattern nor erroneous ones add any constraints. // Neither the _ pattern nor erroneous ones add any constraints.
} }
Identifier(symbol) => { Identifier(symbol) => {
state.headers.insert( state.headers.insert(
symbol.clone(), symbol.clone(),
@ -124,10 +130,22 @@ pub fn constrain_pattern(
}, },
); );
} }
NumLiteral(var, _) => {
state.vars.push(*var);
state.constraints.push(Constraint::Pattern(
region,
PatternCategory::Num,
builtins::builtin_type(Symbol::NUM_NUM, vec![Type::Variable(*var)]),
expected,
));
}
IntLiteral(_) => { IntLiteral(_) => {
state.constraints.push(Constraint::Pattern( state.constraints.push(Constraint::Pattern(
region, region,
PatternCategory::Int, PatternCategory::Float,
builtins::builtin_type(Symbol::INT_INT, vec![]), builtins::builtin_type(Symbol::INT_INT, vec![]),
expected, expected,
)); ));
@ -151,7 +169,12 @@ pub fn constrain_pattern(
)); ));
} }
RecordDestructure(ext_var, patterns) => { RecordDestructure {
whole_var,
ext_var,
destructs,
} => {
state.vars.push(*whole_var);
state.vars.push(*ext_var); state.vars.push(*ext_var);
let ext_type = Type::Variable(*ext_var); let ext_type = Type::Variable(*ext_var);
@ -166,7 +189,7 @@ pub fn constrain_pattern(
guard, guard,
}, },
.. ..
} in patterns } in destructs
{ {
let pat_type = Type::Variable(*var); let pat_type = Type::Variable(*var);
let expected = PExpected::NoExpectation(pat_type.clone()); let expected = PExpected::NoExpectation(pat_type.clone());
@ -180,10 +203,15 @@ pub fn constrain_pattern(
field_types.insert(label.clone(), pat_type.clone()); field_types.insert(label.clone(), pat_type.clone());
if let Some((guard_var, loc_guard)) = guard { if let Some((guard_var, loc_guard)) = guard {
state.constraints.push(Constraint::Eq( state.constraints.push(Constraint::Pattern(
Type::Variable(*guard_var),
Expected::NoExpectation(pat_type.clone()),
region, region,
PatternCategory::PatternGuard,
Type::Variable(*guard_var),
PExpected::ForReason(
PReason::PatternGuard,
pat_type.clone(),
loc_guard.region,
),
)); ));
state.vars.push(*guard_var); state.vars.push(*guard_var);
@ -194,38 +222,69 @@ pub fn constrain_pattern(
} }
let record_type = Type::Record(field_types, Box::new(ext_type)); let record_type = Type::Record(field_types, Box::new(ext_type));
let record_con =
Constraint::Pattern(region, PatternCategory::Record, record_type, expected);
let whole_con = Constraint::Eq(
Type::Variable(*whole_var),
Expected::NoExpectation(record_type),
Category::Storage,
region,
);
let record_con = Constraint::Pattern(
region,
PatternCategory::Record,
Type::Variable(*whole_var),
expected,
);
state.constraints.push(whole_con);
state.constraints.push(record_con); state.constraints.push(record_con);
} }
AppliedTag(ext_var, tag_name, patterns) => { AppliedTag {
let mut argument_types = Vec::with_capacity(patterns.len()); whole_var,
for (pattern_var, loc_pattern) in patterns { ext_var,
tag_name,
arguments,
} => {
let mut argument_types = Vec::with_capacity(arguments.len());
for (index, (pattern_var, loc_pattern)) in arguments.iter().enumerate() {
state.vars.push(*pattern_var); state.vars.push(*pattern_var);
let pattern_type = Type::Variable(*pattern_var); let pattern_type = Type::Variable(*pattern_var);
argument_types.push(pattern_type.clone()); argument_types.push(pattern_type.clone());
let expected = PExpected::NoExpectation(pattern_type); let expected = PExpected::ForReason(
PReason::TagArg {
tag_name: tag_name.clone(),
index: Index::zero_based(index),
},
pattern_type,
region,
);
constrain_pattern(&loc_pattern.value, loc_pattern.region, expected, state); constrain_pattern(&loc_pattern.value, loc_pattern.region, expected, state);
} }
let whole_con = Constraint::Eq(
Type::Variable(*whole_var),
Expected::NoExpectation(Type::TagUnion(
vec![(tag_name.clone(), argument_types)],
Box::new(Type::Variable(*ext_var)),
)),
Category::Storage,
region,
);
let tag_con = Constraint::Pattern( let tag_con = Constraint::Pattern(
region, region,
PatternCategory::Ctor(tag_name.clone()), PatternCategory::Ctor(tag_name.clone()),
Type::TagUnion( Type::Variable(*whole_var),
vec![(tag_name.clone(), argument_types)],
Box::new(Type::Variable(*ext_var)),
),
expected, expected,
); );
state.vars.push(*whole_var);
state.vars.push(*ext_var); state.vars.push(*ext_var);
state.constraints.push(whole_con);
state.constraints.push(tag_con); state.constraints.push(tag_con);
} }
Shadowed(_, _) => {
panic!("TODO constrain Shadowed pattern");
}
} }
} }

View file

@ -4,17 +4,17 @@ use roc_can::constraint::Constraint::{self, *};
use roc_can::constraint::LetConstraint; use roc_can::constraint::LetConstraint;
use roc_can::def::{Declaration, Def}; use roc_can::def::{Declaration, Def};
use roc_can::expected::{Expected, PExpected}; use roc_can::expected::{Expected, PExpected};
use roc_can::expr::{Expr, Field}; use roc_can::expr::{Expr, Field, WhenBranch};
use roc_can::pattern::{Pattern, RecordDestruct}; use roc_can::pattern::{Pattern, RecordDestruct};
use roc_collections::all::{ImMap, ImSet, SendMap}; use roc_collections::all::{ImMap, ImSet, Index, SendMap};
use roc_module::ident::{Ident, Lowercase, TagName}; use roc_module::ident::{Ident, Lowercase};
use roc_module::symbol::{ModuleId, Symbol}; use roc_module::symbol::{ModuleId, Symbol};
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
use roc_types::boolean_algebra::{Atom, Bool}; use roc_types::boolean_algebra::{Atom, Bool};
use roc_types::subs::{VarStore, Variable}; use roc_types::subs::{VarStore, Variable};
use roc_types::types::AnnotationSource::{self, *}; use roc_types::types::AnnotationSource::{self, *};
use roc_types::types::Type::{self, *}; use roc_types::types::Type::{self, *};
use roc_types::types::{Alias, PReason, Reason}; use roc_types::types::{Alias, Category, PReason, Reason};
use roc_uniq::builtins::{attr_type, empty_list_type, list_type, str_type}; use roc_uniq::builtins::{attr_type, empty_list_type, list_type, str_type};
use roc_uniq::sharing::{self, Container, FieldAccess, Mark, Usage, VarUsage}; use roc_uniq::sharing::{self, Container, FieldAccess, Mark, Usage, VarUsage};
@ -143,6 +143,8 @@ fn constrain_pattern(
use roc_can::pattern::Pattern::*; use roc_can::pattern::Pattern::*;
use roc_types::types::PatternCategory; use roc_types::types::PatternCategory;
let region = pattern.region;
match &pattern.value { match &pattern.value {
Identifier(symbol) => { Identifier(symbol) => {
state.headers.insert( state.headers.insert(
@ -154,6 +156,14 @@ fn constrain_pattern(
); );
} }
NumLiteral(inner_var, _) => {
let (num_uvar, val_uvar, num_type, num_var) = unique_unbound_num(*inner_var, var_store);
state.constraints.push(exists(
vec![val_uvar, num_uvar, num_var, *inner_var],
Constraint::Pattern(pattern.region, PatternCategory::Num, num_type, expected),
));
}
IntLiteral(_) => { IntLiteral(_) => {
let (num_uvar, int_uvar, num_type) = unique_int(var_store); let (num_uvar, int_uvar, num_type) = unique_int(var_store);
state.constraints.push(exists( state.constraints.push(exists(
@ -182,10 +192,15 @@ fn constrain_pattern(
)); ));
} }
RecordDestructure(ext_var, patterns) => { RecordDestructure {
whole_var,
ext_var,
destructs,
} => {
// TODO if a subpattern doesn't bind any identifiers, it doesn't count for uniqueness // TODO if a subpattern doesn't bind any identifiers, it doesn't count for uniqueness
let mut pattern_uniq_vars = Vec::with_capacity(patterns.len()); let mut pattern_uniq_vars = Vec::with_capacity(destructs.len());
state.vars.push(*whole_var);
state.vars.push(*ext_var); state.vars.push(*ext_var);
let ext_type = Type::Variable(*ext_var); let ext_type = Type::Variable(*ext_var);
@ -199,7 +214,7 @@ fn constrain_pattern(
guard, guard,
}, },
.. ..
} in patterns } in destructs
{ {
let pat_uniq_var = var_store.fresh(); let pat_uniq_var = var_store.fresh();
pattern_uniq_vars.push(pat_uniq_var); pattern_uniq_vars.push(pat_uniq_var);
@ -217,10 +232,11 @@ fn constrain_pattern(
field_types.insert(label.clone(), pat_type.clone()); field_types.insert(label.clone(), pat_type.clone());
if let Some((guard_var, loc_guard)) = guard { if let Some((guard_var, loc_guard)) = guard {
state.constraints.push(Eq( state.constraints.push(Constraint::Pattern(
Type::Variable(*guard_var),
Expected::NoExpectation(pat_type.clone()),
pattern.region, pattern.region,
PatternCategory::PatternGuard,
Type::Variable(*guard_var),
PExpected::NoExpectation(pat_type.clone()),
)); ));
state.vars.push(*guard_var); state.vars.push(*guard_var);
constrain_pattern(var_store, state, loc_guard, expected); constrain_pattern(var_store, state, loc_guard, expected);
@ -243,22 +259,36 @@ fn constrain_pattern(
record_uniq_type, record_uniq_type,
Type::Record(field_types, Box::new(ext_type)), Type::Record(field_types, Box::new(ext_type)),
); );
let whole_con = Constraint::Eq(
Type::Variable(*whole_var),
Expected::NoExpectation(record_type),
Category::Storage,
region,
);
let record_con = Constraint::Pattern( let record_con = Constraint::Pattern(
pattern.region, region,
PatternCategory::Record, PatternCategory::Record,
record_type, Type::Variable(*whole_var),
expected, expected,
); );
state.constraints.push(whole_con);
state.constraints.push(record_con); state.constraints.push(record_con);
} }
AppliedTag(ext_var, symbol, patterns) => { AppliedTag {
whole_var,
ext_var,
tag_name,
arguments,
} => {
// TODO if a subpattern doesn't bind any identifiers, it doesn't count for uniqueness // TODO if a subpattern doesn't bind any identifiers, it doesn't count for uniqueness
let mut argument_types = Vec::with_capacity(patterns.len()); let mut argument_types = Vec::with_capacity(arguments.len());
let mut pattern_uniq_vars = Vec::with_capacity(patterns.len()); let mut pattern_uniq_vars = Vec::with_capacity(arguments.len());
for (pattern_var, loc_pattern) in patterns { for (pattern_var, loc_pattern) in arguments {
state.vars.push(*pattern_var); state.vars.push(*pattern_var);
let pat_uniq_var = var_store.fresh(); let pat_uniq_var = var_store.fresh();
@ -284,19 +314,29 @@ fn constrain_pattern(
let union_type = attr_type( let union_type = attr_type(
tag_union_uniq_type, tag_union_uniq_type,
Type::TagUnion( Type::TagUnion(
vec![(symbol.clone(), argument_types)], vec![(tag_name.clone(), argument_types)],
Box::new(Type::Variable(*ext_var)), Box::new(Type::Variable(*ext_var)),
), ),
); );
let whole_con = Constraint::Eq(
Type::Variable(*whole_var),
Expected::NoExpectation(union_type),
Category::Storage,
region,
);
let tag_con = Constraint::Pattern( let tag_con = Constraint::Pattern(
pattern.region, region,
PatternCategory::Ctor(symbol.clone()), PatternCategory::Ctor(tag_name.clone()),
union_type, Type::Variable(*whole_var),
expected, expected,
); );
state.vars.push(*whole_var);
state.vars.push(*ext_var); state.vars.push(*ext_var);
state.constraints.push(whole_con);
state.constraints.push(tag_con); state.constraints.push(tag_con);
} }
@ -306,6 +346,23 @@ fn constrain_pattern(
} }
} }
fn unique_unbound_num(
inner_var: Variable,
var_store: &VarStore,
) -> (Variable, Variable, Type, Variable) {
let num_var = var_store.fresh();
let num_uvar = var_store.fresh();
let val_uvar = var_store.fresh();
let val_type = Type::Variable(inner_var);
let val_utype = attr_type(Bool::variable(val_uvar), val_type);
let num_utype = Type::Apply(Symbol::NUM_NUM, vec![val_utype]);
let num_type = attr_type(Bool::variable(num_uvar), num_utype);
(num_uvar, val_uvar, num_type, num_var)
}
fn unique_num(var_store: &VarStore, symbol: Symbol) -> (Variable, Variable, Type) { fn unique_num(var_store: &VarStore, symbol: Symbol) -> (Variable, Variable, Type) {
let num_uvar = var_store.fresh(); let num_uvar = var_store.fresh();
let val_uvar = var_store.fresh(); let val_uvar = var_store.fresh();
@ -339,6 +396,23 @@ pub fn constrain_expr(
pub use roc_can::expr::Expr::*; pub use roc_can::expr::Expr::*;
match expr { match expr {
Num(inner_var, _) => {
let var = var_store.fresh();
let (num_uvar, val_uvar, num_type, num_var) = unique_unbound_num(*inner_var, var_store);
exists(
vec![var, *inner_var, val_uvar, num_uvar, num_var],
And(vec![
Eq(
Type::Variable(var),
Expected::ForReason(Reason::NumLiteral, num_type, region),
Category::Num,
region,
),
Eq(Type::Variable(var), expected, Category::Num, region),
]),
)
}
Int(var, _) => { Int(var, _) => {
let (num_uvar, int_uvar, num_type) = unique_int(var_store); let (num_uvar, int_uvar, num_type) = unique_int(var_store);
@ -348,9 +422,10 @@ pub fn constrain_expr(
Eq( Eq(
Type::Variable(*var), Type::Variable(*var),
Expected::ForReason(Reason::IntLiteral, num_type, region), Expected::ForReason(Reason::IntLiteral, num_type, region),
Category::Int,
region, region,
), ),
Eq(Type::Variable(*var), expected, region), Eq(Type::Variable(*var), expected, Category::Int, region),
]), ]),
) )
} }
@ -363,9 +438,10 @@ pub fn constrain_expr(
Eq( Eq(
Type::Variable(*var), Type::Variable(*var),
Expected::ForReason(Reason::FloatLiteral, num_type, region), Expected::ForReason(Reason::FloatLiteral, num_type, region),
Category::Float,
region, region,
), ),
Eq(Type::Variable(*var), expected, region), Eq(Type::Variable(*var), expected, Category::Float, region),
]), ]),
) )
} }
@ -373,7 +449,10 @@ pub fn constrain_expr(
let uniq_type = var_store.fresh(); let uniq_type = var_store.fresh();
let inferred = str_type(Bool::variable(uniq_type)); let inferred = str_type(Bool::variable(uniq_type));
exists(vec![uniq_type], Eq(inferred, expected, region)) exists(
vec![uniq_type],
Eq(inferred, expected, Category::Str, region),
)
} }
EmptyRecord => { EmptyRecord => {
let uniq_type = var_store.fresh(); let uniq_type = var_store.fresh();
@ -383,16 +462,17 @@ pub fn constrain_expr(
Eq( Eq(
attr_type(Bool::variable(uniq_type), EmptyRec), attr_type(Bool::variable(uniq_type), EmptyRec),
expected, expected,
Category::Record,
region, region,
), ),
) )
} }
Record(variable, fields) => { Record { record_var, fields } => {
// NOTE: canonicalization guarantees at least one field // NOTE: canonicalization guarantees at least one field
// zero fields generates an EmptyRecord // zero fields generates an EmptyRecord
let mut field_types = SendMap::default(); let mut field_types = SendMap::default();
let mut field_vars = Vec::with_capacity(fields.len()); let mut field_vars = Vec::with_capacity(fields.len());
field_vars.push(*variable); field_vars.push(*record_var);
// Constraints need capacity for each field + 1 for the record itself + 1 for ext // Constraints need capacity for each field + 1 for the record itself + 1 for ext
let mut constraints = Vec::with_capacity(2 + fields.len()); let mut constraints = Vec::with_capacity(2 + fields.len());
@ -430,8 +510,13 @@ pub fn constrain_expr(
Box::new(Type::EmptyRec), Box::new(Type::EmptyRec),
), ),
); );
let record_con = Eq(record_type, expected.clone(), region); let record_con = Eq(record_type, expected.clone(), Category::Record, region);
let ext_con = Eq(Type::Variable(*variable), expected, region); let ext_con = Eq(
Type::Variable(*record_var),
expected,
Category::Record,
region,
);
constraints.push(record_con); constraints.push(record_con);
constraints.push(ext_con); constraints.push(ext_con);
@ -474,8 +559,18 @@ pub fn constrain_expr(
), ),
); );
let union_con = Eq(union_type, expected.clone(), region); let union_con = Eq(
let ast_con = Eq(Type::Variable(*variant_var), expected, region); union_type,
expected.clone(),
Category::TagApply(name.clone()),
region,
);
let ast_con = Eq(
Type::Variable(*variant_var),
expected,
Category::TagApply(name.clone()),
region,
);
vars.push(uniq_var); vars.push(uniq_var);
vars.push(*variant_var); vars.push(*variant_var);
@ -492,15 +587,23 @@ pub fn constrain_expr(
let uniq_var = var_store.fresh(); let uniq_var = var_store.fresh();
if loc_elems.is_empty() { if loc_elems.is_empty() {
let inferred = empty_list_type(Bool::variable(uniq_var), *elem_var); let inferred = empty_list_type(Bool::variable(uniq_var), *elem_var);
exists(vec![*elem_var, uniq_var], Eq(inferred, expected, region)) exists(
vec![*elem_var, uniq_var],
Eq(inferred, expected, Category::List, region),
)
} else { } else {
// constrain `expected ~ List a` and that all elements `~ a`. // constrain `expected ~ List a` and that all elements `~ a`.
let entry_type = Type::Variable(*elem_var); let entry_type = Type::Variable(*elem_var);
let mut constraints = Vec::with_capacity(1 + loc_elems.len()); let mut constraints = Vec::with_capacity(1 + loc_elems.len());
for loc_elem in loc_elems.iter() { for (index, loc_elem) in loc_elems.iter().enumerate() {
let elem_expected = let elem_expected = Expected::ForReason(
Expected::ForReason(Reason::ElemInList, entry_type.clone(), region); Reason::ElemInList {
index: Index::zero_based(index),
},
entry_type.clone(),
region,
);
let constraint = constrain_expr( let constraint = constrain_expr(
env, env,
var_store, var_store,
@ -515,7 +618,7 @@ pub fn constrain_expr(
} }
let inferred = list_type(Bool::variable(uniq_var), entry_type); let inferred = list_type(Bool::variable(uniq_var), entry_type);
constraints.push(Eq(inferred, expected, region)); constraints.push(Eq(inferred, expected, Category::List, region));
exists(vec![*elem_var, uniq_var], And(constraints)) exists(vec![*elem_var, uniq_var], And(constraints))
} }
@ -599,11 +702,12 @@ pub fn constrain_expr(
ret_constraint, ret_constraint,
})), })),
// "the closure's type is equal to expected type" // "the closure's type is equal to expected type"
Eq(fn_type.clone(), expected, region), Eq(fn_type.clone(), expected, Category::Lambda, region),
// "fn_var is equal to the closure's type" - fn_var is used in code gen // "fn_var is equal to the closure's type" - fn_var is used in code gen
Eq( Eq(
Type::Variable(*fn_var), Type::Variable(*fn_var),
Expected::NoExpectation(fn_type), Expected::NoExpectation(fn_type),
Category::Lambda,
region, region,
), ),
]), ]),
@ -617,6 +721,12 @@ pub fn constrain_expr(
let fn_expected = Expected::NoExpectation(fn_type.clone()); let fn_expected = Expected::NoExpectation(fn_type.clone());
let fn_region = fn_expr.region; let fn_region = fn_expr.region;
let opt_symbol = if let Var(symbol) = fn_expr.value {
Some(symbol)
} else {
None
};
let mut vars = Vec::with_capacity(2 + loc_args.len()); let mut vars = Vec::with_capacity(2 + loc_args.len());
vars.push(*fn_var); vars.push(*fn_var);
@ -633,8 +743,8 @@ pub fn constrain_expr(
fn_expected, fn_expected,
); );
// TODO look up the name and use NamedFnArg if possible. let fn_reason = Reason::FnCall {
let fn_reason = Reason::AnonymousFnCall { name: opt_symbol,
arity: loc_args.len() as u8, arity: loc_args.len() as u8,
}; };
@ -645,8 +755,9 @@ pub fn constrain_expr(
let region = loc_arg.region; let region = loc_arg.region;
let arg_type = Variable(*arg_var); let arg_type = Variable(*arg_var);
let reason = Reason::AnonymousFnArg { let reason = Reason::FnArg {
arg_index: index as u8, name: opt_symbol,
arg_index: Index::zero_based(index),
}; };
let expected_arg = Expected::ForReason(reason, arg_type.clone(), region); let expected_arg = Expected::ForReason(reason, arg_type.clone(), region);
@ -680,9 +791,14 @@ pub fn constrain_expr(
vars, vars,
And(vec![ And(vec![
fn_con, fn_con,
Eq(fn_type, expected_fn_type, fn_region), Eq(
fn_type,
expected_fn_type,
Category::CallResult(opt_symbol),
fn_region,
),
And(arg_cons), And(arg_cons),
Eq(ret_type, expected, region), Eq(ret_type, expected, Category::CallResult(opt_symbol), region),
]), ]),
) )
} }
@ -716,7 +832,12 @@ pub fn constrain_expr(
), ),
// Record the type of tne entire def-expression in the variable. // Record the type of tne entire def-expression in the variable.
// Code gen will need that later! // Code gen will need that later!
Eq(Type::Variable(*var), expected, loc_ret.region), Eq(
Type::Variable(*var),
expected,
Category::Storage,
loc_ret.region,
),
]), ]),
) )
} }
@ -750,7 +871,12 @@ pub fn constrain_expr(
), ),
// Record the type of tne entire def-expression in the variable. // Record the type of tne entire def-expression in the variable.
// Code gen will need that later! // Code gen will need that later!
Eq(Type::Variable(*var), expected, loc_ret.region), Eq(
Type::Variable(*var),
expected,
Category::Storage,
loc_ret.region,
),
]), ]),
) )
} }
@ -761,17 +887,26 @@ pub fn constrain_expr(
final_else, final_else,
} => { } => {
// TODO use Bool alias here, so we don't allocate this type every time // TODO use Bool alias here, so we don't allocate this type every time
let bool_type = Type::TagUnion( let bool_type = Type::Variable(Variable::BOOL);
vec![
(TagName::Global("True".into()), vec![]),
(TagName::Global("False".into()), vec![]),
],
Box::new(Type::EmptyTagUnion),
);
let mut branch_cons = Vec::with_capacity(2 * branches.len() + 2); let mut branch_cons = Vec::with_capacity(2 * branches.len() + 2);
let mut cond_uniq_vars = Vec::with_capacity(branches.len() + 2); let mut cond_uniq_vars = Vec::with_capacity(branches.len() + 2);
// TODO why does this cond var exist? is it for error messages?
let cond_uniq_var = var_store.fresh();
cond_uniq_vars.push(cond_uniq_var);
let cond_var_is_bool_con = Eq(
Type::Variable(*cond_var),
Expected::ForReason(
Reason::IfCondition,
attr_type(Bool::variable(cond_uniq_var), bool_type.clone()),
region,
),
Category::If,
Region::zero(),
);
branch_cons.push(cond_var_is_bool_con);
match expected { match expected {
Expected::FromAnnotation(name, arity, _, tipe) => { Expected::FromAnnotation(name, arity, _, tipe) => {
for (index, (loc_cond, loc_body)) in branches.iter().enumerate() { for (index, (loc_cond, loc_body)) in branches.iter().enumerate() {
@ -783,11 +918,16 @@ pub fn constrain_expr(
); );
cond_uniq_vars.push(cond_uniq_var); cond_uniq_vars.push(cond_uniq_var);
let cond_con = Eq( let cond_con = constrain_expr(
Type::Variable(*cond_var), env,
expect_bool.clone(), var_store,
var_usage,
applied_usage_constraint,
loc_cond.region, loc_cond.region,
&loc_cond.value,
expect_bool,
); );
let then_con = constrain_expr( let then_con = constrain_expr(
env, env,
var_store, var_store,
@ -798,7 +938,10 @@ pub fn constrain_expr(
Expected::FromAnnotation( Expected::FromAnnotation(
name.clone(), name.clone(),
arity, arity,
AnnotationSource::TypedIfBranch(index + 1), AnnotationSource::TypedIfBranch {
index: Index::zero_based(index),
num_branches: branches.len(),
},
tipe.clone(), tipe.clone(),
), ),
); );
@ -816,7 +959,10 @@ pub fn constrain_expr(
Expected::FromAnnotation( Expected::FromAnnotation(
name, name,
arity, arity,
AnnotationSource::TypedIfBranch(branches.len() + 1), AnnotationSource::TypedIfBranch {
index: Index::zero_based(branches.len()),
num_branches: branches.len(),
},
tipe.clone(), tipe.clone(),
), ),
); );
@ -824,6 +970,7 @@ pub fn constrain_expr(
let ast_con = Eq( let ast_con = Eq(
Type::Variable(*branch_var), Type::Variable(*branch_var),
Expected::NoExpectation(tipe), Expected::NoExpectation(tipe),
Category::Storage,
region, region,
); );
@ -845,11 +992,16 @@ pub fn constrain_expr(
); );
cond_uniq_vars.push(cond_uniq_var); cond_uniq_vars.push(cond_uniq_var);
let cond_con = Eq( let cond_con = constrain_expr(
Type::Variable(*cond_var), env,
expect_bool.clone(), var_store,
var_usage,
applied_usage_constraint,
loc_cond.region, loc_cond.region,
&loc_cond.value,
expect_bool,
); );
let then_con = constrain_expr( let then_con = constrain_expr(
env, env,
var_store, var_store,
@ -858,7 +1010,10 @@ pub fn constrain_expr(
loc_body.region, loc_body.region,
&loc_body.value, &loc_body.value,
Expected::ForReason( Expected::ForReason(
Reason::IfBranch { index: index + 1 }, Reason::IfBranch {
index: Index::zero_based(index),
total_branches: branches.len(),
},
Type::Variable(*branch_var), Type::Variable(*branch_var),
region, region,
), ),
@ -876,14 +1031,20 @@ pub fn constrain_expr(
&final_else.value, &final_else.value,
Expected::ForReason( Expected::ForReason(
Reason::IfBranch { Reason::IfBranch {
index: branches.len() + 1, index: Index::zero_based(branches.len()),
total_branches: branches.len(),
}, },
Type::Variable(*branch_var), Type::Variable(*branch_var),
region, region,
), ),
); );
branch_cons.push(Eq(Type::Variable(*branch_var), expected, region)); branch_cons.push(Eq(
Type::Variable(*branch_var),
expected,
Category::If,
region,
));
branch_cons.push(else_con); branch_cons.push(else_con);
cond_uniq_vars.push(*cond_var); cond_uniq_vars.push(*cond_var);
@ -898,6 +1059,7 @@ pub fn constrain_expr(
expr_var, expr_var,
loc_cond, loc_cond,
branches, branches,
..
} => { } => {
let cond_var = *cond_var; let cond_var = *cond_var;
let cond_type = Variable(cond_var); let cond_type = Variable(cond_var);
@ -916,26 +1078,37 @@ pub fn constrain_expr(
match &expected { match &expected {
Expected::FromAnnotation(name, arity, _, typ) => { Expected::FromAnnotation(name, arity, _, typ) => {
constraints.push(Eq(Type::Variable(*expr_var), expected.clone(), region)); constraints.push(Eq(
Type::Variable(*expr_var),
expected.clone(),
Category::When,
region,
));
for (index, when_branch) in branches.iter().enumerate() {
let pattern_region =
Region::across_all(when_branch.patterns.iter().map(|v| &v.region));
for (index, (loc_pattern, loc_expr)) in branches.iter().enumerate() {
let branch_con = constrain_when_branch( let branch_con = constrain_when_branch(
var_store, var_store,
var_usage, var_usage,
applied_usage_constraint, applied_usage_constraint,
env, env,
region, region,
&loc_pattern, when_branch,
loc_expr,
PExpected::ForReason( PExpected::ForReason(
PReason::WhenMatch { index }, PReason::WhenMatch {
index: Index::zero_based(index),
},
cond_type.clone(), cond_type.clone(),
region, pattern_region,
), ),
Expected::FromAnnotation( Expected::FromAnnotation(
name.clone(), name.clone(),
*arity, *arity,
TypedWhenBranch(index), TypedWhenBranch {
index: Index::zero_based(index),
},
typ.clone(), typ.clone(),
), ),
); );
@ -952,22 +1125,27 @@ pub fn constrain_expr(
let branch_type = Variable(*expr_var); let branch_type = Variable(*expr_var);
let mut branch_cons = Vec::with_capacity(branches.len()); let mut branch_cons = Vec::with_capacity(branches.len());
for (index, (loc_pattern, loc_expr)) in branches.iter().enumerate() { for (index, when_branch) in branches.iter().enumerate() {
let pattern_region =
Region::across_all(when_branch.patterns.iter().map(|v| &v.region));
let branch_con = constrain_when_branch( let branch_con = constrain_when_branch(
var_store, var_store,
var_usage, var_usage,
applied_usage_constraint, applied_usage_constraint,
env, env,
region, region,
&loc_pattern, when_branch,
loc_expr,
PExpected::ForReason( PExpected::ForReason(
PReason::WhenMatch { index }, PReason::WhenMatch {
index: Index::zero_based(index),
},
cond_type.clone(), cond_type.clone(),
region, pattern_region,
), ),
Expected::ForReason( Expected::ForReason(
Reason::WhenBranch { index }, Reason::WhenBranch {
index: Index::zero_based(index),
},
branch_type.clone(), branch_type.clone(),
region, region,
), ),
@ -982,7 +1160,7 @@ pub fn constrain_expr(
And(branch_cons), And(branch_cons),
// The return type of each branch must equal // The return type of each branch must equal
// the return type of the entire case-expression. // the return type of the entire case-expression.
Eq(branch_type, expected, region), Eq(branch_type, expected, Category::When, region),
])) ]))
} }
} }
@ -1020,7 +1198,7 @@ pub fn constrain_expr(
let fields_type = attr_type( let fields_type = attr_type(
Bool::variable(uniq_var), Bool::variable(uniq_var),
Type::Record(fields.clone(), Box::new(Type::Variable(*ext_var))), Type::Record(fields, Box::new(Type::Variable(*ext_var))),
); );
let record_type = Type::Variable(*record_var); let record_type = Type::Variable(*record_var);
@ -1028,9 +1206,10 @@ pub fn constrain_expr(
let fields_con = Eq( let fields_con = Eq(
record_type.clone(), record_type.clone(),
Expected::NoExpectation(fields_type), Expected::NoExpectation(fields_type),
Category::Record,
region, region,
); );
let record_con = Eq(record_type.clone(), expected, region); let record_con = Eq(record_type.clone(), expected, Category::Record, region);
vars.push(*record_var); vars.push(*record_var);
vars.push(*ext_var); vars.push(*ext_var);
@ -1038,7 +1217,13 @@ pub fn constrain_expr(
let con = Lookup( let con = Lookup(
*symbol, *symbol,
Expected::ForReason( Expected::ForReason(
Reason::RecordUpdateKeys(*symbol, fields), Reason::RecordUpdateKeys(
*symbol,
updates
.iter()
.map(|(key, field)| (key.clone(), field.region))
.collect(),
),
record_type, record_type,
region, region,
), ),
@ -1053,6 +1238,7 @@ pub fn constrain_expr(
} }
Access { Access {
record_var,
ext_var, ext_var,
field_var, field_var,
loc_expr, loc_expr,
@ -1074,7 +1260,16 @@ pub fn constrain_expr(
Type::Record(field_types, Box::new(Type::Variable(*ext_var))), Type::Record(field_types, Box::new(Type::Variable(*ext_var))),
); );
let category = Category::Access(field.clone());
let record_expected = Expected::NoExpectation(record_type); let record_expected = Expected::NoExpectation(record_type);
let record_con = Eq(
Type::Variable(*record_var),
record_expected.clone(),
category.clone(),
region,
);
let inner_constraint = constrain_expr( let inner_constraint = constrain_expr(
env, env,
var_store, var_store,
@ -1086,13 +1281,24 @@ pub fn constrain_expr(
); );
exists( exists(
vec![*field_var, *ext_var, field_uniq_var, record_uniq_var], vec![
And(vec![Eq(field_type, expected, region), inner_constraint]), *record_var,
*field_var,
*ext_var,
field_uniq_var,
record_uniq_var,
],
And(vec![
Eq(field_type, expected, category, region),
inner_constraint,
record_con,
]),
) )
} }
Accessor { Accessor {
field, field,
record_var,
field_var, field_var,
ext_var, ext_var,
} => { } => {
@ -1112,6 +1318,16 @@ pub fn constrain_expr(
Type::Record(field_types, Box::new(Type::Variable(*ext_var))), Type::Record(field_types, Box::new(Type::Variable(*ext_var))),
); );
let category = Category::Accessor(field.clone());
let record_expected = Expected::NoExpectation(record_type.clone());
let record_con = Eq(
Type::Variable(*record_var),
record_expected,
category.clone(),
region,
);
let fn_uniq_var = var_store.fresh(); let fn_uniq_var = var_store.fresh();
let fn_type = attr_type( let fn_type = attr_type(
Bool::variable(fn_uniq_var), Bool::variable(fn_uniq_var),
@ -1120,13 +1336,14 @@ pub fn constrain_expr(
exists( exists(
vec![ vec![
*record_var,
*field_var, *field_var,
*ext_var, *ext_var,
fn_uniq_var, fn_uniq_var,
field_uniq_var, field_uniq_var,
record_uniq_var, record_uniq_var,
], ],
And(vec![Eq(fn_type, expected, region)]), And(vec![Eq(fn_type, expected, category, region), record_con]),
) )
} }
RuntimeError(_) => True, RuntimeError(_) => True,
@ -1159,10 +1376,11 @@ fn constrain_var(
vec![val_var, uniq_var], vec![val_var, uniq_var],
And(vec![ And(vec![
Lookup(symbol_for_lookup, expected.clone(), region), Lookup(symbol_for_lookup, expected.clone(), region),
Eq(attr_type, expected, region), Eq(attr_type, expected, Category::Uniqueness, region),
Eq( Eq(
Type::Boolean(uniq_type), Type::Boolean(uniq_type),
Expected::NoExpectation(Type::Boolean(Bool::shared())), Expected::NoExpectation(Type::Boolean(Bool::shared())),
Category::Uniqueness,
region, region,
), ),
]), ]),
@ -1187,7 +1405,7 @@ fn constrain_var(
variables, variables,
And(vec![ And(vec![
Lookup(symbol_for_lookup, new_expected, region), Lookup(symbol_for_lookup, new_expected, region),
Eq(record_type, expected, region), Eq(record_type, expected, Category::Uniqueness, region),
]), ]),
) )
} }
@ -1327,15 +1545,15 @@ fn constrain_by_usage_record(
// TODO trim down these arguments // TODO trim down these arguments
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
#[inline(always)] // NOTE enabling the inline pragma can blow the stack in debug mode
// #[inline(always)]
fn constrain_when_branch( fn constrain_when_branch(
var_store: &VarStore, var_store: &VarStore,
var_usage: &VarUsage, var_usage: &VarUsage,
applied_usage_constraint: &mut ImSet<Symbol>, applied_usage_constraint: &mut ImSet<Symbol>,
env: &Env, env: &Env,
region: Region, region: Region,
loc_pattern: &Located<Pattern>, when_branch: &WhenBranch,
loc_expr: &Located<Expr>,
pattern_expected: PExpected<Type>, pattern_expected: PExpected<Type>,
expr_expected: Expected<Type>, expr_expected: Expected<Type>,
) -> Constraint { ) -> Constraint {
@ -1345,7 +1563,7 @@ fn constrain_when_branch(
var_usage, var_usage,
applied_usage_constraint, applied_usage_constraint,
region, region,
&loc_expr.value, &when_branch.value.value,
expr_expected, expr_expected,
); );
@ -1355,9 +1573,50 @@ fn constrain_when_branch(
constraints: Vec::with_capacity(1), constraints: Vec::with_capacity(1),
}; };
for loc_pattern in &when_branch.patterns {
// mutates the state, so return value is not used // mutates the state, so return value is not used
constrain_pattern(var_store, &mut state, &loc_pattern, pattern_expected); constrain_pattern(
var_store,
&mut state,
&loc_pattern,
pattern_expected.clone(),
);
}
if let Some(loc_guard) = &when_branch.guard {
let guard_uniq_var = var_store.fresh();
let bool_type = attr_type(
Bool::variable(guard_uniq_var),
Type::Variable(Variable::BOOL),
);
let guard_constraint = constrain_expr(
env,
var_store,
var_usage,
applied_usage_constraint,
loc_guard.region,
&loc_guard.value,
Expected::ForReason(Reason::WhenGuard, bool_type, loc_guard.region),
);
Constraint::Let(Box::new(LetConstraint {
rigid_vars: Vec::new(),
flex_vars: state.vars,
def_types: state.headers,
def_aliases: SendMap::default(),
defs_constraint: Constraint::And(state.constraints),
ret_constraint: Constraint::Let(Box::new(LetConstraint {
rigid_vars: Vec::new(),
flex_vars: vec![guard_uniq_var],
def_types: SendMap::default(),
def_aliases: SendMap::default(),
defs_constraint: guard_constraint,
ret_constraint,
})),
}))
} else {
Constraint::Let(Box::new(LetConstraint { Constraint::Let(Box::new(LetConstraint {
rigid_vars: Vec::new(), rigid_vars: Vec::new(),
flex_vars: state.vars, flex_vars: state.vars,
@ -1366,6 +1625,7 @@ fn constrain_when_branch(
defs_constraint: Constraint::And(state.constraints), defs_constraint: Constraint::And(state.constraints),
ret_constraint, ret_constraint,
})) }))
}
} }
fn constrain_def_pattern( fn constrain_def_pattern(
@ -1638,15 +1898,15 @@ fn constrain_def(
let mut new_rigids = Vec::new(); let mut new_rigids = Vec::new();
let expr_con = match &def.annotation { let expr_con = match &def.annotation {
Some((annotation, introduced_vars, ann_def_aliases)) => { Some(annotation) => {
def_aliases = ann_def_aliases.clone(); def_aliases = annotation.aliases.clone();
let arity = annotation.arity(); let arity = annotation.signature.arity();
let mut ftv = env.rigids.clone(); let mut ftv = env.rigids.clone();
let annotation = instantiate_rigids( let signature = instantiate_rigids(
var_store, var_store,
annotation, &annotation.signature,
&introduced_vars, &annotation.introduced_variables,
&mut new_rigids, &mut new_rigids,
&mut ftv, &mut ftv,
&def.loc_pattern, &def.loc_pattern,
@ -1656,13 +1916,16 @@ fn constrain_def(
let annotation_expected = Expected::FromAnnotation( let annotation_expected = Expected::FromAnnotation(
def.loc_pattern.clone(), def.loc_pattern.clone(),
arity, arity,
AnnotationSource::TypedBody, AnnotationSource::TypedBody {
annotation, region: annotation.region,
},
signature,
); );
pattern_state.constraints.push(Eq( pattern_state.constraints.push(Eq(
expr_type, expr_type,
annotation_expected.clone(), annotation_expected.clone(),
Category::Storage,
Region::zero(), Region::zero(),
)); ));
@ -1859,16 +2122,16 @@ pub fn rec_defs_help(
flex_info.def_types.extend(pattern_state.headers); flex_info.def_types.extend(pattern_state.headers);
} }
Some((annotation, introduced_vars, ann_def_aliases)) => { Some(annotation) => {
for (symbol, alias) in ann_def_aliases.clone() { for (symbol, alias) in annotation.aliases.clone() {
def_aliases.insert(symbol, alias); def_aliases.insert(symbol, alias);
} }
let arity = annotation.arity(); let arity = annotation.signature.arity();
let mut ftv = env.rigids.clone(); let mut ftv = env.rigids.clone();
let annotation = instantiate_rigids( let signature = instantiate_rigids(
var_store, var_store,
annotation, &annotation.signature,
&introduced_vars, &annotation.introduced_variables,
&mut new_rigids, &mut new_rigids,
&mut ftv, &mut ftv,
&def.loc_pattern, &def.loc_pattern,
@ -1877,8 +2140,10 @@ pub fn rec_defs_help(
let annotation_expected = Expected::FromAnnotation( let annotation_expected = Expected::FromAnnotation(
def.loc_pattern.clone(), def.loc_pattern.clone(),
arity, arity,
AnnotationSource::TypedBody, AnnotationSource::TypedBody {
annotation.clone(), region: annotation.region,
},
signature.clone(),
); );
let expr_con = constrain_expr( let expr_con = constrain_expr(
&Env { &Env {
@ -1897,6 +2162,7 @@ pub fn rec_defs_help(
rigid_info.constraints.push(Eq( rigid_info.constraints.push(Eq(
expr_type, expr_type,
annotation_expected.clone(), annotation_expected.clone(),
Category::Storage,
def.loc_expr.region, def.loc_expr.region,
)); ));

View file

@ -89,7 +89,7 @@ pub fn fmt_expr<'a>(
} }
buf.push_str("\"\"\""); buf.push_str("\"\"\"");
} }
Int(string) | Float(string) | GlobalTag(string) | PrivateTag(string) => { Num(string) | Float(string) | GlobalTag(string) | PrivateTag(string) => {
buf.push_str(string) buf.push_str(string)
} }
NonBase10Int { NonBase10Int {
@ -440,7 +440,7 @@ pub fn is_multiline_pattern<'a>(pattern: &'a Pattern<'a>) -> bool {
| Pattern::Apply(_, _) | Pattern::Apply(_, _)
| Pattern::RecordDestructure(_) | Pattern::RecordDestructure(_)
| Pattern::RecordField(_, _) | Pattern::RecordField(_, _)
| Pattern::IntLiteral(_) | Pattern::NumLiteral(_)
| Pattern::NonBase10Literal { .. } | Pattern::NonBase10Literal { .. }
| Pattern::FloatLiteral(_) | Pattern::FloatLiteral(_)
| Pattern::StrLiteral(_) | Pattern::StrLiteral(_)
@ -464,7 +464,7 @@ pub fn is_multiline_expr<'a>(expr: &'a Expr<'a>) -> bool {
// These expressions never have newlines // These expressions never have newlines
Float(_) Float(_)
| Int(_) | Num(_)
| NonBase10Int { .. } | NonBase10Int { .. }
| Str(_) | Str(_)
| Access(_, _) | Access(_, _)
@ -505,7 +505,7 @@ pub fn is_multiline_expr<'a>(expr: &'a Expr<'a>) -> bool {
|| next_is_multiline_bin_op || next_is_multiline_bin_op
} }
UnaryOp(loc_subexpr, _) | PrecedenceConflict(_, _, loc_subexpr) => { UnaryOp(loc_subexpr, _) | PrecedenceConflict(_, _, _, loc_subexpr) => {
is_multiline_expr(&loc_subexpr.value) is_multiline_expr(&loc_subexpr.value)
} }

View file

@ -56,7 +56,7 @@ pub fn fmt_pattern<'a>(
fmt_pattern(buf, &loc_pattern.value, indent, true, only_comments); fmt_pattern(buf, &loc_pattern.value, indent, true, only_comments);
} }
IntLiteral(string) => buf.push_str(string), NumLiteral(string) => buf.push_str(string),
NonBase10Literal { NonBase10Literal {
base, base,
string, string,

View file

@ -20,18 +20,25 @@ im = "14" # im and im-rc should always have the same version!
im-rc = "14" # im and im-rc should always have the same version! im-rc = "14" # im and im-rc should always have the same version!
bumpalo = { version = "3.2", features = ["collections"] } bumpalo = { version = "3.2", features = ["collections"] }
inlinable_string = "0.1.0" inlinable_string = "0.1.0"
# NOTE: Breaking API changes get pushed directly to this Inkwell branch, so be # NOTE: rtfeldman/inkwell is a fork of TheDan64/inkwell which does not change anything.
# very careful when running `cargo update` to get a new revision into Cargo.lock.
# #
# We have to depend on `branch` instead of a specific `rev` here because, although # The reason for this fork is that the way Inkwell is designed, you have to use
# `rev` works locally, it causes an error on GitHub Actions. (It's unclear why, # a particular branch (e.g. "llvm8-0") in Cargo.toml. That would be fine, except that
# but after several hours of trying unsuccessfully to fix it, `branch` is it.) # breaking changes get pushed directly to that branch, which breaks our build
inkwell = { git = "https://github.com/TheDan64/inkwell", branch = "llvm8-0" } # without warning.
target-lexicon = "0.10" # NOTE: we must use the same version of target-lexicon as cranelift! #
cranelift = "0.59" # All cranelift crates should have the same version! # We tried referencing a specific rev on TheDan64/inkwell directly (instead of branch),
cranelift-simplejit = "0.59" # All cranelift crates should have the same version! # but although that worked locally, it did not work on GitHub Actions. (After a few
cranelift-module = "0.59" # All cranelift crates should have the same version! # hours of investigation, gave up trying to figure out why.) So this is the workaround:
cranelift-codegen = "0.59" # All cranelift crates should have the same version! # having an immutable tag on the rtfeldman/inkwell fork which points to
# a particular "release" of Inkwell.
#
# When we want to update Inkwell, we can sync up rtfeldman/inkwell to the latest
# commit of TheDan64/inkwell, push a new tag which points to the latest commit,
# change the tag value in this Cargo.toml to point to that tag, and `cargo update`.
# This way, GitHub Actions works and nobody's builds get broken.
inkwell = { git = "https://github.com/rtfeldman/inkwell", tag = "llvm10-0.release1" }
target-lexicon = "0.10"
[dev-dependencies] [dev-dependencies]
roc_can = { path = "../can" } roc_can = { path = "../can" }
@ -43,3 +50,4 @@ quickcheck = "0.8"
quickcheck_macros = "0.8" quickcheck_macros = "0.8"
tokio = { version = "0.2", features = ["blocking", "fs", "sync", "rt-threaded"] } tokio = { version = "0.2", features = ["blocking", "fs", "sync", "rt-threaded"] }
bumpalo = { version = "3.2", features = ["collections"] } bumpalo = { version = "3.2", features = ["collections"] }
libc = "0.2"

View file

@ -1,732 +0,0 @@
use std::convert::TryFrom;
use bumpalo::collections::Vec;
use bumpalo::Bump;
use cranelift::frontend::Switch;
use cranelift::prelude::{
AbiParam, ExternalName, FloatCC, FunctionBuilder, FunctionBuilderContext, IntCC, MemFlags,
};
use cranelift_codegen::ir::entities::{StackSlot, Value};
use cranelift_codegen::ir::stackslot::{StackSlotData, StackSlotKind};
use cranelift_codegen::ir::{immediates::Offset32, types, InstBuilder, Signature, Type};
use cranelift_codegen::isa::TargetFrontendConfig;
use cranelift_codegen::Context;
use cranelift_module::{Backend, FuncId, Linkage, Module};
use crate::crane::convert::{sig_from_layout, type_from_layout};
use roc_collections::all::ImMap;
use roc_module::symbol::{Interns, Symbol};
use roc_mono::expr::{Expr, Proc, Procs};
use roc_mono::layout::{Builtin, Layout};
type Scope = ImMap<Symbol, ScopeEntry>;
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ScopeEntry {
Stack { expr_type: Type, slot: StackSlot },
Heap { expr_type: Type, ptr: Value },
Arg { expr_type: Type, param: Value },
Func { sig: Signature, func_id: FuncId },
}
pub struct Env<'a> {
pub arena: &'a Bump,
pub cfg: TargetFrontendConfig,
pub interns: Interns,
pub malloc: FuncId,
}
pub fn build_expr<'a, B: Backend>(
env: &Env<'a>,
scope: &Scope,
module: &mut Module<B>,
builder: &mut FunctionBuilder,
expr: &Expr<'a>,
procs: &Procs<'a>,
) -> Value {
use roc_mono::expr::Expr::*;
match expr {
Int(num) => builder.ins().iconst(types::I64, *num),
Float(num) => builder.ins().f64const(*num),
Bool(val) => builder.ins().bconst(types::B1, *val),
Byte(val) => builder.ins().iconst(types::I8, *val as i64),
Cond {
cond_lhs,
cond_rhs,
pass,
fail,
cond_layout,
ret_layout,
} => {
let branch = Branch2 {
cond_lhs,
cond_rhs,
pass,
fail,
cond_layout,
ret_layout,
};
build_branch2(env, scope, module, builder, branch, procs)
}
Switch {
cond,
branches,
default_branch,
ret_layout,
cond_layout,
} => {
let ret_type = type_from_layout(env.cfg, &ret_layout);
let switch_args = SwitchArgs {
cond_layout,
cond_expr: cond,
branches,
default_branch,
ret_type,
};
build_switch(env, scope, module, builder, switch_args, procs)
}
Store(stores, ret) => {
let mut scope = im_rc::HashMap::clone(scope);
let cfg = env.cfg;
for (name, layout, expr) in stores.iter() {
let val = build_expr(env, &scope, module, builder, &expr, procs);
let expr_type = type_from_layout(cfg, &layout);
let slot = builder.create_stack_slot(StackSlotData::new(
StackSlotKind::ExplicitSlot,
layout.stack_size(cfg.pointer_bytes() as u32),
));
builder.ins().stack_store(val, slot, Offset32::new(0));
// Make a new scope which includes the binding we just encountered.
// This should be done *after* compiling the bound expr, since any
// recursive (in the LetRec sense) bindings should already have
// been extracted as procedures. Nothing in here should need to
// access itself!
scope = im_rc::HashMap::clone(&scope);
scope.insert(*name, ScopeEntry::Stack { expr_type, slot });
}
build_expr(env, &scope, module, builder, ret, procs)
}
CallByName(symbol, args) => call_by_name(env, *symbol, args, scope, module, builder, procs),
FunctionPointer(name) => {
let fn_id = match scope.get(name) {
Some(ScopeEntry::Func{ func_id, .. }) => *func_id,
other => panic!(
"FunctionPointer could not find function named {:?} in scope; instead, found {:?} in scope {:?}",
name, other, scope
),
};
let func_ref = module.declare_func_in_func(fn_id, &mut builder.func);
builder.ins().func_addr(env.cfg.pointer_type(), func_ref)
}
CallByPointer(sub_expr, args, layout) => {
let mut arg_vals = Vec::with_capacity_in(args.len(), env.arena);
for arg in args.iter() {
arg_vals.push(build_expr(env, scope, module, builder, arg, procs));
}
let sig = sig_from_layout(env.cfg, module, layout);
let callee = build_expr(env, scope, module, builder, sub_expr, procs);
let sig_ref = builder.import_signature(sig);
let call = builder.ins().call_indirect(sig_ref, callee, &arg_vals);
let results = builder.inst_results(call);
debug_assert!(results.len() == 1);
results[0]
}
Load(name) => match scope.get(name) {
Some(ScopeEntry::Stack { expr_type, slot }) => {
builder
.ins()
.stack_load(*expr_type, *slot, Offset32::new(0))
}
Some(ScopeEntry::Arg { param, .. }) => *param,
Some(ScopeEntry::Heap { expr_type, ptr }) => {
builder
.ins()
.load(*expr_type, MemFlags::new(), *ptr, Offset32::new(0))
}
Some(ScopeEntry::Func { .. }) => {
panic!("TODO I don't yet know how to return fn pointers")
}
None => panic!("Could not find a var for {:?} in scope {:?}", name, scope),
},
Struct { layout, fields } => {
let cfg = env.cfg;
// Sort the fields
let mut sorted_fields = Vec::with_capacity_in(fields.len(), env.arena);
for field in fields.iter() {
sorted_fields.push(field);
}
sorted_fields.sort_by_key(|k| &k.0);
// Create a slot
let slot = builder.create_stack_slot(StackSlotData::new(
StackSlotKind::ExplicitSlot,
layout.stack_size(cfg.pointer_bytes() as u32),
));
// Create instructions for storing each field's expression
for (index, (_, ref inner_expr)) in sorted_fields.iter().enumerate() {
let val = build_expr(env, &scope, module, builder, inner_expr, procs);
// Is there an existing function for this?
let field_size = match inner_expr {
Int(_) => std::mem::size_of::<i64>(),
_ => panic!("I don't yet know how to calculate the offset for {:?} when building a cranelift struct", val),
};
let offset = i32::try_from(index * field_size)
.expect("TODO handle field size conversion to i32");
builder.ins().stack_store(val, slot, Offset32::new(offset));
}
let ir_type = type_from_layout(cfg, layout);
builder.ins().stack_addr(ir_type, slot, Offset32::new(0))
}
// Access {
// label,
// field_layout,
// struct_layout,
// } => {
// panic!("I don't yet know how to crane build {:?}", expr);
// }
Str(str_literal) => {
if str_literal.is_empty() {
panic!("TODO build an empty string in Crane");
} else {
let bytes_len = str_literal.len() + 1/* TODO drop the +1 when we have structs and this is no longer a NUL-terminated CString.*/;
let ptr = call_malloc(env, module, builder, bytes_len);
let mem_flags = MemFlags::new();
// Copy the bytes from the string literal into the array
for (index, byte) in str_literal.bytes().enumerate() {
let val = builder.ins().iconst(types::I8, byte as i64);
let offset = Offset32::new(index as i32);
builder.ins().store(mem_flags, val, ptr, offset);
}
// Add a NUL terminator at the end.
// TODO: Instead of NUL-terminating, return a struct
// with the pointer and also the length and capacity.
let nul_terminator = builder.ins().iconst(types::I8, 0);
let index = bytes_len as i32 - 1;
let offset = Offset32::new(index);
builder.ins().store(mem_flags, nul_terminator, ptr, offset);
ptr
}
}
Array { elem_layout, elems } => {
if elems.is_empty() {
panic!("TODO build an empty Array in Crane");
} else {
let elem_bytes = elem_layout.stack_size(env.cfg.pointer_bytes() as u32) as usize;
let bytes_len = (elem_bytes * elems.len()) + 1/* TODO drop the +1 when we have structs and this is no longer NUL-terminated. */;
let ptr = call_malloc(env, module, builder, bytes_len);
let mem_flags = MemFlags::new();
// Copy the elements from the literal into the array
for (index, elem) in elems.iter().enumerate() {
let offset = Offset32::new(elem_bytes as i32 * index as i32);
let val = build_expr(env, scope, module, builder, elem, procs);
builder.ins().store(mem_flags, val, ptr, offset);
}
// Add a NUL terminator at the end.
// TODO: Instead of NUL-terminating, return a struct
// with the pointer and also the length and capacity.
let nul_terminator = builder.ins().iconst(types::I8, 0);
let index = bytes_len as i32 - 1;
let offset = Offset32::new(index);
builder.ins().store(mem_flags, nul_terminator, ptr, offset);
ptr
}
}
_ => {
panic!("I don't yet know how to crane build {:?}", expr);
}
}
}
struct Branch2<'a> {
cond_lhs: &'a Expr<'a>,
cond_rhs: &'a Expr<'a>,
cond_layout: &'a Layout<'a>,
pass: &'a Expr<'a>,
fail: &'a Expr<'a>,
ret_layout: &'a Layout<'a>,
}
fn build_branch2<'a, B: Backend>(
env: &Env<'a>,
scope: &Scope,
module: &mut Module<B>,
builder: &mut FunctionBuilder,
branch: Branch2<'a>,
procs: &Procs<'a>,
) -> Value {
let ret_layout = branch.ret_layout;
let ret_type = type_from_layout(env.cfg, &ret_layout);
// Declare a variable which each branch will mutate to be the value of that branch.
// At the end of the expression, we will evaluate to this.
let ret = cranelift::frontend::Variable::with_u32(0);
// The block we'll jump to once the switch has completed.
let ret_block = builder.create_block();
builder.declare_var(ret, ret_type);
let lhs = build_expr(env, scope, module, builder, branch.cond_lhs, procs);
let rhs = build_expr(env, scope, module, builder, branch.cond_rhs, procs);
let pass_block = builder.create_block();
let fail_block = builder.create_block();
match branch.cond_layout {
Layout::Builtin(Builtin::Float64) => {
// For floats, first do a `fcmp` comparison to get a bool answer about equality,
// then use `brnz` to branch if that bool equality answer was nonzero (aka true).
let is_eq = builder.ins().fcmp(FloatCC::Equal, lhs, rhs);
builder.ins().brnz(is_eq, pass_block, &[]);
}
Layout::Builtin(Builtin::Int64) => {
// For ints, we can compare and branch in the same instruction: `icmp`
builder
.ins()
.br_icmp(IntCC::Equal, lhs, rhs, pass_block, &[]);
}
other => panic!("I don't know how to build a conditional for {:?}", other),
}
// Unconditionally jump to fail_block (if we didn't just jump to pass_block).
builder.ins().jump(fail_block, &[]);
let mut build_branch = |expr, block| {
builder.switch_to_block(block);
// TODO re-enable this once Switch stops making unsealed blocks, e.g.
// https://docs.rs/cranelift-frontend/0.59.0/src/cranelift_frontend/switch.rs.html#152
// builder.seal_block(block);
// Mutate the ret variable to be the outcome of this branch.
let value = build_expr(env, scope, module, builder, expr, procs);
builder.def_var(ret, value);
// Unconditionally jump to ret_block, making the whole expression evaluate to ret.
builder.ins().jump(ret_block, &[]);
};
build_branch(branch.pass, pass_block);
build_branch(branch.fail, fail_block);
// Finally, build ret_block - which contains our terminator instruction.
{
builder.switch_to_block(ret_block);
// TODO re-enable this once Switch stops making unsealed blocks, e.g.
// https://docs.rs/cranelift-frontend/0.59.0/src/cranelift_frontend/switch.rs.html#152
// builder.seal_block(block);
// Now that ret has been mutated by the switch statement, evaluate to it.
builder.use_var(ret)
}
}
struct SwitchArgs<'a> {
pub cond_expr: &'a Expr<'a>,
pub cond_layout: &'a Layout<'a>,
pub branches: &'a [(u64, Expr<'a>)],
pub default_branch: &'a Expr<'a>,
pub ret_type: Type,
}
fn build_switch<'a, B: Backend>(
env: &Env<'a>,
scope: &Scope,
module: &mut Module<B>,
builder: &mut FunctionBuilder,
switch_args: SwitchArgs<'a>,
procs: &Procs<'a>,
) -> Value {
let mut switch = Switch::new();
let SwitchArgs {
branches,
cond_expr,
default_branch,
ret_type,
..
} = switch_args;
let mut blocks = Vec::with_capacity_in(branches.len(), env.arena);
// Declare a variable which each branch will mutate to be the value of that branch.
// At the end of the expression, we will evaluate to this.
let ret = cranelift::frontend::Variable::with_u32(0);
builder.declare_var(ret, ret_type);
// The block for the conditional's default branch.
let default_block = builder.create_block();
// The block we'll jump to once the switch has completed.
let ret_block = builder.create_block();
// Build the blocks for each branch, and register them in the switch.
// Do this before emitting the switch, because it needs to be emitted at the front.
for (int, _) in branches {
let block = builder.create_block();
blocks.push(block);
switch.set_entry(*int, block);
}
// Run the switch. Each branch will mutate ret and then jump to ret_block.
let cond = build_expr(env, scope, module, builder, cond_expr, procs);
switch.emit(builder, cond, default_block);
let mut build_branch = |block, expr| {
builder.switch_to_block(block);
// TODO re-enable this once Switch stops making unsealed blocks, e.g.
// https://docs.rs/cranelift-frontend/0.59.0/src/cranelift_frontend/switch.rs.html#152
// builder.seal_block(block);
// Mutate the ret variable to be the outcome of this branch.
let value = build_expr(env, scope, module, builder, expr, procs);
builder.def_var(ret, value);
// Unconditionally jump to ret_block, making the whole expression evaluate to ret.
builder.ins().jump(ret_block, &[]);
};
// Build the blocks for each branch
for ((_, expr), block) in branches.iter().zip(blocks) {
build_branch(block, expr);
}
// Build the block for the default branch
build_branch(default_block, default_branch);
// Finally, build ret_block - which contains our terminator instruction.
{
builder.switch_to_block(ret_block);
// TODO re-enable this once Switch stops making unsealed blocks, e.g.
// https://docs.rs/cranelift-frontend/0.59.0/src/cranelift_frontend/switch.rs.html#152
// builder.seal_block(block);
// Now that ret has been mutated by the switch statement, evaluate to it.
builder.use_var(ret)
}
}
pub fn declare_proc<'a, B: Backend>(
env: &Env<'a>,
module: &mut Module<B>,
symbol: Symbol,
proc: &Proc<'a>,
) -> (FuncId, Signature) {
let args = proc.args;
let cfg = env.cfg;
// TODO this Layout::from_content is duplicated when building this Proc
let ret_type = type_from_layout(cfg, &proc.ret_layout);
// Create a signature for the function
let mut sig = module.make_signature();
// Add return type to the signature
sig.returns.push(AbiParam::new(ret_type));
// Add params to the signature
for (layout, _name) in args.iter() {
let arg_type = type_from_layout(cfg, &layout);
sig.params.push(AbiParam::new(arg_type));
}
// Declare the function in the module
let fn_id = module
.declare_function(symbol.ident_string(&env.interns), Linkage::Local, &sig)
.unwrap_or_else(|err| panic!("Error when building function {:?} - {:?}", symbol, err));
(fn_id, sig)
}
// TODO trim down these arguments
#[allow(clippy::too_many_arguments)]
pub fn define_proc_body<'a, B: Backend>(
env: &Env<'a>,
ctx: &mut Context,
module: &mut Module<B>,
fn_id: FuncId,
scope: &Scope,
sig: Signature,
proc: Proc<'a>,
procs: &Procs<'a>,
) {
let args = proc.args;
let cfg = env.cfg;
// Build the body of the function
{
let mut scope = scope.clone();
ctx.func.signature = sig;
ctx.func.name = ExternalName::user(0, fn_id.as_u32());
let mut func_ctx = FunctionBuilderContext::new();
let mut builder: FunctionBuilder = FunctionBuilder::new(&mut ctx.func, &mut func_ctx);
let block = builder.create_block();
builder.switch_to_block(block);
builder.append_block_params_for_function_params(block);
// Add args to scope
for (&param, (layout, arg_symbol)) in builder.block_params(block).iter().zip(args) {
let expr_type = type_from_layout(cfg, &layout);
scope.insert(*arg_symbol, ScopeEntry::Arg { expr_type, param });
}
let body = build_expr(env, &scope, module, &mut builder, &proc.body, procs);
builder.ins().return_(&[body]);
// TODO re-enable this once Switch stops making unsealed blocks, e.g.
// https://docs.rs/cranelift-frontend/0.59.0/src/cranelift_frontend/switch.rs.html#152
// builder.seal_block(block);
builder.seal_all_blocks();
builder.finalize();
}
module
.define_function(fn_id, ctx)
.expect("Defining Cranelift function failed");
module.clear_context(ctx);
}
fn build_arg<'a, B: Backend>(
(arg, _): &'a (Expr<'a>, Layout<'a>),
env: &Env<'a>,
scope: &Scope,
module: &mut Module<B>,
builder: &mut FunctionBuilder,
procs: &Procs<'a>,
) -> Value {
build_expr(env, scope, module, builder, arg, procs)
}
#[inline(always)]
fn call_by_name<'a, B: Backend>(
env: &Env<'a>,
symbol: Symbol,
args: &'a [(Expr<'a>, Layout<'a>)],
scope: &Scope,
module: &mut Module<B>,
builder: &mut FunctionBuilder,
procs: &Procs<'a>,
) -> Value {
match symbol {
Symbol::NUM_ADD => {
debug_assert!(args.len() == 2);
let a = build_arg(&args[0], env, scope, module, builder, procs);
let b = build_arg(&args[1], env, scope, module, builder, procs);
builder.ins().iadd(a, b)
}
Symbol::NUM_SUB => {
debug_assert!(args.len() == 2);
let a = build_arg(&args[0], env, scope, module, builder, procs);
let b = build_arg(&args[1], env, scope, module, builder, procs);
builder.ins().isub(a, b)
}
Symbol::NUM_MUL => {
debug_assert!(args.len() == 2);
let a = build_arg(&args[0], env, scope, module, builder, procs);
let b = build_arg(&args[1], env, scope, module, builder, procs);
builder.ins().imul(a, b)
}
Symbol::NUM_NEG => {
debug_assert!(args.len() == 1);
let num = build_arg(&args[0], env, scope, module, builder, procs);
builder.ins().ineg(num)
}
Symbol::LIST_GET_UNSAFE => {
debug_assert!(args.len() == 2);
let list_ptr = build_arg(&args[0], env, scope, module, builder, procs);
let elem_index = build_arg(&args[1], env, scope, module, builder, procs);
let elem_type = Type::int(64).unwrap(); // TODO Look this up instead of hardcoding it!
let elem_bytes = 8; // TODO Look this up instead of hardcoding it!
let elem_size = builder.ins().iconst(types::I64, elem_bytes);
// Multiply the requested index by the size of each element.
let offset = builder.ins().imul(elem_index, elem_size);
builder.ins().load_complex(
elem_type,
MemFlags::new(),
&[list_ptr, offset],
Offset32::new(0),
)
}
Symbol::LIST_SET => {
let (_list_expr, list_layout) = &args[0];
match list_layout {
Layout::Builtin(Builtin::List(elem_layout)) => {
// TODO try memcpy for shallow clones; it's probably faster
// let list_val = build_expr(env, scope, module, builder, list_expr, procs);
let num_elems = 10; // TODO FIXME read from List.len
let elem_bytes =
elem_layout.stack_size(env.cfg.pointer_bytes() as u32) as usize;
let bytes_len = (elem_bytes * num_elems) + 1/* TODO drop the +1 when we have structs and this is no longer NUL-terminated. */;
let ptr = call_malloc(env, module, builder, bytes_len);
// let mem_flags = MemFlags::new();
// Copy the elements from the literal into the array
// for (index, elem) in elems.iter().enumerate() {
// let offset = Offset32::new(elem_bytes as i32 * index as i32);
// let val = build_expr(env, scope, module, builder, elem, procs);
// builder.ins().store(mem_flags, val, ptr, offset);
// }
// Add a NUL terminator at the end.
// TODO: Instead of NUL-terminating, return a struct
// with the pointer and also the length and capacity.
// let nul_terminator = builder.ins().iconst(types::I8, 0);
// let index = bytes_len as i32 - 1;
// let offset = Offset32::new(index);
// builder.ins().store(mem_flags, nul_terminator, ptr, offset);
list_set_in_place(
env,
ptr,
build_arg(&args[1], env, scope, module, builder, procs),
build_arg(&args[2], env, scope, module, builder, procs),
elem_layout,
builder,
);
ptr
}
_ => {
unreachable!("Invalid List layout for List.set: {:?}", list_layout);
}
}
}
Symbol::LIST_SET_IN_PLACE => {
// set : List elem, Int, elem -> List elem
debug_assert!(args.len() == 3);
let (list_expr, list_layout) = &args[0];
let list_val = build_expr(env, scope, module, builder, list_expr, procs);
match list_layout {
Layout::Builtin(Builtin::List(elem_layout)) => list_set_in_place(
env,
list_val,
build_arg(&args[1], env, scope, module, builder, procs),
build_arg(&args[2], env, scope, module, builder, procs),
elem_layout,
builder,
),
_ => {
unreachable!("Invalid List layout for List.set: {:?}", list_layout);
}
}
}
_ => {
let fn_id = match scope.get(&symbol) {
Some(ScopeEntry::Func{ func_id, .. }) => *func_id,
other => panic!(
"CallByName could not find function named {:?} in scope; instead, found {:?} in scope {:?}",
symbol, other, scope
),
};
let local_func = module.declare_func_in_func(fn_id, &mut builder.func);
let mut arg_vals = Vec::with_capacity_in(args.len(), env.arena);
for (arg, _layout) in args {
arg_vals.push(build_expr(env, scope, module, builder, arg, procs));
}
let call = builder.ins().call(local_func, arg_vals.into_bump_slice());
let results = builder.inst_results(call);
debug_assert!(results.len() == 1);
results[0]
}
}
}
fn call_malloc<B: Backend>(
env: &Env<'_>,
module: &mut Module<B>,
builder: &mut FunctionBuilder,
size: usize,
) -> Value {
// Declare malloc inside this function
let local_func = module.declare_func_in_func(env.malloc, &mut builder.func);
// Convert the size argument to a Value
let ptr_size_type = module.target_config().pointer_type();
let size_arg = builder.ins().iconst(ptr_size_type, size as i64);
// Call malloc and return the resulting pointer
let call = builder.ins().call(local_func, &[size_arg]);
let results = builder.inst_results(call);
debug_assert!(results.len() == 1);
results[0]
}
fn list_set_in_place<'a>(
env: &Env<'a>,
list_ptr: Value,
elem_index: Value,
elem: Value,
elem_layout: &Layout<'a>,
builder: &mut FunctionBuilder,
) -> Value {
let elem_bytes = elem_layout.stack_size(env.cfg.pointer_bytes() as u32);
let elem_size = builder.ins().iconst(types::I64, elem_bytes as i64);
// Multiply the requested index by the size of each element.
let offset = builder.ins().imul(elem_index, elem_size);
builder
.ins()
.store_complex(MemFlags::new(), elem, &[list_ptr, offset], Offset32::new(0));
list_ptr
}

View file

@ -1,61 +0,0 @@
use cranelift::prelude::AbiParam;
use cranelift_codegen::ir::{types, Signature, Type};
use cranelift_codegen::isa::TargetFrontendConfig;
use cranelift_module::{Backend, Module};
use roc_mono::layout::Layout;
pub fn type_from_layout(cfg: TargetFrontendConfig, layout: &Layout<'_>) -> Type {
use roc_mono::layout::Builtin::*;
use roc_mono::layout::Layout::*;
match layout {
Pointer(_) | FunctionPointer(_, _) => cfg.pointer_type(),
Struct(fields) => {
// This will change as we add more fields and field types to the tests
let naive_all_ints = fields.iter().all(|ref field| match field.1 {
Builtin(Int64) => true,
_ => false,
});
if naive_all_ints && fields.len() == 3 {
types::I64.by(4).unwrap()
} else {
panic!("TODO layout_to_crane_type for Struct");
}
}
Builtin(builtin) => match builtin {
Int64 => types::I64,
Float64 => types::F64,
Str | Map(_, _) | Set(_) | List(_) => cfg.pointer_type(),
},
}
}
pub fn sig_from_layout<B: Backend>(
cfg: TargetFrontendConfig,
module: &mut Module<B>,
layout: &Layout<'_>,
) -> Signature {
match layout {
Layout::FunctionPointer(args, ret) => {
let ret_type = type_from_layout(cfg, &ret);
let mut sig = module.make_signature();
// Add return type to the signature
sig.returns.push(AbiParam::new(ret_type));
// Add params to the signature
for layout in args.iter() {
let arg_type = type_from_layout(cfg, &layout);
sig.params.push(AbiParam::new(arg_type));
}
sig
}
_ => {
panic!("Could not make Signature from Layout {:?}", layout);
}
}
}

View file

@ -1,117 +0,0 @@
use cranelift::prelude::{AbiParam, FunctionBuilder, FunctionBuilderContext};
use cranelift_codegen::ir::{ExternalName, Function, InstBuilder, Signature};
use cranelift_codegen::isa::CallConv;
use cranelift_codegen::Context;
use cranelift_module::{Backend, FuncId, Linkage, Module};
pub fn declare_malloc_header<B: Backend>(module: &mut Module<B>) -> (FuncId, Signature) {
let ptr_size_type = module.target_config().pointer_type();
let sig = Signature {
params: vec![AbiParam::new(ptr_size_type)],
returns: vec![AbiParam::new(ptr_size_type)],
call_conv: CallConv::SystemV, // TODO is this the calling convention we actually want?
};
// Declare the wrapper around malloc
let func_id = module
.declare_function("roc_malloc", Linkage::Local, &sig)
.unwrap();
(func_id, sig)
}
pub fn define_malloc_body<B: Backend>(
module: &mut Module<B>,
ctx: &mut Context,
sig: Signature,
func_id: FuncId,
) {
let ptr_size_type = module.target_config().pointer_type();
ctx.func = Function::with_name_signature(ExternalName::user(0, func_id.as_u32()), sig);
let mut func_ctx = FunctionBuilderContext::new();
{
let mut builder: FunctionBuilder = FunctionBuilder::new(&mut ctx.func, &mut func_ctx);
let block = builder.create_block();
builder.switch_to_block(block);
builder.append_block_params_for_function_params(block);
let mut malloc_sig = module.make_signature();
malloc_sig.params.push(AbiParam::new(ptr_size_type));
malloc_sig.returns.push(AbiParam::new(ptr_size_type));
let callee = module
.declare_function("malloc", Linkage::Import, &malloc_sig)
.expect("declare malloc");
let malloc = module.declare_func_in_func(callee, &mut builder.func);
let size = builder.block_params(block)[0];
let call = builder.ins().call(malloc, &[size]);
let ptr = builder.inst_results(call)[0];
builder.ins().return_(&[ptr]);
// TODO re-enable this once Switch stops making unsealed blocks, e.g.
// https://docs.rs/cranelift-frontend/0.59.0/src/cranelift_frontend/switch.rs.html#152
// builder.seal_block(block);
}
module.define_function(func_id, ctx).unwrap();
}
pub fn define_malloc<B: Backend>(module: &mut Module<B>, ctx: &mut Context) -> FuncId {
// TODO investigate whether we can remove this wrapper function somehow.
// It may get inlined away, but it seems like it shouldn't be
// necessary, and we should be able to return the FuncId of the imported malloc.
let ptr_size_type = module.target_config().pointer_type();
let sig = Signature {
params: vec![AbiParam::new(ptr_size_type)],
returns: vec![AbiParam::new(ptr_size_type)],
call_conv: CallConv::SystemV, // TODO is this the calling convention we actually want?
};
// Declare the wrapper around malloc
let func_id = module
.declare_function("roc_malloc", Linkage::Local, &sig)
.unwrap();
let ptr_size_type = module.target_config().pointer_type();
ctx.func = Function::with_name_signature(ExternalName::user(0, func_id.as_u32()), sig);
let mut func_ctx = FunctionBuilderContext::new();
{
let mut builder: FunctionBuilder = FunctionBuilder::new(&mut ctx.func, &mut func_ctx);
let block = builder.create_block();
builder.switch_to_block(block);
builder.append_block_params_for_function_params(block);
let mut malloc_sig = module.make_signature();
malloc_sig.params.push(AbiParam::new(ptr_size_type));
malloc_sig.returns.push(AbiParam::new(ptr_size_type));
let callee = module
.declare_function("malloc", Linkage::Import, &malloc_sig)
.expect("declare malloc");
let malloc = module.declare_func_in_func(callee, &mut builder.func);
let size = builder.block_params(block)[0];
let call = builder.ins().call(malloc, &[size]);
let ptr = builder.inst_results(call)[0];
builder.ins().return_(&[ptr]);
builder.seal_block(block);
builder.finalize();
}
module.define_function(func_id, ctx).unwrap();
module.clear_context(ctx);
func_id
}

View file

@ -1,3 +0,0 @@
pub mod build;
pub mod convert;
pub mod imports;

View file

@ -10,5 +10,5 @@
// and encouraging shortcuts here creates bad incentives. I would rather temporarily // and encouraging shortcuts here creates bad incentives. I would rather temporarily
// re-enable this when working on performance optimizations than have it block PRs. // re-enable this when working on performance optimizations than have it block PRs.
#![allow(clippy::large_enum_variant)] #![allow(clippy::large_enum_variant)]
pub mod crane;
pub mod llvm; pub mod llvm;

File diff suppressed because it is too large Load diff

Binary file not shown.

View file

@ -1,9 +1,11 @@
use bumpalo::collections::Vec;
use bumpalo::Bump;
use inkwell::context::Context; use inkwell::context::Context;
use inkwell::types::BasicTypeEnum::{self, *}; use inkwell::types::BasicTypeEnum::{self, *};
use inkwell::types::{BasicType, FunctionType}; use inkwell::types::{ArrayType, BasicType, FunctionType, IntType, PointerType, StructType};
use inkwell::AddressSpace; use inkwell::AddressSpace;
use roc_mono::layout::Layout; use roc_mono::layout::{Builtin, Layout};
/// TODO could this be added to Inkwell itself as a method on BasicValueEnum? /// TODO could this be added to Inkwell itself as a method on BasicValueEnum?
pub fn get_fn_type<'ctx>( pub fn get_fn_type<'ctx>(
@ -20,45 +22,155 @@ pub fn get_fn_type<'ctx>(
} }
} }
/// TODO could this be added to Inkwell itself as a method on BasicValueEnum?
pub fn get_array_type<'ctx>(bt_enum: &BasicTypeEnum<'ctx>, size: u32) -> ArrayType<'ctx> {
match bt_enum {
ArrayType(typ) => typ.array_type(size),
IntType(typ) => typ.array_type(size),
FloatType(typ) => typ.array_type(size),
PointerType(typ) => typ.array_type(size),
StructType(typ) => typ.array_type(size),
VectorType(typ) => typ.array_type(size),
}
}
pub fn basic_type_from_layout<'ctx>( pub fn basic_type_from_layout<'ctx>(
arena: &Bump,
context: &'ctx Context, context: &'ctx Context,
layout: &Layout<'_>, layout: &Layout<'_>,
ptr_bytes: u32,
) -> BasicTypeEnum<'ctx> { ) -> BasicTypeEnum<'ctx> {
use roc_mono::layout::Builtin::*; use roc_mono::layout::Builtin::*;
use roc_mono::layout::Layout::*; use roc_mono::layout::Layout::*;
match layout { match layout {
FunctionPointer(args, ret_layout) => { FunctionPointer(args, ret_layout) => {
let ret_type = basic_type_from_layout(context, &ret_layout); let ret_type = basic_type_from_layout(arena, context, &ret_layout, ptr_bytes);
let mut arg_basic_types = Vec::with_capacity(args.len()); let mut arg_basic_types = Vec::with_capacity_in(args.len(), arena);
for arg_layout in args.iter() { for arg_layout in args.iter() {
arg_basic_types.push(basic_type_from_layout(context, arg_layout)); arg_basic_types.push(basic_type_from_layout(
arena, context, arg_layout, ptr_bytes,
));
} }
let fn_type = get_fn_type(&ret_type, arg_basic_types.as_slice()); let fn_type = get_fn_type(&ret_type, arg_basic_types.into_bump_slice());
let ptr_type = fn_type.ptr_type(AddressSpace::Generic); let ptr_type = fn_type.ptr_type(AddressSpace::Generic);
ptr_type.as_basic_type_enum() ptr_type.as_basic_type_enum()
} }
Struct(_fields) => { Pointer(layout) => basic_type_from_layout(arena, context, &layout, ptr_bytes)
panic!("TODO layout_to_basic_type for Struct"); .ptr_type(AddressSpace::Generic)
.into(),
Struct(sorted_fields) => {
// Determine types
let mut field_types = Vec::with_capacity_in(sorted_fields.len(), arena);
for field_layout in sorted_fields.iter() {
field_types.push(basic_type_from_layout(
arena,
context,
field_layout,
ptr_bytes,
));
} }
Pointer(_layout) => {
panic!("TODO layout_to_basic_type for Pointer"); context
.struct_type(field_types.into_bump_slice(), false)
.as_basic_type_enum()
} }
Union(tags) if tags.len() == 1 => {
let layouts = tags.iter().next().unwrap();
// Determine types
let mut field_types = Vec::with_capacity_in(layouts.len(), arena);
for layout in layouts.iter() {
field_types.push(basic_type_from_layout(arena, context, layout, ptr_bytes));
}
context
.struct_type(field_types.into_bump_slice(), false)
.as_basic_type_enum()
}
Union(_) => {
// TODO make this dynamic
let ptr_size = std::mem::size_of::<i64>();
let union_size = layout.stack_size(ptr_size as u32);
let array_type = context
.i8_type()
.array_type(union_size)
.as_basic_type_enum();
context.struct_type(&[array_type], false).into()
}
Builtin(builtin) => match builtin { Builtin(builtin) => match builtin {
Int64 => context.i64_type().as_basic_type_enum(), Int64 => context.i64_type().as_basic_type_enum(),
Float64 => context.f64_type().as_basic_type_enum(), Float64 => context.f64_type().as_basic_type_enum(),
Str => context Bool => context.bool_type().as_basic_type_enum(),
Byte => context.i8_type().as_basic_type_enum(),
Str | EmptyStr => context
.i8_type() .i8_type()
.ptr_type(AddressSpace::Generic) .ptr_type(AddressSpace::Generic)
.as_basic_type_enum(), .as_basic_type_enum(),
Map(_, _) => panic!("TODO layout_to_basic_type for Builtin::Map"), Map(_, _) | EmptyMap => panic!("TODO layout_to_basic_type for Builtin::Map"),
Set(_) => panic!("TODO layout_to_basic_type for Builtin::Set"), Set(_) | EmptySet => panic!("TODO layout_to_basic_type for Builtin::Set"),
List(elem_layout) => basic_type_from_layout(context, elem_layout) List(elem_layout) => {
.ptr_type(AddressSpace::Generic) let ptr_type = basic_type_from_layout(arena, context, elem_layout, ptr_bytes)
.as_basic_type_enum(), .ptr_type(AddressSpace::Generic);
collection_wrapper(context, ptr_type, ptr_bytes).into()
}
EmptyList => BasicTypeEnum::StructType(empty_collection(context, ptr_bytes)),
}, },
} }
} }
/// A length usize and a pointer to some elements.
/// Could be a wrapper for a List or a Str.
///
/// The order of these doesn't matter, since they should be initialized
/// to zero anyway for an empty collection; as such, we return a
/// (usize, usize) struct layout no matter what.
pub fn empty_collection(ctx: &Context, ptr_bytes: u32) -> StructType<'_> {
let usize_type = BasicTypeEnum::IntType(ptr_int(ctx, ptr_bytes));
ctx.struct_type(&[usize_type, usize_type], false)
}
/// A length usize and a pointer to some elements.
///
/// Could be a wrapper for a List or a Str.
pub fn collection_wrapper<'ctx>(
ctx: &'ctx Context,
ptr_type: PointerType<'ctx>,
ptr_bytes: u32,
) -> StructType<'ctx> {
let ptr_type_enum = BasicTypeEnum::PointerType(ptr_type);
let len_type = BasicTypeEnum::IntType(ptr_int(ctx, ptr_bytes));
// This conditional is based on a constant, so the branch should be optimized away.
// The reason for keeping the conditional here is so we can flip the order
// of the fields (by changing the constants) without breaking this code.
if Builtin::WRAPPER_PTR == 0 {
ctx.struct_type(&[ptr_type_enum, len_type], false)
} else {
ctx.struct_type(&[len_type, ptr_type_enum], false)
}
}
pub fn ptr_int(ctx: &Context, ptr_bytes: u32) -> IntType<'_> {
match ptr_bytes {
1 => ctx.i8_type(),
2 => ctx.i16_type(),
4 => ctx.i32_type(),
8 => ctx.i64_type(),
16 => ctx.i128_type(),
_ => panic!(
"Invalid target: Roc does't support compiling to {}-bit systems.",
ptr_bytes * 8
),
}
}

4
compiler/gen/test.asm Normal file
View file

@ -0,0 +1,4 @@
.text
.file "my_module"
.section ".note.GNU-stack","",@progbits

View file

@ -0,0 +1,360 @@
#[macro_use]
extern crate pretty_assertions;
#[macro_use]
extern crate indoc;
extern crate bumpalo;
extern crate inkwell;
extern crate libc;
extern crate roc_gen;
#[macro_use]
mod helpers;
#[cfg(test)]
mod gen_builtins {
use crate::helpers::{can_expr, infer_expr, uniq_expr, CanExprOut};
use bumpalo::Bump;
use inkwell::context::Context;
use inkwell::execution_engine::JitFunction;
use inkwell::passes::PassManager;
use inkwell::types::BasicType;
use inkwell::OptimizationLevel;
use roc_collections::all::ImMap;
use roc_gen::llvm::build::{build_proc, build_proc_header};
use roc_gen::llvm::convert::basic_type_from_layout;
use roc_mono::expr::{Expr, Procs};
use roc_mono::layout::Layout;
use roc_types::subs::Subs;
#[test]
fn empty_list_literal() {
assert_evals_to!("[]", &[], &'static [i64]);
}
#[test]
fn int_list_literal() {
assert_evals_to!("[ 12, 9, 6, 3 ]", &[12, 9, 6, 3], &'static [i64]);
}
#[test]
fn gen_if_fn() {
assert_evals_to!(
indoc!(
r#"
limitedNegate = \num ->
if num == 1 then
-1
else if num == -1 then
1
else
num
limitedNegate 1
"#
),
-1,
i64
);
}
#[test]
fn gen_float_eq() {
assert_evals_to!(
indoc!(
r#"
1.0 == 1.0
"#
),
true,
bool
);
}
#[test]
fn gen_add_f64() {
assert_evals_to!(
indoc!(
r#"
1.1 + 2.4 + 3
"#
),
6.5,
f64
);
}
#[test]
fn gen_add_i64() {
assert_evals_to!(
indoc!(
r#"
1 + 2 + 3
"#
),
6,
i64
);
}
#[test]
fn gen_sub_f64() {
assert_evals_to!(
indoc!(
r#"
1.5 - 2.4 - 3
"#
),
-3.9,
f64
);
}
#[test]
fn gen_sub_i64() {
assert_evals_to!(
indoc!(
r#"
1 - 2 - 3
"#
),
-4,
i64
);
}
#[test]
fn gen_mul_i64() {
assert_evals_to!(
indoc!(
r#"
2 * 4 * 6
"#
),
48,
i64
);
}
#[test]
fn gen_order_of_arithmetic_ops() {
assert_evals_to!(
indoc!(
r#"
1 + 3 * 7 - 2
"#
),
20,
i64
);
}
#[test]
fn if_guard_bind_variable() {
assert_evals_to!(
indoc!(
r#"
when 10 is
x if x == 5 -> 0
_ -> 42
"#
),
42,
i64
);
assert_evals_to!(
indoc!(
r#"
when 10 is
x if x == 10 -> 42
_ -> 0
"#
),
42,
i64
);
}
#[test]
fn tail_call_elimination() {
assert_evals_to!(
indoc!(
r#"
sum = \n, accum ->
when n is
0 -> accum
_ -> sum (n - 1) (n + accum)
sum 1_000_000 0
"#
),
500000500000,
i64
);
}
#[test]
fn int_negate() {
assert_evals_to!("Num.neg 123", -123, i64);
}
#[test]
fn gen_basic_fn() {
assert_evals_to!(
indoc!(
r#"
always42 : Num.Num Int.Integer -> Num.Num Int.Integer
always42 = \num -> 42
always42 5
"#
),
42,
i64
);
}
#[test]
fn empty_list_len() {
assert_evals_to!("List.len []", 0, usize);
}
#[test]
fn basic_int_list_len() {
assert_evals_to!("List.len [ 12, 9, 6, 3 ]", 4, usize);
}
// #[test]
// fn loaded_int_list_len() {
// assert_evals_to!(
// indoc!(
// r#"
// nums = [ 2, 4, 6 ]
// List.len nums
// "#
// ),
// 3,
// usize
// );
// }
// #[test]
// fn fn_int_list_len() {
// assert_evals_to!(
// indoc!(
// r#"
// # TODO remove this annotation once monomorphization works!
// getLen = \list -> List.len list
// nums = [ 2, 4, 6 ]
// getLen nums
// "#
// ),
// 3,
// usize
// );
// }
// #[test]
// fn int_list_is_empty() {
// assert_evals_to!("List.isEmpty [ 12, 9, 6, 3 ]", 0, u8, |x| x);
// }
//
#[test]
fn head_int_list() {
assert_evals_to!("List.getUnsafe [ 12, 9, 6, 3 ] 0", 12, i64);
}
#[test]
fn get_int_list() {
assert_evals_to!("List.getUnsafe [ 12, 9, 6 ] 1", 9, i64);
}
#[test]
fn get_set_unique_int_list() {
assert_evals_to!("List.getUnsafe (List.set [ 12, 9, 7, 3 ] 1 42) 1", 42, i64);
}
#[test]
fn set_unique_int_list() {
assert_evals_to!(
"List.set [ 12, 9, 7, 1, 5 ] 2 33",
&[12, 9, 33, 1, 5],
&'static [i64]
);
}
#[test]
fn set_unique_list_oob() {
assert_evals_to!(
"List.set [ 3, 17, 4.1 ] 1337 9.25",
&[3.0, 17.0, 4.1],
&'static [f64]
);
}
#[test]
fn set_shared_int_list() {
assert_evals_to!(
indoc!(
r#"
shared = [ 2.1, 4.3 ]
# This should not mutate the original
x = List.getUnsafe (List.set shared 1 7.7) 1
{ x, y: List.getUnsafe shared 1 }
"#
),
(7.7, 4.3),
(f64, f64)
);
}
#[test]
fn set_shared_list_oob() {
assert_evals_to!(
indoc!(
r#"
shared = [ 2, 4 ]
# This List.set is out of bounds, and should have no effect
x = List.getUnsafe (List.set shared 422 0) 1
{ x, y: List.getUnsafe shared 1 }
"#
),
(4, 4),
(i64, i64)
);
}
#[test]
fn get_unique_int_list() {
assert_evals_to!(
indoc!(
r#"
shared = [ 2, 4 ]
List.getUnsafe shared 1
"#
),
4,
i64
);
}
#[test]
fn int_to_float() {
assert_evals_to!(
indoc!(
r#"
Num.toFloat 0x9
"#
),
9.0,
f64
);
}
}

View file

@ -0,0 +1,429 @@
#[macro_use]
extern crate pretty_assertions;
#[macro_use]
extern crate indoc;
extern crate bumpalo;
extern crate inkwell;
extern crate libc;
extern crate roc_gen;
#[macro_use]
mod helpers;
#[cfg(test)]
mod gen_primitives {
use crate::helpers::{can_expr, infer_expr, uniq_expr, CanExprOut};
use bumpalo::Bump;
use inkwell::context::Context;
use inkwell::execution_engine::JitFunction;
use inkwell::passes::PassManager;
use inkwell::types::BasicType;
use inkwell::OptimizationLevel;
use roc_collections::all::ImMap;
use roc_gen::llvm::build::{build_proc, build_proc_header};
use roc_gen::llvm::convert::basic_type_from_layout;
use roc_mono::expr::{Expr, Procs};
use roc_mono::layout::Layout;
use roc_types::subs::Subs;
use std::ffi::{CStr, CString};
use std::os::raw::c_char;
#[test]
fn basic_str() {
assert_evals_to!(
"\"shirt and hat\"",
CString::new("shirt and hat").unwrap().as_c_str(),
*const c_char,
CStr::from_ptr
);
}
#[test]
fn basic_int() {
assert_evals_to!("123", 123, i64);
}
#[test]
fn basic_float() {
assert_evals_to!("1234.0", 1234.0, f64);
}
#[test]
fn branch_first_float() {
assert_evals_to!(
indoc!(
r#"
when 1.23 is
1.23 -> 12
_ -> 34
"#
),
12,
i64
);
}
#[test]
fn branch_second_float() {
assert_evals_to!(
indoc!(
r#"
when 2.34 is
1.23 -> 63
_ -> 48
"#
),
48,
i64
);
}
#[test]
fn branch_third_float() {
assert_evals_to!(
indoc!(
r#"
when 10.0 is
1.0 -> 63
2.0 -> 48
_ -> 112
"#
),
112,
i64
);
}
#[test]
fn branch_first_int() {
assert_evals_to!(
indoc!(
r#"
when 1 is
1 -> 12
_ -> 34
"#
),
12,
i64
);
}
#[test]
fn branch_second_int() {
assert_evals_to!(
indoc!(
r#"
when 2 is
1 -> 63
_ -> 48
"#
),
48,
i64
);
}
#[test]
fn branch_third_int() {
assert_evals_to!(
indoc!(
r#"
when 10 is
1 -> 63
2 -> 48
_ -> 112
"#
),
112,
i64
);
}
#[test]
fn branch_store_variable() {
assert_evals_to!(
indoc!(
r#"
when 0 is
1 -> 12
a -> a
"#
),
0,
i64
);
}
#[test]
fn when_one_element_tag() {
assert_evals_to!(
indoc!(
r#"
x : [ Pair Int Int ]
x = Pair 0x2 0x3
when x is
Pair l r -> l + r
"#
),
5,
i64
);
}
#[test]
fn gen_when_one_branch() {
assert_evals_to!(
indoc!(
r#"
when 3.14 is
_ -> 23
"#
),
23,
i64
);
}
#[test]
fn gen_large_when_int() {
assert_evals_to!(
indoc!(
r#"
foo = \num ->
when num is
0 -> 200
-3 -> 111 # TODO adding more negative numbers reproduces parsing bugs here
3 -> 789
1 -> 123
2 -> 456
_ -> 1000
foo -3
"#
),
111,
i64
);
}
// #[test]
// fn gen_large_when_float() {
// assert_evals_to!(
// indoc!(
// r#"
// foo = \num ->
// when num is
// 0.5 -> 200.1
// -3.6 -> 111.2 # TODO adding more negative numbers reproduces parsing bugs here
// 3.6 -> 789.5
// 1.7 -> 123.3
// 2.8 -> 456.4
// _ -> 1000.6
// foo -3.6
// "#
// ),
// 111.2,
// f64
// );
// }
#[test]
fn or_pattern() {
assert_evals_to!(
indoc!(
r#"
when 2 is
1 | 2 -> 42
_ -> 1
"#
),
42,
i64
);
}
#[test]
fn apply_identity() {
assert_evals_to!(
indoc!(
r#"
identity = \a -> a
identity 5
"#
),
5,
i64
);
}
#[test]
fn apply_unnamed_fn() {
assert_evals_to!(
indoc!(
r#"
(\a -> a) 5
"#
),
5,
i64
);
}
#[test]
fn return_unnamed_fn() {
assert_evals_to!(
indoc!(
r#"
alwaysFloatIdentity : Int -> (Float -> Float)
alwaysFloatIdentity = \num ->
(\a -> a)
(alwaysFloatIdentity 2) 3.14
"#
),
3.14,
f64
);
}
#[test]
fn gen_when_fn() {
assert_evals_to!(
indoc!(
r#"
limitedNegate = \num ->
when num is
1 -> -1
-1 -> 1
_ -> num
limitedNegate 1
"#
),
-1,
i64
);
}
#[test]
fn gen_basic_def() {
assert_evals_to!(
indoc!(
r#"
answer = 42
answer
"#
),
42,
i64
);
assert_evals_to!(
indoc!(
r#"
pi = 3.14
pi
"#
),
3.14,
f64
);
}
#[test]
fn gen_multiple_defs() {
assert_evals_to!(
indoc!(
r#"
answer = 42
pi = 3.14
answer
"#
),
42,
i64
);
assert_evals_to!(
indoc!(
r#"
answer = 42
pi = 3.14
pi
"#
),
3.14,
f64
);
}
#[test]
fn gen_chained_defs() {
assert_evals_to!(
indoc!(
r#"
x = i1
i3 = i2
i1 = 1337
i2 = i1
y = 12.4
i3
"#
),
1337,
i64
);
}
#[test]
fn gen_nested_defs() {
assert_evals_to!(
indoc!(
r#"
x = 5
answer =
i3 = i2
nested =
a = 1.0
b = 5
i1
i1 = 1337
i2 = i1
nested
# None of this should affect anything, even though names
# overlap with the previous nested defs
unused =
nested = 17
i1 = 84.2
nested
y = 12.4
answer
"#
),
1337,
i64
);
}
}

View file

@ -0,0 +1,394 @@
#[macro_use]
extern crate pretty_assertions;
#[macro_use]
extern crate indoc;
extern crate bumpalo;
extern crate inkwell;
extern crate libc;
extern crate roc_gen;
#[macro_use]
mod helpers;
#[cfg(test)]
mod gen_records {
use crate::helpers::{can_expr, infer_expr, uniq_expr, CanExprOut};
use bumpalo::Bump;
use inkwell::context::Context;
use inkwell::execution_engine::JitFunction;
use inkwell::passes::PassManager;
use inkwell::types::BasicType;
use inkwell::OptimizationLevel;
use roc_collections::all::ImMap;
use roc_gen::llvm::build::{build_proc, build_proc_header};
use roc_gen::llvm::convert::basic_type_from_layout;
use roc_mono::expr::{Expr, Procs};
use roc_mono::layout::Layout;
use roc_types::subs::Subs;
#[test]
fn basic_record() {
assert_evals_to!(
indoc!(
r#"
{ y: 17, x: 15, z: 19 }.x
"#
),
15,
i64
);
assert_evals_to!(
indoc!(
r#"
{ x: 15, y: 17, z: 19 }.y
"#
),
17,
i64
);
assert_evals_to!(
indoc!(
r#"
{ x: 15, y: 17, z: 19 }.z
"#
),
19,
i64
);
}
#[test]
fn f64_record() {
assert_evals_to!(
indoc!(
r#"
rec = { y: 17.2, x: 15.1, z: 19.3 }
rec.x
"#
),
15.1,
f64
);
assert_evals_to!(
indoc!(
r#"
rec = { y: 17.2, x: 15.1, z: 19.3 }
rec.y
"#
),
17.2,
f64
);
assert_evals_to!(
indoc!(
r#"
rec = { y: 17.2, x: 15.1, z: 19.3 }
rec.z
"#
),
19.3,
f64
);
}
#[test]
fn fn_record() {
assert_evals_to!(
indoc!(
r#"
getRec = \x -> { y: 17, x, z: 19 }
(getRec 15).x
"#
),
15,
i64
);
assert_evals_to!(
indoc!(
r#"
rec = { x: 15, y: 17, z: 19 }
rec.y
"#
),
17,
i64
);
assert_evals_to!(
indoc!(
r#"
rec = { x: 15, y: 17, z: 19 }
rec.z
"#
),
19,
i64
);
assert_evals_to!(
indoc!(
r#"
rec = { x: 15, y: 17, z: 19 }
rec.z + rec.x
"#
),
34,
i64
);
}
#[test]
fn def_record() {
assert_evals_to!(
indoc!(
r#"
rec = { y: 17, x: 15, z: 19 }
rec.x
"#
),
15,
i64
);
assert_evals_to!(
indoc!(
r#"
rec = { x: 15, y: 17, z: 19 }
rec.y
"#
),
17,
i64
);
assert_evals_to!(
indoc!(
r#"
rec = { x: 15, y: 17, z: 19 }
rec.z
"#
),
19,
i64
);
}
#[test]
fn when_on_record() {
assert_evals_to!(
indoc!(
r#"
when { x: 0x2 } is
{ x } -> x + 3
"#
),
5,
i64
);
assert_evals_to!(
indoc!(
r#"
when { x: 0x2, y: 3.14 } is
{ x: var } -> var + 3
"#
),
5,
i64
);
assert_evals_to!(
indoc!(
r#"
{ x } = { x: 0x2, y: 3.14 }
x
"#
),
2,
i64
);
}
#[test]
fn record_guard_pattern() {
assert_evals_to!(
indoc!(
r#"
when { x: 0x2, y: 3.14 } is
{ x: 0x4 } -> 5
{ x } -> x + 3
"#
),
5,
i64
);
}
#[test]
fn twice_record_access() {
assert_evals_to!(
indoc!(
r#"
x = {a: 0x2, b: 0x3 }
x.a + x.b
"#
),
5,
i64
);
}
#[test]
fn empty_record() {
assert_evals_to!(
indoc!(
r#"
v = {}
1
"#
),
1,
i64
);
}
#[test]
fn i64_record2_literal() {
assert_evals_to!(
indoc!(
r#"
{ x: 3, y: 5 }
"#
),
(3, 5),
(i64, i64)
);
}
// #[test]
// fn i64_record3_literal() {
// assert_evals_to!(
// indoc!(
// r#"
// { x: 3, y: 5, z: 17 }
// "#
// ),
// (3, 5, 17),
// (i64, i64, i64)
// );
// }
#[test]
fn f64_record2_literal() {
assert_evals_to!(
indoc!(
r#"
{ x: 3.1, y: 5.1 }
"#
),
(3.1, 5.1),
(f64, f64)
);
}
// #[test]
// fn f64_record3_literal() {
// assert_evals_to!(
// indoc!(
// r#"
// { x: 3.1, y: 5.1, z: 17.1 }
// "#
// ),
// (3.1, 5.1, 17.1),
// (f64, f64, f64)
// );
// }
// #[test]
// fn bool_record4_literal() {
// assert_evals_to!(
// indoc!(
// r#"
// record : { a : Bool, b : Bool, c : Bool, d : Bool }
// record = { a: True, b: True, c : True, d : Bool }
// record
// "#
// ),
// (true, false, false, true),
// (bool, bool, bool, bool)
// );
// }
#[test]
fn i64_record1_literal() {
assert_evals_to!(
indoc!(
r#"
{ a: 3 }
"#
),
3,
i64
);
}
// #[test]
// fn i64_record9_literal() {
// assert_evals_to!(
// indoc!(
// r#"
// { a: 3, b: 5, c: 17, d: 1, e: 9, f: 12, g: 13, h: 14, i: 15 }
// "#
// ),
// (3, 5, 17, 1, 9, 12, 13, 14, 15),
// (i64, i64, i64, i64, i64, i64, i64, i64, i64)
// );
// }
// #[test]
// fn f64_record3_literal() {
// assert_evals_to!(
// indoc!(
// r#"
// { x: 3.1, y: 5.1, z: 17.1 }
// "#
// ),
// (3.1, 5.1, 17.1),
// (f64, f64, f64)
// );
// }
#[test]
fn bool_literal() {
assert_evals_to!(
indoc!(
r#"
x : Bool
x = True
x
"#
),
true,
bool
);
}
}

View file

@ -0,0 +1,624 @@
#[macro_use]
extern crate pretty_assertions;
#[macro_use]
extern crate indoc;
extern crate bumpalo;
extern crate inkwell;
extern crate libc;
extern crate roc_gen;
#[macro_use]
mod helpers;
#[cfg(test)]
mod gen_tags {
use crate::helpers::{can_expr, infer_expr, uniq_expr, CanExprOut};
use bumpalo::Bump;
use inkwell::context::Context;
use inkwell::execution_engine::JitFunction;
use inkwell::passes::PassManager;
use inkwell::types::BasicType;
use inkwell::OptimizationLevel;
use roc_collections::all::ImMap;
use roc_gen::llvm::build::{build_proc, build_proc_header};
use roc_gen::llvm::convert::basic_type_from_layout;
use roc_mono::expr::{Expr, Procs};
use roc_mono::layout::Layout;
use roc_types::subs::Subs;
#[test]
fn applied_tag_nothing() {
assert_evals_to!(
indoc!(
r#"
Maybe a : [ Just a, Nothing ]
x : Maybe Int
x = Nothing
0x1
"#
),
1,
i64
);
}
#[test]
fn applied_tag_just() {
assert_evals_to!(
indoc!(
r#"
Maybe a : [ Just a, Nothing ]
y : Maybe Int
y = Just 0x4
0x1
"#
),
1,
i64
);
}
#[test]
fn applied_tag_just_unit() {
assert_evals_to!(
indoc!(
r#"
Fruit : [ Orange, Apple, Banana ]
Maybe a : [ Just a, Nothing ]
orange : Fruit
orange = Orange
y : Maybe Fruit
y = Just orange
0x1
"#
),
1,
i64
);
}
// #[test]
// fn raw_result() {
// assert_evals_to!(
// indoc!(
// r#"
// x : Result Int Int
// x = Err 41
// x
// "#
// ),
// 0,
// i8
// );
// }
#[test]
fn true_is_true() {
assert_evals_to!(
indoc!(
r#"
bool : [True, False]
bool = True
bool
"#
),
true,
bool
);
}
#[test]
fn false_is_false() {
assert_evals_to!(
indoc!(
r#"
bool : [True, False]
bool = False
bool
"#
),
false,
bool
);
}
#[test]
fn basic_enum() {
assert_evals_to!(
indoc!(
r#"
Fruit : [ Apple, Orange, Banana ]
apple : Fruit
apple = Apple
orange : Fruit
orange = Orange
apple == orange
"#
),
false,
bool
);
}
// #[test]
// fn linked_list_empty() {
// assert_evals_to!(
// indoc!(
// r#"
// LinkedList a : [ Cons a (LinkedList a), Nil ]
//
// empty : LinkedList Int
// empty = Nil
//
// 1
// "#
// ),
// 1,
// i64
// );
// }
//
// #[test]
// fn linked_list_singleton() {
// assert_evals_to!(
// indoc!(
// r#"
// LinkedList a : [ Cons a (LinkedList a), Nil ]
//
// singleton : LinkedList Int
// singleton = Cons 0x1 Nil
//
// 1
// "#
// ),
// 1,
// i64
// );
// }
//
// #[test]
// fn linked_list_is_empty() {
// assert_evals_to!(
// indoc!(
// r#"
// LinkedList a : [ Cons a (LinkedList a), Nil ]
//
// isEmpty : LinkedList a -> Bool
// isEmpty = \list ->
// when list is
// Nil -> True
// Cons _ _ -> False
//
// isEmpty (Cons 4 Nil)
// "#
// ),
// false,
// bool
// );
// }
#[test]
fn even_odd() {
assert_evals_to!(
indoc!(
r#"
even = \n ->
when n is
0 -> True
1 -> False
_ -> odd (n - 1)
odd = \n ->
when n is
0 -> False
1 -> True
_ -> even (n - 1)
odd 5 && even 42
"#
),
true,
bool
);
}
#[test]
fn gen_literal_true() {
assert_evals_to!(
indoc!(
r#"
if True then -1 else 1
"#
),
-1,
i64
);
}
#[test]
fn gen_if_float() {
assert_evals_to!(
indoc!(
r#"
if True then -1.0 else 1.0
"#
),
-1.0,
f64
);
}
#[test]
fn when_on_nothing() {
assert_evals_to!(
indoc!(
r#"
x : [ Nothing, Just Int ]
x = Nothing
when x is
Nothing -> 0x2
Just _ -> 0x1
"#
),
2,
i64
);
}
#[test]
fn when_on_just() {
assert_evals_to!(
indoc!(
r#"
x : [ Nothing, Just Int ]
x = Just 41
when x is
Just v -> v + 0x1
Nothing -> 0x1
"#
),
42,
i64
);
}
#[test]
fn when_on_result() {
assert_evals_to!(
indoc!(
r#"
x : Result Int Int
x = Err 41
when x is
Err v -> v + 1
Ok _ -> 1
"#
),
42,
i64
);
}
#[test]
fn when_on_these() {
assert_evals_to!(
indoc!(
r#"
These a b : [ This a, That b, These a b ]
x : These Int Int
x = These 0x3 0x2
when x is
These a b -> a + b
That v -> 8
This v -> v
"#
),
5,
i64
);
}
#[test]
fn match_on_two_values() {
// this will produce a Chain internally
assert_evals_to!(
indoc!(
r#"
when Pair 2 3 is
Pair 4 3 -> 9
Pair a b -> a + b
"#
),
5,
i64
);
}
#[test]
fn pair_with_guard_pattern() {
assert_evals_to!(
indoc!(
r#"
when Pair 2 3 is
Pair 4 _ -> 1
Pair 3 _ -> 2
Pair a b -> a + b
"#
),
5,
i64
);
}
#[test]
fn result_with_guard_pattern() {
// This test revealed an issue with hashing Test values
assert_evals_to!(
indoc!(
r#"
x : Result Int Int
x = Ok 2
when x is
Ok 3 -> 1
Ok _ -> 2
Err _ -> 3
"#
),
2,
i64
);
}
#[test]
fn maybe_is_just() {
assert_evals_to!(
indoc!(
r#"
Maybe a : [ Just a, Nothing ]
isJust : Maybe a -> Bool
isJust = \list ->
when list is
Nothing -> False
Just _ -> True
isJust (Just 42)
"#
),
true,
bool
);
}
#[test]
fn nested_pattern_match() {
assert_evals_to!(
indoc!(
r#"
Maybe a : [ Nothing, Just a ]
x : Maybe (Maybe Int)
x = Just (Just 41)
when x is
Just (Just v) -> v + 0x1
_ -> 0x1
"#
),
42,
i64
);
}
#[test]
fn if_guard_pattern_false() {
assert_evals_to!(
indoc!(
r#"
when 2 is
2 if False -> 0
_ -> 42
"#
),
42,
i64
);
}
#[test]
fn if_guard_pattern_true() {
assert_evals_to!(
indoc!(
r#"
when 2 is
2 if True -> 42
_ -> 0
"#
),
42,
i64
);
}
#[test]
fn if_guard_exhaustiveness() {
assert_evals_to!(
indoc!(
r#"
when 2 is
_ if False -> 0
_ -> 42
"#
),
42,
i64
);
}
#[test]
fn when_on_enum() {
assert_evals_to!(
indoc!(
r#"
Fruit : [ Apple, Orange, Banana ]
apple : Fruit
apple = Apple
when apple is
Apple -> 1
Banana -> 2
Orange -> 3
"#
),
1,
i64
);
}
#[test]
fn pattern_matching_unit() {
assert_evals_to!(
indoc!(
r#"
Unit : [ Unit ]
f : Unit -> Int
f = \Unit -> 42
f Unit
"#
),
42,
i64
);
assert_evals_to!(
indoc!(
r#"
Unit : [ Unit ]
x : Unit
x = Unit
when x is
Unit -> 42
"#
),
42,
i64
);
assert_evals_to!(
indoc!(
r#"
f : {} -> Int
f = \{} -> 42
f {}
"#
),
42,
i64
);
assert_evals_to!(
indoc!(
r#"
when {} is
{} -> 42
"#
),
42,
i64
);
}
#[test]
fn one_element_tag() {
assert_evals_to!(
indoc!(
r#"
x : [ Pair Int ]
x = Pair 2
0x3
"#
),
3,
i64
);
}
#[test]
fn nested_tag_union() {
assert_evals_to!(
indoc!(
r#"
Maybe a : [ Nothing, Just a ]
x : Maybe (Maybe a)
x = Just (Just 41)
5
"#
),
5,
i64
);
}
#[test]
fn unit_type() {
assert_evals_to!(
indoc!(
r#"
Unit : [ Unit ]
v : Unit
v = Unit
1
"#
),
1,
i64
);
}
#[test]
fn nested_record_load() {
assert_evals_to!(
indoc!(
r#"
Maybe a : [ Nothing, Just a ]
x = { a : { b : 0x5 } }
y = x.a
y.b
"#
),
5,
i64
);
}
}

View file

@ -0,0 +1,415 @@
#[macro_export]
macro_rules! assert_llvm_evals_to {
($src:expr, $expected:expr, $ty:ty, $transform:expr) => {
let target = target_lexicon::Triple::host();
let ptr_bytes = target.pointer_width().unwrap().bytes() as u32;
let arena = Bump::new();
let CanExprOut { loc_expr, var_store, var, constraint, home, interns, .. } = can_expr($src);
let subs = Subs::new(var_store.into());
let mut unify_problems = Vec::new();
let (content, mut subs) = infer_expr(subs, &mut unify_problems, &constraint, var);
let context = Context::create();
let module = roc_gen::llvm::build::module_from_builtins(&context, "app");
let builder = context.create_builder();
let fpm = inkwell::passes::PassManager::create(&module);
roc_gen::llvm::build::add_passes(&fpm);
fpm.initialize();
// Compute main_fn_type before moving subs to Env
let layout = Layout::from_content(&arena, content, &subs, ptr_bytes)
.unwrap_or_else(|err| panic!("Code gen error in test: could not convert to layout. Err was {:?} and Subs were {:?}", err, subs));
let execution_engine =
module
.create_jit_execution_engine(OptimizationLevel::None)
.expect("Error creating JIT execution engine for test");
let main_fn_type = basic_type_from_layout(&arena, &context, &layout, ptr_bytes)
.fn_type(&[], false);
let main_fn_name = "$Test.main";
// Compile and add all the Procs before adding main
let mut env = roc_gen::llvm::build::Env {
arena: &arena,
builder: &builder,
context: &context,
interns,
module: arena.alloc(module),
ptr_bytes
};
let mut procs = Procs::default();
let mut ident_ids = env.interns.all_ident_ids.remove(&home).unwrap();
// Populate Procs and get the low-level Expr from the canonical Expr
let mut mono_problems = Vec::new();
let main_body = Expr::new(&arena, &mut subs, &mut mono_problems, loc_expr.value, &mut procs, home, &mut ident_ids, ptr_bytes);
// Put this module's ident_ids back in the interns, so we can use them in Env.
env.interns.all_ident_ids.insert(home, ident_ids);
let mut headers = Vec::with_capacity(procs.len());
// Add all the Proc headers to the module.
// We have to do this in a separate pass first,
// because their bodies may reference each other.
for (symbol, opt_proc) in procs.as_map().into_iter() {
if let Some(proc) = opt_proc {
let (fn_val, arg_basic_types) = build_proc_header(&env, symbol, &proc);
headers.push((proc, fn_val, arg_basic_types));
}
}
// Build each proc using its header info.
for (proc, fn_val, arg_basic_types) in headers {
// NOTE: This is here to be uncommented in case verification fails.
// (This approach means we don't have to defensively clone name here.)
//
// println!("\n\nBuilding and then verifying function {}\n\n", name);
build_proc(&env, proc, &procs, fn_val, arg_basic_types);
if fn_val.verify(true) {
fpm.run_on(&fn_val);
} else {
// NOTE: If this fails, uncomment the above println to debug.
panic!("Non-main function failed LLVM verification. Uncomment the above println to debug!");
}
}
// Add main to the module.
let main_fn = env.module.add_function(main_fn_name, main_fn_type, None);
let cc = roc_gen::llvm::build::get_call_conventions(target.default_calling_convention().unwrap());
main_fn.set_call_conventions(cc);
// Add main's body
let basic_block = context.append_basic_block(main_fn, "entry");
builder.position_at_end(basic_block);
let ret = roc_gen::llvm::build::build_expr(
&env,
&ImMap::default(),
main_fn,
&main_body,
&mut Procs::default(),
);
builder.build_return(Some(&ret));
// Uncomment this to see the module's un-optimized LLVM instruction output:
// env.module.print_to_stderr();
if main_fn.verify(true) {
fpm.run_on(&main_fn);
} else {
panic!("Function {} failed LLVM verification.", main_fn_name);
}
// Verify the module
if let Err(errors) = env.module.verify() {
panic!("Errors defining module: {:?}", errors);
}
// Uncomment this to see the module's optimized LLVM instruction output:
// env.module.print_to_stderr();
unsafe {
let main: JitFunction<unsafe extern "C" fn() -> $ty> = execution_engine
.get_function(main_fn_name)
.ok()
.ok_or(format!("Unable to JIT compile `{}`", main_fn_name))
.expect("errored");
assert_eq!($transform(main.call()), $expected);
}
};
}
// TODO this is almost all code duplication with assert_llvm_evals_to
// the only difference is that this calls uniq_expr instead of can_expr.
// Should extract the common logic into test helpers.
#[macro_export]
macro_rules! assert_opt_evals_to {
($src:expr, $expected:expr, $ty:ty, $transform:expr) => {
let arena = Bump::new();
let target = target_lexicon::Triple::host();
let ptr_bytes = target.pointer_width().unwrap().bytes() as u32;
let (loc_expr, _output, _problems, subs, var, constraint, home, interns) = uniq_expr($src);
let mut unify_problems = Vec::new();
let (content, mut subs) = infer_expr(subs, &mut unify_problems, &constraint, var);
let context = Context::create();
let module = roc_gen::llvm::build::module_from_builtins(&context, "app");
let builder = context.create_builder();
let fpm = PassManager::create(&module);
roc_gen::llvm::build::add_passes(&fpm);
fpm.initialize();
// Compute main_fn_type before moving subs to Env
let layout = Layout::from_content(&arena, content, &subs, ptr_bytes)
.unwrap_or_else(|err| panic!("Code gen error in test: could not convert to layout. Err was {:?} and Subs were {:?}", err, subs));
let execution_engine =
module
.create_jit_execution_engine(OptimizationLevel::None)
.expect("Error creating JIT execution engine for test");
let main_fn_type = basic_type_from_layout(&arena, &context, &layout, ptr_bytes)
.fn_type(&[], false);
let main_fn_name = "$Test.main";
// Compile and add all the Procs before adding main
let mut env = roc_gen::llvm::build::Env {
arena: &arena,
builder: &builder,
context: &context,
interns,
module: arena.alloc(module),
ptr_bytes
};
let mut procs = Procs::default();
let mut ident_ids = env.interns.all_ident_ids.remove(&home).unwrap();
// Populate Procs and get the low-level Expr from the canonical Expr
let mut mono_problems = Vec::new();
let main_body = Expr::new(&arena, &mut subs, &mut mono_problems, loc_expr.value, &mut procs, home, &mut ident_ids, ptr_bytes);
// Put this module's ident_ids back in the interns, so we can use them in Env.
env.interns.all_ident_ids.insert(home, ident_ids);
let mut headers = Vec::with_capacity(procs.len());
// Add all the Proc headers to the module.
// We have to do this in a separate pass first,
// because their bodies may reference each other.
for (symbol, opt_proc) in procs.as_map().into_iter() {
if let Some(proc) = opt_proc {
let (fn_val, arg_basic_types) = build_proc_header(&env, symbol, &proc);
headers.push((proc, fn_val, arg_basic_types));
}
}
// Build each proc using its header info.
for (proc, fn_val, arg_basic_types) in headers {
// NOTE: This is here to be uncommented in case verification fails.
// (This approach means we don't have to defensively clone name here.)
//
// println!("\n\nBuilding and then verifying function {}\n\n", name);
build_proc(&env, proc, &procs, fn_val, arg_basic_types);
if fn_val.verify(true) {
fpm.run_on(&fn_val);
} else {
// NOTE: If this fails, uncomment the above println to debug.
panic!("Non-main function failed LLVM verification. Uncomment the above println to debug!");
}
}
// Add main to the module.
let main_fn = env.module.add_function(main_fn_name, main_fn_type, None);
let cc = roc_gen::llvm::build::get_call_conventions(target.default_calling_convention().unwrap());
main_fn.set_call_conventions(cc);
// Add main's body
let basic_block = context.append_basic_block(main_fn, "entry");
builder.position_at_end(basic_block);
let ret = roc_gen::llvm::build::build_expr(
&env,
&ImMap::default(),
main_fn,
&main_body,
&mut Procs::default(),
);
builder.build_return(Some(&ret));
// Uncomment this to see the module's un-optimized LLVM instruction output:
// env.module.print_to_stderr();
if main_fn.verify(true) {
fpm.run_on(&main_fn);
} else {
panic!("Function {} failed LLVM verification.", main_fn_name);
}
// Verify the module
if let Err(errors) = env.module.verify() {
panic!("Errors defining module: {:?}", errors);
}
// Uncomment this to see the module's optimized LLVM instruction output:
// env.module.print_to_stderr();
unsafe {
let main: JitFunction<unsafe extern "C" fn() -> $ty> = execution_engine
.get_function(main_fn_name)
.ok()
.ok_or(format!("Unable to JIT compile `{}`", main_fn_name))
.expect("errored");
assert_eq!($transform(main.call()), $expected);
}
};
}
#[macro_export]
macro_rules! emit_expr {
($src:expr, $expected:expr, $ty:ty, $transform:expr) => {
let arena = Bump::new();
let (loc_expr, _output, _problems, subs, var, constraint, home, interns) = uniq_expr($src);
let mut unify_problems = Vec::new();
let (content, mut subs) = infer_expr(subs, &mut unify_problems, &constraint, var);
let context = Context::create();
let module = context.create_module("app");
let builder = context.create_builder();
let fpm = PassManager::create(&module);
roc_gen::llvm::build::add_passes(&fpm);
fpm.initialize();
// Compute main_fn_type before moving subs to Env
let layout = Layout::from_content(&arena, content, &subs, ptr_bytes)
.unwrap_or_else(|err| panic!("Code gen error in test: could not convert to layout. Err was {:?} and Subs were {:?}", err, subs));
let execution_engine =
module
.create_jit_execution_engine(OptimizationLevel::None)
.expect("Error creating JIT execution engine for test");
let main_fn_type = basic_type_from_layout(&arena, &context, &layout, ptr_bytes)
.fn_type(&[], false);
let main_fn_name = "$Test.main";
// Compile and add all the Procs before adding main
let mut env = roc_gen::llvm::build::Env {
arena: &arena,
builder: &builder,
context: &context,
interns,
module: arena.alloc(module),
ptr_bytes
};
let mut procs = Procs::default();
let mut ident_ids = env.interns.all_ident_ids.remove(&home).unwrap();
// Populate Procs and get the low-level Expr from the canonical Expr
let main_body = Expr::new(&arena, &mut subs, loc_expr.value, &mut procs, home, &mut ident_ids, $crate::helpers::eval::POINTER_SIZE);
// Put this module's ident_ids back in the interns, so we can use them in Env.
env.interns.all_ident_ids.insert(home, ident_ids);
let mut headers = Vec::with_capacity(procs.len());
// Add all the Proc headers to the module.
// We have to do this in a separate pass first,
// because their bodies may reference each other.
for (symbol, opt_proc) in procs.as_map().into_iter() {
if let Some(proc) = opt_proc {
let (fn_val, arg_basic_types) = build_proc_header(&env, symbol, &proc);
headers.push((proc, fn_val, arg_basic_types));
}
}
// Build each proc using its header info.
for (proc, fn_val, arg_basic_types) in headers {
// NOTE: This is here to be uncommented in case verification fails.
// (This approach means we don't have to defensively clone name here.)
//
// println!("\n\nBuilding and then verifying function {}\n\n", name);
build_proc(&env, proc, &procs, fn_val, arg_basic_types);
if fn_val.verify(true) {
fpm.run_on(&fn_val);
} else {
// NOTE: If this fails, uncomment the above println to debug.
panic!("Non-main function failed LLVM verification. Uncomment the above println to debug!");
}
}
// Add main to the module.
let main_fn = env.module.add_function(main_fn_name, main_fn_type, None);
main_fn.set_call_conventions($crate::helpers::eval::MAIN_CALLING_CONVENTION);
// Add main's body
let basic_block = context.append_basic_block(main_fn, "entry");
builder.position_at_end(basic_block);
let ret = roc_gen::llvm::build::build_expr(
&env,
&ImMap::default(),
main_fn,
&main_body,
&mut Procs::default(),
);
builder.build_return(Some(&ret));
// Uncomment this to see the module's un-optimized LLVM instruction output:
// env.module.print_to_stderr();
if main_fn.verify(true) {
fpm.run_on(&main_fn);
} else {
panic!("Function {} failed LLVM verification.", main_fn_name);
}
// Verify the module
if let Err(errors) = env.module.verify() {
panic!("Errors defining module: {:?}", errors);
}
// Uncomment this to see the module's optimized LLVM instruction output:
// env.module.print_to_stderr();
unsafe {
let main: JitFunction<unsafe extern "C" fn() -> $ty> = execution_engine
.get_function(main_fn_name)
.ok()
.ok_or(format!("Unable to JIT compile `{}`", main_fn_name))
.expect("errored");
assert_eq!($transform(main.call()), $expected);
}
};
}
#[macro_export]
macro_rules! assert_evals_to {
($src:expr, $expected:expr, $ty:ty) => {
// Run un-optimized tests, and then optimized tests, in separate scopes.
// These each rebuild everything from scratch, starting with
// parsing the source, so that there's no chance their passing
// or failing depends on leftover state from the previous one.
{
assert_llvm_evals_to!($src, $expected, $ty, (|val| val));
}
{
assert_opt_evals_to!($src, $expected, $ty, (|val| val));
}
};
($src:expr, $expected:expr, $ty:ty, $transform:expr) => {
// Same as above, except with an additional transformation argument.
{
assert_llvm_evals_to!($src, $expected, $ty, $transform);
}
{
assert_opt_evals_to!($src, $expected, $ty, $transform);
}
};
}

View file

@ -1,5 +1,8 @@
extern crate bumpalo; extern crate bumpalo;
#[macro_use]
pub mod eval;
use self::bumpalo::Bump; use self::bumpalo::Bump;
use roc_builtins::unique::uniq_stdlib; use roc_builtins::unique::uniq_stdlib;
use roc_can::constraint::Constraint; use roc_can::constraint::Constraint;
@ -31,7 +34,7 @@ pub fn test_home() -> ModuleId {
#[allow(dead_code)] #[allow(dead_code)]
pub fn infer_expr( pub fn infer_expr(
subs: Subs, subs: Subs,
problems: &mut Vec<roc_types::types::Problem>, problems: &mut Vec<roc_solve::solve::TypeError>,
constraint: &Constraint, constraint: &Constraint,
expr_var: Variable, expr_var: Variable,
) -> (Content, Subs) { ) -> (Content, Subs) {
@ -421,7 +424,7 @@ fn variable_usage_help(con: &Constraint, declared: &mut SeenVariables, used: &mu
match con { match con {
True | SaveTheEnvironment => (), True | SaveTheEnvironment => (),
Eq(tipe, expectation, _) => { Eq(tipe, expectation, _, _) => {
for v in tipe.variables() { for v in tipe.variables() {
used.insert(v); used.insert(v);
} }

View file

@ -1,905 +0,0 @@
#[macro_use]
extern crate pretty_assertions;
#[macro_use]
extern crate indoc;
extern crate bumpalo;
extern crate inkwell;
extern crate roc_gen;
mod helpers;
#[cfg(test)]
mod test_gen {
use crate::helpers::{can_expr, infer_expr, uniq_expr, CanExprOut};
use bumpalo::Bump;
use cranelift::prelude::{AbiParam, ExternalName, FunctionBuilder, FunctionBuilderContext};
use cranelift_codegen::ir::InstBuilder;
use cranelift_codegen::settings;
use cranelift_codegen::verifier::verify_function;
use cranelift_module::{default_libcall_names, Linkage, Module};
use cranelift_simplejit::{SimpleJITBackend, SimpleJITBuilder};
use inkwell::context::Context;
use inkwell::execution_engine::JitFunction;
use inkwell::passes::PassManager;
use inkwell::types::BasicType;
use inkwell::OptimizationLevel;
use roc_collections::all::{ImMap, MutMap};
use roc_gen::crane::build::{declare_proc, define_proc_body, ScopeEntry};
use roc_gen::crane::convert::type_from_layout;
use roc_gen::crane::imports::define_malloc;
use roc_gen::llvm::build::{build_proc, build_proc_header};
use roc_gen::llvm::convert::basic_type_from_layout;
use roc_mono::expr::Expr;
use roc_mono::layout::Layout;
use roc_types::subs::Subs;
use std::ffi::{CStr, CString};
use std::mem;
use std::os::raw::c_char;
macro_rules! assert_crane_evals_to {
($src:expr, $expected:expr, $ty:ty, $transform:expr) => {
let arena = Bump::new();
let CanExprOut { loc_expr, var_store, var, constraint, home, interns, .. } = can_expr($src);
let subs = Subs::new(var_store.into());
let mut unify_problems = Vec::new();
let (content, subs) = infer_expr(subs, &mut unify_problems, &constraint, var);
let shared_builder = settings::builder();
let shared_flags = settings::Flags::new(shared_builder);
let mut module: Module<SimpleJITBackend> =
Module::new(SimpleJITBuilder::new(default_libcall_names()));
let cfg = module.target_config();
let mut ctx = module.make_context();
let malloc = define_malloc(&mut module, &mut ctx);
let mut func_ctx = FunctionBuilderContext::new();
let main_fn_name = "$Test.main";
// Compute main_fn_ret_type before moving subs to Env
let layout = Layout::from_content(&arena, content, &subs)
.unwrap_or_else(|err| panic!("Code gen error in test: could not convert content to layout. Err was {:?} and Subs were {:?}", err, subs));
let main_ret_type = type_from_layout(cfg, &layout);
// Compile and add all the Procs before adding main
let mut procs = MutMap::default();
let mut env = roc_gen::crane::build::Env {
arena: &arena,
interns,
cfg,
malloc
};
let mut ident_ids = env.interns.all_ident_ids.remove(&home).unwrap();
// Populate Procs and Subs, and get the low-level Expr from the canonical Expr
let mono_expr = Expr::new(&arena, &subs, loc_expr.value, &mut procs, home, &mut ident_ids);
// Put this module's ident_ids back in the interns
env.interns.all_ident_ids.insert(home, ident_ids);
let mut scope = ImMap::default();
let mut declared = Vec::with_capacity(procs.len());
// Declare all the Procs, then insert them into scope so their bodies
// can look up their Funcs in scope later when calling each other by value.
for (name, opt_proc) in procs.iter() {
if let Some(proc) = opt_proc {
let (func_id, sig) = declare_proc(&env, &mut module, name.clone(), proc);
declared.push((proc.clone(), sig.clone(), func_id));
scope.insert(name.clone(), ScopeEntry::Func { func_id, sig });
}
}
for (proc, sig, fn_id) in declared {
define_proc_body(
&env,
&mut ctx,
&mut module,
fn_id,
&scope,
sig,
proc,
&procs,
);
// Verify the function we just defined
if let Err(errors) = verify_function(&ctx.func, &shared_flags) {
// NOTE: We don't include proc here because it's already
// been moved. If you need to know which proc failed, go back
// and add some logging.
panic!("Errors defining proc: {}", errors);
}
}
// Add main itself
let mut sig = module.make_signature();
sig.returns.push(AbiParam::new(main_ret_type));
let main_fn = module
.declare_function(main_fn_name, Linkage::Local, &sig)
.unwrap();
ctx.func.signature = sig;
ctx.func.name = ExternalName::user(0, main_fn.as_u32());
{
let mut builder: FunctionBuilder =
FunctionBuilder::new(&mut ctx.func, &mut func_ctx);
let block = builder.create_block();
builder.switch_to_block(block);
// TODO try deleting this line and seeing if everything still works.
builder.append_block_params_for_function_params(block);
let main_body =
roc_gen::crane::build::build_expr(&env, &scope, &mut module, &mut builder, &mono_expr, &procs);
builder.ins().return_(&[main_body]);
// TODO re-enable this once Switch stops making unsealed blocks, e.g.
// https://docs.rs/cranelift-frontend/0.59.0/src/cranelift_frontend/switch.rs.html#152
// builder.seal_block(block);
builder.seal_all_blocks();
builder.finalize();
}
module.define_function(main_fn, &mut ctx).expect("declare main");
module.clear_context(&mut ctx);
// Perform linking
module.finalize_definitions();
// Verify the main function
if let Err(errors) = verify_function(&ctx.func, &shared_flags) {
panic!("Errors defining {} - {}", main_fn_name, errors);
}
let main_ptr = module.get_finalized_function(main_fn);
unsafe {
let run_main = mem::transmute::<_, fn() -> $ty>(main_ptr) ;
assert_eq!($transform(run_main()), $expected);
}
};
}
macro_rules! assert_llvm_evals_to {
($src:expr, $expected:expr, $ty:ty, $transform:expr) => {
let arena = Bump::new();
let CanExprOut { loc_expr, var_store, var, constraint, home, interns, .. } = can_expr($src);
let subs = Subs::new(var_store.into());
let mut unify_problems = Vec::new();
let (content, subs) = infer_expr(subs, &mut unify_problems, &constraint, var);
let context = Context::create();
let module = context.create_module("app");
let builder = context.create_builder();
let fpm = PassManager::create(&module);
// Enable optimizations when running cargo test --release
if !cfg!(debug_assertions) {
fpm.add_instruction_combining_pass();
fpm.add_reassociate_pass();
fpm.add_basic_alias_analysis_pass();
fpm.add_promote_memory_to_register_pass();
fpm.add_cfg_simplification_pass();
fpm.add_gvn_pass();
// TODO figure out why enabling any of these (even alone) causes LLVM to segfault
// fpm.add_strip_dead_prototypes_pass();
// fpm.add_dead_arg_elimination_pass();
// fpm.add_function_inlining_pass();
}
fpm.initialize();
// Compute main_fn_type before moving subs to Env
let layout = Layout::from_content(&arena, content, &subs)
.unwrap_or_else(|err| panic!("Code gen error in test: could not convert to layout. Err was {:?} and Subs were {:?}", err, subs));
let main_fn_type = basic_type_from_layout(&context, &layout)
.fn_type(&[], false);
let main_fn_name = "$Test.main";
let execution_engine =
module
.create_jit_execution_engine(OptimizationLevel::None)
.expect("Error creating JIT execution engine for test");
let pointer_bytes = execution_engine.get_target_data().get_pointer_byte_size(None);
// Compile and add all the Procs before adding main
let mut env = roc_gen::llvm::build::Env {
arena: &arena,
builder: &builder,
context: &context,
interns,
module: arena.alloc(module),
pointer_bytes
};
let mut procs = MutMap::default();
let mut ident_ids = env.interns.all_ident_ids.remove(&home).unwrap();
// Populate Procs and get the low-level Expr from the canonical Expr
let main_body = Expr::new(&arena, &subs, loc_expr.value, &mut procs, home, &mut ident_ids);
// Put this module's ident_ids back in the interns, so we can use them in Env.
env.interns.all_ident_ids.insert(home, ident_ids);
let mut headers = Vec::with_capacity(procs.len());
// Add all the Proc headers to the module.
// We have to do this in a separate pass first,
// because their bodies may reference each other.
for (symbol, opt_proc) in procs.clone().into_iter() {
if let Some(proc) = opt_proc {
let (fn_val, arg_basic_types) = build_proc_header(&env, symbol, &proc);
headers.push((proc, fn_val, arg_basic_types));
}
}
// Build each proc using its header info.
for (proc, fn_val, arg_basic_types) in headers {
// NOTE: This is here to be uncommented in case verification fails.
// (This approach means we don't have to defensively clone name here.)
//
// println!("\n\nBuilding and then verifying function {}\n\n", name);
build_proc(&env, proc, &procs, fn_val, arg_basic_types);
if fn_val.verify(true) {
fpm.run_on(&fn_val);
} else {
// NOTE: If this fails, uncomment the above println to debug.
panic!("Non-main function failed LLVM verification. Uncomment the above println to debug!");
}
}
// Add main to the module.
let main_fn = env.module.add_function(main_fn_name, main_fn_type, None);
// Add main's body
let basic_block = context.append_basic_block(main_fn, "entry");
builder.position_at_end(basic_block);
let ret = roc_gen::llvm::build::build_expr(
&env,
&ImMap::default(),
main_fn,
&main_body,
&mut MutMap::default(),
);
builder.build_return(Some(&ret));
// Uncomment this to see the module's un-optimized LLVM instruction output:
// env.module.print_to_stderr();
if main_fn.verify(true) {
fpm.run_on(&main_fn);
} else {
panic!("Function {} failed LLVM verification.", main_fn_name);
}
// Uncomment this to see the module's optimized LLVM instruction output:
// env.module.print_to_stderr();
unsafe {
let main: JitFunction<unsafe extern "C" fn() -> $ty> = execution_engine
.get_function(main_fn_name)
.ok()
.ok_or(format!("Unable to JIT compile `{}`", main_fn_name))
.expect("errored");
assert_eq!($transform(main.call()), $expected);
}
};
}
// TODO this is almost all code duplication with the one in test_gen;
// the only difference is that this calls uniq_expr instead of can_expr.
// Should extract the common logic into test helpers.
macro_rules! assert_opt_evals_to {
($src:expr, $expected:expr, $ty:ty, $transform:expr) => {
let arena = Bump::new();
let (loc_expr, _output, _problems, subs, var, constraint, home, interns) = uniq_expr($src);
let mut unify_problems = Vec::new();
let (content, subs) = infer_expr(subs, &mut unify_problems, &constraint, var);
let context = Context::create();
let module = context.create_module("app");
let builder = context.create_builder();
let fpm = PassManager::create(&module);
// Enable optimizations when running cargo test --release
if !cfg!(debug_assertions) {
fpm.add_instruction_combining_pass();
fpm.add_reassociate_pass();
fpm.add_basic_alias_analysis_pass();
fpm.add_promote_memory_to_register_pass();
fpm.add_cfg_simplification_pass();
fpm.add_gvn_pass();
// TODO figure out why enabling any of these (even alone) causes LLVM to segfault
// fpm.add_strip_dead_prototypes_pass();
// fpm.add_dead_arg_elimination_pass();
// fpm.add_function_inlining_pass();
}
fpm.initialize();
// Compute main_fn_type before moving subs to Env
let layout = Layout::from_content(&arena, content, &subs)
.unwrap_or_else(|err| panic!("Code gen error in test: could not convert to layout. Err was {:?} and Subs were {:?}", err, subs));
let main_fn_type = basic_type_from_layout(&context, &layout)
.fn_type(&[], false);
let main_fn_name = "$Test.main";
let execution_engine =
module
.create_jit_execution_engine(OptimizationLevel::None)
.expect("Error creating JIT execution engine for test");
let pointer_bytes = execution_engine.get_target_data().get_pointer_byte_size(None);
// Compile and add all the Procs before adding main
let mut env = roc_gen::llvm::build::Env {
arena: &arena,
builder: &builder,
context: &context,
interns,
module: arena.alloc(module),
pointer_bytes
};
let mut procs = MutMap::default();
let mut ident_ids = env.interns.all_ident_ids.remove(&home).unwrap();
// Populate Procs and get the low-level Expr from the canonical Expr
let main_body = Expr::new(&arena, &subs, loc_expr.value, &mut procs, home, &mut ident_ids);
// Put this module's ident_ids back in the interns, so we can use them in Env.
env.interns.all_ident_ids.insert(home, ident_ids);
let mut headers = Vec::with_capacity(procs.len());
// Add all the Proc headers to the module.
// We have to do this in a separate pass first,
// because their bodies may reference each other.
for (symbol, opt_proc) in procs.clone().into_iter() {
if let Some(proc) = opt_proc {
let (fn_val, arg_basic_types) = build_proc_header(&env, symbol, &proc);
headers.push((proc, fn_val, arg_basic_types));
}
}
// Build each proc using its header info.
for (proc, fn_val, arg_basic_types) in headers {
// NOTE: This is here to be uncommented in case verification fails.
// (This approach means we don't have to defensively clone name here.)
//
// println!("\n\nBuilding and then verifying function {}\n\n", name);
build_proc(&env, proc, &procs, fn_val, arg_basic_types);
if fn_val.verify(true) {
fpm.run_on(&fn_val);
} else {
// NOTE: If this fails, uncomment the above println to debug.
panic!("Non-main function failed LLVM verification. Uncomment the above println to debug!");
}
}
// Add main to the module.
let main_fn = env.module.add_function(main_fn_name, main_fn_type, None);
// Add main's body
let basic_block = context.append_basic_block(main_fn, "entry");
builder.position_at_end(basic_block);
let ret = roc_gen::llvm::build::build_expr(
&env,
&ImMap::default(),
main_fn,
&main_body,
&mut MutMap::default(),
);
builder.build_return(Some(&ret));
// Uncomment this to see the module's un-optimized LLVM instruction output:
// env.module.print_to_stderr();
if main_fn.verify(true) {
fpm.run_on(&main_fn);
} else {
panic!("Function {} failed LLVM verification.", main_fn_name);
}
// Uncomment this to see the module's optimized LLVM instruction output:
// env.module.print_to_stderr();
unsafe {
let main: JitFunction<unsafe extern "C" fn() -> $ty> = execution_engine
.get_function(main_fn_name)
.ok()
.ok_or(format!("Unable to JIT compile `{}`", main_fn_name))
.expect("errored");
assert_eq!($transform(main.call()), $expected);
}
};
}
macro_rules! assert_evals_to {
($src:expr, $expected:expr, $ty:ty) => {
// Run Cranelift tests, then LLVM tests, in separate scopes.
// These each rebuild everything from scratch, starting with
// parsing the source, so that there's no chance their passing
// or failing depends on leftover state from the previous one.
{
assert_crane_evals_to!($src, $expected, $ty, (|val| val));
}
{
assert_llvm_evals_to!($src, $expected, $ty, (|val| val));
}
{
assert_opt_evals_to!($src, $expected, $ty, (|val| val));
}
};
($src:expr, $expected:expr, $ty:ty, $transform:expr) => {
// Same as above, except with an additional transformation argument.
{
assert_crane_evals_to!($src, $expected, $ty, $transform);
}
{
assert_llvm_evals_to!($src, $expected, $ty, $transform);
}
{
assert_opt_evals_to!($src, $expected, $ty, $transform);
}
};
}
#[test]
fn basic_str() {
assert_evals_to!(
"\"shirt and hat\"",
CString::new("shirt and hat").unwrap().as_c_str(),
*const c_char,
CStr::from_ptr
);
}
#[test]
fn basic_int() {
assert_evals_to!("123", 123, i64);
}
#[test]
fn basic_float() {
assert_evals_to!("1234.0", 1234.0, f64);
}
#[test]
fn get_int_list() {
assert_evals_to!("List.getUnsafe [ 12, 9, 6, 3 ] 1", 9, i64);
}
#[test]
fn set_unique_int_list() {
assert_evals_to!("List.getUnsafe (List.set [ 12, 9, 7, 3 ] 1 42) 1", 42, i64);
}
#[test]
fn set_shared_int_list() {
assert_evals_to!(
indoc!(
r#"
shared = [ 2, 4 ]
List.getUnsafe shared 1
"#
),
4,
i64
);
}
#[test]
fn branch_first_float() {
assert_evals_to!(
indoc!(
r#"
when 1.23 is
1.23 -> 12
_ -> 34
"#
),
12,
i64
);
}
#[test]
fn branch_second_float() {
assert_evals_to!(
indoc!(
r#"
when 2.34 is
1.23 -> 63
_ -> 48
"#
),
48,
i64
);
}
#[test]
fn branch_first_int() {
assert_evals_to!(
indoc!(
r#"
when 1 is
1 -> 12
_ -> 34
"#
),
12,
i64
);
}
#[test]
fn branch_second_int() {
assert_evals_to!(
indoc!(
r#"
when 2 is
1 -> 63
_ -> 48
"#
),
48,
i64
);
}
#[test]
fn gen_when_one_branch() {
assert_evals_to!(
indoc!(
r#"
when 3.14 is
_ -> 23
"#
),
23,
i64
);
}
#[test]
fn gen_large_when_int() {
assert_evals_to!(
indoc!(
r#"
foo = \num ->
when num is
0 -> 200
-3 -> 111 # TODO adding more negative numbers reproduces parsing bugs here
3 -> 789
1 -> 123
2 -> 456
_ -> 1000
foo -3
"#
),
111,
i64
);
}
#[test]
fn int_negate() {
assert_evals_to!("Num.neg 123", -123, i64);
}
// #[test]
// fn gen_large_when_float() {
// assert_evals_to!(
// indoc!(
// r#"
// foo = \num ->
// when num is
// 0.5 -> 200.1
// -3.6 -> 111.2 # TODO adding more negative numbers reproduces parsing bugs here
// 3.6 -> 789.5
// 1.7 -> 123.3
// 2.8 -> 456.4
// _ -> 1000.6
// foo -3.6
// "#
// ),
// 111.2,
// f64
// );
// }
#[test]
fn gen_basic_def() {
assert_evals_to!(
indoc!(
r#"
answer = 42
answer
"#
),
42,
i64
);
assert_evals_to!(
indoc!(
r#"
pi = 3.14
pi
"#
),
3.14,
f64
);
}
#[test]
fn gen_multiple_defs() {
assert_evals_to!(
indoc!(
r#"
answer = 42
pi = 3.14
answer
"#
),
42,
i64
);
assert_evals_to!(
indoc!(
r#"
answer = 42
pi = 3.14
pi
"#
),
3.14,
f64
);
}
#[test]
fn gen_chained_defs() {
assert_evals_to!(
indoc!(
r#"
x = i1
i3 = i2
i1 = 1337
i2 = i1
y = 12.4
i3
"#
),
1337,
i64
);
}
#[test]
fn gen_nested_defs() {
assert_evals_to!(
indoc!(
r#"
x = 5
answer =
i3 = i2
nested =
a = 1.0
b = 5
i1
i1 = 1337
i2 = i1
nested
# None of this should affect anything, even though names
# overlap with the previous nested defs
unused =
nested = 17
i1 = 84.2
nested
y = 12.4
answer
"#
),
1337,
i64
);
}
#[test]
fn gen_basic_fn() {
assert_evals_to!(
indoc!(
r#"
always42 : Num.Num Int.Integer -> Num.Num Int.Integer
always42 = \num -> 42
always42 5
"#
),
42,
i64
);
}
#[test]
fn gen_when_fn() {
assert_evals_to!(
indoc!(
r#"
limitedNegate = \num ->
when num is
1 -> -1
-1 -> 1
_ -> num
limitedNegate 1
"#
),
-1,
i64
);
}
#[test]
fn apply_unnamed_fn() {
assert_evals_to!(
indoc!(
r#"
(\a -> a) 5
"#
),
5,
i64
);
}
#[test]
fn gen_add_i64() {
assert_evals_to!(
indoc!(
r#"
1 + 2 + 3
"#
),
6,
i64
);
}
#[test]
fn gen_sub_i64() {
assert_evals_to!(
indoc!(
r#"
1 - 2 - 3
"#
),
-4,
i64
);
}
#[test]
fn gen_mul_i64() {
assert_evals_to!(
indoc!(
r#"
2 * 4 * 6
"#
),
48,
i64
);
}
#[test]
fn gen_order_of_arithmetic_ops() {
assert_evals_to!(
indoc!(
r#"
1 + 3 * 7 - 2
"#
),
20,
i64
);
}
#[test]
fn return_unnamed_fn() {
assert_evals_to!(
indoc!(
r#"
alwaysFloatIdentity : Int -> (Float -> Float)
alwaysFloatIdentity = \num ->
(\a -> a)
(alwaysFloatIdentity 2) 3.14
"#
),
3.14,
f64
);
}
// #[test]
// fn basic_record() {
// assert_evals_to!(
// indoc!(
// r#"
// point = { x: 15, y: 17, z: 19 }
// point.x
// "#
// ),
// 15,
// i64
// );
// assert_evals_to!(
// indoc!(
// r#"
// point = { x: 15, y: 17, z: 19 }
// point.y
// "#
// ),
// 17,
// i64
// );
// assert_evals_to!(
// indoc!(
// r#"
// point = { x: 15, y: 17, z: 19 }
// point.z
// "#
// ),
// 19,
// i64
// );
// }
}

View file

@ -52,7 +52,7 @@ pub struct LoadedModule {
pub interns: Interns, pub interns: Interns,
pub solved: Solved<Subs>, pub solved: Solved<Subs>,
pub can_problems: Vec<roc_problem::can::Problem>, pub can_problems: Vec<roc_problem::can::Problem>,
pub type_problems: Vec<types::Problem>, pub type_problems: Vec<solve::TypeError>,
pub declarations: Vec<Declaration>, pub declarations: Vec<Declaration>,
} }
@ -93,7 +93,7 @@ enum Msg {
solved_types: MutMap<Symbol, SolvedType>, solved_types: MutMap<Symbol, SolvedType>,
aliases: MutMap<Symbol, Alias>, aliases: MutMap<Symbol, Alias>,
subs: Arc<Solved<Subs>>, subs: Arc<Solved<Subs>>,
problems: Vec<types::Problem>, problems: Vec<solve::TypeError>,
}, },
} }
@ -1048,8 +1048,11 @@ fn parse_and_constrain(
(module, ident_ids, constraint, problems) (module, ident_ids, constraint, problems)
} }
Err(_runtime_error) => { Err(runtime_error) => {
panic!("TODO gracefully handle module canonicalization error"); panic!(
"TODO gracefully handle module canonicalization error {:?}",
runtime_error
);
} }
}; };

View file

@ -63,7 +63,7 @@ updateCost = \current, neighbour, model ->
newCosts = Map.insert model.costs neighbour distanceTo newCosts = Map.insert model.costs neighbour distanceTo
distanceTo = reconstructPath newCameFrom neighbour distanceTo = reconstructPath newCameFrom neighbour
|> List.length |> List.len
|> Num.toFloat |> Num.toFloat
newModel = { model & costs : newCosts , cameFrom : newCameFrom } newModel = { model & costs : newCosts , cameFrom : newCameFrom }

View file

@ -1,5 +1,5 @@
interface WithBuiltins interface WithBuiltins
exposes [ floatTest, divisionFn, divisionTest, intTest, constantInt, fromDep2, divDep1ByDep2 ] exposes [ floatTest, divisionFn, divisionTest, intTest, constantNum, fromDep2, divDep1ByDep2 ]
imports [ Dep1, Dep2.{ two } ] imports [ Dep1, Dep2.{ two } ]
floatTest = Float.highest floatTest = Float.highest
@ -12,7 +12,7 @@ divisionTest = Float.highest / x
intTest = Int.highest intTest = Int.highest
constantInt = 5 constantNum = 5
fromDep2 = Dep2.two fromDep2 = Dep2.two

View file

@ -28,10 +28,33 @@ pub fn test_home() -> ModuleId {
ModuleIds::default().get_or_insert(&"Test".into()) ModuleIds::default().get_or_insert(&"Test".into())
} }
/// Without a larger-than-default stack size, some tests
/// run out of stack space in debug builds (but don't in --release builds)
#[allow(dead_code)]
const THREAD_STACK_SIZE: usize = 4 * 1024 * 1024;
pub fn test_async<F: std::future::Future>(future: F) -> F::Output {
use tokio::runtime::Builder;
// Create the runtime
let mut rt = Builder::new()
.thread_name("tokio-thread-for-tests")
.thread_stack_size(THREAD_STACK_SIZE)
// DEBUG: Replace this with .basic_scheduler() to make tests run single-threaded on the main thread.
// Doing this makes assertion failures easier to read, but means
// the tests can't reveal concurrency bugs, so leave this off by default!
.threaded_scheduler()
.build()
.expect("Error initializing Tokio runtime.");
// Spawn the root task
rt.block_on(future)
}
#[allow(dead_code)] #[allow(dead_code)]
pub fn infer_expr( pub fn infer_expr(
subs: Subs, subs: Subs,
problems: &mut Vec<roc_types::types::Problem>, problems: &mut Vec<solve::TypeError>,
constraint: &Constraint, constraint: &Constraint,
expr_var: Variable, expr_var: Variable,
) -> (Content, Subs) { ) -> (Content, Subs) {
@ -391,7 +414,7 @@ fn variable_usage_help(con: &Constraint, declared: &mut SeenVariables, used: &mu
match con { match con {
True | SaveTheEnvironment => (), True | SaveTheEnvironment => (),
Eq(tipe, expectation, _) => { Eq(tipe, expectation, _, _) => {
for v in tipe.variables() { for v in tipe.variables() {
used.insert(v); used.insert(v);
} }

View file

@ -13,7 +13,7 @@ mod helpers;
#[cfg(test)] #[cfg(test)]
mod test_load { mod test_load {
use crate::helpers::fixtures_dir; use crate::helpers::{fixtures_dir, test_async};
use inlinable_string::InlinableString; use inlinable_string::InlinableString;
use roc_can::def::Declaration::*; use roc_can::def::Declaration::*;
use roc_can::def::Def; use roc_can::def::Def;
@ -27,16 +27,6 @@ mod test_load {
// HELPERS // HELPERS
fn test_async<F: std::future::Future>(future: F) -> F::Output {
use tokio::runtime::Runtime;
// Create the runtime
let mut rt = Runtime::new().expect("Error initializing Tokio runtime.");
// Spawn the root task
rt.block_on(future)
}
async fn load_fixture( async fn load_fixture(
dir_name: &str, dir_name: &str,
module_name: &str, module_name: &str,
@ -213,7 +203,7 @@ mod test_load {
"divisionTest" => "Float", "divisionTest" => "Float",
"intTest" => "Int", "intTest" => "Int",
"x" => "Float", "x" => "Float",
"constantInt" => "Int", "constantNum" => "Num *",
"divDep1ByDep2" => "Float", "divDep1ByDep2" => "Float",
"fromDep2" => "Float", "fromDep2" => "Float",
}, },

View file

@ -13,7 +13,7 @@ mod helpers;
#[cfg(test)] #[cfg(test)]
mod test_uniq_load { mod test_uniq_load {
use crate::helpers::fixtures_dir; use crate::helpers::{fixtures_dir, test_async};
use inlinable_string::InlinableString; use inlinable_string::InlinableString;
use roc_builtins::unique; use roc_builtins::unique;
use roc_can::def::Declaration::*; use roc_can::def::Declaration::*;
@ -28,16 +28,6 @@ mod test_uniq_load {
// HELPERS // HELPERS
fn test_async<F: std::future::Future>(future: F) -> F::Output {
use tokio::runtime::Runtime;
// Create the runtime
let mut rt = Runtime::new().expect("Error initializing Tokio runtime.");
// Spawn the root task
rt.block_on(future)
}
async fn load_fixture( async fn load_fixture(
dir_name: &str, dir_name: &str,
module_name: &str, module_name: &str,
@ -208,7 +198,7 @@ mod test_uniq_load {
"divisionTest" => "Attr * Float", "divisionTest" => "Attr * Float",
"intTest" => "Attr * Int", "intTest" => "Attr * Int",
"x" => "Attr * Float", "x" => "Attr * Float",
"constantInt" => "Attr * Int", "constantNum" => "Attr * (Num (Attr * *))",
"divDep1ByDep2" => "Attr * Float", "divDep1ByDep2" => "Attr * Float",
"fromDep2" => "Attr * Float", "fromDep2" => "Attr * Float",
}, },

View file

@ -9,6 +9,14 @@ use std::{fmt, u32};
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] #[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct Symbol(u64); pub struct Symbol(u64);
// When this is `true` (which it normally should be), Symbol's Debug::fmt implementation
// attempts to pretty print debug symbols using interns recorded using
// register_debug_idents calls (which should be made in debug mode).
// Set it to false if you want to see the raw ModuleId and IdentId ints,
// but please set it back to true before checking in the result!
#[cfg(debug_assertions)]
const PRETTY_PRINT_DEBUG_SYMBOLS: bool = true;
/// In Debug builds only, Symbol has a name() method that lets /// In Debug builds only, Symbol has a name() method that lets
/// you look up its name in a global intern table. This table is /// you look up its name in a global intern table. This table is
/// behind a mutex, so it is neither populated nor available in release builds. /// behind a mutex, so it is neither populated nor available in release builds.
@ -101,6 +109,7 @@ impl Symbol {
impl fmt::Debug for Symbol { impl fmt::Debug for Symbol {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if PRETTY_PRINT_DEBUG_SYMBOLS {
let module_id = self.module_id(); let module_id = self.module_id();
let ident_id = self.ident_id(); let ident_id = self.ident_id();
@ -122,6 +131,9 @@ impl fmt::Debug for Symbol {
fallback_debug_fmt(*self, f) fallback_debug_fmt(*self, f)
} }
} }
} else {
fallback_debug_fmt(*self, f)
}
} }
#[cfg(not(debug_assertions))] #[cfg(not(debug_assertions))]
@ -168,6 +180,19 @@ pub struct Interns {
} }
impl Interns { impl Interns {
pub fn module_id(&mut self, name: &InlinableString) -> ModuleId {
self.module_ids.get_or_insert(name)
}
pub fn module_name(&self, module_id: ModuleId) -> &InlinableString {
self.module_ids.get_name(module_id).unwrap_or_else(|| {
panic!(
"Unable to find interns entry for module_id {:?} in Interns {:?}",
module_id, self
)
})
}
pub fn symbol(&self, module_id: ModuleId, ident: InlinableString) -> Symbol { pub fn symbol(&self, module_id: ModuleId, ident: InlinableString) -> Symbol {
match self.all_ident_ids.get(&module_id) { match self.all_ident_ids.get(&module_id) {
Some(ident_ids) => match ident_ids.get_id(&ident) { Some(ident_ids) => match ident_ids.get_id(&ident) {
@ -207,32 +232,11 @@ lazy_static! {
#[derive(Copy, Clone, PartialEq, Eq, Hash)] #[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct ModuleId(u32); pub struct ModuleId(u32);
/// In Debug builds only, ModuleId has a name() method that lets
/// you look up its name in a global intern table. This table is
/// behind a mutex, so it is neither populated nor available in release builds.
impl ModuleId { impl ModuleId {
// NOTE: the define_builtins! macro adds a bunch of constants to this impl, // NOTE: the define_builtins! macro adds a bunch of constants to this impl,
// //
// e.g. pub const NUM: ModuleId = … // e.g. pub const NUM: ModuleId = …
#[cfg(debug_assertions)]
pub fn name(self) -> Box<str> {
let names =
DEBUG_MODULE_ID_NAMES
.lock()
.expect("Failed to acquire lock for Debug reading from DEBUG_MODULE_ID_NAMES, presumably because a thread panicked.");
match names.get(&self.0) {
Some(str_ref) => str_ref.clone(),
None => {
panic!(
"Could not find a Debug name for module ID {} in {:?}",
self.0, names,
);
}
}
}
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
pub fn register_debug_idents(self, ident_ids: &IdentIds) { pub fn register_debug_idents(self, ident_ids: &IdentIds) {
let mut all = DEBUG_IDENT_IDS_BY_MODULE_ID.lock().expect("Failed to acquire lock for Debug interning into DEBUG_MODULE_ID_NAMES, presumably because a thread panicked."); let mut all = DEBUG_IDENT_IDS_BY_MODULE_ID.lock().expect("Failed to acquire lock for Debug interning into DEBUG_MODULE_ID_NAMES, presumably because a thread panicked.");
@ -264,7 +268,20 @@ impl fmt::Debug for ModuleId {
// Originally, this printed both name and numeric ID, but the numeric ID // Originally, this printed both name and numeric ID, but the numeric ID
// didn't seem to add anything useful. Feel free to temporarily re-add it // didn't seem to add anything useful. Feel free to temporarily re-add it
// if it's helpful in debugging! // if it's helpful in debugging!
write!(f, "{}", self.name()) let names =
DEBUG_MODULE_ID_NAMES
.lock()
.expect("Failed to acquire lock for Debug reading from DEBUG_MODULE_ID_NAMES, presumably because a thread panicked.");
match names.get(&self.0) {
Some(str_ref) => write!(f, "{}", str_ref.clone()),
None => {
panic!(
"Could not find a Debug name for module ID {} in {:?}",
self.0, names,
);
}
}
} }
/// In relese builds, all we have access to is the number, so only display that. /// In relese builds, all we have access to is the number, so only display that.
@ -547,11 +564,13 @@ macro_rules! define_builtins {
}; };
} }
// NOTE: Some of these builtins have a # at the beginning of their names.
// This is because they are for compiler use only, and should not cause
// namespace conflicts with userspace!
define_builtins! { define_builtins! {
0 ATTR: "Attr" => { 0 ATTR: "#Attr" => {
0 UNDERSCORE: "_" // the _ used in pattern matches. This is Symbol 0. 0 UNDERSCORE: "_" // the _ used in pattern matches. This is Symbol 0.
1 ATTR_ATTR: "Attr" // the Attr.Attr type alias, used in uniqueness types 1 ATTR_ATTR: "Attr" // the #Attr.Attr type alias, used in uniqueness types.
2 ATTR_AT_ATTR: "@Attr" // the Attr.@Attr private tag
} }
1 NUM: "Num" => { 1 NUM: "Num" => {
0 NUM_NUM: "Num" imported // the Num.Num type alias 0 NUM_NUM: "Num" imported // the Num.Num type alias
@ -575,6 +594,11 @@ define_builtins! {
4 INT_MOD: "mod" 4 INT_MOD: "mod"
5 INT_HIGHEST: "highest" 5 INT_HIGHEST: "highest"
6 INT_LOWEST: "lowest" 6 INT_LOWEST: "lowest"
7 INT_ADD: "#add"
8 INT_SUB: "#sub"
9 INT_EQ_I64: "#eqi64" // Equality on 64-bit integers, the standard in Roc
10 INT_EQ_I1: "#eqi1" // Equality on boolean (theoretically i1) values
11 INT_EQ_I8: "#eqi8" // Equality on byte (theoretically i8) values
} }
3 FLOAT: "Float" => { 3 FLOAT: "Float" => {
0 FLOAT_FLOAT: "Float" imported // the Float.Float type alias 0 FLOAT_FLOAT: "Float" imported // the Float.Float type alias
@ -585,6 +609,10 @@ define_builtins! {
5 FLOAT_SQRT: "sqrt" 5 FLOAT_SQRT: "sqrt"
6 FLOAT_HIGHEST: "highest" 6 FLOAT_HIGHEST: "highest"
7 FLOAT_LOWEST: "lowest" 7 FLOAT_LOWEST: "lowest"
8 FLOAT_ADD: "#add"
9 FLOAT_SUB: "#sub"
10 FLOAT_EQ: "#eq"
11 FLOAT_ROUND: "round"
} }
4 BOOL: "Bool" => { 4 BOOL: "Bool" => {
0 BOOL_BOOL: "Bool" imported // the Bool.Bool type alias 0 BOOL_BOOL: "Bool" imported // the Bool.Bool type alias
@ -603,16 +631,17 @@ define_builtins! {
6 LIST: "List" => { 6 LIST: "List" => {
0 LIST_LIST: "List" imported // the List.List type alias 0 LIST_LIST: "List" imported // the List.List type alias
1 LIST_AT_LIST: "@List" // the List.@List private tag 1 LIST_AT_LIST: "@List" // the List.@List private tag
2 LIST_ISEMPTY: "isEmpty" 2 LIST_IS_EMPTY: "isEmpty"
3 LIST_GET: "get" 3 LIST_GET: "get"
4 LIST_SET: "set" 4 LIST_SET: "set"
5 LIST_SET_IN_PLACE: "set_in_place" 5 LIST_SET_IN_PLACE: "#setInPlace"
6 LIST_PUSH: "push" 6 LIST_PUSH: "push"
7 LIST_MAP: "map" 7 LIST_MAP: "map"
8 LIST_LENGTH: "length" 8 LIST_LEN: "len"
9 LIST_FOLDL: "foldl" 9 LIST_FOLDL: "foldl"
10 LIST_FOLDR: "foldr" 10 LIST_FOLDR: "foldr"
11 LIST_GET_UNSAFE: "getUnsafe" // TODO remove once we can code gen Result 11 LIST_GET_UNSAFE: "getUnsafe" // TODO remove once we can code gen Result
12 LIST_CONCAT: "concat"
} }
7 RESULT: "Result" => { 7 RESULT: "Result" => {
0 RESULT_RESULT: "Result" imported // the Result.Result type alias 0 RESULT_RESULT: "Result" imported // the Result.Result type alias

View file

@ -11,12 +11,12 @@ roc_module = { path = "../module" }
roc_types = { path = "../types" } roc_types = { path = "../types" }
roc_can = { path = "../can" } roc_can = { path = "../can" }
roc_unify = { path = "../unify" } roc_unify = { path = "../unify" }
roc_problem = { path = "../problem" }
bumpalo = { version = "3.2", features = ["collections"] } bumpalo = { version = "3.2", features = ["collections"] }
[dev-dependencies] [dev-dependencies]
roc_constrain = { path = "../constrain" } roc_constrain = { path = "../constrain" }
roc_builtins = { path = "../builtins" } roc_builtins = { path = "../builtins" }
roc_problem = { path = "../problem" }
roc_parse = { path = "../parse" } roc_parse = { path = "../parse" }
roc_solve = { path = "../solve" } roc_solve = { path = "../solve" }
pretty_assertions = "0.5.1 " pretty_assertions = "0.5.1 "

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -4,45 +4,65 @@ use roc_collections::all::MutMap;
use roc_module::ident::{Lowercase, TagName}; use roc_module::ident::{Lowercase, TagName};
use roc_module::symbol::Symbol; use roc_module::symbol::Symbol;
use roc_types::subs::{Content, FlatType, Subs, Variable}; use roc_types::subs::{Content, FlatType, Subs, Variable};
use std::collections::BTreeMap;
pub const MAX_ENUM_SIZE: usize = (std::mem::size_of::<u8>() * 8) as usize;
/// Types for code gen must be monomorphic. No type variables allowed! /// Types for code gen must be monomorphic. No type variables allowed!
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum Layout<'a> { pub enum Layout<'a> {
Builtin(Builtin<'a>), Builtin(Builtin<'a>),
Struct(&'a [(Lowercase, Layout<'a>)]), Struct(&'a [Layout<'a>]),
Pointer(&'a Layout<'a>), Union(&'a [&'a [Layout<'a>]]),
/// A function. The types of its arguments, then the type of its return value. /// A function. The types of its arguments, then the type of its return value.
FunctionPointer(&'a [Layout<'a>], &'a Layout<'a>), FunctionPointer(&'a [Layout<'a>], &'a Layout<'a>),
Pointer(&'a Layout<'a>),
} }
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum Builtin<'a> { pub enum Builtin<'a> {
Int64, Int64,
Float64, Float64,
Bool,
Byte,
Str, Str,
Map(&'a Layout<'a>, &'a Layout<'a>), Map(&'a Layout<'a>, &'a Layout<'a>),
Set(&'a Layout<'a>), Set(&'a Layout<'a>),
List(&'a Layout<'a>), List(&'a Layout<'a>),
EmptyStr,
EmptyList,
EmptyMap,
EmptySet,
} }
impl<'a> Layout<'a> { impl<'a> Layout<'a> {
/// Returns Err(()) if given an error, or Ok(Layout) if given a non-erroneous Structure. /// Returns Err(()) if given an error, or Ok(Layout) if given a non-erroneous Structure.
/// Panics if given a FlexVar or RigidVar, since those should have been /// Panics if given a FlexVar or RigidVar, since those should have been
/// monomorphized away already! /// monomorphized away already!
pub fn from_var(arena: &'a Bump, var: Variable, subs: &Subs) -> Result<Self, ()> { pub fn from_var(
arena: &'a Bump,
var: Variable,
subs: &Subs,
pointer_size: u32,
) -> Result<Self, ()> {
let content = subs.get_without_compacting(var).content; let content = subs.get_without_compacting(var).content;
Self::from_content(arena, content, subs) Self::from_content(arena, content, subs, pointer_size)
} }
pub fn from_content(arena: &'a Bump, content: Content, subs: &Subs) -> Result<Self, ()> { pub fn from_content(
arena: &'a Bump,
content: Content,
subs: &Subs,
pointer_size: u32,
) -> Result<Self, ()> {
use roc_types::subs::Content::*; use roc_types::subs::Content::*;
match content { match content {
var @ FlexVar(_) | var @ RigidVar(_) => { var @ FlexVar(_) | var @ RigidVar(_) => {
panic!("Layout::from_content encountered an unresolved {:?}", var); panic!("Layout::from_content encountered an unresolved {:?}", var);
} }
Structure(flat_type) => layout_from_flat_type(arena, flat_type, subs), Structure(flat_type) => layout_from_flat_type(arena, flat_type, subs, pointer_size),
Alias(Symbol::INT_INT, args, _) => { Alias(Symbol::INT_INT, args, _) => {
debug_assert!(args.is_empty()); debug_assert!(args.is_empty());
@ -52,13 +72,38 @@ impl<'a> Layout<'a> {
debug_assert!(args.is_empty()); debug_assert!(args.is_empty());
Ok(Layout::Builtin(Builtin::Float64)) Ok(Layout::Builtin(Builtin::Float64))
} }
Alias(_, _, var) => { Alias(_, _, var) => Self::from_content(
Self::from_content(arena, subs.get_without_compacting(var).content, subs) arena,
} subs.get_without_compacting(var).content,
subs,
pointer_size,
),
Error => Err(()), Error => Err(()),
} }
} }
pub fn safe_to_memcpy(&self) -> bool {
use Layout::*;
match self {
Builtin(builtin) => builtin.safe_to_memcpy(),
Struct(fields) => fields
.iter()
.all(|field_layout| field_layout.safe_to_memcpy()),
Union(tags) => tags
.iter()
.all(|tag_layout| tag_layout.iter().all(|field| field.safe_to_memcpy())),
FunctionPointer(_, _) => {
// Function pointers are immutable and can always be safely copied
true
}
Pointer(_) => {
// We cannot memcpy pointers, because then we would have the same pointer in multiple places!
false
}
}
}
pub fn stack_size(&self, pointer_size: u32) -> u32 { pub fn stack_size(&self, pointer_size: u32) -> u32 {
use Layout::*; use Layout::*;
@ -67,13 +112,24 @@ impl<'a> Layout<'a> {
Struct(fields) => { Struct(fields) => {
let mut sum = 0; let mut sum = 0;
for (_, field_layout) in *fields { for field_layout in *fields {
sum += field_layout.stack_size(pointer_size); sum += field_layout.stack_size(pointer_size);
} }
sum sum
} }
Pointer(_) | FunctionPointer(_, _) => pointer_size, Union(fields) => fields
.iter()
.map(|tag_layout| {
tag_layout
.iter()
.map(|field| field.stack_size(pointer_size))
.sum()
})
.max()
.unwrap_or_default(),
FunctionPointer(_, _) => pointer_size,
Pointer(_) => pointer_size,
} }
} }
} }
@ -81,12 +137,21 @@ impl<'a> Layout<'a> {
impl<'a> Builtin<'a> { impl<'a> Builtin<'a> {
const I64_SIZE: u32 = std::mem::size_of::<i64>() as u32; const I64_SIZE: u32 = std::mem::size_of::<i64>() as u32;
const F64_SIZE: u32 = std::mem::size_of::<f64>() as u32; const F64_SIZE: u32 = std::mem::size_of::<f64>() as u32;
const BOOL_SIZE: u32 = std::mem::size_of::<bool>() as u32;
const BYTE_SIZE: u32 = std::mem::size_of::<u8>() as u32;
/// Number of machine words in an empty one of these /// Number of machine words in an empty one of these
const STR_WORDS: u32 = 3; pub const STR_WORDS: u32 = 2;
const MAP_WORDS: u32 = 6; pub const MAP_WORDS: u32 = 6;
const SET_WORDS: u32 = Builtin::MAP_WORDS; // Set is an alias for Map with {} for value pub const SET_WORDS: u32 = Builtin::MAP_WORDS; // Set is an alias for Map with {} for value
const LIST_WORDS: u32 = 3; pub const LIST_WORDS: u32 = 2;
/// Layout of collection wrapper for List and Str - a struct of (pointre, length).
///
/// We choose this layout (with pointer first) because it's how
/// Rust slices are laid out, meaning we can cast to/from them for free.
pub const WRAPPER_PTR: u32 = 0;
pub const WRAPPER_LEN: u32 = 1;
pub fn stack_size(&self, pointer_size: u32) -> u32 { pub fn stack_size(&self, pointer_size: u32) -> u32 {
use Builtin::*; use Builtin::*;
@ -94,10 +159,21 @@ impl<'a> Builtin<'a> {
match self { match self {
Int64 => Builtin::I64_SIZE, Int64 => Builtin::I64_SIZE,
Float64 => Builtin::F64_SIZE, Float64 => Builtin::F64_SIZE,
Str => Builtin::STR_WORDS * pointer_size, Bool => Builtin::BOOL_SIZE,
Map(_, _) => Builtin::MAP_WORDS * pointer_size, Byte => Builtin::BYTE_SIZE,
Set(_) => Builtin::SET_WORDS * pointer_size, Str | EmptyStr => Builtin::STR_WORDS * pointer_size,
List(_) => Builtin::LIST_WORDS * pointer_size, Map(_, _) | EmptyMap => Builtin::MAP_WORDS * pointer_size,
Set(_) | EmptySet => Builtin::SET_WORDS * pointer_size,
List(_) | EmptyList => Builtin::LIST_WORDS * pointer_size,
}
}
pub fn safe_to_memcpy(&self) -> bool {
use Builtin::*;
match self {
Int64 | Float64 | Bool | Byte | EmptyStr | EmptyMap | EmptyList | EmptySet => true,
Str | Map(_, _) | Set(_) | List(_) => false,
} }
} }
} }
@ -106,6 +182,7 @@ fn layout_from_flat_type<'a>(
arena: &'a Bump, arena: &'a Bump,
flat_type: FlatType, flat_type: FlatType,
subs: &Subs, subs: &Subs,
pointer_size: u32,
) -> Result<Layout<'a>, ()> { ) -> Result<Layout<'a>, ()> {
use roc_types::subs::FlatType::*; use roc_types::subs::FlatType::*;
@ -131,10 +208,18 @@ fn layout_from_flat_type<'a>(
} }
Symbol::STR_STR => Ok(Layout::Builtin(Builtin::Str)), Symbol::STR_STR => Ok(Layout::Builtin(Builtin::Str)),
Symbol::LIST_LIST => { Symbol::LIST_LIST => {
let elem_layout = Layout::from_var(arena, args[0], subs)?; use roc_types::subs::Content::*;
match subs.get_without_compacting(args[0]).content {
FlexVar(_) | RigidVar(_) => Ok(Layout::Builtin(Builtin::EmptyList)),
content => {
let elem_layout =
Layout::from_content(arena, content, subs, pointer_size)?;
Ok(Layout::Builtin(Builtin::List(arena.alloc(elem_layout)))) Ok(Layout::Builtin(Builtin::List(arena.alloc(elem_layout))))
} }
}
}
Symbol::ATTR_ATTR => { Symbol::ATTR_ATTR => {
debug_assert!(args.len() == 2); debug_assert!(args.len() == 2);
@ -145,7 +230,7 @@ fn layout_from_flat_type<'a>(
// For now, layout is unaffected by uniqueness. // For now, layout is unaffected by uniqueness.
// (Incorporating refcounting may change this.) // (Incorporating refcounting may change this.)
// Unwrap and continue // Unwrap and continue
Layout::from_var(arena, wrapped_var, subs) Layout::from_var(arena, wrapped_var, subs, pointer_size)
} }
_ => { _ => {
panic!("TODO layout_from_flat_type for {:?}", Apply(symbol, args)); panic!("TODO layout_from_flat_type for {:?}", Apply(symbol, args));
@ -158,49 +243,35 @@ fn layout_from_flat_type<'a>(
for arg_var in args { for arg_var in args {
let arg_content = subs.get_without_compacting(arg_var).content; let arg_content = subs.get_without_compacting(arg_var).content;
fn_args.push(Layout::from_content(arena, arg_content, subs)?); fn_args.push(Layout::from_content(
arena,
arg_content,
subs,
pointer_size,
)?);
} }
let ret_content = subs.get_without_compacting(ret_var).content; let ret_content = subs.get_without_compacting(ret_var).content;
let ret = Layout::from_content(arena, ret_content, subs)?; let ret = Layout::from_content(arena, ret_content, subs, pointer_size)?;
Ok(Layout::FunctionPointer( Ok(Layout::FunctionPointer(
fn_args.into_bump_slice(), fn_args.into_bump_slice(),
arena.alloc(ret), arena.alloc(ret),
)) ))
} }
Record(mut fields, ext_var) => { Record(fields, ext_var) => {
flatten_record(&mut fields, ext_var, subs); debug_assert!(ext_var_is_empty_record(subs, ext_var));
let ext_content = subs.get_without_compacting(ext_var).content;
let ext_layout = match Layout::from_content(arena, ext_content, subs) {
Ok(layout) => layout,
Err(()) => {
// Invalid record!
panic!("TODO gracefully handle record with invalid ext_var");
}
};
let mut field_layouts; let btree = fields
.into_iter()
.collect::<BTreeMap<Lowercase, Variable>>();
match ext_layout { let mut layouts = Vec::with_capacity_in(btree.len(), arena);
Layout::Struct(more_fields) => {
field_layouts = Vec::with_capacity_in(fields.len() + more_fields.len(), arena);
for (label, field) in more_fields { for (_, field_var) in btree {
field_layouts.push((label.clone(), field.clone()));
}
}
_ => {
panic!(
"TODO handle Layout for invalid record extension, specifically {:?}",
ext_layout
);
}
}
for (label, field_var) in fields {
let field_content = subs.get_without_compacting(field_var).content; let field_content = subs.get_without_compacting(field_var).content;
let field_layout = match Layout::from_content(arena, field_content, subs) { let field_layout =
match Layout::from_content(arena, field_content, subs, pointer_size) {
Ok(layout) => layout, Ok(layout) => layout,
Err(()) => { Err(()) => {
// Invalid field! // Invalid field!
@ -208,44 +279,15 @@ fn layout_from_flat_type<'a>(
} }
}; };
field_layouts.push((label.clone(), field_layout)); layouts.push(field_layout);
} }
Ok(Layout::Struct(field_layouts.into_bump_slice())) Ok(Layout::Struct(layouts.into_bump_slice()))
} }
TagUnion(mut tags, ext_var) => { TagUnion(tags, ext_var) => {
// Recursively inject the contents of ext_var into tags debug_assert!(ext_var_is_empty_tag_union(subs, ext_var));
// until we have all the tags in one map.
flatten_union(&mut tags, ext_var, subs);
match tags.len() { Ok(layout_from_tag_union(arena, tags, subs, pointer_size))
0 => {
panic!("TODO gracefully handle trying to instantiate Never");
}
1 => {
// This is a wrapper. Unwrap it!
let (tag, args) = tags.into_iter().next().unwrap();
match tag {
TagName::Private(Symbol::NUM_AT_NUM) => {
debug_assert!(args.len() == 1);
let var = args.into_iter().next().unwrap();
unwrap_num_tag(subs, var)
}
TagName::Private(symbol) => {
panic!("TODO emit wrapped private tag for {:?} {:?}", symbol, args);
}
TagName::Global(ident) => {
panic!("TODO emit wrapped global tag for {:?} {:?}", ident, args);
}
}
}
_ => {
panic!("TODO handle a tag union with mutliple tags: {:?}", tags);
}
}
} }
RecursiveTagUnion(_, _, _) => { RecursiveTagUnion(_, _, _) => {
panic!("TODO make Layout for non-empty Tag Union"); panic!("TODO make Layout for non-empty Tag Union");
@ -261,6 +303,198 @@ fn layout_from_flat_type<'a>(
} }
} }
pub fn record_fields_btree<'a>(
arena: &'a Bump,
var: Variable,
subs: &Subs,
pointer_size: u32,
) -> BTreeMap<Lowercase, Layout<'a>> {
let mut fields_map = MutMap::default();
match roc_types::pretty_print::chase_ext_record(subs, var, &mut fields_map) {
Ok(()) | Err((_, Content::FlexVar(_))) => {
// collect into btreemap to sort
fields_map
.into_iter()
.map(|(label, var)| {
(
label,
Layout::from_var(arena, var, subs, pointer_size)
.expect("invalid layout from var"),
)
})
.collect::<BTreeMap<Lowercase, Layout<'a>>>()
}
Err(other) => panic!("invalid content in record variable: {:?}", other),
}
}
pub enum UnionVariant<'a> {
Never,
Unit,
BoolUnion { ttrue: TagName, ffalse: TagName },
ByteUnion(Vec<'a, TagName>),
Unwrapped(Vec<'a, Layout<'a>>),
Wrapped(Vec<'a, (TagName, &'a [Layout<'a>])>),
}
pub fn union_sorted_tags<'a>(
arena: &'a Bump,
var: Variable,
subs: &Subs,
pointer_size: u32,
) -> UnionVariant<'a> {
let mut tags_vec = std::vec::Vec::new();
match roc_types::pretty_print::chase_ext_tag_union(subs, var, &mut tags_vec) {
Ok(()) | Err((_, Content::FlexVar(_))) => {
union_sorted_tags_help(arena, tags_vec, subs, pointer_size)
}
Err(other) => panic!("invalid content in record variable: {:?}", other),
}
}
fn union_sorted_tags_help<'a>(
arena: &'a Bump,
mut tags_vec: std::vec::Vec<(TagName, std::vec::Vec<Variable>)>,
subs: &Subs,
pointer_size: u32,
) -> UnionVariant<'a> {
// for this union be be an enum, none of the tags may have any arguments
let has_no_arguments = tags_vec.iter().all(|(_, args)| args.is_empty());
// sort up-front, make sure the ordering stays intact!
tags_vec.sort();
match tags_vec.len() {
0 => {
// trying to instantiate a type with no values
UnionVariant::Never
}
1 if has_no_arguments => {
// a unit type
UnionVariant::Unit
}
2 if has_no_arguments => {
// type can be stored in a boolean
// tags_vec is sorted,
let ttrue = tags_vec.remove(1).0;
let ffalse = tags_vec.remove(0).0;
UnionVariant::BoolUnion { ffalse, ttrue }
}
3..=MAX_ENUM_SIZE if has_no_arguments => {
// type can be stored in a byte
// needs the sorted tag names to determine the tag_id
let mut tag_names = Vec::with_capacity_in(tags_vec.len(), arena);
for (label, _) in tags_vec {
tag_names.push(label);
}
UnionVariant::ByteUnion(tag_names)
}
1 => {
// special-case NUM_AT_NUM: if its argument is a FlexVar, make it Int
let (tag_name, arguments) = tags_vec.remove(0);
// just one tag in the union (but with arguments) can be a struct
let mut layouts = Vec::with_capacity_in(tags_vec.len(), arena);
match tag_name {
TagName::Private(Symbol::NUM_AT_NUM) => {
layouts.push(unwrap_num_tag(subs, arguments[0]).expect("invalid num layout"));
}
_ => {
for var in arguments.iter() {
let layout = Layout::from_var(arena, *var, subs, pointer_size)
.expect("invalid layout from var");
layouts.push(layout);
}
}
}
UnionVariant::Unwrapped(layouts)
}
_ => {
// default path
let mut result = Vec::with_capacity_in(tags_vec.len(), arena);
for (tag_name, arguments) in tags_vec {
// resverse space for the tag discriminant
let mut arg_layouts = Vec::with_capacity_in(arguments.len() + 1, arena);
// add the tag discriminant
arg_layouts.push(Layout::Builtin(Builtin::Int64));
for var in arguments {
let layout = Layout::from_var(arena, var, subs, pointer_size)
.expect("invalid layout from var");
arg_layouts.push(layout);
}
result.push((tag_name, arg_layouts.into_bump_slice()));
}
UnionVariant::Wrapped(result)
}
}
}
pub fn layout_from_tag_union<'a>(
arena: &'a Bump,
tags: MutMap<TagName, std::vec::Vec<Variable>>,
subs: &Subs,
pointer_size: u32,
) -> Layout<'a> {
use UnionVariant::*;
let tags_vec: std::vec::Vec<_> = tags.into_iter().collect();
let first_tag = tags_vec[0].clone();
let variant = union_sorted_tags_help(arena, tags_vec, subs, pointer_size);
match variant {
Never => panic!("TODO gracefully handle trying to instantiate Never"),
Unit => Layout::Struct(&[]),
BoolUnion { .. } => Layout::Builtin(Builtin::Bool),
ByteUnion(_) => Layout::Builtin(Builtin::Byte),
Unwrapped(field_layouts) => match first_tag.0 {
TagName::Private(Symbol::NUM_AT_NUM) => {
let arguments = first_tag.1;
debug_assert!(arguments.len() == 1);
let var = arguments.iter().next().unwrap();
unwrap_num_tag(subs, *var).expect("invalid Num argument")
}
_ => Layout::Struct(field_layouts.into_bump_slice()),
},
Wrapped(tags) => {
let mut tag_layouts = Vec::with_capacity_in(tags.len(), arena);
for (_, tag_layout) in tags {
tag_layouts.push(tag_layout);
}
Layout::Union(tag_layouts.into_bump_slice())
}
}
}
fn ext_var_is_empty_tag_union(subs: &Subs, ext_var: Variable) -> bool {
// the ext_var is empty
let mut ext_fields = std::vec::Vec::new();
match roc_types::pretty_print::chase_ext_tag_union(subs, ext_var, &mut ext_fields) {
Ok(()) | Err((_, Content::FlexVar(_))) => ext_fields.is_empty(),
Err(content) => panic!("invalid content in ext_var: {:?}", content),
}
}
fn ext_var_is_empty_record(subs: &Subs, ext_var: Variable) -> bool {
// the ext_var is empty
let mut ext_fields = MutMap::default();
match roc_types::pretty_print::chase_ext_record(subs, ext_var, &mut ext_fields) {
Ok(()) | Err((_, Content::FlexVar(_))) => ext_fields.is_empty(),
Err((_, content)) => panic!("invalid content in ext_var: {:?}", content),
}
}
fn layout_from_num_content<'a>(content: Content) -> Result<Layout<'a>, ()> { fn layout_from_num_content<'a>(content: Content) -> Result<Layout<'a>, ()> {
use roc_types::subs::Content::*; use roc_types::subs::Content::*;
use roc_types::subs::FlatType::*; use roc_types::subs::FlatType::*;
@ -289,52 +523,6 @@ fn layout_from_num_content<'a>(content: Content) -> Result<Layout<'a>, ()> {
} }
} }
/// Recursively inline the contents ext_var into this union until we have
/// a flat union containing all the tags.
fn flatten_union(
tags: &mut MutMap<TagName, std::vec::Vec<Variable>>,
ext_var: Variable,
subs: &Subs,
) {
use roc_types::subs::Content::*;
use roc_types::subs::FlatType::*;
match subs.get_without_compacting(ext_var).content {
Structure(EmptyTagUnion) => (),
Structure(TagUnion(new_tags, new_ext_var)) => {
for (tag_name, vars) in new_tags {
tags.insert(tag_name, vars);
}
flatten_union(tags, new_ext_var, subs)
}
invalid => {
panic!("Compiler error: flatten_union got an ext_var in a tag union that wasn't itself a tag union; instead, it was: {:?}", invalid);
}
};
}
/// Recursively inline the contents ext_var into this record until we have
/// a flat record containing all the fields.
fn flatten_record(fields: &mut MutMap<Lowercase, Variable>, ext_var: Variable, subs: &Subs) {
use roc_types::subs::Content::*;
use roc_types::subs::FlatType::*;
match subs.get_without_compacting(ext_var).content {
Structure(EmptyRecord) => (),
Structure(Record(new_tags, new_ext_var)) => {
for (label, var) in new_tags {
fields.insert(label, var);
}
flatten_record(fields, new_ext_var, subs)
}
invalid => {
panic!("Compiler error: flatten_record encountered an ext_var in a record that wasn't itself a record; instead, it was: {:?}", invalid);
}
};
}
fn unwrap_num_tag<'a>(subs: &Subs, var: Variable) -> Result<Layout<'a>, ()> { fn unwrap_num_tag<'a>(subs: &Subs, var: Variable) -> Result<Layout<'a>, ()> {
match subs.get_without_compacting(var).content { match subs.get_without_compacting(var).content {
Content::Structure(flat_type) => match flat_type { Content::Structure(flat_type) => match flat_type {
@ -357,6 +545,10 @@ fn unwrap_num_tag<'a>(subs: &Subs, var: Variable) -> Result<Layout<'a>, ()> {
debug_assert!(args.is_empty()); debug_assert!(args.is_empty());
Ok(Layout::Builtin(Builtin::Float64)) Ok(Layout::Builtin(Builtin::Float64))
} }
Content::FlexVar(_) => {
// If this was still a (Num *) then default to compiling it to i64
Ok(Layout::Builtin(Builtin::Int64))
}
other => { other => {
panic!("TODO non structure Num.@Num flat_type {:?}", other); panic!("TODO non structure Num.@Num flat_type {:?}", other);
} }

View file

@ -12,3 +12,10 @@
#![allow(clippy::large_enum_variant)] #![allow(clippy::large_enum_variant)]
pub mod expr; pub mod expr;
pub mod layout; pub mod layout;
// Temporary, while we can build up test cases and optimize the exhaustiveness checking.
// For now, following this warning's advice will lead to nasty type inference errors.
#[allow(clippy::ptr_arg)]
pub mod decision_tree;
#[allow(clippy::ptr_arg)]
pub mod pattern;

View file

@ -0,0 +1,510 @@
use roc_collections::all::{Index, MutMap};
use roc_module::ident::TagName;
use roc_region::all::{Located, Region};
use self::Pattern::*;
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Union {
pub alternatives: Vec<Ctor>,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Ctor {
pub name: TagName,
// pub tag_id: u8,
pub arity: usize,
}
#[derive(Clone, Debug, PartialEq)]
pub enum Pattern {
Anything,
Literal(Literal),
Ctor(Union, TagName, std::vec::Vec<Pattern>),
}
#[derive(Clone, Debug, PartialEq)]
pub enum Literal {
Int(i64),
Bit(bool),
Byte(u8),
Float(u64),
Str(Box<str>),
}
fn simplify<'a>(pattern: &crate::expr::Pattern<'a>) -> Pattern {
use crate::expr::Pattern::*;
match pattern {
IntLiteral(v) => Literal(Literal::Int(*v)),
FloatLiteral(v) => Literal(Literal::Float(*v)),
StrLiteral(v) => Literal(Literal::Str(v.clone())),
// To make sure these are exhaustive, we have to "fake" a union here
// TODO: use the hash or some other integer to discriminate between constructors
BitLiteral {
tag_name, union, ..
} => Ctor(union.clone(), tag_name.clone(), vec![]),
EnumLiteral {
tag_name, union, ..
} => Ctor(union.clone(), tag_name.clone(), vec![]),
Underscore => Anything,
Identifier(_) => Anything,
RecordDestructure(destructures, _) => {
let union = Union {
alternatives: vec![Ctor {
name: TagName::Global("#Record".into()),
arity: destructures.len(),
}],
};
let mut patterns = std::vec::Vec::with_capacity(destructures.len());
for destruct in destructures {
match &destruct.guard {
None => patterns.push(Anything),
Some(guard) => patterns.push(simplify(guard)),
}
}
Ctor(union, TagName::Global("#Record".into()), patterns)
}
Shadowed(_region, _ident) => {
// Treat as an Anything
// code-gen will make a runtime error out of the branch
Anything
}
UnsupportedPattern(_region) => {
// Treat as an Anything
// code-gen will make a runtime error out of the branch
Anything
}
AppliedTag {
tag_name,
arguments,
union,
..
} => {
let simplified_args: std::vec::Vec<_> =
arguments.iter().map(|v| simplify(&v.0)).collect();
Ctor(union.clone(), tag_name.clone(), simplified_args)
}
}
}
/// Error
#[derive(Clone, Debug, PartialEq)]
pub enum Error {
Incomplete(Region, Context, Vec<Pattern>),
Redundant {
overall_region: Region,
branch_region: Region,
index: Index,
},
}
#[derive(Clone, Debug, PartialEq)]
pub enum Context {
BadArg,
BadDestruct,
BadCase,
}
#[derive(Clone, Debug, PartialEq)]
pub enum Guard {
HasGuard,
NoGuard,
}
/// Check
pub fn check<'a>(
region: Region,
patterns: &[(Located<crate::expr::Pattern<'a>>, Guard)],
context: Context,
) -> Result<(), Vec<Error>> {
let mut errors = Vec::new();
check_patterns(region, context, patterns, &mut errors);
if errors.is_empty() {
Ok(())
} else {
Err(errors)
}
}
pub fn check_patterns<'a>(
region: Region,
context: Context,
patterns: &[(Located<crate::expr::Pattern<'a>>, Guard)],
errors: &mut Vec<Error>,
) {
match to_nonredundant_rows(region, patterns) {
Err(err) => errors.push(err),
Ok(matrix) => {
let bad_patterns = is_exhaustive(&matrix, 1);
if !bad_patterns.is_empty() {
// TODO i suspect this is like a concat in in practice? code below can panic
// if this debug_assert! ever fails, the theory is disproven
debug_assert!(
bad_patterns.iter().map(|v| v.len()).sum::<usize>() == bad_patterns.len()
);
let heads = bad_patterns.into_iter().map(|mut v| v.remove(0)).collect();
errors.push(Error::Incomplete(region, context, heads));
}
}
}
}
/// EXHAUSTIVE PATTERNS
/// INVARIANTS:
///
/// The initial rows "matrix" are all of length 1
/// The initial count of items per row "n" is also 1
/// The resulting rows are examples of missing patterns
fn is_exhaustive(matrix: &PatternMatrix, n: usize) -> PatternMatrix {
if matrix.is_empty() {
vec![std::iter::repeat(Anything).take(n).collect()]
} else if n == 0 {
vec![]
} else {
let ctors = collect_ctors(matrix);
let num_seen = ctors.len();
if num_seen == 0 {
let new_matrix = matrix
.iter()
.filter_map(specialize_row_by_anything)
.collect();
let mut rest = is_exhaustive(&new_matrix, n - 1);
for row in rest.iter_mut() {
row.push(Anything);
}
rest
} else {
let alts = ctors.iter().next().unwrap().1;
let alt_list = &alts.alternatives;
let num_alts = alt_list.len();
if num_seen < num_alts {
let new_matrix = matrix
.iter()
.filter_map(specialize_row_by_anything)
.collect();
let rest: Vec<Vec<Pattern>> = is_exhaustive(&new_matrix, n - 1);
let last: _ = alt_list
.iter()
.filter_map(|r| is_missing(alts.clone(), ctors.clone(), r));
let mut result = Vec::new();
for last_option in last {
for mut row in rest.clone() {
row.push(last_option.clone());
result.push(row);
}
}
result
} else {
let is_alt_exhaustive = |Ctor { name, arity }| {
let new_matrix = matrix
.iter()
.filter_map(|r| specialize_row_by_ctor(&name, arity, r))
.collect();
let rest: Vec<Vec<Pattern>> = is_exhaustive(&new_matrix, arity + n - 1);
let mut result = Vec::with_capacity(rest.len());
for row in rest {
result.push(recover_ctor(alts.clone(), name.clone(), arity, row));
}
result
};
alt_list
.iter()
.cloned()
.map(is_alt_exhaustive)
.flatten()
.collect()
}
}
}
}
fn is_missing<T>(union: Union, ctors: MutMap<TagName, T>, ctor: &Ctor) -> Option<Pattern> {
let Ctor { name, arity, .. } = ctor;
if ctors.contains_key(&name) {
None
} else {
let anythings = std::iter::repeat(Anything).take(*arity).collect();
Some(Pattern::Ctor(union, name.clone(), anythings))
}
}
fn recover_ctor(
union: Union,
tag_name: TagName,
arity: usize,
mut patterns: Vec<Pattern>,
) -> Vec<Pattern> {
let mut rest = patterns.split_off(arity);
let args = patterns;
rest.push(Ctor(union, tag_name, args));
rest
}
/// REDUNDANT PATTERNS
/// INVARIANT: Produces a list of rows where (forall row. length row == 1)
fn to_nonredundant_rows<'a>(
overall_region: Region,
patterns: &[(Located<crate::expr::Pattern<'a>>, Guard)],
) -> Result<Vec<Vec<Pattern>>, Error> {
let mut checked_rows = Vec::with_capacity(patterns.len());
// If any of the branches has a guard, e.g.
//
// when x is
// y if y < 10 -> "foo"
// _ -> "bar"
//
// then we treat it as a pattern match on the pattern and a boolean, wrapped in the #Guard
// constructor. We can use this special constructor name to generate better error messages.
// This transformation of the pattern match only works because we only report exhaustiveness
// errors: the Pattern created in this file is not used for code gen.
//
// when x is
// #Guard y True -> "foo"
// #Guard _ _ -> "bar"
let any_has_guard = patterns.iter().any(|(_, guard)| guard == &Guard::HasGuard);
for (loc_pat, guard) in patterns {
let region = loc_pat.region;
let next_row = if any_has_guard {
let guard_pattern = match guard {
Guard::HasGuard => Pattern::Literal(Literal::Bit(true)),
Guard::NoGuard => Pattern::Anything,
};
let union = Union {
alternatives: vec![Ctor {
name: TagName::Global("#Guard".into()),
arity: 2,
}],
};
let tag_name = TagName::Global("#Guard".into());
vec![Pattern::Ctor(
union,
tag_name,
vec![simplify(&loc_pat.value), guard_pattern],
)]
} else {
vec![simplify(&loc_pat.value)]
};
if is_useful(&checked_rows, &next_row) {
checked_rows.push(next_row);
} else {
return Err(Error::Redundant {
overall_region,
branch_region: region,
index: Index::zero_based(checked_rows.len()),
});
}
}
Ok(checked_rows)
}
/// Check if a new row "vector" is useful given previous rows "matrix"
fn is_useful(matrix: &PatternMatrix, vector: &Row) -> bool {
if matrix.is_empty() {
// No rows are the same as the new vector! The vector is useful!
true
} else if vector.is_empty() {
// There is nothing left in the new vector, but we still have
// rows that match the same things. This is not a useful vector!
false
} else {
let mut vector = vector.clone();
let first_pattern = vector.remove(0);
let patterns = vector;
match first_pattern {
// keep checking rows that start with this Ctor or Anything
Ctor(_, name, args) => {
let new_matrix: Vec<_> = matrix
.iter()
.filter_map(|r| specialize_row_by_ctor(&name, args.len(), r))
.collect();
let mut new_row = Vec::new();
new_row.extend(args);
new_row.extend(patterns);
is_useful(&new_matrix, &new_row)
}
Anything => {
// check if all alts appear in matrix
match is_complete(matrix) {
Complete::No => {
// This Anything is useful because some Ctors are missing.
// But what if a previous row has an Anything?
// If so, this one is not useful.
let new_matrix: Vec<_> = matrix
.iter()
.filter_map(|r| specialize_row_by_anything(r))
.collect();
is_useful(&new_matrix, &patterns)
}
Complete::Yes(alts) => {
// All Ctors are covered, so this Anything is not needed for any
// of those. But what if some of those Ctors have subpatterns
// that make them less general? If so, this actually is useful!
let is_useful_alt = |Ctor { name, arity, .. }| {
let new_matrix = matrix
.iter()
.filter_map(|r| specialize_row_by_ctor(&name, arity, r))
.collect();
let mut new_row: Vec<Pattern> =
std::iter::repeat(Anything).take(arity).collect::<Vec<_>>();
new_row.extend(patterns.clone());
is_useful(&new_matrix, &new_row)
};
alts.iter().cloned().any(is_useful_alt)
}
}
}
Literal(literal) => {
// keep checking rows that start with this Literal or Anything
let new_matrix = matrix
.iter()
.filter_map(|r| specialize_row_by_literal(&literal, r))
.collect();
is_useful(&new_matrix, &patterns)
}
}
}
}
/// INVARIANT: (length row == N) ==> (length result == arity + N - 1)
fn specialize_row_by_ctor(tag_name: &TagName, arity: usize, row: &Row) -> Option<Row> {
let mut row = row.clone();
let head = row.pop();
let patterns = row;
match head {
Some(Ctor(_,name, args)) =>
if &name == tag_name {
// TODO order!
let mut new_patterns = Vec::new();
new_patterns.extend(args);
new_patterns.extend(patterns);
Some(new_patterns)
} else {
None
}
Some(Anything) => {
// TODO order!
let new_patterns =
std::iter::repeat(Anything).take(arity).chain(patterns).collect();
Some(new_patterns)
}
Some(Literal(_)) => panic!( "Compiler bug! After type checking, constructors and literal should never align in pattern match exhaustiveness checks."),
None => panic!("Compiler error! Empty matrices should not get specialized."),
}
}
/// INVARIANT: (length row == N) ==> (length result == N-1)
fn specialize_row_by_literal(literal: &Literal, row: &Row) -> Option<Row> {
let mut row = row.clone();
let head = row.pop();
let patterns = row;
match head {
Some(Literal(lit)) => if &lit == literal { Some(patterns) } else{ None } ,
Some(Anything) => Some(patterns),
Some(Ctor(_,_,_)) => panic!( "Compiler bug! After type checking, constructors and literals should never align in pattern match exhaustiveness checks."),
None => panic!("Compiler error! Empty matrices should not get specialized."),
}
}
/// INVARIANT: (length row == N) ==> (length result == N-1)
fn specialize_row_by_anything(row: &Row) -> Option<Row> {
let mut row = row.clone();
match row.pop() {
Some(Anything) => Some(row),
_ => None,
}
}
/// ALL CONSTRUCTORS ARE PRESENT?
pub enum Complete {
Yes(Vec<Ctor>),
No,
}
fn is_complete(matrix: &PatternMatrix) -> Complete {
let ctors = collect_ctors(matrix);
let mut it = ctors.values();
match it.next() {
None => Complete::No,
Some(Union { alternatives, .. }) => {
if ctors.len() == alternatives.len() {
Complete::Yes(alternatives.to_vec())
} else {
Complete::No
}
}
}
}
/// COLLECT CTORS
type RefPatternMatrix = [Vec<Pattern>];
type PatternMatrix = Vec<Vec<Pattern>>;
type Row = Vec<Pattern>;
fn collect_ctors(matrix: &RefPatternMatrix) -> MutMap<TagName, Union> {
let mut ctors = MutMap::default();
for row in matrix {
if let Some(Ctor(union, name, _)) = row.get(row.len() - 1) {
ctors.insert(name.clone(), union.clone());
}
}
ctors
}

View file

@ -0,0 +1,457 @@
extern crate bumpalo;
use self::bumpalo::Bump;
use roc_builtins::unique::uniq_stdlib;
use roc_can::constraint::Constraint;
use roc_can::env::Env;
use roc_can::expected::Expected;
use roc_can::expr::{canonicalize_expr, Expr, Output};
use roc_can::operator;
use roc_can::scope::Scope;
use roc_collections::all::{ImMap, ImSet, MutMap, SendMap, SendSet};
use roc_constrain::expr::constrain_expr;
use roc_constrain::module::{constrain_imported_values, load_builtin_aliases, Import};
use roc_module::ident::Ident;
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds, Symbol};
use roc_parse::ast::{self, Attempting};
use roc_parse::blankspace::space0_before;
use roc_parse::parser::{loc, Fail, Parser, State};
use roc_problem::can::Problem;
use roc_region::all::{Located, Region};
use roc_solve::solve;
use roc_types::subs::{Content, Subs, VarStore, Variable};
use roc_types::types::Type;
use std::hash::Hash;
use std::path::{Path, PathBuf};
pub fn test_home() -> ModuleId {
ModuleIds::default().get_or_insert(&"Test".into())
}
#[allow(dead_code)]
pub fn infer_expr(
subs: Subs,
problems: &mut Vec<roc_solve::solve::TypeError>,
constraint: &Constraint,
expr_var: Variable,
) -> (Content, Subs) {
let env = solve::Env {
aliases: MutMap::default(),
vars_by_symbol: SendMap::default(),
};
let (solved, _) = solve::run(&env, problems, subs, constraint);
let content = solved.inner().get_without_compacting(expr_var).content;
(content, solved.into_inner())
}
/// Used in the with_larger_debug_stack() function, for tests that otherwise
/// run out of stack space in debug builds (but don't in --release builds)
#[allow(dead_code)]
const EXPANDED_STACK_SIZE: usize = 4 * 1024 * 1024;
/// Without this, some tests pass in `cargo test --release` but fail without
/// the --release flag because they run out of stack space. This increases
/// stack size for debug builds only, while leaving the stack space at the default
/// amount for release builds.
#[allow(dead_code)]
#[cfg(debug_assertions)]
pub fn with_larger_debug_stack<F>(run_test: F)
where
F: FnOnce() -> (),
F: Send,
F: 'static,
{
std::thread::Builder::new()
.stack_size(EXPANDED_STACK_SIZE)
.spawn(run_test)
.expect("Error while spawning expanded dev stack size thread")
.join()
.expect("Error while joining expanded dev stack size thread")
}
/// In --release builds, don't increase the stack size. Run the test normally.
/// This way, we find out if any of our tests are blowing the stack even after
/// optimizations in release builds.
#[allow(dead_code)]
#[cfg(not(debug_assertions))]
#[inline(always)]
pub fn with_larger_debug_stack<F>(run_test: F)
where
F: FnOnce() -> (),
F: Send,
F: 'static,
{
run_test()
}
#[allow(dead_code)]
pub fn parse_with<'a>(arena: &'a Bump, input: &'a str) -> Result<ast::Expr<'a>, Fail> {
parse_loc_with(arena, input).map(|loc_expr| loc_expr.value)
}
#[allow(dead_code)]
pub fn parse_loc_with<'a>(arena: &'a Bump, input: &'a str) -> Result<Located<ast::Expr<'a>>, Fail> {
let state = State::new(&input, Attempting::Module);
let parser = space0_before(loc(roc_parse::expr::expr(0)), 0);
let answer = parser.parse(&arena, state);
answer
.map(|(loc_expr, _)| loc_expr)
.map_err(|(fail, _)| fail)
}
#[allow(dead_code)]
pub fn can_expr(expr_str: &str) -> CanExprOut {
can_expr_with(&Bump::new(), test_home(), expr_str)
}
#[allow(dead_code)]
pub fn uniq_expr(
expr_str: &str,
) -> (
Located<Expr>,
Output,
Vec<Problem>,
Subs,
Variable,
Constraint,
ModuleId,
Interns,
) {
let declared_idents: &ImMap<Ident, (Symbol, Region)> = &ImMap::default();
uniq_expr_with(&Bump::new(), expr_str, declared_idents)
}
#[allow(dead_code)]
pub fn uniq_expr_with(
arena: &Bump,
expr_str: &str,
declared_idents: &ImMap<Ident, (Symbol, Region)>,
) -> (
Located<Expr>,
Output,
Vec<Problem>,
Subs,
Variable,
Constraint,
ModuleId,
Interns,
) {
let home = test_home();
let CanExprOut {
loc_expr,
output,
problems,
var_store: old_var_store,
var,
interns,
..
} = can_expr_with(arena, home, expr_str);
// double check
let var_store = VarStore::new(old_var_store.fresh());
let expected2 = Expected::NoExpectation(Type::Variable(var));
let constraint = roc_constrain::uniq::constrain_declaration(
home,
&var_store,
Region::zero(),
&loc_expr,
declared_idents,
expected2,
);
let stdlib = uniq_stdlib();
let types = stdlib.types;
let imports: Vec<_> = types
.iter()
.map(|(symbol, (solved_type, region))| Import {
loc_symbol: Located::at(*region, *symbol),
solved_type: solved_type,
})
.collect();
// load builtin values
// TODO what to do with those rigids?
let (_introduced_rigids, constraint) =
constrain_imported_values(imports, constraint, &var_store);
// load builtin types
let mut constraint = load_builtin_aliases(&stdlib.aliases, constraint, &var_store);
constraint.instantiate_aliases(&var_store);
let subs2 = Subs::new(var_store.into());
(
loc_expr, output, problems, subs2, var, constraint, home, interns,
)
}
pub struct CanExprOut {
pub loc_expr: Located<Expr>,
pub output: Output,
pub problems: Vec<Problem>,
pub home: ModuleId,
pub interns: Interns,
pub var_store: VarStore,
pub var: Variable,
pub constraint: Constraint,
}
#[allow(dead_code)]
pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut {
let loc_expr = parse_loc_with(&arena, expr_str).unwrap_or_else(|e| {
panic!(
"can_expr_with() got a parse error when attempting to canonicalize:\n\n{:?} {:?}",
expr_str, e
)
});
let var_store = VarStore::default();
let var = var_store.fresh();
let expected = Expected::NoExpectation(Type::Variable(var));
let module_ids = ModuleIds::default();
// Desugar operators (convert them to Apply calls, taking into account
// operator precedence and associativity rules), before doing other canonicalization.
//
// If we did this *during* canonicalization, then each time we
// visited a BinOp node we'd recursively try to apply this to each of its nested
// operators, and then again on *their* nested operators, ultimately applying the
// rules multiple times unnecessarily.
let loc_expr = operator::desugar_expr(arena, &loc_expr);
let mut scope = Scope::new(home);
let dep_idents = IdentIds::exposed_builtins(0);
let mut env = Env::new(home, dep_idents, &module_ids, IdentIds::default());
let (loc_expr, output) = canonicalize_expr(
&mut env,
&var_store,
&mut scope,
Region::zero(),
&loc_expr.value,
);
let constraint = constrain_expr(
&roc_constrain::expr::Env {
rigids: ImMap::default(),
home,
},
loc_expr.region,
&loc_expr.value,
expected,
);
let types = roc_builtins::std::types();
let imports: Vec<_> = types
.iter()
.map(|(symbol, (solved_type, region))| Import {
loc_symbol: Located::at(*region, *symbol),
solved_type: solved_type,
})
.collect();
//load builtin values
let (_introduced_rigids, constraint) =
constrain_imported_values(imports, constraint, &var_store);
//load builtin types
let mut constraint =
load_builtin_aliases(&roc_builtins::std::aliases(), constraint, &var_store);
constraint.instantiate_aliases(&var_store);
let mut all_ident_ids = MutMap::default();
// When pretty printing types, we may need the exposed builtins,
// so include them in the Interns we'll ultimately return.
for (module_id, ident_ids) in IdentIds::exposed_builtins(0) {
all_ident_ids.insert(module_id, ident_ids);
}
all_ident_ids.insert(home, env.ident_ids);
let interns = Interns {
module_ids: env.module_ids.clone(),
all_ident_ids,
};
CanExprOut {
loc_expr,
output,
problems: env.problems,
home: env.home,
var_store,
interns,
var,
constraint,
}
}
#[allow(dead_code)]
pub fn mut_map_from_pairs<K, V, I>(pairs: I) -> MutMap<K, V>
where
I: IntoIterator<Item = (K, V)>,
K: Hash + Eq,
{
let mut answer = MutMap::default();
for (key, value) in pairs {
answer.insert(key, value);
}
answer
}
#[allow(dead_code)]
pub fn im_map_from_pairs<K, V, I>(pairs: I) -> ImMap<K, V>
where
I: IntoIterator<Item = (K, V)>,
K: Hash + Eq + Clone,
V: Clone,
{
let mut answer = ImMap::default();
for (key, value) in pairs {
answer.insert(key, value);
}
answer
}
#[allow(dead_code)]
pub fn send_set_from<V, I>(elems: I) -> SendSet<V>
where
I: IntoIterator<Item = V>,
V: Hash + Eq + Clone,
{
let mut answer = SendSet::default();
for elem in elems {
answer.insert(elem);
}
answer
}
#[allow(dead_code)]
pub fn fixtures_dir<'a>() -> PathBuf {
Path::new("tests").join("fixtures").join("build")
}
#[allow(dead_code)]
pub fn builtins_dir<'a>() -> PathBuf {
PathBuf::new().join("builtins")
}
// Check constraints
//
// Keep track of the used (in types or expectations) variables, and the declared variables (in
// flex_vars or rigid_vars fields of LetConstraint. These roc_collections should match: no duplicates
// and no variables that are used but not declared are allowed.
//
// There is one exception: the initial variable (that stores the type of the whole expression) is
// never declared, but is used.
#[allow(dead_code)]
pub fn assert_correct_variable_usage(constraint: &Constraint) {
// variables declared in constraint (flex_vars or rigid_vars)
// and variables actually used in constraints
let (declared, used) = variable_usage(constraint);
let used: ImSet<Variable> = used.clone().into();
let mut decl: ImSet<Variable> = declared.rigid_vars.clone().into();
for var in declared.flex_vars.clone() {
decl.insert(var);
}
let diff = used.clone().relative_complement(decl);
// NOTE: this checks whether we're using variables that are not declared. For recursive type
// definitions, their rigid types are declared twice, which is correct!
if !diff.is_empty() {
println!("VARIABLE USAGE PROBLEM");
println!("used: {:?}", &used);
println!("rigids: {:?}", &declared.rigid_vars);
println!("flexs: {:?}", &declared.flex_vars);
println!("difference: {:?}", &diff);
panic!("variable usage problem (see stdout for details)");
}
}
#[derive(Default)]
pub struct SeenVariables {
pub rigid_vars: Vec<Variable>,
pub flex_vars: Vec<Variable>,
}
pub fn variable_usage(con: &Constraint) -> (SeenVariables, Vec<Variable>) {
let mut declared = SeenVariables::default();
let mut used = ImSet::default();
variable_usage_help(con, &mut declared, &mut used);
used.remove(unsafe { &Variable::unsafe_test_debug_variable(1) });
used.remove(unsafe { &Variable::unsafe_test_debug_variable(2) });
used.remove(unsafe { &Variable::unsafe_test_debug_variable(3) });
let mut used_vec: Vec<Variable> = used.into_iter().collect();
used_vec.sort();
declared.rigid_vars.sort();
declared.flex_vars.sort();
(declared, used_vec)
}
fn variable_usage_help(con: &Constraint, declared: &mut SeenVariables, used: &mut ImSet<Variable>) {
use Constraint::*;
match con {
True | SaveTheEnvironment => (),
Eq(tipe, expectation, _, _) => {
for v in tipe.variables() {
used.insert(v);
}
for v in expectation.get_type_ref().variables() {
used.insert(v);
}
}
Lookup(_, expectation, _) => {
for v in expectation.get_type_ref().variables() {
used.insert(v);
}
}
Pattern(_, _, tipe, pexpectation) => {
for v in tipe.variables() {
used.insert(v);
}
for v in pexpectation.get_type_ref().variables() {
used.insert(v);
}
}
Let(letcon) => {
declared.rigid_vars.extend(letcon.rigid_vars.clone());
declared.flex_vars.extend(letcon.flex_vars.clone());
variable_usage_help(&letcon.defs_constraint, declared, used);
variable_usage_help(&letcon.ret_constraint, declared, used);
}
And(constraints) => {
for sub in constraints {
variable_usage_help(sub, declared, used);
}
}
}
}

View file

@ -0,0 +1,524 @@
#[macro_use]
extern crate pretty_assertions;
// #[macro_use]
// extern crate indoc;
extern crate bumpalo;
extern crate roc_mono;
mod helpers;
// Test monomorphization
#[cfg(test)]
mod test_mono {
use crate::helpers::{can_expr, infer_expr, test_home, CanExprOut};
use bumpalo::Bump;
use roc_module::symbol::{Interns, Symbol};
use roc_mono::expr::Expr::{self, *};
use roc_mono::expr::Procs;
use roc_mono::layout;
use roc_mono::layout::{Builtin, Layout};
use roc_types::subs::Subs;
// HELPERS
fn compiles_to(src: &str, expected: Expr<'_>) {
compiles_to_with_interns(src, |_| expected)
}
fn compiles_to_with_interns<'a, F>(src: &str, get_expected: F)
where
F: FnOnce(Interns) -> Expr<'a>,
{
let arena = Bump::new();
let CanExprOut {
loc_expr,
var_store,
var,
constraint,
home,
mut interns,
..
} = can_expr(src);
let subs = Subs::new(var_store.into());
let mut unify_problems = Vec::new();
let (_content, mut subs) = infer_expr(subs, &mut unify_problems, &constraint, var);
// Compile and add all the Procs before adding main
let mut procs = Procs::default();
let mut ident_ids = interns.all_ident_ids.remove(&home).unwrap();
// assume 64-bit pointers
let pointer_size = std::mem::size_of::<u64>() as u32;
// Populate Procs and Subs, and get the low-level Expr from the canonical Expr
let mut mono_problems = Vec::new();
let mono_expr = Expr::new(
&arena,
&mut subs,
&mut mono_problems,
loc_expr.value,
&mut procs,
home,
&mut ident_ids,
pointer_size,
);
// Put this module's ident_ids back in the interns
interns.all_ident_ids.insert(home, ident_ids);
assert_eq!(mono_expr, get_expected(interns));
}
#[test]
fn int_literal() {
compiles_to("5", Int(5));
}
#[test]
fn float_literal() {
compiles_to("0.5", Float(0.5));
}
#[test]
fn float_addition() {
compiles_to(
"3.0 + 4",
CallByName(
Symbol::FLOAT_ADD,
&[
(Float(3.0), Layout::Builtin(Builtin::Float64)),
(Float(4.0), Layout::Builtin(Builtin::Float64)),
],
),
);
}
#[test]
fn int_addition() {
compiles_to(
"0xDEADBEEF + 4",
CallByName(
Symbol::INT_ADD,
&[
(Int(3735928559), Layout::Builtin(Builtin::Int64)),
(Int(4), Layout::Builtin(Builtin::Int64)),
],
),
);
}
#[test]
fn num_addition() {
// Default to Int for `Num *`
compiles_to(
"3 + 5",
CallByName(
Symbol::INT_ADD,
&[
(Int(3), Layout::Builtin(Builtin::Int64)),
(Int(5), Layout::Builtin(Builtin::Int64)),
],
),
);
}
#[test]
fn specialize_closure() {
compiles_to(
r#"
f = \x -> x + 5
{ y: f 3.14, x: f 0x4 }
"#,
{
use self::Builtin::*;
use Layout::Builtin;
let home = test_home();
let gen_symbol_3 = Interns::from_index(home, 3);
let gen_symbol_4 = Interns::from_index(home, 4);
Struct(&[
(
CallByName(gen_symbol_3, &[(Int(4), Builtin(Int64))]),
Builtin(Int64),
),
(
CallByName(gen_symbol_4, &[(Float(3.14), Builtin(Float64))]),
Builtin(Float64),
),
])
},
)
}
#[test]
fn if_expression() {
compiles_to(
r#"
if True then "bar" else "foo"
"#,
{
use self::Builtin::*;
use Layout::Builtin;
let home = test_home();
let gen_symbol_0 = Interns::from_index(home, 0);
Store(
&[(
gen_symbol_0,
Layout::Builtin(layout::Builtin::Bool),
Expr::Bool(true),
)],
&Cond {
cond_symbol: gen_symbol_0,
branch_symbol: gen_symbol_0,
cond_layout: Builtin(Bool),
pass: (&[] as &[_], &Expr::Str("bar")),
fail: (&[] as &[_], &Expr::Str("foo")),
ret_layout: Builtin(Str),
},
)
},
)
}
#[test]
fn multiway_if_expression() {
compiles_to(
r#"
if True then
"bar"
else if False then
"foo"
else
"baz"
"#,
{
use self::Builtin::*;
use Layout::Builtin;
let home = test_home();
let gen_symbol_0 = Interns::from_index(home, 1);
let gen_symbol_1 = Interns::from_index(home, 0);
Store(
&[(
gen_symbol_0,
Layout::Builtin(layout::Builtin::Bool),
Expr::Bool(true),
)],
&Cond {
cond_symbol: gen_symbol_0,
branch_symbol: gen_symbol_0,
cond_layout: Builtin(Bool),
pass: (&[] as &[_], &Expr::Str("bar")),
fail: (
&[] as &[_],
&Store(
&[(
gen_symbol_1,
Layout::Builtin(layout::Builtin::Bool),
Expr::Bool(false),
)],
&Cond {
cond_symbol: gen_symbol_1,
branch_symbol: gen_symbol_1,
cond_layout: Builtin(Bool),
pass: (&[] as &[_], &Expr::Str("foo")),
fail: (&[] as &[_], &Expr::Str("baz")),
ret_layout: Builtin(Str),
},
),
),
ret_layout: Builtin(Str),
},
)
},
)
}
#[test]
fn annotated_if_expression() {
// an if with an annotation gets constrained differently. Make sure the result is still correct.
compiles_to(
r#"
x : Str
x = if True then "bar" else "foo"
x
"#,
{
use self::Builtin::*;
use Layout::Builtin;
let home = test_home();
let gen_symbol_0 = Interns::from_index(home, 1);
let symbol_x = Interns::from_index(home, 0);
Store(
&[(
symbol_x,
Builtin(Str),
Store(
&[(
gen_symbol_0,
Layout::Builtin(layout::Builtin::Bool),
Expr::Bool(true),
)],
&Cond {
cond_symbol: gen_symbol_0,
branch_symbol: gen_symbol_0,
cond_layout: Builtin(Bool),
pass: (&[] as &[_], &Expr::Str("bar")),
fail: (&[] as &[_], &Expr::Str("foo")),
ret_layout: Builtin(Str),
},
),
)],
&Load(symbol_x),
)
},
)
}
// #[test]
// fn record_pattern() {
// compiles_to(
// r#"
// \{ x } -> x + 0x5
// "#,
// { Float(3.45) },
// )
// }
//
// #[test]
// fn tag_pattern() {
// compiles_to(
// r#"
// \Foo x -> x + 0x5
// "#,
// { Float(3.45) },
// )
// }
#[test]
fn polymorphic_identity() {
compiles_to(
r#"
id = \x -> x
id { x: id 0x4 }
"#,
{
use self::Builtin::*;
use Layout::Builtin;
let home = test_home();
let gen_symbol_3 = Interns::from_index(home, 3);
let gen_symbol_4 = Interns::from_index(home, 4);
CallByName(
gen_symbol_3,
&[(
Struct(&[(
CallByName(gen_symbol_4, &[(Int(4), Builtin(Int64))]),
Builtin(Int64),
)]),
Layout::Struct(&[Builtin(Int64)]),
)],
)
},
)
}
// needs LetRec to be converted to mono
// #[test]
// fn polymorphic_recursive() {
// compiles_to(
// r#"
// f = \x ->
// when x < 10 is
// True -> f (x + 1)
// False -> x
//
// { x: f 0x4, y: f 3.14 }
// "#,
// {
// use self::Builtin::*;
// use Layout::Builtin;
// let home = test_home();
//
// let gen_symbol_3 = Interns::from_index(home, 3);
// let gen_symbol_4 = Interns::from_index(home, 4);
//
// Float(3.4)
//
// },
// )
// }
// needs layout for non-empty tag union
// #[test]
// fn is_nil() {
// let arena = Bump::new();
//
// compiles_to_with_interns(
// r#"
// LinkedList a : [ Cons a (LinkedList a), Nil ]
//
// isNil : LinkedList a -> Bool
// isNil = \list ->
// when list is
// Nil -> True
// Cons _ _ -> False
//
// listInt : LinkedList Int
// listInt = Nil
//
// isNil listInt
// "#,
// |interns| {
// let home = test_home();
// let var_is_nil = interns.symbol(home, "isNil".into());
// },
// );
// }
#[test]
fn bool_literal() {
let arena = Bump::new();
compiles_to_with_interns(
r#"
x : Bool
x = True
x
"#,
|interns| {
let home = test_home();
let var_x = interns.symbol(home, "x".into());
let stores = [(var_x, Layout::Builtin(Builtin::Bool), Bool(true))];
let load = Load(var_x);
Store(arena.alloc(stores), arena.alloc(load))
},
);
}
#[test]
fn two_element_enum() {
let arena = Bump::new();
compiles_to_with_interns(
r#"
x : [ Yes, No ]
x = No
x
"#,
|interns| {
let home = test_home();
let var_x = interns.symbol(home, "x".into());
let stores = [(var_x, Layout::Builtin(Builtin::Bool), Bool(false))];
let load = Load(var_x);
Store(arena.alloc(stores), arena.alloc(load))
},
);
}
#[test]
fn three_element_enum() {
let arena = Bump::new();
compiles_to_with_interns(
r#"
# this test is brought to you by fruits.com!
x : [ Apple, Orange, Banana ]
x = Orange
x
"#,
|interns| {
let home = test_home();
let var_x = interns.symbol(home, "x".into());
// orange gets index (and therefore tag_id) 1
let stores = [(var_x, Layout::Builtin(Builtin::Byte), Byte(2))];
let load = Load(var_x);
Store(arena.alloc(stores), arena.alloc(load))
},
);
}
#[test]
fn set_unique_int_list() {
compiles_to("List.getUnsafe (List.set [ 12, 9, 7, 3 ] 1 42) 1", {
CallByName(
Symbol::LIST_GET_UNSAFE,
&vec![
(
CallByName(
Symbol::LIST_SET,
&vec![
(
Array {
elem_layout: Layout::Builtin(Builtin::Int64),
elems: &vec![Int(12), Int(9), Int(7), Int(3)],
},
Layout::Builtin(Builtin::List(&Layout::Builtin(
Builtin::Int64,
))),
),
(Int(1), Layout::Builtin(Builtin::Int64)),
(Int(42), Layout::Builtin(Builtin::Int64)),
],
),
Layout::Builtin(Builtin::List(&Layout::Builtin(Builtin::Int64))),
),
(Int(1), Layout::Builtin(Builtin::Int64)),
],
)
});
}
// #[test]
// fn when_on_result() {
// compiles_to(
// r#"
// when 1 is
// 1 -> 12
// _ -> 34
// "#,
// {
// use self::Builtin::*;
// use Layout::Builtin;
// let home = test_home();
//
// let gen_symbol_3 = Interns::from_index(home, 3);
// let gen_symbol_4 = Interns::from_index(home, 4);
//
// CallByName(
// gen_symbol_3,
// &[(
// Struct(&[(
// CallByName(gen_symbol_4, &[(Int(4), Builtin(Int64))]),
// Builtin(Int64),
// )]),
// Layout::Struct(&[("x".into(), Builtin(Int64))]),
// )],
// )
// },
// )
// }
}

View file

@ -0,0 +1,268 @@
#[macro_use]
extern crate pretty_assertions;
// #[macro_use]
// extern crate indoc;
extern crate bumpalo;
extern crate roc_mono;
mod helpers;
// Test optimizations
#[cfg(test)]
mod test_opt {
use crate::helpers::{infer_expr, uniq_expr};
use bumpalo::Bump;
use roc_module::symbol::Symbol;
use roc_mono::expr::Expr::{self, *};
use roc_mono::expr::Procs;
use roc_mono::layout::{Builtin, Layout};
// HELPERS
#[derive(Debug, Default, PartialEq, Eq)]
struct CallProblems {
missing: Vec<Symbol>,
unexpected: Vec<Symbol>,
}
fn contains_named_calls(src: &str, mut calls: Vec<Symbol>) {
let arena = Bump::new();
let (loc_expr, _, _problems, subs, var, constraint, home, mut interns) = uniq_expr(src);
let mut unify_problems = Vec::new();
let (_content, mut subs) = infer_expr(subs, &mut unify_problems, &constraint, var);
// Compile and add all the Procs before adding main
let mut procs = Procs::default();
let mut ident_ids = interns.all_ident_ids.remove(&home).unwrap();
// assume 64-bit pointers
let pointer_size = std::mem::size_of::<u64>() as u32;
// Populate Procs and Subs, and get the low-level Expr from the canonical Expr
let mut mono_problems = Vec::new();
let mono_expr = Expr::new(
&arena,
&mut subs,
&mut mono_problems,
loc_expr.value,
&mut procs,
home,
&mut ident_ids,
pointer_size,
);
let unexpected_calls = extract_named_calls(&mono_expr, &mut calls);
let expected = CallProblems::default();
let actual = CallProblems {
missing: calls,
unexpected: unexpected_calls,
};
assert_eq!(expected, actual);
}
fn extract_named_calls(expr: &Expr<'_>, calls: &mut Vec<Symbol>) -> Vec<Symbol> {
let mut unexpected_calls = Vec::new();
// The calls must be sorted so we can binary_search them for matches.
calls.sort();
extract_named_calls_help(expr, calls, &mut unexpected_calls);
unexpected_calls
}
fn extract_named_calls_help(
expr: &Expr<'_>,
calls: &mut Vec<Symbol>,
unexpected_calls: &mut Vec<Symbol>,
) {
match expr {
Int(_) | Float(_) | Str(_) | Bool(_) | Byte(_) | Load(_) | FunctionPointer(_)
| RuntimeError(_) => (),
Store(paths, sub_expr) => {
for (_, _, path_expr) in paths.iter() {
extract_named_calls_help(path_expr, calls, unexpected_calls);
}
extract_named_calls_help(sub_expr, calls, unexpected_calls);
}
CallByPointer(sub_expr, args, _) => {
extract_named_calls_help(sub_expr, calls, unexpected_calls);
for arg in args.iter() {
extract_named_calls_help(arg, calls, unexpected_calls);
}
}
CallByName(symbol, args) => {
// Search for the symbol. If we found it, check it off the list.
// If we didn't find it, add it to the list of unexpected calls.
match calls.binary_search(symbol) {
Ok(index) => {
calls.remove(index);
}
Err(_) => {
unexpected_calls.push(*symbol);
}
}
for (arg, _) in args.iter() {
extract_named_calls_help(arg, calls, unexpected_calls);
}
}
Cond {
cond_symbol: _,
branch_symbol: _,
cond_layout: _,
pass,
fail,
ret_layout: _,
} => {
extract_named_calls_help(pass.1, calls, unexpected_calls);
extract_named_calls_help(fail.1, calls, unexpected_calls);
}
Switch {
cond,
cond_layout: _,
branches,
default_branch,
ret_layout: _,
} => {
extract_named_calls_help(cond, calls, unexpected_calls);
extract_named_calls_help(default_branch.1, calls, unexpected_calls);
for (_, _, branch_expr) in branches.iter() {
extract_named_calls_help(branch_expr, calls, unexpected_calls);
}
}
Tag {
tag_layout: _,
tag_name: _,
tag_id: _,
union_size: _,
arguments,
} => {
for (tag_expr, _) in arguments.iter() {
extract_named_calls_help(tag_expr, calls, unexpected_calls);
}
}
Struct(fields) => {
for (field, _) in fields.iter() {
extract_named_calls_help(field, calls, unexpected_calls);
}
}
AccessAtIndex {
index: _,
field_layouts: _,
expr: sub_expr,
is_unwrapped: _,
} => {
extract_named_calls_help(sub_expr, calls, unexpected_calls);
}
Array {
elem_layout: _,
elems,
} => {
for elem in elems.iter() {
extract_named_calls_help(elem, calls, unexpected_calls);
}
}
}
}
fn compiles_to(src: &str, expected: Expr<'_>) {
let arena = Bump::new();
let (loc_expr, _, _problems, subs, var, constraint, home, mut interns) = uniq_expr(src);
let mut unify_problems = Vec::new();
let (_content, mut subs) = infer_expr(subs, &mut unify_problems, &constraint, var);
// Compile and add all the Procs before adding main
let mut procs = Procs::default();
let mut ident_ids = interns.all_ident_ids.remove(&home).unwrap();
// assume 64-bit pointers
let pointer_size = std::mem::size_of::<u64>() as u32;
// Populate Procs and Subs, and get the low-level Expr from the canonical Expr
let mut mono_problems = Vec::new();
let mono_expr = Expr::new(
&arena,
&mut subs,
&mut mono_problems,
loc_expr.value,
&mut procs,
home,
&mut ident_ids,
pointer_size,
);
assert_eq!(mono_expr, expected);
}
#[test]
fn int_literal() {
compiles_to("5", Int(5));
}
#[test]
fn float_literal() {
compiles_to("0.5", Float(0.5));
}
#[test]
fn set_unique_int_list() {
// This should optimize List.set to List.set_in_place
compiles_to(
"List.getUnsafe (List.set [ 12, 9, 7, 3 ] 1 42) 1",
CallByName(
Symbol::LIST_GET_UNSAFE,
&vec![
(
CallByName(
Symbol::LIST_SET_IN_PLACE,
&vec![
(
Array {
elem_layout: Layout::Builtin(Builtin::Int64),
elems: &vec![Int(12), Int(9), Int(7), Int(3)],
},
Layout::Builtin(Builtin::List(&Layout::Builtin(
Builtin::Int64,
))),
),
(Int(1), Layout::Builtin(Builtin::Int64)),
(Int(42), Layout::Builtin(Builtin::Int64)),
],
),
Layout::Builtin(Builtin::List(&Layout::Builtin(Builtin::Int64))),
),
(Int(1), Layout::Builtin(Builtin::Int64)),
],
),
);
}
#[test]
fn set_shared_int_list() {
// This should *NOT* optimize List.set to List.set_in_place
contains_named_calls(
r#"
shared = [ 2, 4 ]
# This should not mutate the original
x = List.set shared 1 0
{ x, y: List.getUnsafe shared 1 }
"#,
vec![Symbol::LIST_SET, Symbol::LIST_GET_UNSAFE],
);
}
}

View file

@ -93,7 +93,7 @@ pub struct WhenPattern<'a> {
pub enum Expr<'a> { pub enum Expr<'a> {
// Number Literals // Number Literals
Float(&'a str), Float(&'a str),
Int(&'a str), Num(&'a str),
NonBase10Int { NonBase10Int {
string: &'a str, string: &'a str,
base: Base, base: Base,
@ -165,7 +165,7 @@ pub enum Expr<'a> {
MalformedClosure, MalformedClosure,
// Both operators were non-associative, e.g. (True == False == False). // Both operators were non-associative, e.g. (True == False == False).
// We should tell the author to disambiguate by grouping them with parens. // We should tell the author to disambiguate by grouping them with parens.
PrecedenceConflict(Loc<BinOp>, Loc<BinOp>, &'a Loc<Expr<'a>>), PrecedenceConflict(Region, Loc<BinOp>, Loc<BinOp>, &'a Loc<Expr<'a>>),
} }
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
@ -324,7 +324,7 @@ pub enum Pattern<'a> {
Nested(&'a Pattern<'a>), Nested(&'a Pattern<'a>),
// Literal // Literal
IntLiteral(&'a str), NumLiteral(&'a str),
NonBase10Literal { NonBase10Literal {
string: &'a str, string: &'a str,
base: Base, base: Base,
@ -425,7 +425,7 @@ impl<'a> Pattern<'a> {
(Nested(x), Nested(y)) => x.equivalent(y), (Nested(x), Nested(y)) => x.equivalent(y),
// Literal // Literal
(IntLiteral(x), IntLiteral(y)) => x == y, (NumLiteral(x), NumLiteral(y)) => x == y,
( (
NonBase10Literal { NonBase10Literal {
string: string_x, string: string_x,

View file

@ -299,7 +299,7 @@ fn expr_to_pattern<'a>(arena: &'a Bump, expr: &Expr<'a>) -> Result<Pattern<'a>,
} }
Expr::Float(string) => Ok(Pattern::FloatLiteral(string)), Expr::Float(string) => Ok(Pattern::FloatLiteral(string)),
Expr::Int(string) => Ok(Pattern::IntLiteral(string)), Expr::Num(string) => Ok(Pattern::NumLiteral(string)),
Expr::NonBase10Int { Expr::NonBase10Int {
string, string,
base, base,
@ -323,7 +323,7 @@ fn expr_to_pattern<'a>(arena: &'a Bump, expr: &Expr<'a>) -> Result<Pattern<'a>,
| Expr::If(_, _, _) | Expr::If(_, _, _)
| Expr::When(_, _) | Expr::When(_, _)
| Expr::MalformedClosure | Expr::MalformedClosure
| Expr::PrecedenceConflict(_, _, _) | Expr::PrecedenceConflict(_, _, _, _)
| Expr::Record { | Expr::Record {
update: Some(_), .. update: Some(_), ..
} }
@ -551,7 +551,7 @@ fn annotation_or_alias<'a>(
QualifiedIdentifier { .. } => { QualifiedIdentifier { .. } => {
panic!("TODO gracefully handle trying to annotate a qualified identifier, e.g. `Foo.bar : ...`"); panic!("TODO gracefully handle trying to annotate a qualified identifier, e.g. `Foo.bar : ...`");
} }
IntLiteral(_) NumLiteral(_)
| NonBase10Literal { .. } | NonBase10Literal { .. }
| FloatLiteral(_) | FloatLiteral(_)
| StrLiteral(_) | StrLiteral(_)
@ -1310,8 +1310,11 @@ pub fn ident_etc<'a>(min_indent: u16) -> impl Parser<'a, Expr<'a>> {
region: loc_arg.region, region: loc_arg.region,
}); });
} }
Err(_malformed) => { Err(malformed) => {
panic!("TODO early return malformed pattern"); panic!(
"TODO early return malformed pattern {:?}",
malformed
);
} }
} }
} }

View file

@ -36,7 +36,7 @@ where
{ {
use self::LiteralType::*; use self::LiteralType::*;
let mut typ = Int; let mut typ = Num;
// We already parsed 1 character (which may have been a minus sign). // We already parsed 1 character (which may have been a minus sign).
let mut bytes_parsed = 1; let mut bytes_parsed = 1;
@ -71,8 +71,8 @@ where
} else { } else {
return err_unexpected(); return err_unexpected();
} }
} else if next_ch == 'b' && typ == Int { } else if next_ch == 'b' && typ == Num {
// We have to check for typ == Int because otherwise we get a false // We have to check for typ == Num because otherwise we get a false
// positive here when parsing a hex literal that happens to have // positive here when parsing a hex literal that happens to have
// a 'b' in it, e.g. 0xbbbb // a 'b' in it, e.g. 0xbbbb
if is_potentially_non_base10() { if is_potentially_non_base10() {
@ -129,7 +129,7 @@ where
// If the number is malformed (outside the supported range), // If the number is malformed (outside the supported range),
// we'll succeed with an appropriate Expr which records that. // we'll succeed with an appropriate Expr which records that.
let expr = match typ { let expr = match typ {
Int => Expr::Int(&state.input[0..bytes_parsed]), Num => Expr::Num(&state.input[0..bytes_parsed]),
Float => Expr::Float(&state.input[0..bytes_parsed]), Float => Expr::Float(&state.input[0..bytes_parsed]),
// For these we trim off the 0x/0o/0b part // For these we trim off the 0x/0o/0b part
Hex => from_base(Base::Hex), Hex => from_base(Base::Hex),
@ -144,7 +144,7 @@ where
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
enum LiteralType { enum LiteralType {
Int, Num,
Float, Float,
Hex, Hex,
Octal, Octal,

View file

@ -1,5 +1,6 @@
use self::BinOp::*; use self::BinOp::*;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::fmt;
#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CalledVia { pub enum CalledVia {
@ -109,3 +110,29 @@ impl Ord for BinOp {
self.precedence().cmp(&other.precedence()) self.precedence().cmp(&other.precedence())
} }
} }
impl std::fmt::Display for BinOp {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let as_str = match self {
Caret => "^",
Star => "*",
Slash => "/",
DoubleSlash => "//",
Percent => "%",
DoublePercent => "%%",
Plus => "+",
Minus => "-",
Equals => "==",
NotEquals => "!=",
LessThan => "<",
GreaterThan => ">",
LessThanOrEq => "<=",
GreaterThanOrEq => ">=",
And => "&&",
Or => "||",
Pizza => "|>",
};
write!(f, "{}", as_str)
}
}

View file

@ -147,26 +147,26 @@ mod test_parse {
#[test] #[test]
fn zero_int() { fn zero_int() {
assert_parses_to("0", Int("0")); assert_parses_to("0", Num("0"));
} }
#[test] #[test]
fn positive_int() { fn positive_int() {
assert_parses_to("1", Int("1")); assert_parses_to("1", Num("1"));
assert_parses_to("42", Int("42")); assert_parses_to("42", Num("42"));
} }
#[test] #[test]
fn negative_int() { fn negative_int() {
assert_parses_to("-1", Int("-1")); assert_parses_to("-1", Num("-1"));
assert_parses_to("-42", Int("-42")); assert_parses_to("-42", Num("-42"));
} }
#[test] #[test]
fn highest_int() { fn highest_int() {
assert_parses_to( assert_parses_to(
i64::MAX.to_string().as_str(), i64::MAX.to_string().as_str(),
Int(i64::MAX.to_string().as_str()), Num(i64::MAX.to_string().as_str()),
); );
} }
@ -174,24 +174,24 @@ mod test_parse {
fn lowest_int() { fn lowest_int() {
assert_parses_to( assert_parses_to(
i64::MIN.to_string().as_str(), i64::MIN.to_string().as_str(),
Int(i64::MIN.to_string().as_str()), Num(i64::MIN.to_string().as_str()),
); );
} }
#[test] #[test]
fn int_with_underscore() { fn int_with_underscore() {
assert_parses_to("1_2_34_567", Int("1_2_34_567")); assert_parses_to("1_2_34_567", Num("1_2_34_567"));
assert_parses_to("-1_2_34_567", Int("-1_2_34_567")); assert_parses_to("-1_2_34_567", Num("-1_2_34_567"));
// The following cases are silly. They aren't supported on purpose, // The following cases are silly. They aren't supported on purpose,
// but there would be a performance cost to explicitly disallowing them, // but there would be a performance cost to explicitly disallowing them,
// which doesn't seem like it would benefit anyone. // which doesn't seem like it would benefit anyone.
assert_parses_to("1_", Int("1_")); assert_parses_to("1_", Num("1_"));
assert_parses_to("1__23", Int("1__23")); assert_parses_to("1__23", Num("1__23"));
} }
#[quickcheck] #[quickcheck]
fn all_i64_values_parse(num: i64) { fn all_i64_values_parse(num: i64) {
assert_parses_to(num.to_string().as_str(), Int(num.to_string().as_str())); assert_parses_to(num.to_string().as_str(), Num(num.to_string().as_str()));
} }
// FLOAT LITERALS // FLOAT LITERALS
@ -262,12 +262,12 @@ mod test_parse {
let label1 = LabeledValue( let label1 = LabeledValue(
Located::new(0, 0, 16, 17, "x"), Located::new(0, 0, 16, 17, "x"),
&[], &[],
arena.alloc(Located::new(0, 0, 19, 20, Int("5"))), arena.alloc(Located::new(0, 0, 19, 20, Num("5"))),
); );
let label2 = LabeledValue( let label2 = LabeledValue(
Located::new(0, 0, 22, 23, "y"), Located::new(0, 0, 22, 23, "y"),
&[], &[],
arena.alloc(Located::new(0, 0, 25, 26, Int("0"))), arena.alloc(Located::new(0, 0, 25, 26, Num("0"))),
); );
let fields = bumpalo::vec![in &arena; let fields = bumpalo::vec![in &arena;
Located::new(0, 0, 16, 20, label1), Located::new(0, 0, 16, 20, label1),
@ -293,9 +293,9 @@ mod test_parse {
fn one_plus_two() { fn one_plus_two() {
let arena = Bump::new(); let arena = Bump::new();
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 1, Int("1")), Located::new(0, 0, 0, 1, Num("1")),
Located::new(0, 0, 1, 2, Plus), Located::new(0, 0, 1, 2, Plus),
Located::new(0, 0, 2, 3, Int("2")), Located::new(0, 0, 2, 3, Num("2")),
)); ));
let expected = BinOp(tuple); let expected = BinOp(tuple);
let actual = parse_with(&arena, "1+2"); let actual = parse_with(&arena, "1+2");
@ -307,9 +307,9 @@ mod test_parse {
fn one_minus_two() { fn one_minus_two() {
let arena = Bump::new(); let arena = Bump::new();
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 1, Int("1")), Located::new(0, 0, 0, 1, Num("1")),
Located::new(0, 0, 1, 2, Minus), Located::new(0, 0, 1, 2, Minus),
Located::new(0, 0, 2, 3, Int("2")), Located::new(0, 0, 2, 3, Num("2")),
)); ));
let expected = BinOp(tuple); let expected = BinOp(tuple);
let actual = parse_with(&arena, "1-2"); let actual = parse_with(&arena, "1-2");
@ -321,9 +321,9 @@ mod test_parse {
fn add_with_spaces() { fn add_with_spaces() {
let arena = Bump::new(); let arena = Bump::new();
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 1, Int("1")), Located::new(0, 0, 0, 1, Num("1")),
Located::new(0, 0, 3, 4, Plus), Located::new(0, 0, 3, 4, Plus),
Located::new(0, 0, 7, 8, Int("2")), Located::new(0, 0, 7, 8, Num("2")),
)); ));
let expected = BinOp(tuple); let expected = BinOp(tuple);
let actual = parse_with(&arena, "1 + 2"); let actual = parse_with(&arena, "1 + 2");
@ -335,9 +335,9 @@ mod test_parse {
fn sub_with_spaces() { fn sub_with_spaces() {
let arena = Bump::new(); let arena = Bump::new();
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 1, Int("1")), Located::new(0, 0, 0, 1, Num("1")),
Located::new(0, 0, 3, 4, Minus), Located::new(0, 0, 3, 4, Minus),
Located::new(0, 0, 7, 8, Int("2")), Located::new(0, 0, 7, 8, Num("2")),
)); ));
let expected = BinOp(tuple); let expected = BinOp(tuple);
let actual = parse_with(&arena, "1 - 2"); let actual = parse_with(&arena, "1 - 2");
@ -360,7 +360,7 @@ mod test_parse {
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 1, var), Located::new(0, 0, 0, 1, var),
Located::new(0, 0, 2, 3, Plus), Located::new(0, 0, 2, 3, Plus),
Located::new(0, 0, 4, 5, Int("2")), Located::new(0, 0, 4, 5, Num("2")),
)); ));
let expected = BinOp(tuple); let expected = BinOp(tuple);
let actual = parse_with(&arena, "x + 2"); let actual = parse_with(&arena, "x + 2");
@ -382,7 +382,7 @@ mod test_parse {
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 1, var), Located::new(0, 0, 0, 1, var),
Located::new(0, 0, 2, 3, Minus), Located::new(0, 0, 2, 3, Minus),
Located::new(0, 0, 4, 5, Int("2")), Located::new(0, 0, 4, 5, Num("2")),
)); ));
let expected = BinOp(tuple); let expected = BinOp(tuple);
let actual = parse_with(&arena, "x - 2"); let actual = parse_with(&arena, "x - 2");
@ -394,13 +394,13 @@ mod test_parse {
fn newline_before_add() { fn newline_before_add() {
let arena = Bump::new(); let arena = Bump::new();
let spaced_int = Expr::SpaceAfter( let spaced_int = Expr::SpaceAfter(
arena.alloc(Int("3")), arena.alloc(Num("3")),
bumpalo::vec![in &arena; Newline].into_bump_slice(), bumpalo::vec![in &arena; Newline].into_bump_slice(),
); );
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 1, spaced_int), Located::new(0, 0, 0, 1, spaced_int),
Located::new(1, 1, 0, 1, Plus), Located::new(1, 1, 0, 1, Plus),
Located::new(1, 1, 2, 3, Int("4")), Located::new(1, 1, 2, 3, Num("4")),
)); ));
let expected = BinOp(tuple); let expected = BinOp(tuple);
let actual = parse_with(&arena, "3 \n+ 4"); let actual = parse_with(&arena, "3 \n+ 4");
@ -412,13 +412,13 @@ mod test_parse {
fn newline_before_sub() { fn newline_before_sub() {
let arena = Bump::new(); let arena = Bump::new();
let spaced_int = Expr::SpaceAfter( let spaced_int = Expr::SpaceAfter(
arena.alloc(Int("3")), arena.alloc(Num("3")),
bumpalo::vec![in &arena; Newline].into_bump_slice(), bumpalo::vec![in &arena; Newline].into_bump_slice(),
); );
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 1, spaced_int), Located::new(0, 0, 0, 1, spaced_int),
Located::new(1, 1, 0, 1, Minus), Located::new(1, 1, 0, 1, Minus),
Located::new(1, 1, 2, 3, Int("4")), Located::new(1, 1, 2, 3, Num("4")),
)); ));
let expected = BinOp(tuple); let expected = BinOp(tuple);
let actual = parse_with(&arena, "3 \n- 4"); let actual = parse_with(&arena, "3 \n- 4");
@ -430,10 +430,10 @@ mod test_parse {
fn newline_after_mul() { fn newline_after_mul() {
let arena = Bump::new(); let arena = Bump::new();
let spaced_int = arena let spaced_int = arena
.alloc(Int("4")) .alloc(Num("4"))
.before(bumpalo::vec![in &arena; Newline].into_bump_slice()); .before(bumpalo::vec![in &arena; Newline].into_bump_slice());
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 1, Int("3")), Located::new(0, 0, 0, 1, Num("3")),
Located::new(0, 0, 3, 4, Star), Located::new(0, 0, 3, 4, Star),
Located::new(1, 1, 2, 3, spaced_int), Located::new(1, 1, 2, 3, spaced_int),
)); ));
@ -447,10 +447,10 @@ mod test_parse {
fn newline_after_sub() { fn newline_after_sub() {
let arena = Bump::new(); let arena = Bump::new();
let spaced_int = arena let spaced_int = arena
.alloc(Int("4")) .alloc(Num("4"))
.before(bumpalo::vec![in &arena; Newline].into_bump_slice()); .before(bumpalo::vec![in &arena; Newline].into_bump_slice());
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 1, Int("3")), Located::new(0, 0, 0, 1, Num("3")),
Located::new(0, 0, 3, 4, Minus), Located::new(0, 0, 3, 4, Minus),
Located::new(1, 1, 2, 3, spaced_int), Located::new(1, 1, 2, 3, spaced_int),
)); ));
@ -464,12 +464,12 @@ mod test_parse {
fn comment_with_unicode() { fn comment_with_unicode() {
let arena = Bump::new(); let arena = Bump::new();
let spaced_int = arena let spaced_int = arena
.alloc(Int("3")) .alloc(Num("3"))
.after(bumpalo::vec![in &arena; LineComment(" 2 × 2")].into_bump_slice()); .after(bumpalo::vec![in &arena; LineComment(" 2 × 2")].into_bump_slice());
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 1, spaced_int), Located::new(0, 0, 0, 1, spaced_int),
Located::new(1, 1, 0, 1, Plus), Located::new(1, 1, 0, 1, Plus),
Located::new(1, 1, 2, 3, Int("4")), Located::new(1, 1, 2, 3, Num("4")),
)); ));
let expected = BinOp(tuple); let expected = BinOp(tuple);
let actual = parse_with(&arena, "3 # 2 × 2\n+ 4"); let actual = parse_with(&arena, "3 # 2 × 2\n+ 4");
@ -481,12 +481,12 @@ mod test_parse {
fn comment_before_op() { fn comment_before_op() {
let arena = Bump::new(); let arena = Bump::new();
let spaced_int = arena let spaced_int = arena
.alloc(Int("3")) .alloc(Num("3"))
.after(bumpalo::vec![in &arena; LineComment(" test!")].into_bump_slice()); .after(bumpalo::vec![in &arena; LineComment(" test!")].into_bump_slice());
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 1, spaced_int), Located::new(0, 0, 0, 1, spaced_int),
Located::new(1, 1, 0, 1, Plus), Located::new(1, 1, 0, 1, Plus),
Located::new(1, 1, 2, 3, Int("4")), Located::new(1, 1, 2, 3, Num("4")),
)); ));
let expected = BinOp(tuple); let expected = BinOp(tuple);
let actual = parse_with(&arena, "3 # test!\n+ 4"); let actual = parse_with(&arena, "3 # test!\n+ 4");
@ -498,10 +498,10 @@ mod test_parse {
fn comment_after_op() { fn comment_after_op() {
let arena = Bump::new(); let arena = Bump::new();
let spaced_int = arena let spaced_int = arena
.alloc(Int("92")) .alloc(Num("92"))
.before(bumpalo::vec![in &arena; LineComment(" test!")].into_bump_slice()); .before(bumpalo::vec![in &arena; LineComment(" test!")].into_bump_slice());
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 2, Int("12")), Located::new(0, 0, 0, 2, Num("12")),
Located::new(0, 0, 4, 5, Star), Located::new(0, 0, 4, 5, Star),
Located::new(1, 1, 1, 3, spaced_int), Located::new(1, 1, 1, 3, spaced_int),
)); ));
@ -515,10 +515,10 @@ mod test_parse {
fn ops_with_newlines() { fn ops_with_newlines() {
let arena = Bump::new(); let arena = Bump::new();
let spaced_int1 = arena let spaced_int1 = arena
.alloc(Int("3")) .alloc(Num("3"))
.after(bumpalo::vec![in &arena; Newline].into_bump_slice()); .after(bumpalo::vec![in &arena; Newline].into_bump_slice());
let spaced_int2 = arena let spaced_int2 = arena
.alloc(Int("4")) .alloc(Num("4"))
.before(bumpalo::vec![in &arena; Newline, Newline].into_bump_slice()); .before(bumpalo::vec![in &arena; Newline, Newline].into_bump_slice());
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 1, spaced_int1), Located::new(0, 0, 0, 1, spaced_int1),
@ -559,9 +559,9 @@ mod test_parse {
fn minus_twelve_minus_five() { fn minus_twelve_minus_five() {
let arena = Bump::new(); let arena = Bump::new();
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 3, Int("-12")), Located::new(0, 0, 0, 3, Num("-12")),
Located::new(0, 0, 3, 4, Minus), Located::new(0, 0, 3, 4, Minus),
Located::new(0, 0, 4, 5, Int("5")), Located::new(0, 0, 4, 5, Num("5")),
)); ));
let expected = BinOp(tuple); let expected = BinOp(tuple);
let actual = parse_with(&arena, "-12-5"); let actual = parse_with(&arena, "-12-5");
@ -573,9 +573,9 @@ mod test_parse {
fn ten_times_eleven() { fn ten_times_eleven() {
let arena = Bump::new(); let arena = Bump::new();
let tuple = arena.alloc(( let tuple = arena.alloc((
Located::new(0, 0, 0, 2, Int("10")), Located::new(0, 0, 0, 2, Num("10")),
Located::new(0, 0, 2, 3, Star), Located::new(0, 0, 2, 3, Star),
Located::new(0, 0, 3, 5, Int("11")), Located::new(0, 0, 3, 5, Num("11")),
)); ));
let expected = BinOp(tuple); let expected = BinOp(tuple);
let actual = parse_with(&arena, "10*11"); let actual = parse_with(&arena, "10*11");
@ -587,12 +587,12 @@ mod test_parse {
fn multiple_operators() { fn multiple_operators() {
let arena = Bump::new(); let arena = Bump::new();
let inner = arena.alloc(( let inner = arena.alloc((
Located::new(0, 0, 3, 5, Int("42")), Located::new(0, 0, 3, 5, Num("42")),
Located::new(0, 0, 5, 6, Plus), Located::new(0, 0, 5, 6, Plus),
Located::new(0, 0, 6, 9, Int("534")), Located::new(0, 0, 6, 9, Num("534")),
)); ));
let outer = arena.alloc(( let outer = arena.alloc((
Located::new(0, 0, 0, 2, Int("31")), Located::new(0, 0, 0, 2, Num("31")),
Located::new(0, 0, 2, 3, Star), Located::new(0, 0, 2, 3, Star),
Located::new(0, 0, 3, 9, BinOp(inner)), Located::new(0, 0, 3, 9, BinOp(inner)),
)); ));
@ -707,8 +707,8 @@ mod test_parse {
#[test] #[test]
fn apply_private_tag() { fn apply_private_tag() {
let arena = Bump::new(); let arena = Bump::new();
let arg1 = arena.alloc(Located::new(0, 0, 6, 8, Int("12"))); let arg1 = arena.alloc(Located::new(0, 0, 6, 8, Num("12")));
let arg2 = arena.alloc(Located::new(0, 0, 9, 11, Int("34"))); let arg2 = arena.alloc(Located::new(0, 0, 9, 11, Num("34")));
let args = bumpalo::vec![in &arena; &*arg1, &*arg2]; let args = bumpalo::vec![in &arena; &*arg1, &*arg2];
let expected = Expr::Apply( let expected = Expr::Apply(
arena.alloc(Located::new(0, 0, 0, 5, Expr::PrivateTag("@Whee"))), arena.alloc(Located::new(0, 0, 0, 5, Expr::PrivateTag("@Whee"))),
@ -723,8 +723,8 @@ mod test_parse {
#[test] #[test]
fn apply_global_tag() { fn apply_global_tag() {
let arena = Bump::new(); let arena = Bump::new();
let arg1 = arena.alloc(Located::new(0, 0, 5, 7, Int("12"))); let arg1 = arena.alloc(Located::new(0, 0, 5, 7, Num("12")));
let arg2 = arena.alloc(Located::new(0, 0, 8, 10, Int("34"))); let arg2 = arena.alloc(Located::new(0, 0, 8, 10, Num("34")));
let args = bumpalo::vec![in &arena; &*arg1, &*arg2]; let args = bumpalo::vec![in &arena; &*arg1, &*arg2];
let expected = Expr::Apply( let expected = Expr::Apply(
arena.alloc(Located::new(0, 0, 0, 4, Expr::GlobalTag("Whee"))), arena.alloc(Located::new(0, 0, 0, 4, Expr::GlobalTag("Whee"))),
@ -739,8 +739,8 @@ mod test_parse {
#[test] #[test]
fn apply_parenthetical_global_tag_args() { fn apply_parenthetical_global_tag_args() {
let arena = Bump::new(); let arena = Bump::new();
let int1 = ParensAround(arena.alloc(Int("12"))); let int1 = ParensAround(arena.alloc(Num("12")));
let int2 = ParensAround(arena.alloc(Int("34"))); let int2 = ParensAround(arena.alloc(Num("34")));
let arg1 = arena.alloc(Located::new(0, 0, 6, 8, int1)); let arg1 = arena.alloc(Located::new(0, 0, 6, 8, int1));
let arg2 = arena.alloc(Located::new(0, 0, 11, 13, int2)); let arg2 = arena.alloc(Located::new(0, 0, 11, 13, int2));
let args = bumpalo::vec![in &arena; &*arg1, &*arg2]; let args = bumpalo::vec![in &arena; &*arg1, &*arg2];
@ -780,7 +780,7 @@ mod test_parse {
let patterns = bumpalo::vec![in &arena; pattern]; let patterns = bumpalo::vec![in &arena; pattern];
let expected = Closure( let expected = Closure(
arena.alloc(patterns), arena.alloc(patterns),
arena.alloc(Located::new(0, 0, 10, 12, Int("42"))), arena.alloc(Located::new(0, 0, 10, 12, Num("42"))),
); );
let actual = parse_with(&arena, "\\Thing -> 42"); let actual = parse_with(&arena, "\\Thing -> 42");
@ -822,7 +822,7 @@ mod test_parse {
#[test] #[test]
fn packed_singleton_list() { fn packed_singleton_list() {
let arena = Bump::new(); let arena = Bump::new();
let elems = bumpalo::vec![in &arena; &*arena.alloc(Located::new(0, 0, 1, 2, Int("1")))]; let elems = bumpalo::vec![in &arena; &*arena.alloc(Located::new(0, 0, 1, 2, Num("1")))];
let expected = List(elems); let expected = List(elems);
let actual = parse_with(&arena, "[1]"); let actual = parse_with(&arena, "[1]");
@ -832,7 +832,7 @@ mod test_parse {
#[test] #[test]
fn spaced_singleton_list() { fn spaced_singleton_list() {
let arena = Bump::new(); let arena = Bump::new();
let elems = bumpalo::vec![in &arena; &*arena.alloc(Located::new(0, 0, 2, 3, Int("1")))]; let elems = bumpalo::vec![in &arena; &*arena.alloc(Located::new(0, 0, 2, 3, Num("1")))];
let expected = List(elems); let expected = List(elems);
let actual = parse_with(&arena, "[ 1 ]"); let actual = parse_with(&arena, "[ 1 ]");
@ -917,7 +917,7 @@ mod test_parse {
#[test] #[test]
fn basic_apply() { fn basic_apply() {
let arena = Bump::new(); let arena = Bump::new();
let arg = arena.alloc(Located::new(0, 0, 5, 6, Int("1"))); let arg = arena.alloc(Located::new(0, 0, 5, 6, Num("1")));
let args = bumpalo::vec![in &arena; &*arg]; let args = bumpalo::vec![in &arena; &*arg];
let expected = Expr::Apply( let expected = Expr::Apply(
arena.alloc(Located::new( arena.alloc(Located::new(
@ -941,8 +941,8 @@ mod test_parse {
#[test] #[test]
fn apply_two_args() { fn apply_two_args() {
let arena = Bump::new(); let arena = Bump::new();
let arg1 = arena.alloc(Located::new(0, 0, 6, 8, Int("12"))); let arg1 = arena.alloc(Located::new(0, 0, 6, 8, Num("12")));
let arg2 = arena.alloc(Located::new(0, 0, 10, 12, Int("34"))); let arg2 = arena.alloc(Located::new(0, 0, 10, 12, Num("34")));
let args = bumpalo::vec![in &arena; &*arg1, &*arg2]; let args = bumpalo::vec![in &arena; &*arg1, &*arg2];
let expected = Expr::Apply( let expected = Expr::Apply(
arena.alloc(Located::new( arena.alloc(Located::new(
@ -1019,7 +1019,7 @@ mod test_parse {
#[test] #[test]
fn parenthetical_apply() { fn parenthetical_apply() {
let arena = Bump::new(); let arena = Bump::new();
let arg = arena.alloc(Located::new(0, 0, 7, 8, Int("1"))); let arg = arena.alloc(Located::new(0, 0, 7, 8, Num("1")));
let args = bumpalo::vec![in &arena; &*arg]; let args = bumpalo::vec![in &arena; &*arg];
let parens_var = Expr::ParensAround(arena.alloc(Var { let parens_var = Expr::ParensAround(arena.alloc(Var {
module_name: "", module_name: "",
@ -1080,7 +1080,7 @@ mod test_parse {
#[test] #[test]
fn apply_unary_negation() { fn apply_unary_negation() {
let arena = Bump::new(); let arena = Bump::new();
let arg1 = arena.alloc(Located::new(0, 0, 7, 9, Int("12"))); let arg1 = arena.alloc(Located::new(0, 0, 7, 9, Num("12")));
let loc_op = Located::new(0, 0, 0, 1, UnaryOp::Negate); let loc_op = Located::new(0, 0, 0, 1, UnaryOp::Negate);
let arg2 = arena.alloc(Located::new( let arg2 = arena.alloc(Located::new(
0, 0,
@ -1116,7 +1116,7 @@ mod test_parse {
#[test] #[test]
fn apply_unary_not() { fn apply_unary_not() {
let arena = Bump::new(); let arena = Bump::new();
let arg1 = arena.alloc(Located::new(0, 0, 7, 9, Int("12"))); let arg1 = arena.alloc(Located::new(0, 0, 7, 9, Num("12")));
let loc_op = Located::new(0, 0, 0, 1, UnaryOp::Not); let loc_op = Located::new(0, 0, 0, 1, UnaryOp::Not);
let arg2 = arena.alloc(Located::new( let arg2 = arena.alloc(Located::new(
0, 0,
@ -1152,7 +1152,7 @@ mod test_parse {
#[test] #[test]
fn unary_negation_with_parens() { fn unary_negation_with_parens() {
let arena = Bump::new(); let arena = Bump::new();
let arg1 = arena.alloc(Located::new(0, 0, 8, 10, Int("12"))); let arg1 = arena.alloc(Located::new(0, 0, 8, 10, Num("12")));
let loc_op = Located::new(0, 0, 0, 1, UnaryOp::Negate); let loc_op = Located::new(0, 0, 0, 1, UnaryOp::Negate);
let arg2 = arena.alloc(Located::new( let arg2 = arena.alloc(Located::new(
0, 0,
@ -1188,7 +1188,7 @@ mod test_parse {
#[test] #[test]
fn unary_not_with_parens() { fn unary_not_with_parens() {
let arena = Bump::new(); let arena = Bump::new();
let arg1 = arena.alloc(Located::new(0, 0, 8, 10, Int("12"))); let arg1 = arena.alloc(Located::new(0, 0, 8, 10, Num("12")));
let loc_op = Located::new(0, 0, 0, 1, UnaryOp::Not); let loc_op = Located::new(0, 0, 0, 1, UnaryOp::Not);
let arg2 = arena.alloc(Located::new( let arg2 = arena.alloc(Located::new(
0, 0,
@ -1224,7 +1224,7 @@ mod test_parse {
#[test] #[test]
fn unary_negation_arg() { fn unary_negation_arg() {
let arena = Bump::new(); let arena = Bump::new();
let arg1 = arena.alloc(Located::new(0, 0, 6, 8, Int("12"))); let arg1 = arena.alloc(Located::new(0, 0, 6, 8, Num("12")));
let loc_op = Located::new(0, 0, 9, 10, UnaryOp::Negate); let loc_op = Located::new(0, 0, 9, 10, UnaryOp::Negate);
let var1 = Var { let var1 = Var {
module_name: "", module_name: "",
@ -1257,7 +1257,7 @@ mod test_parse {
let patterns = bumpalo::vec![in &arena; pattern]; let patterns = bumpalo::vec![in &arena; pattern];
let expected = Closure( let expected = Closure(
arena.alloc(patterns), arena.alloc(patterns),
arena.alloc(Located::new(0, 0, 6, 8, Int("42"))), arena.alloc(Located::new(0, 0, 6, 8, Num("42"))),
); );
let actual = parse_with(&arena, "\\a -> 42"); let actual = parse_with(&arena, "\\a -> 42");
@ -1271,7 +1271,7 @@ mod test_parse {
let patterns = bumpalo::vec![in &arena; pattern]; let patterns = bumpalo::vec![in &arena; pattern];
let expected = Closure( let expected = Closure(
arena.alloc(patterns), arena.alloc(patterns),
arena.alloc(Located::new(0, 0, 6, 8, Int("42"))), arena.alloc(Located::new(0, 0, 6, 8, Num("42"))),
); );
let actual = parse_with(&arena, "\\_ -> 42"); let actual = parse_with(&arena, "\\_ -> 42");
@ -1297,7 +1297,7 @@ mod test_parse {
let patterns = bumpalo::vec![in &arena; arg1, arg2]; let patterns = bumpalo::vec![in &arena; arg1, arg2];
let expected = Closure( let expected = Closure(
arena.alloc(patterns), arena.alloc(patterns),
arena.alloc(Located::new(0, 0, 9, 11, Int("42"))), arena.alloc(Located::new(0, 0, 9, 11, Num("42"))),
); );
let actual = parse_with(&arena, "\\a, b -> 42"); let actual = parse_with(&arena, "\\a, b -> 42");
@ -1313,7 +1313,7 @@ mod test_parse {
let patterns = bumpalo::vec![in &arena; arg1, arg2, arg3]; let patterns = bumpalo::vec![in &arena; arg1, arg2, arg3];
let expected = Closure( let expected = Closure(
arena.alloc(patterns), arena.alloc(patterns),
arena.alloc(Located::new(0, 0, 12, 14, Int("42"))), arena.alloc(Located::new(0, 0, 12, 14, Num("42"))),
); );
let actual = parse_with(&arena, "\\a, b, c -> 42"); let actual = parse_with(&arena, "\\a, b, c -> 42");
@ -1328,7 +1328,7 @@ mod test_parse {
let patterns = bumpalo::vec![in &arena; underscore1, underscore2]; let patterns = bumpalo::vec![in &arena; underscore1, underscore2];
let expected = Closure( let expected = Closure(
arena.alloc(patterns), arena.alloc(patterns),
arena.alloc(Located::new(0, 0, 9, 11, Int("42"))), arena.alloc(Located::new(0, 0, 9, 11, Num("42"))),
); );
let actual = parse_with(&arena, "\\_, _ -> 42"); let actual = parse_with(&arena, "\\_, _ -> 42");
@ -1343,11 +1343,11 @@ mod test_parse {
let newlines = bumpalo::vec![in &arena; Newline, Newline]; let newlines = bumpalo::vec![in &arena; Newline, Newline];
let def = Def::Body( let def = Def::Body(
arena.alloc(Located::new(1, 1, 0, 1, Identifier("x"))), arena.alloc(Located::new(1, 1, 0, 1, Identifier("x"))),
arena.alloc(Located::new(1, 1, 2, 3, Int("5"))), arena.alloc(Located::new(1, 1, 2, 3, Num("5"))),
); );
let loc_def = &*arena.alloc(Located::new(1, 1, 0, 1, def)); let loc_def = &*arena.alloc(Located::new(1, 1, 0, 1, def));
let defs = bumpalo::vec![in &arena; loc_def]; let defs = bumpalo::vec![in &arena; loc_def];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice()); let ret = Expr::SpaceBefore(arena.alloc(Num("42")), newlines.into_bump_slice());
let loc_ret = Located::new(3, 3, 0, 2, ret); let loc_ret = Located::new(3, 3, 0, 2, ret);
let reset_indentation = bumpalo::vec![in &arena; LineComment(" leading comment")]; let reset_indentation = bumpalo::vec![in &arena; LineComment(" leading comment")];
let expected = Expr::SpaceBefore( let expected = Expr::SpaceBefore(
@ -1373,11 +1373,11 @@ mod test_parse {
let newlines = bumpalo::vec![in &arena; Newline, Newline]; let newlines = bumpalo::vec![in &arena; Newline, Newline];
let def = Def::Body( let def = Def::Body(
arena.alloc(Located::new(1, 1, 0, 1, Identifier("x"))), arena.alloc(Located::new(1, 1, 0, 1, Identifier("x"))),
arena.alloc(Located::new(1, 1, 4, 5, Int("5"))), arena.alloc(Located::new(1, 1, 4, 5, Num("5"))),
); );
let loc_def = &*arena.alloc(Located::new(1, 1, 0, 1, def)); let loc_def = &*arena.alloc(Located::new(1, 1, 0, 1, def));
let defs = bumpalo::vec![in &arena; loc_def]; let defs = bumpalo::vec![in &arena; loc_def];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice()); let ret = Expr::SpaceBefore(arena.alloc(Num("42")), newlines.into_bump_slice());
let loc_ret = Located::new(3, 3, 0, 2, ret); let loc_ret = Located::new(3, 3, 0, 2, ret);
let reset_indentation = bumpalo::vec![in &arena; LineComment(" leading comment")]; let reset_indentation = bumpalo::vec![in &arena; LineComment(" leading comment")];
let expected = Expr::SpaceBefore( let expected = Expr::SpaceBefore(
@ -1404,13 +1404,13 @@ mod test_parse {
let newline = bumpalo::vec![in &arena; Newline]; let newline = bumpalo::vec![in &arena; Newline];
let def1 = Def::Body( let def1 = Def::Body(
arena.alloc(Located::new(1, 1, 0, 1, Identifier("x"))), arena.alloc(Located::new(1, 1, 0, 1, Identifier("x"))),
arena.alloc(Located::new(1, 1, 4, 5, Int("5"))), arena.alloc(Located::new(1, 1, 4, 5, Num("5"))),
); );
let loc_def1 = &*arena.alloc(Located::new(1, 1, 0, 1, def1)); let loc_def1 = &*arena.alloc(Located::new(1, 1, 0, 1, def1));
let def2 = Def::SpaceBefore( let def2 = Def::SpaceBefore(
&*arena.alloc(Def::Body( &*arena.alloc(Def::Body(
arena.alloc(Located::new(2, 2, 0, 1, Identifier("y"))), arena.alloc(Located::new(2, 2, 0, 1, Identifier("y"))),
arena.alloc(Located::new(2, 2, 4, 5, Int("6"))), arena.alloc(Located::new(2, 2, 4, 5, Num("6"))),
)), )),
newline.into_bump_slice(), newline.into_bump_slice(),
); );
@ -1419,7 +1419,7 @@ mod test_parse {
// gets added by .push(), since that's more efficient and since // gets added by .push(), since that's more efficient and since
// canonicalization is going to re-sort these all anyway.) // canonicalization is going to re-sort these all anyway.)
let defs = bumpalo::vec![in &arena; loc_def2, loc_def1]; let defs = bumpalo::vec![in &arena; loc_def2, loc_def1];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice()); let ret = Expr::SpaceBefore(arena.alloc(Num("42")), newlines.into_bump_slice());
let loc_ret = Located::new(4, 4, 0, 2, ret); let loc_ret = Located::new(4, 4, 0, 2, ret);
let reset_indentation = bumpalo::vec![in &arena; LineComment(" leading comment")]; let reset_indentation = bumpalo::vec![in &arena; LineComment(" leading comment")];
let expected = Expr::SpaceBefore( let expected = Expr::SpaceBefore(
@ -1457,13 +1457,13 @@ mod test_parse {
8, 8,
RecordDestructure(fields.into_bump_slice()), RecordDestructure(fields.into_bump_slice()),
)), )),
arena.alloc(Located::new(1, 1, 11, 12, Int("5"))), arena.alloc(Located::new(1, 1, 11, 12, Num("5"))),
); );
let loc_def1 = &*arena.alloc(Located::new(1, 1, 1, 8, def1)); let loc_def1 = &*arena.alloc(Located::new(1, 1, 1, 8, def1));
let def2 = Def::SpaceBefore( let def2 = Def::SpaceBefore(
&*arena.alloc(Def::Body( &*arena.alloc(Def::Body(
arena.alloc(Located::new(2, 2, 0, 1, Identifier("y"))), arena.alloc(Located::new(2, 2, 0, 1, Identifier("y"))),
arena.alloc(Located::new(2, 2, 4, 5, Int("6"))), arena.alloc(Located::new(2, 2, 4, 5, Num("6"))),
)), )),
newline.into_bump_slice(), newline.into_bump_slice(),
); );
@ -1472,7 +1472,7 @@ mod test_parse {
// gets added by .push(), since that's more efficient and since // gets added by .push(), since that's more efficient and since
// canonicalization is going to re-sort these all anyway.) // canonicalization is going to re-sort these all anyway.)
let defs = bumpalo::vec![in &arena; loc_def2, loc_def1]; let defs = bumpalo::vec![in &arena; loc_def2, loc_def1];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice()); let ret = Expr::SpaceBefore(arena.alloc(Num("42")), newlines.into_bump_slice());
let loc_ret = Located::new(4, 4, 0, 2, ret); let loc_ret = Located::new(4, 4, 0, 2, ret);
let reset_indentation = bumpalo::vec![in &arena; LineComment(" leading comment")]; let reset_indentation = bumpalo::vec![in &arena; LineComment(" leading comment")];
let expected = Expr::SpaceBefore( let expected = Expr::SpaceBefore(
@ -1505,14 +1505,14 @@ mod test_parse {
); );
let def = Def::Body( let def = Def::Body(
arena.alloc(Located::new(1, 1, 0, 3, Identifier("foo"))), arena.alloc(Located::new(1, 1, 0, 3, Identifier("foo"))),
arena.alloc(Located::new(1, 1, 6, 7, Int("4"))), arena.alloc(Located::new(1, 1, 6, 7, Num("4"))),
); );
let spaced_def = Def::SpaceBefore(arena.alloc(def), newline.into_bump_slice()); let spaced_def = Def::SpaceBefore(arena.alloc(def), newline.into_bump_slice());
let loc_def = &*arena.alloc(Located::new(1, 1, 0, 7, spaced_def)); let loc_def = &*arena.alloc(Located::new(1, 1, 0, 7, spaced_def));
let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature)); let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature));
let defs = bumpalo::vec![in &arena; loc_ann, loc_def]; let defs = bumpalo::vec![in &arena; loc_ann, loc_def];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice()); let ret = Expr::SpaceBefore(arena.alloc(Num("42")), newlines.into_bump_slice());
let loc_ret = Located::new(3, 3, 0, 2, ret); let loc_ret = Located::new(3, 3, 0, 2, ret);
let expected = Defs(defs, arena.alloc(loc_ret)); let expected = Defs(defs, arena.alloc(loc_ret));
@ -1552,7 +1552,7 @@ mod test_parse {
let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature)); let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature));
let defs = bumpalo::vec![in &arena; loc_ann]; let defs = bumpalo::vec![in &arena; loc_ann];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice()); let ret = Expr::SpaceBefore(arena.alloc(Num("42")), newlines.into_bump_slice());
let loc_ret = Located::new(2, 2, 0, 2, ret); let loc_ret = Located::new(2, 2, 0, 2, ret);
let expected = Defs(defs, arena.alloc(loc_ret)); let expected = Defs(defs, arena.alloc(loc_ret));
@ -1588,7 +1588,7 @@ mod test_parse {
let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 4, signature)); let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 4, signature));
let defs = bumpalo::vec![in &arena; loc_ann]; let defs = bumpalo::vec![in &arena; loc_ann];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice()); let ret = Expr::SpaceBefore(arena.alloc(Num("42")), newlines.into_bump_slice());
let loc_ret = Located::new(2, 2, 0, 2, ret); let loc_ret = Located::new(2, 2, 0, 2, ret);
let expected = Defs(defs, arena.alloc(loc_ret)); let expected = Defs(defs, arena.alloc(loc_ret));
@ -1630,7 +1630,7 @@ mod test_parse {
Located::new(1,1,7,8, Identifier("x")), Located::new(1,1,7,8, Identifier("x")),
Located::new(1,1,10,11, Underscore) Located::new(1,1,10,11, Underscore)
]; ];
let body = Located::new(1, 1, 15, 17, Int("42")); let body = Located::new(1, 1, 15, 17, Num("42"));
let closure = Expr::Closure(&args, &body); let closure = Expr::Closure(&args, &body);
@ -1643,7 +1643,7 @@ mod test_parse {
let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature)); let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature));
let defs = bumpalo::vec![in &arena; loc_ann, loc_def]; let defs = bumpalo::vec![in &arena; loc_ann, loc_def];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice()); let ret = Expr::SpaceBefore(arena.alloc(Num("42")), newlines.into_bump_slice());
let loc_ret = Located::new(3, 3, 0, 2, ret); let loc_ret = Located::new(3, 3, 0, 2, ret);
let expected = Defs(defs, arena.alloc(loc_ret)); let expected = Defs(defs, arena.alloc(loc_ret));
@ -1698,7 +1698,7 @@ mod test_parse {
let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature)); let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature));
let defs = bumpalo::vec![in &arena; loc_ann, loc_def]; let defs = bumpalo::vec![in &arena; loc_ann, loc_def];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice()); let ret = Expr::SpaceBefore(arena.alloc(Num("42")), newlines.into_bump_slice());
let loc_ret = Located::new(3, 3, 0, 2, ret); let loc_ret = Located::new(3, 3, 0, 2, ret);
let expected = Defs(defs, arena.alloc(loc_ret)); let expected = Defs(defs, arena.alloc(loc_ret));
@ -1751,7 +1751,7 @@ mod test_parse {
let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature)); let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature));
let defs = bumpalo::vec![in &arena; loc_ann, loc_def]; let defs = bumpalo::vec![in &arena; loc_ann, loc_def];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice()); let ret = Expr::SpaceBefore(arena.alloc(Num("42")), newlines.into_bump_slice());
let loc_ret = Located::new(3, 3, 0, 2, ret); let loc_ret = Located::new(3, 3, 0, 2, ret);
let expected = Defs(defs, arena.alloc(loc_ret)); let expected = Defs(defs, arena.alloc(loc_ret));
@ -1805,7 +1805,7 @@ mod test_parse {
let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature)); let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature));
let defs = bumpalo::vec![in &arena; loc_ann, loc_def]; let defs = bumpalo::vec![in &arena; loc_ann, loc_def];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice()); let ret = Expr::SpaceBefore(arena.alloc(Num("42")), newlines.into_bump_slice());
let loc_ret = Located::new(3, 3, 0, 2, ret); let loc_ret = Located::new(3, 3, 0, 2, ret);
let expected = Defs(defs, arena.alloc(loc_ret)); let expected = Defs(defs, arena.alloc(loc_ret));
@ -1858,7 +1858,7 @@ mod test_parse {
let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature)); let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature));
let defs = bumpalo::vec![in &arena; loc_ann, loc_def]; let defs = bumpalo::vec![in &arena; loc_ann, loc_def];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice()); let ret = Expr::SpaceBefore(arena.alloc(Num("42")), newlines.into_bump_slice());
let loc_ret = Located::new(3, 3, 0, 2, ret); let loc_ret = Located::new(3, 3, 0, 2, ret);
let expected = Defs(defs, arena.alloc(loc_ret)); let expected = Defs(defs, arena.alloc(loc_ret));
@ -1884,7 +1884,7 @@ mod test_parse {
let pattern1 = let pattern1 =
Pattern::SpaceBefore(arena.alloc(StrLiteral("blah")), newlines.into_bump_slice()); Pattern::SpaceBefore(arena.alloc(StrLiteral("blah")), newlines.into_bump_slice());
let loc_pattern1 = Located::new(1, 1, 1, 7, pattern1); let loc_pattern1 = Located::new(1, 1, 1, 7, pattern1);
let expr1 = Int("1"); let expr1 = Num("1");
let loc_expr1 = Located::new(1, 1, 11, 12, expr1); let loc_expr1 = Located::new(1, 1, 11, 12, expr1);
let branch1 = &*arena.alloc(WhenBranch { let branch1 = &*arena.alloc(WhenBranch {
patterns: bumpalo::vec![in &arena;loc_pattern1], patterns: bumpalo::vec![in &arena;loc_pattern1],
@ -1895,7 +1895,7 @@ mod test_parse {
let pattern2 = let pattern2 =
Pattern::SpaceBefore(arena.alloc(StrLiteral("mise")), newlines.into_bump_slice()); Pattern::SpaceBefore(arena.alloc(StrLiteral("mise")), newlines.into_bump_slice());
let loc_pattern2 = Located::new(2, 2, 1, 7, pattern2); let loc_pattern2 = Located::new(2, 2, 1, 7, pattern2);
let expr2 = Int("2"); let expr2 = Num("2");
let loc_expr2 = Located::new(2, 2, 11, 12, expr2); let loc_expr2 = Located::new(2, 2, 11, 12, expr2);
let branch2 = &*arena.alloc(WhenBranch { let branch2 = &*arena.alloc(WhenBranch {
patterns: bumpalo::vec![in &arena;loc_pattern2 ], patterns: bumpalo::vec![in &arena;loc_pattern2 ],
@ -1928,9 +1928,9 @@ mod test_parse {
let arena = Bump::new(); let arena = Bump::new();
let newlines = bumpalo::vec![in &arena; Newline]; let newlines = bumpalo::vec![in &arena; Newline];
let pattern1 = let pattern1 =
Pattern::SpaceBefore(arena.alloc(IntLiteral("1")), newlines.into_bump_slice()); Pattern::SpaceBefore(arena.alloc(NumLiteral("1")), newlines.into_bump_slice());
let loc_pattern1 = Located::new(1, 1, 1, 2, pattern1); let loc_pattern1 = Located::new(1, 1, 1, 2, pattern1);
let expr1 = Int("2"); let expr1 = Num("2");
let loc_expr1 = Located::new(1, 1, 6, 7, expr1); let loc_expr1 = Located::new(1, 1, 6, 7, expr1);
let branch1 = &*arena.alloc(WhenBranch { let branch1 = &*arena.alloc(WhenBranch {
patterns: bumpalo::vec![in &arena;loc_pattern1], patterns: bumpalo::vec![in &arena;loc_pattern1],
@ -1939,9 +1939,9 @@ mod test_parse {
}); });
let newlines = bumpalo::vec![in &arena; Newline]; let newlines = bumpalo::vec![in &arena; Newline];
let pattern2 = let pattern2 =
Pattern::SpaceBefore(arena.alloc(IntLiteral("3")), newlines.into_bump_slice()); Pattern::SpaceBefore(arena.alloc(NumLiteral("3")), newlines.into_bump_slice());
let loc_pattern2 = Located::new(2, 2, 1, 2, pattern2); let loc_pattern2 = Located::new(2, 2, 1, 2, pattern2);
let expr2 = Int("4"); let expr2 = Num("4");
let loc_expr2 = Located::new(2, 2, 6, 7, expr2); let loc_expr2 = Located::new(2, 2, 6, 7, expr2);
let branch2 = &*arena.alloc(WhenBranch { let branch2 = &*arena.alloc(WhenBranch {
patterns: bumpalo::vec![in &arena;loc_pattern2], patterns: bumpalo::vec![in &arena;loc_pattern2],
@ -1979,7 +1979,7 @@ mod test_parse {
newlines.into_bump_slice(), newlines.into_bump_slice(),
); );
let loc_pattern1 = Located::new(1, 1, 1, 6, pattern1); let loc_pattern1 = Located::new(1, 1, 1, 6, pattern1);
let expr1 = Int("2"); let expr1 = Num("2");
let loc_expr1 = Located::new(1, 1, 10, 11, expr1); let loc_expr1 = Located::new(1, 1, 10, 11, expr1);
let branch1 = &*arena.alloc(WhenBranch { let branch1 = &*arena.alloc(WhenBranch {
patterns: bumpalo::vec![in &arena;loc_pattern1 ], patterns: bumpalo::vec![in &arena;loc_pattern1 ],
@ -1993,7 +1993,7 @@ mod test_parse {
newlines.into_bump_slice(), newlines.into_bump_slice(),
); );
let loc_pattern2 = Located::new(2, 2, 1, 9, pattern2); let loc_pattern2 = Located::new(2, 2, 1, 9, pattern2);
let expr2 = Int("4"); let expr2 = Num("4");
let loc_expr2 = Located::new(2, 2, 13, 14, expr2); let loc_expr2 = Located::new(2, 2, 13, 14, expr2);
let branch2 = &*arena.alloc(WhenBranch { let branch2 = &*arena.alloc(WhenBranch {
patterns: bumpalo::vec![in &arena;loc_pattern2 ], patterns: bumpalo::vec![in &arena;loc_pattern2 ],
@ -2030,7 +2030,7 @@ mod test_parse {
let pattern1_alt = StrLiteral("blop"); let pattern1_alt = StrLiteral("blop");
let loc_pattern1 = Located::new(1, 1, 1, 7, pattern1); let loc_pattern1 = Located::new(1, 1, 1, 7, pattern1);
let loc_pattern1_alt = Located::new(1, 1, 10, 16, pattern1_alt); let loc_pattern1_alt = Located::new(1, 1, 10, 16, pattern1_alt);
let expr1 = Int("1"); let expr1 = Num("1");
let loc_expr1 = Located::new(1, 1, 20, 21, expr1); let loc_expr1 = Located::new(1, 1, 20, 21, expr1);
let branch1 = &*arena.alloc(WhenBranch { let branch1 = &*arena.alloc(WhenBranch {
patterns: bumpalo::vec![in &arena;loc_pattern1, loc_pattern1_alt], patterns: bumpalo::vec![in &arena;loc_pattern1, loc_pattern1_alt],
@ -2045,7 +2045,7 @@ mod test_parse {
Pattern::SpaceBefore(arena.alloc(StrLiteral("bar")), newlines.into_bump_slice()); Pattern::SpaceBefore(arena.alloc(StrLiteral("bar")), newlines.into_bump_slice());
let loc_pattern2 = Located::new(2, 2, 1, 6, pattern2); let loc_pattern2 = Located::new(2, 2, 1, 6, pattern2);
let loc_pattern2_alt = Located::new(3, 3, 1, 6, pattern2_alt); let loc_pattern2_alt = Located::new(3, 3, 1, 6, pattern2_alt);
let expr2 = Int("2"); let expr2 = Num("2");
let loc_expr2 = Located::new(3, 3, 10, 11, expr2); let loc_expr2 = Located::new(3, 3, 10, 11, expr2);
let branch2 = &*arena.alloc(WhenBranch { let branch2 = &*arena.alloc(WhenBranch {
patterns: bumpalo::vec![in &arena;loc_pattern2, loc_pattern2_alt], patterns: bumpalo::vec![in &arena;loc_pattern2, loc_pattern2_alt],
@ -2148,7 +2148,7 @@ mod test_parse {
let def1 = SpaceAfter( let def1 = SpaceAfter(
arena.alloc(Body( arena.alloc(Body(
arena.alloc(Located::new(0, 0, 0, 3, pattern1)), arena.alloc(Located::new(0, 0, 0, 3, pattern1)),
arena.alloc(Located::new(0, 0, 6, 7, Int("1"))), arena.alloc(Located::new(0, 0, 6, 7, Num("1"))),
)), )),
newlines1.into_bump_slice(), newlines1.into_bump_slice(),
); );

View file

@ -1,4 +1,5 @@
use inlinable_string::InlinableString; use inlinable_string::InlinableString;
use roc_collections::all::MutSet;
use roc_module::ident::Ident; use roc_module::ident::Ident;
use roc_module::symbol::{ModuleId, Symbol}; use roc_module::symbol::{ModuleId, Symbol};
use roc_parse::operator::BinOp; use roc_parse::operator::BinOp;
@ -10,7 +11,9 @@ use roc_region::all::{Located, Region};
pub enum Problem { pub enum Problem {
UnusedDef(Symbol, Region), UnusedDef(Symbol, Region),
UnusedImport(ModuleId, Region), UnusedImport(ModuleId, Region),
UnusedArgument(Symbol, Region), /// First symbol is the name of the closure with that argument
/// Second symbol is the name of the argument that is unused
UnusedArgument(Symbol, Symbol, Region),
PrecedenceProblem(PrecedenceProblem), PrecedenceProblem(PrecedenceProblem),
// Example: (5 = 1 + 2) is an unsupported pattern in an assignment; Int patterns aren't allowed in assignments! // Example: (5 = 1 + 2) is an unsupported pattern in an assignment; Int patterns aren't allowed in assignments!
UnsupportedPattern(PatternType, Region), UnsupportedPattern(PatternType, Region),
@ -23,7 +26,7 @@ pub enum Problem {
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub enum PrecedenceProblem { pub enum PrecedenceProblem {
BothNonAssociative(Located<BinOp>, Located<BinOp>), BothNonAssociative(Region, Located<BinOp>, Located<BinOp>),
} }
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
@ -32,8 +35,10 @@ pub enum RuntimeError {
original_region: Region, original_region: Region,
shadow: Located<Ident>, shadow: Located<Ident>,
}, },
// Example: (5 = 1 + 2) is an unsupported pattern in an assignment; Int patterns aren't allowed in assignments!
UnsupportedPattern(Region),
UnrecognizedFunctionName(Located<InlinableString>), UnrecognizedFunctionName(Located<InlinableString>),
LookupNotInScope(Located<InlinableString>), LookupNotInScope(Located<InlinableString>, MutSet<Box<str>>),
ValueNotExposed { ValueNotExposed {
module_name: InlinableString, module_name: InlinableString,
ident: InlinableString, ident: InlinableString,
@ -45,6 +50,8 @@ pub enum RuntimeError {
region: Region, region: Region,
}, },
InvalidPrecedence(PrecedenceProblem, Region), InvalidPrecedence(PrecedenceProblem, Region),
MalformedIdentifier(Box<str>, Region),
MalformedClosure(Region),
FloatOutsideRange(Box<str>), FloatOutsideRange(Box<str>),
IntOutsideRange(Box<str>), IntOutsideRange(Box<str>),
InvalidHex(std::num::ParseIntError, Box<str>), InvalidHex(std::num::ParseIntError, Box<str>),

View file

@ -30,6 +30,23 @@ impl Region {
} }
} }
pub fn contains(&self, other: &Self) -> bool {
use std::cmp::Ordering::*;
match self.start_line.cmp(&other.start_line) {
Greater => false,
Equal => match self.end_line.cmp(&other.end_line) {
Less => false,
Equal => self.start_col <= other.start_col && self.end_col >= other.end_col,
Greater => self.start_col >= other.start_col,
},
Less => match self.end_line.cmp(&other.end_line) {
Less => false,
Equal => self.end_col >= other.end_col,
Greater => true,
},
}
}
pub fn span_across(start: &Region, end: &Region) -> Self { pub fn span_across(start: &Region, end: &Region) -> Self {
Region { Region {
start_line: start.start_line, start_line: start.start_line,
@ -38,6 +55,25 @@ impl Region {
end_col: end.end_col, end_col: end.end_col,
} }
} }
pub fn across_all<'a, I>(regions: I) -> Self
where
I: IntoIterator<Item = &'a Region>,
{
let mut it = regions.into_iter();
if let Some(first) = it.next() {
let mut result = *first;
for r in it {
result = Self::span_across(&result, r);
}
result
} else {
Self::zero()
}
}
} }
#[test] #[test]
@ -89,6 +125,11 @@ impl<T> Located<T> {
pub fn at(region: Region, value: T) -> Located<T> { pub fn at(region: Region, value: T) -> Located<T> {
Located { value, region } Located { value, region }
} }
pub fn at_zero(value: T) -> Located<T> {
let region = Region::zero();
Located { value, region }
}
} }
impl<T> Located<T> { impl<T> Located<T> {

View file

@ -8,10 +8,25 @@ edition = "2018"
roc_collections = { path = "../collections" } roc_collections = { path = "../collections" }
roc_region = { path = "../region" } roc_region = { path = "../region" }
roc_module = { path = "../module" } roc_module = { path = "../module" }
roc_parse = { path = "../parse" }
roc_problem = { path = "../problem" } roc_problem = { path = "../problem" }
roc_types = { path = "../types" } roc_types = { path = "../types" }
roc_load = { path = "../load" }
roc_can = { path = "../can" }
roc_solve = { path = "../solve" }
roc_mono = { path = "../mono" }
ven_pretty = { path = "../../vendor/pretty" }
inlinable_string = "0.1.0"
im = "14" # im and im-rc should always have the same version!
im-rc = "14" # im and im-rc should always have the same version!
distance = "0.4.0"
bumpalo = { version = "3.2", features = ["collections"] }
[dev-dependencies] [dev-dependencies]
roc_constrain = { path = "../constrain" }
roc_builtins = { path = "../builtins" }
roc_problem = { path = "../problem" }
roc_parse = { path = "../parse" }
pretty_assertions = "0.5.1 " pretty_assertions = "0.5.1 "
maplit = "1.0.1" maplit = "1.0.1"
indoc = "0.3.3" indoc = "0.3.3"

View file

@ -12,3 +12,4 @@
#![allow(clippy::large_enum_variant)] #![allow(clippy::large_enum_variant)]
pub mod report; pub mod report;
pub mod type_error;

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,352 @@
extern crate bumpalo;
use self::bumpalo::Bump;
use roc_builtins::unique::uniq_stdlib;
use roc_can::constraint::Constraint;
use roc_can::env::Env;
use roc_can::expected::Expected;
use roc_can::expr::{canonicalize_expr, Expr, Output};
use roc_can::operator;
use roc_can::scope::Scope;
use roc_collections::all::{ImMap, MutMap, SendMap, SendSet};
use roc_constrain::expr::constrain_expr;
use roc_constrain::module::{constrain_imported_values, load_builtin_aliases, Import};
use roc_module::ident::Ident;
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds, Symbol};
use roc_parse::ast::{self, Attempting};
use roc_parse::blankspace::space0_before;
use roc_parse::parser::{loc, Fail, Parser, State};
use roc_problem::can::Problem;
use roc_region::all::{Located, Region};
use roc_solve::solve;
use roc_types::subs::{Content, Subs, VarStore, Variable};
use roc_types::types::Type;
use std::hash::Hash;
use std::path::{Path, PathBuf};
pub fn test_home() -> ModuleId {
ModuleIds::default().get_or_insert(&"Test".into())
}
#[allow(dead_code)]
pub fn infer_expr(
subs: Subs,
problems: &mut Vec<solve::TypeError>,
constraint: &Constraint,
expr_var: Variable,
) -> (Content, Subs) {
let env = solve::Env {
aliases: MutMap::default(),
vars_by_symbol: SendMap::default(),
};
let (solved, _) = solve::run(&env, problems, subs, constraint);
let content = solved.inner().get_without_compacting(expr_var).content;
(content, solved.into_inner())
}
/// Used in the with_larger_debug_stack() function, for tests that otherwise
/// run out of stack space in debug builds (but don't in --release builds)
#[allow(dead_code)]
const EXPANDED_STACK_SIZE: usize = 4 * 1024 * 1024;
/// Without this, some tests pass in `cargo test --release` but fail without
/// the --release flag because they run out of stack space. This increases
/// stack size for debug builds only, while leaving the stack space at the default
/// amount for release builds.
#[allow(dead_code)]
#[cfg(debug_assertions)]
pub fn with_larger_debug_stack<F>(run_test: F)
where
F: FnOnce() -> (),
F: Send,
F: 'static,
{
std::thread::Builder::new()
.stack_size(EXPANDED_STACK_SIZE)
.spawn(run_test)
.expect("Error while spawning expanded dev stack size thread")
.join()
.expect("Error while joining expanded dev stack size thread")
}
/// In --release builds, don't increase the stack size. Run the test normally.
/// This way, we find out if any of our tests are blowing the stack even after
/// optimizations in release builds.
#[allow(dead_code)]
#[cfg(not(debug_assertions))]
#[inline(always)]
pub fn with_larger_debug_stack<F>(run_test: F)
where
F: FnOnce() -> (),
F: Send,
F: 'static,
{
run_test()
}
#[allow(dead_code)]
pub fn parse_with<'a>(arena: &'a Bump, input: &'a str) -> Result<ast::Expr<'a>, Fail> {
parse_loc_with(arena, input).map(|loc_expr| loc_expr.value)
}
#[allow(dead_code)]
pub fn parse_loc_with<'a>(arena: &'a Bump, input: &'a str) -> Result<Located<ast::Expr<'a>>, Fail> {
let state = State::new(&input, Attempting::Module);
let parser = space0_before(loc(roc_parse::expr::expr(0)), 0);
let answer = parser.parse(&arena, state);
answer
.map(|(loc_expr, _)| loc_expr)
.map_err(|(fail, _)| fail)
}
#[allow(dead_code)]
pub fn can_expr(expr_str: &str) -> CanExprOut {
can_expr_with(&Bump::new(), test_home(), expr_str)
}
#[allow(dead_code)]
pub fn uniq_expr(
expr_str: &str,
) -> (
Located<Expr>,
Output,
Vec<Problem>,
Subs,
Variable,
Constraint,
ModuleId,
Interns,
) {
let declared_idents: &ImMap<Ident, (Symbol, Region)> = &ImMap::default();
uniq_expr_with(&Bump::new(), expr_str, declared_idents)
}
#[allow(dead_code)]
pub fn uniq_expr_with(
arena: &Bump,
expr_str: &str,
declared_idents: &ImMap<Ident, (Symbol, Region)>,
) -> (
Located<Expr>,
Output,
Vec<Problem>,
Subs,
Variable,
Constraint,
ModuleId,
Interns,
) {
let home = test_home();
let CanExprOut {
loc_expr,
output,
problems,
var_store: old_var_store,
var,
interns,
..
} = can_expr_with(arena, home, expr_str);
// double check
let var_store = VarStore::new(old_var_store.fresh());
let expected2 = Expected::NoExpectation(Type::Variable(var));
let constraint = roc_constrain::uniq::constrain_declaration(
home,
&var_store,
Region::zero(),
&loc_expr,
declared_idents,
expected2,
);
let stdlib = uniq_stdlib();
let types = stdlib.types;
let imports: Vec<_> = types
.iter()
.map(|(symbol, (solved_type, region))| Import {
loc_symbol: Located::at(*region, *symbol),
solved_type: solved_type,
})
.collect();
// load builtin values
// TODO what to do with those rigids?
let (_introduced_rigids, constraint) =
constrain_imported_values(imports, constraint, &var_store);
// load builtin types
let mut constraint = load_builtin_aliases(&stdlib.aliases, constraint, &var_store);
constraint.instantiate_aliases(&var_store);
let subs2 = Subs::new(var_store.into());
(
loc_expr, output, problems, subs2, var, constraint, home, interns,
)
}
pub struct CanExprOut {
pub loc_expr: Located<Expr>,
pub output: Output,
pub problems: Vec<Problem>,
pub home: ModuleId,
pub interns: Interns,
pub var_store: VarStore,
pub var: Variable,
pub constraint: Constraint,
}
#[allow(dead_code)]
pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut {
let loc_expr = parse_loc_with(&arena, expr_str).unwrap_or_else(|e| {
panic!(
"can_expr_with() got a parse error when attempting to canonicalize:\n\n{:?} {:?}",
expr_str, e
)
});
let var_store = VarStore::default();
let var = var_store.fresh();
let expected = Expected::NoExpectation(Type::Variable(var));
let module_ids = ModuleIds::default();
// Desugar operators (convert them to Apply calls, taking into account
// operator precedence and associativity rules), before doing other canonicalization.
//
// If we did this *during* canonicalization, then each time we
// visited a BinOp node we'd recursively try to apply this to each of its nested
// operators, and then again on *their* nested operators, ultimately applying the
// rules multiple times unnecessarily.
let loc_expr = operator::desugar_expr(arena, &loc_expr);
let mut scope = Scope::new(home);
let dep_idents = IdentIds::exposed_builtins(0);
let mut env = Env::new(home, dep_idents, &module_ids, IdentIds::default());
let (loc_expr, output) = canonicalize_expr(
&mut env,
&var_store,
&mut scope,
loc_expr.region,
&loc_expr.value,
);
let constraint = constrain_expr(
&roc_constrain::expr::Env {
rigids: ImMap::default(),
home,
},
loc_expr.region,
&loc_expr.value,
expected,
);
let types = roc_builtins::std::types();
let imports: Vec<_> = types
.iter()
.map(|(symbol, (solved_type, region))| Import {
loc_symbol: Located::at(*region, *symbol),
solved_type: solved_type,
})
.collect();
//load builtin values
let (_introduced_rigids, constraint) =
constrain_imported_values(imports, constraint, &var_store);
//load builtin types
let mut constraint =
load_builtin_aliases(&roc_builtins::std::aliases(), constraint, &var_store);
constraint.instantiate_aliases(&var_store);
let mut all_ident_ids = MutMap::default();
// When pretty printing types, we may need the exposed builtins,
// so include them in the Interns we'll ultimately return.
for (module_id, ident_ids) in IdentIds::exposed_builtins(0) {
all_ident_ids.insert(module_id, ident_ids);
}
all_ident_ids.insert(home, env.ident_ids);
let interns = Interns {
module_ids: env.module_ids.clone(),
all_ident_ids,
};
CanExprOut {
loc_expr,
output,
problems: env.problems,
home: env.home,
var_store,
interns,
var,
constraint,
}
}
#[allow(dead_code)]
pub fn mut_map_from_pairs<K, V, I>(pairs: I) -> MutMap<K, V>
where
I: IntoIterator<Item = (K, V)>,
K: Hash + Eq,
{
let mut answer = MutMap::default();
for (key, value) in pairs {
answer.insert(key, value);
}
answer
}
#[allow(dead_code)]
pub fn im_map_from_pairs<K, V, I>(pairs: I) -> ImMap<K, V>
where
I: IntoIterator<Item = (K, V)>,
K: Hash + Eq + Clone,
V: Clone,
{
let mut answer = ImMap::default();
for (key, value) in pairs {
answer.insert(key, value);
}
answer
}
#[allow(dead_code)]
pub fn send_set_from<V, I>(elems: I) -> SendSet<V>
where
I: IntoIterator<Item = V>,
V: Hash + Eq + Clone,
{
let mut answer = SendSet::default();
for elem in elems {
answer.insert(elem);
}
answer
}
#[allow(dead_code)]
pub fn fixtures_dir<'a>() -> PathBuf {
Path::new("tests").join("fixtures").join("build")
}
#[allow(dead_code)]
pub fn builtins_dir<'a>() -> PathBuf {
PathBuf::new().join("builtins")
}

File diff suppressed because it is too large Load diff

View file

@ -1,19 +1,28 @@
use roc_can::constraint::Constraint::{self, *}; use roc_can::constraint::Constraint::{self, *};
use roc_can::expected::{Expected, PExpected};
use roc_collections::all::{ImMap, MutMap, SendMap}; use roc_collections::all::{ImMap, MutMap, SendMap};
use roc_module::ident::TagName; use roc_module::ident::TagName;
use roc_module::symbol::{ModuleId, Symbol}; use roc_module::symbol::{ModuleId, Symbol};
use roc_region::all::Located; use roc_region::all::{Located, Region};
use roc_types::boolean_algebra::{self, Atom}; use roc_types::boolean_algebra::{self, Atom};
use roc_types::solved_types::{Solved, SolvedType}; use roc_types::solved_types::{Solved, SolvedType};
use roc_types::subs::{Content, Descriptor, FlatType, Mark, OptVariable, Rank, Subs, Variable}; use roc_types::subs::{Content, Descriptor, FlatType, Mark, OptVariable, Rank, Subs, Variable};
use roc_types::types::Type::{self, *}; use roc_types::types::Type::{self, *};
use roc_types::types::{Alias, Problem}; use roc_types::types::{Alias, Category, ErrorType, PatternCategory};
use roc_unify::unify::{unify, Unified}; use roc_unify::unify::unify;
use roc_unify::unify::Unified::*;
// Type checking system adapted from Elm by Evan Czaplicki, BSD-3-Clause Licensed // Type checking system adapted from Elm by Evan Czaplicki, BSD-3-Clause Licensed
// https://github.com/elm/compiler // https://github.com/elm/compiler
// Thank you, Evan! // Thank you, Evan!
#[derive(PartialEq, Debug, Clone)]
pub enum TypeError {
BadExpr(Region, Category, ErrorType, Expected<ErrorType>),
BadPattern(Region, PatternCategory, ErrorType, PExpected<ErrorType>),
CircularType(Region, Symbol, ErrorType),
}
pub type SubsByModule = MutMap<ModuleId, ExposedModuleTypes>; pub type SubsByModule = MutMap<ModuleId, ExposedModuleTypes>;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -85,7 +94,7 @@ struct State {
pub fn run( pub fn run(
env: &Env, env: &Env,
problems: &mut Vec<Problem>, problems: &mut Vec<TypeError>,
mut subs: Subs, mut subs: Subs,
constraint: &Constraint, constraint: &Constraint,
) -> (Solved<Subs>, Env) { ) -> (Solved<Subs>, Env) {
@ -115,7 +124,7 @@ fn solve(
state: State, state: State,
rank: Rank, rank: Rank,
pools: &mut Pools, pools: &mut Pools,
problems: &mut Vec<Problem>, problems: &mut Vec<TypeError>,
cached_aliases: &mut MutMap<Symbol, Variable>, cached_aliases: &mut MutMap<Symbol, Variable>,
subs: &mut Subs, subs: &mut Subs,
constraint: &Constraint, constraint: &Constraint,
@ -129,28 +138,42 @@ fn solve(
copy copy
} }
Eq(typ, expected_type, _region) => { Eq(typ, expectation, category, region) => {
let actual = type_to_var(subs, rank, pools, cached_aliases, typ); let actual = type_to_var(subs, rank, pools, cached_aliases, typ);
let expected = type_to_var( let expected = type_to_var(
subs, subs,
rank, rank,
pools, pools,
cached_aliases, cached_aliases,
expected_type.get_type_ref(), expectation.get_type_ref(),
); );
let Unified { vars, mismatches } = unify(subs, actual, expected);
// TODO use region when reporting a problem
problems.extend(mismatches);
match unify(subs, actual, expected) {
Success(vars) => {
introduce(subs, rank, pools, &vars); introduce(subs, rank, pools, &vars);
state state
} }
Lookup(symbol, expected_type, _region) => { Failure(vars, actual_type, expected_type) => {
introduce(subs, rank, pools, &vars);
let problem = TypeError::BadExpr(
*region,
category.clone(),
actual_type,
expectation.clone().replace(expected_type),
);
problems.push(problem);
state
}
}
}
Lookup(symbol, expectation, region) => {
let var = *env.vars_by_symbol.get(&symbol).unwrap_or_else(|| { let var = *env.vars_by_symbol.get(&symbol).unwrap_or_else(|| {
// TODO Instead of panicking, solve this as True and record // TODO Instead of panicking, solve this as True and record
// a Problem ("module Foo does not expose `bar`") for later. // a TypeError ("module Foo does not expose `bar`") for later.
panic!( panic!(
"Could not find symbol {:?} in vars_by_symbol {:?}", "Could not find symbol {:?} in vars_by_symbol {:?}",
symbol, env.vars_by_symbol symbol, env.vars_by_symbol
@ -184,17 +207,31 @@ fn solve(
rank, rank,
pools, pools,
cached_aliases, cached_aliases,
expected_type.get_type_ref(), expectation.get_type_ref(),
); );
let Unified { vars, mismatches } = unify(subs, actual, expected); match unify(subs, actual, expected) {
Success(vars) => {
// TODO use region when reporting a problem
problems.extend(mismatches);
introduce(subs, rank, pools, &vars); introduce(subs, rank, pools, &vars);
state state
} }
Failure(vars, actual_type, expected_type) => {
introduce(subs, rank, pools, &vars);
let problem = TypeError::BadExpr(
*region,
Category::Lookup(*symbol),
actual_type,
expectation.clone().replace(expected_type),
);
problems.push(problem);
state
}
}
}
And(sub_constraints) => { And(sub_constraints) => {
let mut state = state; let mut state = state;
@ -213,18 +250,38 @@ fn solve(
state state
} }
Pattern(_region, _category, typ, expected) => { Pattern(region, category, typ, expectation) => {
let actual = type_to_var(subs, rank, pools, cached_aliases, typ); let actual = type_to_var(subs, rank, pools, cached_aliases, typ);
let expected = type_to_var(subs, rank, pools, cached_aliases, expected.get_type_ref()); let expected = type_to_var(
let Unified { vars, mismatches } = unify(subs, actual, expected); subs,
rank,
// TODO use region when reporting a problem pools,
problems.extend(mismatches); cached_aliases,
expectation.get_type_ref(),
);
match unify(subs, actual, expected) {
Success(vars) => {
introduce(subs, rank, pools, &vars); introduce(subs, rank, pools, &vars);
state state
} }
Failure(vars, actual_type, expected_type) => {
introduce(subs, rank, pools, &vars);
let problem = TypeError::BadPattern(
*region,
category.clone(),
actual_type,
expectation.clone().replace(expected_type),
);
problems.push(problem);
state
}
}
}
Let(let_con) => { Let(let_con) => {
match &let_con.ret_constraint { match &let_con.ret_constraint {
True if let_con.rigid_vars.is_empty() => { True if let_con.rigid_vars.is_empty() => {
@ -504,8 +561,17 @@ fn type_to_variable(
); );
} }
let ext_var = type_to_variable(subs, rank, pools, cached, ext); let temp_ext_var = type_to_variable(subs, rank, pools, cached, ext);
let content = Content::Structure(FlatType::Record(field_vars, ext_var)); let new_ext_var = match roc_types::pretty_print::chase_ext_record(
subs,
temp_ext_var,
&mut field_vars,
) {
Ok(()) => Variable::EMPTY_RECORD,
Err((new, _)) => new,
};
let content = Content::Structure(FlatType::Record(field_vars, new_ext_var));
register(subs, rank, pools, content) register(subs, rank, pools, content)
} }
@ -522,8 +588,19 @@ fn type_to_variable(
tag_vars.insert(tag.clone(), tag_argument_vars); tag_vars.insert(tag.clone(), tag_argument_vars);
} }
let ext_var = type_to_variable(subs, rank, pools, cached, ext); let temp_ext_var = type_to_variable(subs, rank, pools, cached, ext);
let content = Content::Structure(FlatType::TagUnion(tag_vars, ext_var)); let mut ext_tag_vec = Vec::new();
let new_ext_var = match roc_types::pretty_print::chase_ext_tag_union(
subs,
temp_ext_var,
&mut ext_tag_vec,
) {
Ok(()) => Variable::EMPTY_TAG_UNION,
Err((new, _)) => new,
};
tag_vars.extend(ext_tag_vec.into_iter());
let content = Content::Structure(FlatType::TagUnion(tag_vars, new_ext_var));
register(subs, rank, pools, content) register(subs, rank, pools, content)
} }
@ -540,12 +617,24 @@ fn type_to_variable(
tag_vars.insert(tag.clone(), tag_argument_vars); tag_vars.insert(tag.clone(), tag_argument_vars);
} }
let ext_var = type_to_variable(subs, rank, pools, cached, ext); let temp_ext_var = type_to_variable(subs, rank, pools, cached, ext);
let mut ext_tag_vec = Vec::new();
let new_ext_var = match roc_types::pretty_print::chase_ext_tag_union(
subs,
temp_ext_var,
&mut ext_tag_vec,
) {
Ok(()) => Variable::EMPTY_TAG_UNION,
Err((new, _)) => new,
};
tag_vars.extend(ext_tag_vec.into_iter());
let content = let content =
Content::Structure(FlatType::RecursiveTagUnion(*rec_var, tag_vars, ext_var)); Content::Structure(FlatType::RecursiveTagUnion(*rec_var, tag_vars, new_ext_var));
register(subs, rank, pools, content) register(subs, rank, pools, content)
} }
Alias(Symbol::BOOL_BOOL, _, _) => Variable::BOOL,
Alias(symbol, args, alias_type) => { Alias(symbol, args, alias_type) => {
// Cache aliases without type arguments. Commonly used aliases like `Int` would otherwise get O(n) // Cache aliases without type arguments. Commonly used aliases like `Int` would otherwise get O(n)
// different variables (once for each occurence). The recursion restriction is required // different variables (once for each occurence). The recursion restriction is required
@ -560,6 +649,7 @@ fn type_to_variable(
// //
// This `u` variable can be different between lists, so giving just one variable to // This `u` variable can be different between lists, so giving just one variable to
// this type is incorrect. // this type is incorrect.
// TODO does caching work at all with uniqueness types? even Int then hides a uniqueness variable
let is_recursive = alias_type.is_recursive(); let is_recursive = alias_type.is_recursive();
let no_args = args.is_empty(); let no_args = args.is_empty();
if no_args && !is_recursive { if no_args && !is_recursive {
@ -599,7 +689,7 @@ fn type_to_variable(
fn check_for_infinite_type( fn check_for_infinite_type(
subs: &mut Subs, subs: &mut Subs,
problems: &mut Vec<Problem>, problems: &mut Vec<TypeError>,
symbol: Symbol, symbol: Symbol,
loc_var: Located<Variable>, loc_var: Located<Variable>,
) { ) {
@ -734,13 +824,13 @@ fn correct_recursive_attr(
fn circular_error( fn circular_error(
subs: &mut Subs, subs: &mut Subs,
problems: &mut Vec<Problem>, problems: &mut Vec<TypeError>,
symbol: Symbol, symbol: Symbol,
loc_var: &Located<Variable>, loc_var: &Located<Variable>,
) { ) {
let var = loc_var.value; let var = loc_var.value;
let error_type = subs.var_to_error_type(var); let error_type = subs.var_to_error_type(var);
let problem = Problem::CircularType(symbol, error_type, loc_var.region); let problem = TypeError::CircularType(loc_var.region, symbol, error_type);
subs.set_content(var, Content::Error); subs.set_content(var, Content::Error);

View file

@ -31,7 +31,7 @@ pub fn test_home() -> ModuleId {
#[allow(dead_code)] #[allow(dead_code)]
pub fn infer_expr( pub fn infer_expr(
subs: Subs, subs: Subs,
problems: &mut Vec<roc_types::types::Problem>, problems: &mut Vec<solve::TypeError>,
constraint: &Constraint, constraint: &Constraint,
expr_var: Variable, expr_var: Variable,
) -> (Content, Subs) { ) -> (Content, Subs) {
@ -49,7 +49,7 @@ pub fn infer_expr(
/// Used in the with_larger_debug_stack() function, for tests that otherwise /// Used in the with_larger_debug_stack() function, for tests that otherwise
/// run out of stack space in debug builds (but don't in --release builds) /// run out of stack space in debug builds (but don't in --release builds)
#[allow(dead_code)] #[allow(dead_code)]
const EXPANDED_STACK_SIZE: usize = 4 * 1024 * 1024; const EXPANDED_STACK_SIZE: usize = 8 * 1024 * 1024;
/// Without this, some tests pass in `cargo test --release` but fail without /// Without this, some tests pass in `cargo test --release` but fail without
/// the --release flag because they run out of stack space. This increases /// the --release flag because they run out of stack space. This increases
@ -400,9 +400,10 @@ pub fn variable_usage(con: &Constraint) -> (SeenVariables, Vec<Variable>) {
let mut used = ImSet::default(); let mut used = ImSet::default();
variable_usage_help(con, &mut declared, &mut used); variable_usage_help(con, &mut declared, &mut used);
used.remove(unsafe { &Variable::unsafe_test_debug_variable(1) }); // ..= because there is an extra undeclared variable that contains the type of the full expression
used.remove(unsafe { &Variable::unsafe_test_debug_variable(2) }); for i in 0..=Variable::RESERVED {
used.remove(unsafe { &Variable::unsafe_test_debug_variable(3) }); used.remove(unsafe { &Variable::unsafe_test_debug_variable(i as u32) });
}
let mut used_vec: Vec<Variable> = used.into_iter().collect(); let mut used_vec: Vec<Variable> = used.into_iter().collect();
used_vec.sort(); used_vec.sort();
@ -418,7 +419,7 @@ fn variable_usage_help(con: &Constraint, declared: &mut SeenVariables, used: &mu
match con { match con {
True | SaveTheEnvironment => (), True | SaveTheEnvironment => (),
Eq(tipe, expectation, _) => { Eq(tipe, expectation, _, _) => {
for v in tipe.variables() { for v in tipe.variables() {
used.insert(v); used.insert(v);
} }

View file

@ -9,7 +9,9 @@ mod helpers;
#[cfg(test)] #[cfg(test)]
mod test_solve { mod test_solve {
use crate::helpers::{assert_correct_variable_usage, can_expr, infer_expr, CanExprOut}; use crate::helpers::{
assert_correct_variable_usage, can_expr, infer_expr, with_larger_debug_stack, CanExprOut,
};
use roc_types::pretty_print::{content_to_string, name_all_type_vars}; use roc_types::pretty_print::{content_to_string, name_all_type_vars};
use roc_types::subs::Subs; use roc_types::subs::Subs;
@ -18,7 +20,7 @@ mod test_solve {
fn infer_eq_help( fn infer_eq_help(
src: &str, src: &str,
) -> ( ) -> (
Vec<roc_types::types::Problem>, Vec<roc_solve::solve::TypeError>,
Vec<roc_problem::can::Problem>, Vec<roc_problem::can::Problem>,
String, String,
) { ) {
@ -29,7 +31,7 @@ mod test_solve {
constraint, constraint,
home, home,
interns, interns,
problems: can_problems, problems: mut can_problems,
.. ..
} = can_expr(src); } = can_expr(src);
let mut subs = Subs::new(var_store.into()); let mut subs = Subs::new(var_store.into());
@ -47,17 +49,28 @@ mod test_solve {
let actual_str = content_to_string(content, &mut subs, home, &interns); let actual_str = content_to_string(content, &mut subs, home, &interns);
// Disregard UnusedDef problems, because those are unavoidable when
// returning a function from the test expression.
can_problems.retain(|prob| match prob {
roc_problem::can::Problem::UnusedDef(_, _) => false,
_ => true,
});
(unify_problems, can_problems, actual_str) (unify_problems, can_problems, actual_str)
} }
fn infer_eq(src: &str, expected: &str) { fn infer_eq(src: &str, expected: &str) {
let (_, _can_problems, actual) = infer_eq_help(src); let (_, can_problems, actual) = infer_eq_help(src);
assert_eq!(can_problems, Vec::new(), "Canonicalization problems: ");
assert_eq!(actual, expected.to_string()); assert_eq!(actual, expected.to_string());
} }
fn infer_eq_without_problem(src: &str, expected: &str) { fn infer_eq_without_problem(src: &str, expected: &str) {
let (type_problems, _, actual) = infer_eq_help(src); let (type_problems, can_problems, actual) = infer_eq_help(src);
assert_eq!(can_problems, Vec::new(), "Canonicalization problems: ");
if !type_problems.is_empty() { if !type_problems.is_empty() {
// fail with an assert, but print the problems normally so rust doesn't try to diff // fail with an assert, but print the problems normally so rust doesn't try to diff
@ -69,7 +82,7 @@ mod test_solve {
#[test] #[test]
fn int_literal() { fn int_literal() {
infer_eq("5", "Int"); infer_eq("5", "Num *");
} }
#[test] #[test]
@ -188,7 +201,7 @@ mod test_solve {
[42] [42]
"# "#
), ),
"List Int", "List (Num *)",
); );
} }
@ -200,7 +213,7 @@ mod test_solve {
[[[ 5 ]]] [[[ 5 ]]]
"# "#
), ),
"List (List (List Int))", "List (List (List (Num *)))",
); );
} }
@ -212,7 +225,7 @@ mod test_solve {
[ 1, 2, 3 ] [ 1, 2, 3 ]
"# "#
), ),
"List Int", "List (Num *)",
); );
} }
@ -224,7 +237,7 @@ mod test_solve {
[ [ 1 ], [ 2, 3 ] ] [ [ 1 ], [ 2, 3 ] ]
"# "#
), ),
"List (List Int)", "List (List (Num *))",
); );
} }
@ -340,7 +353,7 @@ mod test_solve {
\_, _ -> 42 \_, _ -> 42
"# "#
), ),
"*, * -> Int", "*, * -> Num *",
); );
} }
@ -410,7 +423,7 @@ mod test_solve {
func func
"# "#
), ),
"*, * -> Int", "*, * -> Num *",
); );
} }
@ -474,7 +487,7 @@ mod test_solve {
c c
"# "#
), ),
"Int", "Num *",
); );
} }
@ -509,7 +522,7 @@ mod test_solve {
alwaysFive "stuff" alwaysFive "stuff"
"# "#
), ),
"Int", "Num *",
); );
} }
@ -556,7 +569,7 @@ mod test_solve {
x x
"# "#
), ),
"Int", "Num *",
); );
} }
@ -570,7 +583,7 @@ mod test_solve {
enlist 5 enlist 5
"# "#
), ),
"List Int", "List (Num *)",
); );
} }
@ -597,7 +610,7 @@ mod test_solve {
1 |> (\a -> a) 1 |> (\a -> a)
"# "#
), ),
"Int", "Num *",
); );
} }
@ -606,12 +619,12 @@ mod test_solve {
infer_eq( infer_eq(
indoc!( indoc!(
r#" r#"
always = \a, b -> a always2 = \a, _ -> a
1 |> always "foo" 1 |> always2 "foo"
"# "#
), ),
"Int", "Num *",
); );
} }
@ -676,7 +689,7 @@ mod test_solve {
apply identity 5 apply identity 5
"# "#
), ),
"Int", "Num *",
); );
} }
@ -705,7 +718,7 @@ mod test_solve {
// flip neverendingInt // flip neverendingInt
// "# // "#
// ), // ),
// "(Int, (a -> a)) -> Int", // "(Num *, (a -> a)) -> Num *",
// ); // );
// } // }
@ -779,7 +792,7 @@ mod test_solve {
// 1 // 2 // 1 // 2
// "# // "#
// ), // ),
// "Int", // "Num *",
// ); // );
// } // }
@ -791,7 +804,7 @@ mod test_solve {
// 1 + 2 // 1 + 2
// "# // "#
// ), // ),
// "Int", // "Num *",
// ); // );
// } // }
@ -840,7 +853,7 @@ mod test_solve {
[ alwaysFive "foo", alwaysFive [] ] [ alwaysFive "foo", alwaysFive [] ]
"# "#
), ),
"List Int", "List (Num *)",
); );
} }
@ -855,7 +868,7 @@ mod test_solve {
24 24
"# "#
), ),
"Int", "Num *",
); );
} }
@ -869,7 +882,7 @@ mod test_solve {
3 -> 4 3 -> 4
"# "#
), ),
"Int", "Num *",
); );
} }
@ -882,17 +895,17 @@ mod test_solve {
#[test] #[test]
fn one_field_record() { fn one_field_record() {
infer_eq("{ x: 5 }", "{ x : Int }"); infer_eq("{ x: 5 }", "{ x : Num * }");
} }
#[test] #[test]
fn two_field_record() { fn two_field_record() {
infer_eq("{ x: 5, y : 3.14 }", "{ x : Int, y : Float }"); infer_eq("{ x: 5, y : 3.14 }", "{ x : Num *, y : Float }");
} }
#[test] #[test]
fn record_literal_accessor() { fn record_literal_accessor() {
infer_eq("{ x: 5, y : 3.14 }.x", "Int"); infer_eq("{ x: 5, y : 3.14 }.x", "Num *");
} }
#[test] #[test]
@ -951,7 +964,7 @@ mod test_solve {
infer_eq( infer_eq(
indoc!( indoc!(
r#" r#"
foo : Int -> custom foo : Num * -> custom
foo 2 foo 2
"# "#
@ -986,13 +999,15 @@ mod test_solve {
r#" r#"
# technically, an empty record can be destructured # technically, an empty record can be destructured
{} = {} {} = {}
bar = \{} -> 42 thunk = \{} -> 42
when foo is xEmpty = if thunk {} == 42 then { x: {} } else { x: {} }
{ x: {} } -> x
when xEmpty is
{ x: {} } -> {}
"# "#
), ),
"{}*", "{}",
); );
} }
@ -1030,7 +1045,8 @@ mod test_solve {
fn bare_tag() { fn bare_tag() {
infer_eq( infer_eq(
indoc!( indoc!(
r#"Foo r#"
Foo
"# "#
), ),
"[ Foo ]*", "[ Foo ]*",
@ -1041,10 +1057,11 @@ mod test_solve {
fn single_tag_pattern() { fn single_tag_pattern() {
infer_eq( infer_eq(
indoc!( indoc!(
r#"\Foo -> 42 r#"
\Foo -> 42
"# "#
), ),
"[ Foo ]* -> Int", "[ Foo ]* -> Num *",
); );
} }
@ -1052,10 +1069,11 @@ mod test_solve {
fn single_private_tag_pattern() { fn single_private_tag_pattern() {
infer_eq( infer_eq(
indoc!( indoc!(
r#"\@Foo -> 42 r#"
\@Foo -> 42
"# "#
), ),
"[ @Foo ]* -> Int", "[ @Foo ]* -> Num *",
); );
} }
@ -1063,13 +1081,14 @@ mod test_solve {
fn two_tag_pattern() { fn two_tag_pattern() {
infer_eq( infer_eq(
indoc!( indoc!(
r#"\x -> r#"
\x ->
when x is when x is
True -> 1 True -> 1
False -> 0 False -> 0
"# "#
), ),
"[ False, True ]* -> Int", "[ False, True ]* -> Num *",
); );
} }
@ -1077,10 +1096,11 @@ mod test_solve {
fn tag_application() { fn tag_application() {
infer_eq( infer_eq(
indoc!( indoc!(
r#"Foo "happy" 2020 r#"
Foo "happy" 2020
"# "#
), ),
"[ Foo Str Int ]*", "[ Foo Str (Num *) ]*",
); );
} }
@ -1088,10 +1108,11 @@ mod test_solve {
fn private_tag_application() { fn private_tag_application() {
infer_eq( infer_eq(
indoc!( indoc!(
r#"@Foo "happy" 2020 r#"
@Foo "happy" 2020
"# "#
), ),
"[ @Foo Str Int ]*", "[ @Foo Str (Num *) ]*",
); );
} }
@ -1116,11 +1137,11 @@ mod test_solve {
infer_eq( infer_eq(
indoc!( indoc!(
r#" r#"
when foo is when { x: 5 } is
{ x: 4 } -> x { x: 4 } -> 4
"# "#
), ),
"Int", "Num *",
); );
} }
@ -1466,7 +1487,7 @@ mod test_solve {
{ numIdentity, x : numIdentity 42, y } { numIdentity, x : numIdentity 42, y }
"# "#
), ),
"{ numIdentity : Num a -> Num a, x : Int, y : Float }", "{ numIdentity : Num a -> Num a, x : Num a, y : Float }",
); );
} }
@ -1502,7 +1523,7 @@ mod test_solve {
f f
"# "#
), ),
"Int -> Int", "Num * -> Num *",
); );
} }
@ -1527,7 +1548,6 @@ mod test_solve {
infer_eq_without_problem( infer_eq_without_problem(
indoc!( indoc!(
r#" r#"
# toBit : [ False, True ] -> Num.Num Int.Integer
toBit = \bool -> toBit = \bool ->
when bool is when bool is
True -> 1 True -> 1
@ -1536,7 +1556,7 @@ mod test_solve {
toBit toBit
"# "#
), ),
"[ False, True ]* -> Int", "[ False, True ]* -> Num *",
); );
} }
@ -1573,7 +1593,7 @@ mod test_solve {
fromBit fromBit
"# "#
), ),
"Int -> [ False, True ]*", "Num * -> [ False, True ]*",
); );
} }
@ -1625,7 +1645,7 @@ mod test_solve {
foo { x: 5 } foo { x: 5 }
"# "#
), ),
"Int", "Num *",
); );
} }
@ -1904,24 +1924,6 @@ mod test_solve {
// ); // );
// } // }
#[test]
fn manual_attr() {
infer_eq(
indoc!(
r#"
r = Attr unknown "bar"
s = Attr unknown2 { left : Attr Shared "foo" }
when True is
_ -> { x : ((\Attr _ val -> val) s).left, y : r }
_ -> { x : ((\Attr _ val -> val) s).left, y : ((\Attr _ val -> val) s).left }
"#
),
"{ x : [ Attr [ Shared ]* Str ]*, y : [ Attr [ Shared ]* Str ]* }",
);
}
#[test] #[test]
fn peano_map_alias() { fn peano_map_alias() {
infer_eq( infer_eq(
@ -2215,6 +2217,7 @@ mod test_solve {
#[test] #[test]
fn quicksort_partition() { fn quicksort_partition() {
with_larger_debug_stack(|| {
infer_eq_without_problem( infer_eq_without_problem(
indoc!( indoc!(
r#" r#"
@ -2259,6 +2262,7 @@ mod test_solve {
), ),
"Int, Int, List Int -> [ Pair Int (List Int) ]", "Int, Int, List Int -> [ Pair Int (List Int) ]",
); );
});
} }
#[test] #[test]
@ -2288,7 +2292,7 @@ mod test_solve {
List.get [ 10, 9, 8, 7 ] 1 List.get [ 10, 9, 8, 7 ] 1
"# "#
), ),
"Result Int [ IndexOutOfBounds ]*", "Result (Num *) [ IndexOutOfBounds ]*",
); );
} }
@ -2370,7 +2374,7 @@ mod test_solve {
f f
"# "#
), ),
"{ p : *, q : * }* -> Int", "{ p : *, q : * }* -> Num *",
); );
} }
@ -2408,4 +2412,140 @@ mod test_solve {
"Model position -> Result position [ KeyNotFound ]*", "Model position -> Result position [ KeyNotFound ]*",
); );
} }
#[test]
fn when_with_or_pattern_and_guard() {
infer_eq_without_problem(
indoc!(
r#"
\x ->
when x is
2 | 3 -> 0
a if a < 20 -> 1
3 | 4 if False -> 2
_ -> 3
"#
),
"Num * -> Num *",
);
}
#[test]
fn sorting() {
// based on https://github.com/elm/compiler/issues/2057
// Roc seems to do this correctly, tracking to make sure it stays that way
infer_eq_without_problem(
indoc!(
r#"
sort : ConsList cm -> ConsList cm
sort =
\xs ->
f : cm, cm -> Order
f = \_, _ -> LT
sortWith f xs
sortBy : (x -> cmpl), ConsList x -> ConsList x
sortBy =
\_, list ->
cmp : x, x -> Order
cmp = \_, _ -> LT
sortWith cmp list
always = \x, _ -> x
sortWith : (foobar, foobar -> Order), ConsList foobar -> ConsList foobar
sortWith =
\_, list ->
f = \arg ->
g arg
g = \bs ->
when bs is
bx -> f bx
_ -> Nil
always Nil (f list)
Order : [ LT, GT, EQ ]
ConsList a : [ Nil, Cons a (ConsList a) ]
{ x: sortWith, y: sort, z: sortBy }
"#
),
"{ x : (foobar, foobar -> Order), ConsList foobar -> ConsList foobar, y : ConsList cm -> ConsList cm, z : (x -> cmpl), ConsList x -> ConsList x }"
);
}
#[test]
fn wrapper() {
// based on https://github.com/elm/compiler/issues/1964
// Roc seems to do this correctly, tracking to make sure it stays that way
infer_eq_without_problem(
indoc!(
r#"
Type a : [ TypeCtor (Type (Wrapper a)) ]
Wrapper a : [ Wrapper a ]
Opaque : [ Opaque ]
encodeType1 : Type a -> Opaque
encodeType1 = \thing ->
when thing is
TypeCtor v0 ->
encodeType1 v0
encodeType1
"#
),
"Type a -> Opaque",
);
}
#[test]
fn rigids() {
// I was sligtly surprised this works
infer_eq_without_problem(
indoc!(
r#"
f : List a -> List a
f = \input ->
x : List b
x = []
v = List.getUnsafe input 0
List.push x v
f
"#
),
"List a -> List a",
);
}
#[cfg(debug_assertions)]
#[test]
#[should_panic]
fn rigid_record_quantification() {
// the ext here is qualified on the outside (because we have rank 1 types, not rank 2).
// That means e.g. `f : { bar : String, foo : Int } -> Bool }` is a valid argument. but
// that function could not be applied to the `{ foo : Int }` list. Therefore, this function
// is not allowed.
//
// should hit a debug_assert! in debug mode, and produce a type error in release mode
infer_eq_without_problem(
indoc!(
r#"
test : ({ foo : Int }ext -> Bool), { foo : Int } -> Bool
test = \fn, a -> fn a
test
"#
),
"should fail",
);
}
} }

View file

@ -16,10 +16,19 @@ mod test_uniq_solve {
// HELPERS // HELPERS
fn infer_eq_help(src: &str) -> (Vec<roc_types::types::Problem>, String) { fn infer_eq_help(src: &str) -> (Vec<roc_solve::solve::TypeError>, String) {
let (_loc_expr, output, _problems, mut subs, variable, constraint, home, interns) = let (_loc_expr, output, mut can_problems, mut subs, variable, constraint, home, interns) =
uniq_expr(src); uniq_expr(src);
// Disregard UnusedDef problems, because those are unavoidable when
// returning a function from the test expression.
can_problems.retain(|prob| match prob {
roc_problem::can::Problem::UnusedDef(_, _) => false,
_ => true,
});
assert_eq!(can_problems, Vec::new(), "Canonicalization problems");
assert_correct_variable_usage(&constraint); assert_correct_variable_usage(&constraint);
for (var, name) in output.introduced_variables.name_by_var { for (var, name) in output.introduced_variables.name_by_var {
@ -47,6 +56,7 @@ mod test_uniq_solve {
if !problems.is_empty() { if !problems.is_empty() {
panic!("expected:\n{:?}\ninferred:\n{:?}", expected, actual); panic!("expected:\n{:?}\ninferred:\n{:?}", expected, actual);
} }
assert_eq!(actual, expected.to_string()); assert_eq!(actual, expected.to_string());
} }
@ -57,7 +67,7 @@ mod test_uniq_solve {
#[test] #[test]
fn int_literal() { fn int_literal() {
infer_eq("5", "Attr * Int"); infer_eq("5", "Attr * (Num (Attr * *))");
} }
#[test] #[test]
@ -176,7 +186,7 @@ mod test_uniq_solve {
[42] [42]
"# "#
), ),
"Attr * (List (Attr * Int))", "Attr * (List (Attr * (Num (Attr * *))))",
); );
} }
@ -188,7 +198,7 @@ mod test_uniq_solve {
[[[ 5 ]]] [[[ 5 ]]]
"# "#
), ),
"Attr * (List (Attr * (List (Attr * (List (Attr * Int))))))", "Attr * (List (Attr * (List (Attr * (List (Attr * (Num (Attr * *))))))))",
); );
} }
@ -200,7 +210,7 @@ mod test_uniq_solve {
[ 1, 2, 3 ] [ 1, 2, 3 ]
"# "#
), ),
"Attr * (List (Attr * Int))", "Attr * (List (Attr * (Num (Attr * *))))",
); );
} }
@ -212,7 +222,7 @@ mod test_uniq_solve {
[ [ 1 ], [ 2, 3 ] ] [ [ 1 ], [ 2, 3 ] ]
"# "#
), ),
"Attr * (List (Attr * (List (Attr * Int))))", "Attr * (List (Attr * (List (Attr * (Num (Attr * *))))))",
); );
} }
@ -328,7 +338,7 @@ mod test_uniq_solve {
\_, _ -> 42 \_, _ -> 42
"# "#
), ),
"Attr * (*, * -> Attr * Int)", "Attr * (*, * -> Attr * (Num (Attr * *)))",
); );
} }
@ -398,7 +408,7 @@ mod test_uniq_solve {
func func
"# "#
), ),
"Attr * (*, * -> Attr * Int)", "Attr * (*, * -> Attr * (Num (Attr * *)))",
); );
} }
@ -462,7 +472,7 @@ mod test_uniq_solve {
c c
"# "#
), ),
"Attr * Int", "Attr * (Num (Attr * *))",
); );
} }
@ -498,7 +508,7 @@ mod test_uniq_solve {
alwaysFive "stuff" alwaysFive "stuff"
"# "#
), ),
"Attr * Int", "Attr * (Num (Attr * *))",
); );
} }
@ -546,7 +556,7 @@ mod test_uniq_solve {
), ),
// TODO investigate why is this not shared? // TODO investigate why is this not shared?
// maybe because y is not used it is dropped? // maybe because y is not used it is dropped?
"Attr * Int", "Attr * (Num (Attr * *))",
); );
} }
@ -560,7 +570,7 @@ mod test_uniq_solve {
enlist 5 enlist 5
"# "#
), ),
"Attr * (List (Attr * Int))", "Attr * (List (Attr * (Num (Attr * *))))",
); );
} }
@ -587,7 +597,7 @@ mod test_uniq_solve {
1 |> (\a -> a) 1 |> (\a -> a)
"# "#
), ),
"Attr * Int", "Attr * (Num (Attr * *))",
); );
} }
@ -599,7 +609,7 @@ mod test_uniq_solve {
(\a -> a) 1 (\a -> a) 1
"# "#
), ),
"Attr * Int", "Attr * (Num (Attr * *))",
); );
} }
@ -608,12 +618,12 @@ mod test_uniq_solve {
infer_eq( infer_eq(
indoc!( indoc!(
r#" r#"
always = \a, b -> a always2 = \a, _ -> a
1 |> always "foo" 1 |> always2 "foo"
"# "#
), ),
"Attr * Int", "Attr * (Num (Attr * *))",
); );
} }
@ -679,7 +689,7 @@ mod test_uniq_solve {
apply identity 5 apply identity 5
"# "#
), ),
"Attr * Int", "Attr * (Num (Attr * *))",
); );
} }
@ -702,13 +712,13 @@ mod test_uniq_solve {
// indoc!( // indoc!(
// r#" // r#"
// flip = \f -> (\a b -> f b a) // flip = \f -> (\a b -> f b a)
// neverendingInt = \f int -> f int // neverendingNum = \f int -> f int
// x = neverendingInt (\a -> a) 5 // x = neverendingNum (\a -> a) 5
// flip neverendingInt // flip neverendingNum
// "# // "#
// ), // ),
// "(Int, (a -> a)) -> Int", // "((Num (Attr * *)), (a -> a)) -> (Num (Attr * *))",
// ); // );
// } // }
@ -782,7 +792,7 @@ mod test_uniq_solve {
// 1 // 2 // 1 // 2
// "# // "#
// ), // ),
// "Int", // "(Num (Attr * *))",
// ); // );
// } // }
@ -794,7 +804,7 @@ mod test_uniq_solve {
// 1 + 2 // 1 + 2
// "# // "#
// ), // ),
// "Int", // "(Num (Attr * *))",
// ); // );
// } // }
@ -843,7 +853,7 @@ mod test_uniq_solve {
[ alwaysFive "foo", alwaysFive [] ] [ alwaysFive "foo", alwaysFive [] ]
"# "#
), ),
"Attr * (List (Attr * Int))", "Attr * (List (Attr * (Num (Attr * *))))",
); );
} }
@ -858,7 +868,7 @@ mod test_uniq_solve {
24 24
"# "#
), ),
"Attr * Int", "Attr * (Num (Attr * *))",
); );
} }
@ -872,18 +882,18 @@ mod test_uniq_solve {
3 -> 4 3 -> 4
"# "#
), ),
"Attr * Int", "Attr * (Num (Attr * *))",
); );
} }
#[test] #[test]
fn record() { fn record() {
infer_eq("{ foo: 42 }", "Attr * { foo : (Attr * Int) }"); infer_eq("{ foo: 42 }", "Attr * { foo : (Attr * (Num (Attr * *))) }");
} }
#[test] #[test]
fn record_access() { fn record_access() {
infer_eq("{ foo: 42 }.foo", "Attr * Int"); infer_eq("{ foo: 42 }.foo", "Attr * (Num (Attr * *))");
} }
#[test] #[test]
@ -893,13 +903,15 @@ mod test_uniq_solve {
r#" r#"
# technically, an empty record can be destructured # technically, an empty record can be destructured
{} = {} {} = {}
bar = \{} -> 42 thunk = \{} -> 42
when foo is xEmpty = if thunk {} == 42 then { x: {} } else { x: {} }
{ x: {} } -> x
when xEmpty is
{ x: {} } -> {}
"# "#
), ),
"Attr * {}*", "Attr * {}",
); );
} }
@ -937,7 +949,7 @@ mod test_uniq_solve {
\Foo -> 42 \Foo -> 42
"# "#
), ),
"Attr * (Attr * [ Foo ]* -> Attr * Int)", "Attr * (Attr * [ Foo ]* -> Attr * (Num (Attr * *)))",
); );
} }
@ -949,7 +961,7 @@ mod test_uniq_solve {
\@Foo -> 42 \@Foo -> 42
"# "#
), ),
"Attr * (Attr * [ @Foo ]* -> Attr * Int)", "Attr * (Attr * [ @Foo ]* -> Attr * (Num (Attr * *)))",
); );
} }
@ -964,7 +976,7 @@ mod test_uniq_solve {
False -> 0 False -> 0
"# "#
), ),
"Attr * (Attr * [ False, True ]* -> Attr * Int)", "Attr * (Attr * [ False, True ]* -> Attr * (Num (Attr * *)))",
); );
} }
@ -976,7 +988,7 @@ mod test_uniq_solve {
Foo "happy" 2020 Foo "happy" 2020
"# "#
), ),
"Attr * [ Foo (Attr * Str) (Attr * Int) ]*", "Attr * [ Foo (Attr * Str) (Attr * (Num (Attr * *))) ]*",
); );
} }
@ -988,7 +1000,7 @@ mod test_uniq_solve {
@Foo "happy" 2020 @Foo "happy" 2020
"# "#
), ),
"Attr * [ @Foo (Attr * Str) (Attr * Int) ]*", "Attr * [ @Foo (Attr * Str) (Attr * (Num (Attr * *))) ]*",
); );
} }
@ -1033,11 +1045,11 @@ mod test_uniq_solve {
infer_eq( infer_eq(
indoc!( indoc!(
r#" r#"
when foo is when { x: 5 } is
{ x: 4 } -> x { x: 4 } -> 4
"# "#
), ),
"Attr * Int", "Attr * (Num (Attr * *))",
); );
} }
@ -1079,7 +1091,7 @@ mod test_uniq_solve {
Foo x -> x Foo x -> x
"# "#
), ),
"Attr * Int", "Attr * (Num (Attr * *))",
); );
} }
@ -1092,7 +1104,7 @@ mod test_uniq_solve {
@Foo x -> x @Foo x -> x
"# "#
), ),
"Attr * Int", "Attr * (Num (Attr * *))",
); );
} }
@ -1194,7 +1206,7 @@ mod test_uniq_solve {
{ numIdentity, p, q } { numIdentity, p, q }
"# "#
), ),
"Attr * { numIdentity : (Attr Shared (Attr a (Num (Attr b p)) -> Attr a (Num (Attr b p)))), p : (Attr * Int), q : (Attr * Float) }" "Attr * { numIdentity : (Attr Shared (Attr a (Num (Attr b p)) -> Attr a (Num (Attr b p)))), p : (Attr * (Num (Attr * p))), q : (Attr * Float) }"
); );
} }
@ -1380,7 +1392,7 @@ mod test_uniq_solve {
factorial factorial
"# "#
), ),
"Attr Shared (Attr * Int -> Attr * Int)", "Attr Shared (Attr * (Num (Attr * *)) -> Attr * (Num (Attr * *)))",
); );
} }
@ -1408,6 +1420,7 @@ mod test_uniq_solve {
#[test] #[test]
fn quicksort() { fn quicksort() {
with_larger_debug_stack(|| {
infer_eq( infer_eq(
indoc!( indoc!(
r#" r#"
@ -1462,6 +1475,7 @@ mod test_uniq_solve {
), ),
"Attr Shared (Attr b (List (Attr Shared (Num (Attr c a)))), Attr Shared Int, Attr Shared Int -> Attr b (List (Attr Shared (Num (Attr c a)))))" "Attr Shared (Attr b (List (Attr Shared (Num (Attr c a)))), Attr Shared Int, Attr Shared Int -> Attr b (List (Attr Shared (Num (Attr c a)))))"
); );
})
} }
#[test] #[test]
@ -1479,7 +1493,7 @@ mod test_uniq_solve {
s.left s.left
"# "#
), ),
"Attr Shared Int", "Attr Shared (Num (Attr * *))",
); );
} }
@ -1498,7 +1512,7 @@ mod test_uniq_solve {
{ y: s.left } { y: s.left }
"# "#
), ),
"Attr * { y : (Attr Shared Int) }", "Attr * { y : (Attr Shared (Num (Attr * *))) }",
); );
} }
@ -1549,7 +1563,7 @@ mod test_uniq_solve {
"# "#
), ),
// it's fine that the inner fields are not shared: only shared extraction is possible // it's fine that the inner fields are not shared: only shared extraction is possible
"Attr * { left : (Attr Shared { left : (Attr * Int), right : (Attr * Int) }), right : (Attr Shared { left : (Attr * Int), right : (Attr * Int) }) }", "Attr * { left : (Attr Shared { left : (Attr * (Num (Attr * *))), right : (Attr * (Num (Attr * *))) }), right : (Attr Shared { left : (Attr * (Num (Attr * *))), right : (Attr * (Num (Attr * *))) }) }",
); );
} }
@ -1869,7 +1883,7 @@ mod test_uniq_solve {
4 + 4 4 + 4
"# "#
), ),
"Attr * Int", "Attr * (Num (Attr * *))",
); );
} }
@ -1882,7 +1896,7 @@ mod test_uniq_solve {
|> List.get 2 |> List.get 2
"# "#
), ),
"Attr * (Result (Attr * Int) (Attr * [ IndexOutOfBounds ]*))", "Attr * (Result (Attr * (Num (Attr * *))) (Attr * [ IndexOutOfBounds ]*))",
); );
} }
@ -1988,7 +2002,7 @@ mod test_uniq_solve {
list list
"# "#
), ),
"Attr * (Attr a (List (Attr Shared Int)) -> Attr a (List (Attr Shared Int)))", "Attr * (Attr a (List (Attr Shared (Num (Attr b c)))) -> Attr a (List (Attr Shared (Num (Attr b c)))))",
); );
} }
@ -2004,7 +2018,7 @@ mod test_uniq_solve {
List.set list 0 42 List.set list 0 42
"# "#
), ),
"Attr * (Attr (a | b) (List (Attr b Int)) -> Attr (a | b) (List (Attr b Int)))", "Attr * (Attr (a | b) (List (Attr b (Num (Attr c d)))) -> Attr (a | b) (List (Attr b (Num (Attr c d)))))",
); );
} }
@ -2047,7 +2061,7 @@ mod test_uniq_solve {
sum sum
"# "#
), ),
"Attr * (Attr * (List (Attr * Int)) -> Attr * Int)", "Attr * (Attr * (List (Attr * (Num (Attr a b)))) -> Attr * (Num (Attr a b)))",
); );
} }
@ -2083,7 +2097,8 @@ mod test_uniq_solve {
reverse reverse
"# "#
), ),
"Attr * (Attr * (List (Attr (a | b) c)) -> Attr (* | a | b) (List (Attr a c)))", "Attr * (Attr * (List (Attr (a | b) c)) -> Attr (* | a | b) (List (Attr b c)))",
//"Attr * (Attr * (List (Attr (a | b) c)) -> Attr (* | a | b) (List (Attr a c)))",
); );
} }
@ -2095,7 +2110,7 @@ mod test_uniq_solve {
List.getUnsafe (List.set [ 12, 9, 7, 3 ] 1 42) 1 List.getUnsafe (List.set [ 12, 9, 7, 3 ] 1 42) 1
"# "#
), ),
"Attr * Int", "Attr * (Num (Attr * *))",
); );
} }
@ -2113,7 +2128,8 @@ mod test_uniq_solve {
f f
"# "#
), ),
"Attr * (Attr (* | a | b) { p : (Attr b *), q : (Attr a *) }* -> Attr * Int)", //"Attr * (Attr (* | a | b) { p : (Attr a *), q : (Attr b *) }* -> Attr * (Num (Attr * *)))",
"Attr * (Attr (* | a | b) { p : (Attr b *), q : (Attr a *) }* -> Attr * (Num (Attr * *)))"
); );
} }
@ -2140,6 +2156,7 @@ mod test_uniq_solve {
#[test] #[test]
fn cheapest_open() { fn cheapest_open() {
with_larger_debug_stack(|| {
infer_eq( infer_eq(
indoc!( indoc!(
r#" r#"
@ -2176,7 +2193,8 @@ mod test_uniq_solve {
"# "#
), ),
"Attr * (Attr * (Attr Shared position -> Attr Shared Float), Attr * (Model (Attr Shared position)) -> Attr * (Result (Attr Shared position) (Attr * [ KeyNotFound ]*)))" "Attr * (Attr * (Attr Shared position -> Attr Shared Float), Attr * (Model (Attr Shared position)) -> Attr * (Result (Attr Shared position) (Attr * [ KeyNotFound ]*)))"
); )
});
} }
#[test] #[test]
@ -2206,7 +2224,7 @@ mod test_uniq_solve {
newCosts = Map.insert model.costs neighbour distanceTo newCosts = Map.insert model.costs neighbour distanceTo
distanceTo = reconstructPath newCameFrom neighbour distanceTo = reconstructPath newCameFrom neighbour
|> List.length |> List.len
|> Num.toFloat |> Num.toFloat
newModel = { model & costs : newCosts , cameFrom : newCameFrom } newModel = { model & costs : newCosts , cameFrom : newCameFrom }
@ -2270,11 +2288,11 @@ mod test_uniq_solve {
else else
Ok smallestSoFar Ok smallestSoFar
Set.foldl model.openSet folder (Err KeyNotFound) Set.foldl model.openSet folder (Err KeyNotFound)
|> Result.map (\x -> x.position) |> Result.map (\x -> x.position)
reconstructPath : Map position position, position -> List position reconstructPath : Map position position, position -> List position
reconstructPath = \cameFrom, goal -> reconstructPath = \cameFrom, goal ->
when Map.get cameFrom goal is when Map.get cameFrom goal is
@ -2284,14 +2302,16 @@ mod test_uniq_solve {
Ok next -> Ok next ->
List.push (reconstructPath cameFrom next) goal List.push (reconstructPath cameFrom next) goal
updateCost : position, position, Model position -> Model position updateCost : position, position, Model position -> Model position
updateCost = \current, neighbour, model -> updateCost = \current, neighbour, model ->
newCameFrom = Map.insert model.cameFrom neighbour current newCameFrom = Map.insert model.cameFrom neighbour current
newCosts = Map.insert model.costs neighbour distanceTo newCosts = Map.insert model.costs neighbour distanceTo
distanceTo = reconstructPath newCameFrom neighbour distanceTo =
|> List.length reconstructPath newCameFrom neighbour
|> List.len
|> Num.toFloat |> Num.toFloat
newModel = { model & costs : newCosts , cameFrom : newCameFrom } newModel = { model & costs : newCosts , cameFrom : newCameFrom }
@ -2324,7 +2344,6 @@ mod test_uniq_solve {
Ok (reconstructPath model.cameFrom goal) Ok (reconstructPath model.cameFrom goal)
else else
modelPopped = { model & openSet : Set.remove model.openSet current, evaluated : Set.insert model.evaluated current } modelPopped = { model & openSet : Set.remove model.openSet current, evaluated : Set.insert model.evaluated current }
neighbours = moveFn current neighbours = moveFn current
@ -2345,6 +2364,18 @@ mod test_uniq_solve {
}); });
} }
#[test]
fn equals() {
infer_eq(
indoc!(
r#"
\a, b -> a == b
"#
),
"Attr * (a, a -> Attr * Bool)",
);
}
#[test] #[test]
fn instantiated_alias() { fn instantiated_alias() {
infer_eq( infer_eq(
@ -2362,4 +2393,21 @@ mod test_uniq_solve {
"Attr * (Attr * position -> Attr * (Model (Attr * Int)))", "Attr * (Attr * position -> Attr * (Model (Attr * Int)))",
); );
} }
#[test]
fn when_with_or_pattern_and_guard() {
infer_eq(
indoc!(
r#"
\x ->
when x is
2 | 3 -> 0
a if a < 20 -> 1
3 | 4 if False -> 2
_ -> 3
"#
),
"Attr * (Attr Shared (Num (Attr * *)) -> Attr * (Num (Attr * *)))",
);
}
} }

View file

@ -94,6 +94,10 @@ impl Bool {
Atom::Zero => Err(Atom::Zero), Atom::Zero => Err(Atom::Zero),
Atom::One => Err(Atom::One), Atom::One => Err(Atom::One),
Atom::Variable(var) => { Atom::Variable(var) => {
// The var may still point to Zero or One!
match subs.get_without_compacting(var).content {
Content::Structure(FlatType::Boolean(nested)) => nested.simplify(subs),
_ => {
let mut result = Vec::new(); let mut result = Vec::new();
result.push(var); result.push(var);
@ -101,7 +105,8 @@ impl Bool {
match atom { match atom {
Atom::Zero => {} Atom::Zero => {}
Atom::One => return Err(Atom::One), Atom::One => return Err(Atom::One),
Atom::Variable(v) => match subs.get_without_compacting(*v).content { Atom::Variable(v) => {
match subs.get_without_compacting(*v).content {
Content::Structure(FlatType::Boolean(nested)) => { Content::Structure(FlatType::Boolean(nested)) => {
match nested.simplify(subs) { match nested.simplify(subs) {
Ok(variables) => { Ok(variables) => {
@ -111,13 +116,16 @@ impl Bool {
} }
Err(Atom::Zero) => {} Err(Atom::Zero) => {}
Err(Atom::One) => return Err(Atom::One), Err(Atom::One) => return Err(Atom::One),
Err(Atom::Variable(_)) => panic!("TODO nested variable"), Err(Atom::Variable(_)) => {
panic!("TODO nested variable")
}
} }
} }
_ => { _ => {
result.push(*v); result.push(*v);
} }
}, }
}
} }
} }
@ -125,6 +133,8 @@ impl Bool {
} }
} }
} }
}
}
pub fn map_variables<F>(&self, f: &mut F) -> Self pub fn map_variables<F>(&self, f: &mut F) -> Self
where where

View file

@ -24,7 +24,7 @@ static EMPTY_TAG_UNION: &str = "[]";
/// ///
/// Otherwise, parens are unnecessary. /// Otherwise, parens are unnecessary.
#[derive(Clone, Copy, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
enum Parens { pub enum Parens {
InFn, InFn,
InTypeParam, InTypeParam,
Unnecessary, Unnecessary,
@ -78,7 +78,9 @@ fn find_names_needed(
use crate::subs::FlatType::*; use crate::subs::FlatType::*;
while let Some((recursive, _)) = subs.occurs(variable) { while let Some((recursive, _)) = subs.occurs(variable) {
if let Content::Structure(FlatType::TagUnion(tags, ext_var)) = subs.get(recursive).content { if let Content::Structure(FlatType::TagUnion(tags, ext_var)) =
subs.get_without_compacting(recursive).content
{
let rec_var = subs.fresh_unnamed_flex_var(); let rec_var = subs.fresh_unnamed_flex_var();
let mut new_tags = MutMap::default(); let mut new_tags = MutMap::default();
@ -100,9 +102,9 @@ fn find_names_needed(
} }
} }
match subs.get(variable).content { match subs.get_without_compacting(variable).content {
FlexVar(None) => { FlexVar(None) => {
let root = subs.get_root_key(variable); let root = subs.get_root_key_without_compacting(variable);
// If this var is *not* its own root, then the // If this var is *not* its own root, then the
// root var necessarily appears in multiple places. // root var necessarily appears in multiple places.
@ -232,7 +234,7 @@ fn name_root(
fn set_root_name(root: Variable, name: &Lowercase, subs: &mut Subs) { fn set_root_name(root: Variable, name: &Lowercase, subs: &mut Subs) {
use crate::subs::Content::*; use crate::subs::Content::*;
let mut descriptor = subs.get(root); let mut descriptor = subs.get_without_compacting(root);
match descriptor.content { match descriptor.content {
FlexVar(None) => { FlexVar(None) => {
@ -250,7 +252,7 @@ fn set_root_name(root: Variable, name: &Lowercase, subs: &mut Subs) {
pub fn content_to_string( pub fn content_to_string(
content: Content, content: Content,
subs: &mut Subs, subs: &Subs,
home: ModuleId, home: ModuleId,
interns: &Interns, interns: &Interns,
) -> String { ) -> String {
@ -262,7 +264,7 @@ pub fn content_to_string(
buf buf
} }
fn write_content(env: &Env, content: Content, subs: &mut Subs, buf: &mut String, parens: Parens) { fn write_content(env: &Env, content: Content, subs: &Subs, buf: &mut String, parens: Parens) {
use crate::subs::Content::*; use crate::subs::Content::*;
match content { match content {
@ -279,7 +281,7 @@ fn write_content(env: &Env, content: Content, subs: &mut Subs, buf: &mut String,
let (_, arg_var) = args let (_, arg_var) = args
.get(0) .get(0)
.expect("Num was not applied to a type argument!"); .expect("Num was not applied to a type argument!");
let content = subs.get(*arg_var).content; let content = subs.get_without_compacting(*arg_var).content;
match &content { match &content {
Alias(nested, _, _) => match *nested { Alias(nested, _, _) => match *nested {
@ -293,7 +295,7 @@ fn write_content(env: &Env, content: Content, subs: &mut Subs, buf: &mut String,
}, },
Structure(FlatType::Apply(Symbol::ATTR_ATTR, nested_args)) => { Structure(FlatType::Apply(Symbol::ATTR_ATTR, nested_args)) => {
let attr_content = subs.get(nested_args[1]).content; let attr_content = subs.get_without_compacting(nested_args[1]).content;
match &attr_content { match &attr_content {
Alias(nested, _, _) => match *nested { Alias(nested, _, _) => match *nested {
Symbol::INT_INTEGER => buf.push_str("Int"), Symbol::INT_INTEGER => buf.push_str("Int"),
@ -322,7 +324,13 @@ fn write_content(env: &Env, content: Content, subs: &mut Subs, buf: &mut String,
for (_, var) in args { for (_, var) in args {
buf.push(' '); buf.push(' ');
write_content(env, subs.get(var).content, subs, buf, Parens::InTypeParam); write_content(
env,
subs.get_without_compacting(var).content,
subs,
buf,
Parens::InTypeParam,
);
} }
}), }),
} }
@ -331,13 +339,7 @@ fn write_content(env: &Env, content: Content, subs: &mut Subs, buf: &mut String,
} }
} }
fn write_flat_type( fn write_flat_type(env: &Env, flat_type: FlatType, subs: &Subs, buf: &mut String, parens: Parens) {
env: &Env,
flat_type: FlatType,
subs: &mut Subs,
buf: &mut String,
parens: Parens,
) {
use crate::subs::FlatType::*; use crate::subs::FlatType::*;
match flat_type { match flat_type {
@ -374,13 +376,19 @@ fn write_flat_type(
buf.push_str(label.as_str()); buf.push_str(label.as_str());
buf.push_str(" : "); buf.push_str(" : ");
write_content(env, subs.get(field_var).content, subs, buf, parens); write_content(
env,
subs.get_without_compacting(field_var).content,
subs,
buf,
parens,
);
} }
buf.push_str(" }"); buf.push_str(" }");
} }
match subs.get(ext_var).content { match subs.get_without_compacting(ext_var).content {
Content::Structure(EmptyRecord) => { Content::Structure(EmptyRecord) => {
// This is a closed record. We're done! // This is a closed record. We're done!
} }
@ -430,13 +438,19 @@ fn write_flat_type(
for var in vars { for var in vars {
buf.push(' '); buf.push(' ');
write_content(env, subs.get(var).content, subs, buf, Parens::InTypeParam); write_content(
env,
subs.get_without_compacting(var).content,
subs,
buf,
Parens::InTypeParam,
);
} }
} }
buf.push_str(" ]"); buf.push_str(" ]");
if let Some(content) = ext_content { if let Err((_, content)) = ext_content {
// This is an open tag union, so print the variable // This is an open tag union, so print the variable
// right after the ']' // right after the ']'
// //
@ -477,13 +491,19 @@ fn write_flat_type(
for var in vars { for var in vars {
buf.push(' '); buf.push(' ');
write_content(env, subs.get(var).content, subs, buf, Parens::InTypeParam); write_content(
env,
subs.get_without_compacting(var).content,
subs,
buf,
Parens::InTypeParam,
);
} }
} }
buf.push_str(" ]"); buf.push_str(" ]");
if let Some(content) = ext_content { if let Err((_, content)) = ext_content {
// This is an open tag union, so print the variable // This is an open tag union, so print the variable
// right after the ']' // right after the ']'
// //
@ -493,7 +513,13 @@ fn write_flat_type(
} }
buf.push_str(" as "); buf.push_str(" as ");
write_content(env, subs.get(rec_var).content, subs, buf, parens) write_content(
env,
subs.get_without_compacting(rec_var).content,
subs,
buf,
parens,
)
} }
Boolean(b) => { Boolean(b) => {
write_boolean(env, b, subs, buf, Parens::InTypeParam); write_boolean(env, b, subs, buf, Parens::InTypeParam);
@ -504,14 +530,14 @@ fn write_flat_type(
} }
} }
fn chase_ext_tag_union( pub fn chase_ext_tag_union(
subs: &mut Subs, subs: &Subs,
var: Variable, var: Variable,
fields: &mut Vec<(TagName, Vec<Variable>)>, fields: &mut Vec<(TagName, Vec<Variable>)>,
) -> Option<Content> { ) -> Result<(), (Variable, Content)> {
use FlatType::*; use FlatType::*;
match subs.get(var).content { match subs.get_without_compacting(var).content {
Content::Structure(EmptyTagUnion) => None, Content::Structure(EmptyTagUnion) => Ok(()),
Content::Structure(TagUnion(tags, ext_var)) Content::Structure(TagUnion(tags, ext_var))
| Content::Structure(RecursiveTagUnion(_, tags, ext_var)) => { | Content::Structure(RecursiveTagUnion(_, tags, ext_var)) => {
for (label, vars) in tags { for (label, vars) in tags {
@ -520,12 +546,45 @@ fn chase_ext_tag_union(
chase_ext_tag_union(subs, ext_var, fields) chase_ext_tag_union(subs, ext_var, fields)
} }
Content::Structure(Apply(Symbol::ATTR_ATTR, arguments)) => {
debug_assert!(arguments.len() == 2);
chase_ext_tag_union(subs, arguments[1], fields)
}
Content::Alias(_, _, var) => chase_ext_tag_union(subs, var, fields),
content => Some(content), content => Err((var, content)),
} }
} }
fn write_boolean(env: &Env, boolean: Bool, subs: &mut Subs, buf: &mut String, parens: Parens) { pub fn chase_ext_record(
subs: &Subs,
var: Variable,
fields: &mut MutMap<Lowercase, Variable>,
) -> Result<(), (Variable, Content)> {
use crate::subs::Content::*;
use crate::subs::FlatType::*;
match subs.get_without_compacting(var).content {
Structure(Record(sub_fields, sub_ext)) => {
fields.extend(sub_fields.into_iter());
chase_ext_record(subs, sub_ext, fields)
}
Structure(EmptyRecord) => Ok(()),
Content::Structure(Apply(Symbol::ATTR_ATTR, arguments)) => {
debug_assert!(arguments.len() == 2);
chase_ext_record(subs, arguments[1], fields)
}
Alias(_, _, var) => chase_ext_record(subs, var, fields),
content => Err((var, content)),
}
}
fn write_boolean(env: &Env, boolean: Bool, subs: &Subs, buf: &mut String, parens: Parens) {
match boolean.simplify(subs) { match boolean.simplify(subs) {
Err(atom) => write_boolean_atom(env, atom, subs, buf, parens), Err(atom) => write_boolean_atom(env, atom, subs, buf, parens),
Ok(mut variables) => { Ok(mut variables) => {
@ -534,7 +593,13 @@ fn write_boolean(env: &Env, boolean: Bool, subs: &mut Subs, buf: &mut String, pa
for v in variables { for v in variables {
let mut inner_buf: String = "".to_string(); let mut inner_buf: String = "".to_string();
write_content(env, subs.get(v).content, subs, &mut inner_buf, parens); write_content(
env,
subs.get_without_compacting(v).content,
subs,
&mut inner_buf,
parens,
);
buffers_set.insert(inner_buf); buffers_set.insert(inner_buf);
} }
@ -556,9 +621,15 @@ fn write_boolean(env: &Env, boolean: Bool, subs: &mut Subs, buf: &mut String, pa
} }
} }
fn write_boolean_atom(env: &Env, atom: Atom, subs: &mut Subs, buf: &mut String, parens: Parens) { fn write_boolean_atom(env: &Env, atom: Atom, subs: &Subs, buf: &mut String, parens: Parens) {
match atom { match atom {
Atom::Variable(var) => write_content(env, subs.get(var).content, subs, buf, parens), Atom::Variable(var) => write_content(
env,
subs.get_without_compacting(var).content,
subs,
buf,
parens,
),
Atom::Zero => { Atom::Zero => {
buf.push_str("Shared"); buf.push_str("Shared");
} }
@ -572,7 +643,7 @@ fn write_apply(
env: &Env, env: &Env,
symbol: Symbol, symbol: Symbol,
args: Vec<Variable>, args: Vec<Variable>,
subs: &mut Subs, subs: &Subs,
buf: &mut String, buf: &mut String,
parens: Parens, parens: Parens,
) { ) {
@ -588,7 +659,7 @@ fn write_apply(
.into_iter() .into_iter()
.next() .next()
.unwrap_or_else(|| panic!("Num did not have any type parameters somehow.")); .unwrap_or_else(|| panic!("Num did not have any type parameters somehow."));
let arg_content = subs.get(arg).content; let arg_content = subs.get_without_compacting(arg).content;
let mut arg_param = String::new(); let mut arg_param = String::new();
let mut default_case = |subs, content| { let mut default_case = |subs, content| {
@ -646,7 +717,13 @@ fn write_apply(
for arg in args { for arg in args {
buf.push_str(" "); buf.push_str(" ");
write_content(env, subs.get(arg).content, subs, buf, Parens::InTypeParam); write_content(
env,
subs.get_without_compacting(arg).content,
subs,
buf,
Parens::InTypeParam,
);
} }
if write_parens { if write_parens {
@ -660,7 +737,7 @@ fn write_fn(
env: &Env, env: &Env,
args: Vec<Variable>, args: Vec<Variable>,
ret: Variable, ret: Variable,
subs: &mut Subs, subs: &Subs,
buf: &mut String, buf: &mut String,
parens: Parens, parens: Parens,
) { ) {
@ -678,11 +755,23 @@ fn write_fn(
needs_comma = true; needs_comma = true;
} }
write_content(env, subs.get(arg).content, subs, buf, Parens::InFn); write_content(
env,
subs.get_without_compacting(arg).content,
subs,
buf,
Parens::InFn,
);
} }
buf.push_str(" -> "); buf.push_str(" -> ");
write_content(env, subs.get(ret).content, subs, buf, Parens::InFn); write_content(
env,
subs.get_without_compacting(ret).content,
subs,
buf,
Parens::InFn,
);
if use_parens { if use_parens {
buf.push_str(")"); buf.push_str(")");

View file

@ -6,7 +6,7 @@ use roc_module::symbol::Symbol;
use std::fmt; use std::fmt;
use std::iter::{once, Iterator}; use std::iter::{once, Iterator};
use std::sync::atomic::{AtomicU32, Ordering}; use std::sync::atomic::{AtomicU32, Ordering};
use ven_ena::unify::{InPlace, UnificationTable, UnifyKey}; use ven_ena::unify::{InPlace, Snapshot, UnificationTable, UnifyKey};
#[derive(Clone, Copy, Hash, PartialEq, Eq)] #[derive(Clone, Copy, Hash, PartialEq, Eq)]
pub struct Mark(i32); pub struct Mark(i32);
@ -42,7 +42,7 @@ struct NameState {
normals: u32, normals: u32,
} }
#[derive(Default)] #[derive(Default, Clone)]
pub struct Subs { pub struct Subs {
utable: UnificationTable<InPlace<Variable>>, utable: UnificationTable<InPlace<Variable>>,
} }
@ -151,9 +151,11 @@ impl Variable {
pub const EMPTY_RECORD: Variable = Variable(1); pub const EMPTY_RECORD: Variable = Variable(1);
pub const EMPTY_TAG_UNION: Variable = Variable(2); pub const EMPTY_TAG_UNION: Variable = Variable(2);
const BOOL_ENUM: Variable = Variable(3);
pub const BOOL: Variable = Variable(4);
pub const RESERVED: usize = 5;
// variables 1 and 2 are reserved for EmptyRecord and EmptyTagUnion const FIRST_USER_SPACE_VAR: Variable = Variable(Self::RESERVED as u32);
const FIRST_USER_SPACE_VAR: Variable = Variable(3);
/// # Safety /// # Safety
/// ///
@ -228,8 +230,26 @@ impl Subs {
subs.utable.new_key(flex_var_descriptor()); subs.utable.new_key(flex_var_descriptor());
} }
subs.set_content(Variable(1), Content::Structure(FlatType::EmptyRecord)); subs.set_content(
subs.set_content(Variable(2), Content::Structure(FlatType::EmptyTagUnion)); Variable::EMPTY_RECORD,
Content::Structure(FlatType::EmptyRecord),
);
subs.set_content(
Variable::EMPTY_TAG_UNION,
Content::Structure(FlatType::EmptyTagUnion),
);
subs.set_content(Variable::BOOL_ENUM, {
let mut tags = MutMap::default();
tags.insert(TagName::Global("False".into()), vec![]);
tags.insert(TagName::Global("True".into()), vec![]);
Content::Structure(FlatType::TagUnion(tags, Variable::EMPTY_TAG_UNION))
});
subs.set_content(Variable::BOOL, {
Content::Alias(Symbol::BOOL_BOOL, vec![], Variable::BOOL_ENUM)
});
subs subs
} }
@ -326,7 +346,7 @@ impl Subs {
self.utable.is_redirect(var) self.utable.is_redirect(var)
} }
pub fn occurs(&mut self, var: Variable) -> Option<(Variable, Vec<Variable>)> { pub fn occurs(&self, var: Variable) -> Option<(Variable, Vec<Variable>)> {
occurs(self, &ImSet::default(), var) occurs(self, &ImSet::default(), var)
} }
@ -382,6 +402,18 @@ impl Subs {
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.utable.is_empty() self.utable.is_empty()
} }
pub fn snapshot(&mut self) -> Snapshot<InPlace<Variable>> {
self.utable.snapshot()
}
pub fn rollback_to(&mut self, snapshot: Snapshot<InPlace<Variable>>) {
self.utable.rollback_to(snapshot)
}
pub fn commit_snapshot(&mut self, snapshot: Snapshot<InPlace<Variable>>) {
self.utable.commit(snapshot)
}
} }
#[inline(always)] #[inline(always)]
@ -522,6 +554,250 @@ pub enum FlatType {
Boolean(boolean_algebra::Bool), Boolean(boolean_algebra::Bool),
} }
#[derive(Clone, Debug, Hash, PartialEq, Eq, Copy)]
pub struct ContentHash(u64);
impl ContentHash {
pub fn from_var(var: Variable, subs: &mut Subs) -> Self {
use std::hash::Hasher;
let mut hasher = std::collections::hash_map::DefaultHasher::new();
Self::from_var_help(var, subs, &mut hasher);
ContentHash(hasher.finish())
}
pub fn from_var_help<T>(var: Variable, subs: &mut Subs, hasher: &mut T)
where
T: std::hash::Hasher,
{
Self::from_content_help(var, &subs.get_without_compacting(var).content, subs, hasher)
}
pub fn from_content_help<T>(var: Variable, content: &Content, subs: &mut Subs, hasher: &mut T)
where
T: std::hash::Hasher,
{
match content {
Content::Alias(_, _, actual) => {
// ensure an alias has the same hash as just the body of the alias
Self::from_var_help(*actual, subs, hasher)
}
Content::Structure(flat_type) => {
hasher.write_u8(0x10);
Self::from_flat_type_help(var, flat_type, subs, hasher)
}
Content::FlexVar(_) | Content::RigidVar(_) => {
hasher.write_u8(0x11);
}
Content::Error => {
hasher.write_u8(0x12);
}
}
}
pub fn from_flat_type_help<T>(
flat_type_var: Variable,
flat_type: &FlatType,
subs: &mut Subs,
hasher: &mut T,
) where
T: std::hash::Hasher,
{
use std::hash::Hash;
match flat_type {
FlatType::Func(arguments, ret) => {
hasher.write_u8(0);
for var in arguments {
Self::from_var_help(*var, subs, hasher);
}
Self::from_var_help(*ret, subs, hasher);
}
FlatType::Apply(symbol, arguments) => {
hasher.write_u8(1);
symbol.hash(hasher);
for var in arguments {
Self::from_var_help(*var, subs, hasher);
}
}
FlatType::EmptyRecord => {
hasher.write_u8(2);
}
FlatType::Record(record_fields, ext) => {
hasher.write_u8(3);
// NOTE: This function will modify the subs, putting all fields from the ext_var
// into the record itself, then setting the ext_var to EMPTY_RECORD
let mut fields = Vec::with_capacity(record_fields.len());
let mut extracted_fields_from_ext = false;
if *ext != Variable::EMPTY_RECORD {
let mut fields_map = MutMap::default();
match crate::pretty_print::chase_ext_record(subs, *ext, &mut fields_map) {
Err((_, Content::FlexVar(_))) | Ok(()) => {
if !fields_map.is_empty() {
extracted_fields_from_ext = true;
fields.extend(fields_map.into_iter());
}
}
Err(content) => panic!("Record with unexpected ext_var: {:?}", content),
}
}
fields.extend(record_fields.clone().into_iter());
fields.sort();
for (name, argument) in &fields {
name.hash(hasher);
Self::from_var_help(*argument, subs, hasher);
}
if *ext != Variable::EMPTY_RECORD {
// unify ext with empty record
let desc = subs.get(Variable::EMPTY_RECORD);
subs.union(Variable::EMPTY_RECORD, *ext, desc);
}
if extracted_fields_from_ext {
let fields_map = fields.into_iter().collect();
subs.set_content(
flat_type_var,
Content::Structure(FlatType::Record(fields_map, Variable::EMPTY_RECORD)),
);
}
}
FlatType::EmptyTagUnion => {
hasher.write_u8(4);
}
FlatType::TagUnion(tags, ext) => {
hasher.write_u8(5);
// NOTE: This function will modify the subs, putting all tags from the ext_var
// into the tag union itself, then setting the ext_var to EMPTY_TAG_UNION
let mut tag_vec = Vec::with_capacity(tags.len());
let mut extracted_fields_from_ext = false;
if *ext != Variable::EMPTY_TAG_UNION {
match crate::pretty_print::chase_ext_tag_union(subs, *ext, &mut tag_vec) {
Err((_, Content::FlexVar(_))) | Ok(()) => {
extracted_fields_from_ext = !tag_vec.is_empty();
}
Err(content) => panic!("TagUnion with unexpected ext_var: {:?}", content),
}
}
tag_vec.extend(tags.clone().into_iter());
tag_vec.sort();
for (name, arguments) in &tag_vec {
name.hash(hasher);
for var in arguments {
Self::from_var_help(*var, subs, hasher);
}
}
if *ext != Variable::EMPTY_TAG_UNION {
// unify ext with empty record
let desc = subs.get(Variable::EMPTY_TAG_UNION);
subs.union(Variable::EMPTY_TAG_UNION, *ext, desc);
}
if extracted_fields_from_ext {
let fields_map = tag_vec.into_iter().collect();
subs.set_content(
flat_type_var,
Content::Structure(FlatType::TagUnion(
fields_map,
Variable::EMPTY_TAG_UNION,
)),
);
}
}
FlatType::RecursiveTagUnion(rec, tags, ext) => {
// NOTE: rec is not hashed in. If all the tags and their arguments are the same,
// then the recursive tag unions are the same
hasher.write_u8(6);
// NOTE: This function will modify the subs, putting all tags from the ext_var
// into the tag union itself, then setting the ext_var to EMPTY_TAG_UNION
let mut tag_vec = Vec::with_capacity(tags.len());
let mut extracted_fields_from_ext = false;
if *ext != Variable::EMPTY_TAG_UNION {
match crate::pretty_print::chase_ext_tag_union(subs, *ext, &mut tag_vec) {
Err((_, Content::FlexVar(_))) | Ok(()) => {
extracted_fields_from_ext = !tag_vec.is_empty();
}
Err(content) => {
panic!("RecursiveTagUnion with unexpected ext_var: {:?}", content)
}
}
}
tag_vec.extend(tags.clone().into_iter());
tag_vec.sort();
for (name, arguments) in &tag_vec {
name.hash(hasher);
for var in arguments {
Self::from_var_help(*var, subs, hasher);
}
}
if *ext != Variable::EMPTY_TAG_UNION {
// unify ext with empty record
let desc = subs.get(Variable::EMPTY_TAG_UNION);
subs.union(Variable::EMPTY_TAG_UNION, *ext, desc);
}
if extracted_fields_from_ext {
let fields_map = tag_vec.into_iter().collect();
subs.set_content(
flat_type_var,
Content::Structure(FlatType::RecursiveTagUnion(
*rec,
fields_map,
Variable::EMPTY_TAG_UNION,
)),
);
}
}
FlatType::Boolean(boolean) => {
hasher.write_u8(7);
match boolean.simplify(subs) {
Ok(_variables) => hasher.write_u8(1),
Err(crate::boolean_algebra::Atom::One) => hasher.write_u8(1),
Err(crate::boolean_algebra::Atom::Zero) => hasher.write_u8(0),
Err(crate::boolean_algebra::Atom::Variable(_)) => unreachable!(),
}
}
FlatType::Erroneous(_problem) => {
hasher.write_u8(8);
//TODO hash the problem?
}
}
}
}
#[derive(PartialEq, Eq, Debug, Clone, Copy)] #[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum Builtin { pub enum Builtin {
Str, Str,
@ -531,19 +807,19 @@ pub enum Builtin {
} }
fn occurs( fn occurs(
subs: &mut Subs, subs: &Subs,
seen: &ImSet<Variable>, seen: &ImSet<Variable>,
input_var: Variable, input_var: Variable,
) -> Option<(Variable, Vec<Variable>)> { ) -> Option<(Variable, Vec<Variable>)> {
use self::Content::*; use self::Content::*;
use self::FlatType::*; use self::FlatType::*;
let root_var = subs.get_root_key(input_var); let root_var = subs.get_root_key_without_compacting(input_var);
if seen.contains(&root_var) { if seen.contains(&root_var) {
Some((root_var, vec![])) Some((root_var, vec![]))
} else { } else {
match subs.get(root_var).content { match subs.get_without_compacting(root_var).content {
FlexVar(_) | RigidVar(_) | Error => None, FlexVar(_) | RigidVar(_) | Error => None,
Structure(flat_type) => { Structure(flat_type) => {
@ -593,7 +869,7 @@ fn occurs(
} }
fn short_circuit<'a, T>( fn short_circuit<'a, T>(
subs: &mut Subs, subs: &Subs,
root_key: Variable, root_key: Variable,
seen: &ImSet<Variable>, seen: &ImSet<Variable>,
iter: T, iter: T,
@ -993,21 +1269,24 @@ fn flat_type_to_err_type(subs: &mut Subs, state: &mut NameState, flat_type: Flat
err_tags.insert(tag, err_vars); err_tags.insert(tag, err_vars);
} }
let rec_error_type = Box::new(var_to_err_type(subs, state, rec_var));
match var_to_err_type(subs, state, ext_var).unwrap_alias() { match var_to_err_type(subs, state, ext_var).unwrap_alias() {
ErrorType::RecursiveTagUnion(rec_var, sub_tags, sub_ext) => { ErrorType::RecursiveTagUnion(rec_var, sub_tags, sub_ext) => {
ErrorType::RecursiveTagUnion(rec_var, sub_tags.union(err_tags), sub_ext) debug_assert!(rec_var == rec_error_type);
ErrorType::RecursiveTagUnion(rec_error_type, sub_tags.union(err_tags), sub_ext)
} }
ErrorType::TagUnion(sub_tags, sub_ext) => { ErrorType::TagUnion(sub_tags, sub_ext) => {
ErrorType::RecursiveTagUnion(rec_var, sub_tags.union(err_tags), sub_ext) ErrorType::RecursiveTagUnion(rec_error_type, sub_tags.union(err_tags), sub_ext)
} }
ErrorType::FlexVar(var) => { ErrorType::FlexVar(var) => {
ErrorType::RecursiveTagUnion(rec_var, err_tags, TypeExt::FlexOpen(var)) ErrorType::RecursiveTagUnion(rec_error_type, err_tags, TypeExt::FlexOpen(var))
} }
ErrorType::RigidVar(var) => { ErrorType::RigidVar(var) => {
ErrorType::RecursiveTagUnion(rec_var, err_tags, TypeExt::RigidOpen(var)) ErrorType::RecursiveTagUnion(rec_error_type, err_tags, TypeExt::RigidOpen(var))
} }
other => other =>

View file

@ -1,10 +1,10 @@
use crate::boolean_algebra; use crate::boolean_algebra;
use crate::pretty_print::Parens;
use crate::subs::{Subs, VarStore, Variable}; use crate::subs::{Subs, VarStore, Variable};
use inlinable_string::InlinableString; use inlinable_string::InlinableString;
use roc_collections::all::{union, ImMap, ImSet, MutMap, MutSet, SendMap}; use roc_collections::all::{union, ImMap, ImSet, Index, MutMap, MutSet, SendMap};
use roc_module::ident::{Ident, Lowercase, TagName}; use roc_module::ident::{Ident, Lowercase, TagName};
use roc_module::symbol::Symbol; use roc_module::symbol::{Interns, ModuleId, Symbol};
use roc_parse::operator::{ArgSide, BinOp};
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
use std::fmt; use std::fmt;
@ -597,49 +597,87 @@ pub struct RecordStructure {
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum PReason { pub enum PReason {
TypedArg { name: Box<str>, index: usize }, WhenMatch { index: Index },
WhenMatch { index: usize }, TagArg { tag_name: TagName, index: Index },
CtorArg { name: Box<str>, index: usize }, PatternGuard,
ListEntry { index: usize },
Tail,
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum AnnotationSource { pub enum AnnotationSource {
TypedIfBranch(usize /* index */), TypedIfBranch { index: Index, num_branches: usize },
TypedWhenBranch(usize /* index */), TypedWhenBranch { index: Index },
TypedBody, TypedBody { region: Region },
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum Reason { pub enum Reason {
AnonymousFnArg { arg_index: u8 }, FnArg {
NamedFnArg(String /* function name */, u8 /* arg index */), name: Option<Symbol>,
AnonymousFnCall { arity: u8 }, arg_index: Index,
NamedFnCall(String /* function name */, u8 /* arity */), },
BinOpArg(BinOp, ArgSide), FnCall {
BinOpRet(BinOp), name: Option<Symbol>,
arity: u8,
},
FloatLiteral, FloatLiteral,
IntLiteral, IntLiteral,
NumLiteral,
InterpolatedStringVar, InterpolatedStringVar,
WhenBranch { index: usize }, WhenBranch {
index: Index,
},
WhenGuard,
IfCondition, IfCondition,
IfBranch { index: usize }, IfBranch {
ElemInList, index: Index,
total_branches: usize,
},
ElemInList {
index: Index,
},
RecordUpdateValue(Lowercase), RecordUpdateValue(Lowercase),
RecordUpdateKeys(Symbol, SendMap<Lowercase, Type>), RecordUpdateKeys(Symbol, SendMap<Lowercase, Region>),
}
#[derive(PartialEq, Debug, Clone)]
pub enum Category {
Lookup(Symbol),
CallResult(Option<Symbol>),
TagApply(TagName),
Lambda,
Uniqueness,
// storing variables in the ast
Storage,
// control flow
If,
When,
// types
Float,
Int,
Num,
List,
Str,
// records
Record,
Accessor(Lowercase),
Access(Lowercase),
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum PatternCategory { pub enum PatternCategory {
Record, Record,
EmptyRecord, EmptyRecord,
List, PatternGuard,
Set, Set,
Map, Map,
Ctor(TagName), Ctor(TagName),
Int,
Str, Str,
Num,
Int,
Float, Float,
} }
@ -677,7 +715,7 @@ pub enum ErrorType {
RigidVar(Lowercase), RigidVar(Lowercase),
Record(SendMap<Lowercase, ErrorType>, TypeExt), Record(SendMap<Lowercase, ErrorType>, TypeExt),
TagUnion(SendMap<TagName, Vec<ErrorType>>, TypeExt), TagUnion(SendMap<TagName, Vec<ErrorType>>, TypeExt),
RecursiveTagUnion(Variable, SendMap<TagName, Vec<ErrorType>>, TypeExt), RecursiveTagUnion(Box<ErrorType>, SendMap<TagName, Vec<ErrorType>>, TypeExt),
Function(Vec<ErrorType>, Box<ErrorType>), Function(Vec<ErrorType>, Box<ErrorType>),
Alias(Symbol, Vec<(Lowercase, ErrorType)>, Box<ErrorType>), Alias(Symbol, Vec<(Lowercase, ErrorType)>, Box<ErrorType>),
Boolean(boolean_algebra::Bool), Boolean(boolean_algebra::Bool),
@ -693,6 +731,113 @@ impl ErrorType {
} }
} }
pub fn write_error_type(home: ModuleId, interns: &Interns, error_type: ErrorType) -> String {
let mut buf = String::new();
write_error_type_help(home, interns, error_type, &mut buf, Parens::Unnecessary);
buf
}
fn write_error_type_help(
home: ModuleId,
interns: &Interns,
error_type: ErrorType,
buf: &mut String,
parens: Parens,
) {
use ErrorType::*;
match error_type {
Infinite => buf.push_str(""),
Error => buf.push_str("?"),
FlexVar(name) => buf.push_str(name.as_str()),
RigidVar(name) => buf.push_str(name.as_str()),
Type(symbol, arguments) => {
let write_parens = parens == Parens::InTypeParam && !arguments.is_empty();
if write_parens {
buf.push('(');
}
buf.push_str(symbol.ident_string(interns));
for arg in arguments {
buf.push(' ');
write_error_type_help(home, interns, arg, buf, Parens::InTypeParam);
}
if write_parens {
buf.push(')');
}
}
Alias(Symbol::NUM_NUM, mut arguments, _actual) => {
debug_assert!(arguments.len() == 1);
let argument = arguments.remove(0).1;
match argument {
Type(Symbol::INT_INTEGER, _) => {
buf.push_str("Int");
}
Type(Symbol::FLOAT_FLOATINGPOINT, _) => {
buf.push_str("Float");
}
other => {
let write_parens = parens == Parens::InTypeParam;
if write_parens {
buf.push('(');
}
buf.push_str("Num ");
write_error_type_help(home, interns, other, buf, Parens::InTypeParam);
if write_parens {
buf.push(')');
}
}
}
}
Function(arguments, result) => {
let write_parens = parens != Parens::Unnecessary;
if write_parens {
buf.push(')');
}
let mut it = arguments.into_iter().peekable();
while let Some(arg) = it.next() {
write_error_type_help(home, interns, arg, buf, Parens::InFn);
if it.peek().is_some() {
buf.push_str(", ");
}
}
buf.push_str(" -> ");
write_error_type_help(home, interns, *result, buf, Parens::InFn);
if write_parens {
buf.push(')');
}
}
Record(fields, ext) => {
buf.push('{');
for (label, content) in fields {
buf.push_str(label.as_str());
buf.push_str(": ");
write_error_type_help(home, interns, content, buf, Parens::Unnecessary);
}
buf.push('}');
write_type_ext(ext, buf);
}
other => todo!("cannot format {:?} yet", other),
}
}
#[derive(PartialEq, Eq, Debug, Clone)] #[derive(PartialEq, Eq, Debug, Clone)]
pub enum TypeExt { pub enum TypeExt {
Closed, Closed,
@ -700,6 +845,16 @@ pub enum TypeExt {
RigidOpen(Lowercase), RigidOpen(Lowercase),
} }
fn write_type_ext(ext: TypeExt, buf: &mut String) {
use TypeExt::*;
match ext {
Closed => {}
FlexOpen(lowercase) | RigidOpen(lowercase) => {
buf.push_str(lowercase.as_str());
}
}
}
static THE_LETTER_A: u32 = 'a' as u32; static THE_LETTER_A: u32 = 'a' as u32;
pub fn name_type_var(letters_used: u32, taken: &mut MutSet<Lowercase>) -> (Lowercase, u32) { pub fn name_type_var(letters_used: u32, taken: &mut MutSet<Lowercase>) -> (Lowercase, u32) {
@ -726,14 +881,14 @@ pub fn name_type_var(letters_used: u32, taken: &mut MutSet<Lowercase>) -> (Lower
} }
pub fn gather_fields( pub fn gather_fields(
subs: &mut Subs, subs: &Subs,
fields: MutMap<Lowercase, Variable>, fields: MutMap<Lowercase, Variable>,
var: Variable, var: Variable,
) -> RecordStructure { ) -> RecordStructure {
use crate::subs::Content::*; use crate::subs::Content::*;
use crate::subs::FlatType::*; use crate::subs::FlatType::*;
match subs.get(var).content { match subs.get_without_compacting(var).content {
Structure(Record(sub_fields, sub_ext)) => { Structure(Record(sub_fields, sub_ext)) => {
gather_fields(subs, union(fields, &sub_fields), sub_ext) gather_fields(subs, union(fields, &sub_fields), sub_ext)
} }

View file

@ -4,7 +4,7 @@ use roc_module::symbol::Symbol;
use roc_types::boolean_algebra::{Atom, Bool}; use roc_types::boolean_algebra::{Atom, Bool};
use roc_types::subs::Content::{self, *}; use roc_types::subs::Content::{self, *};
use roc_types::subs::{Descriptor, FlatType, Mark, OptVariable, Subs, Variable}; use roc_types::subs::{Descriptor, FlatType, Mark, OptVariable, Subs, Variable};
use roc_types::types::{gather_fields, Mismatch, Problem, RecordStructure}; use roc_types::types::{gather_fields, ErrorType, Mismatch, RecordStructure};
use std::hash::Hash; use std::hash::Hash;
macro_rules! mismatch { macro_rules! mismatch {
@ -19,6 +19,45 @@ macro_rules! mismatch {
} }
vec![Mismatch::TypeMismatch] vec![Mismatch::TypeMismatch]
}}; }};
($msg:expr) => {{
if cfg!(debug_assertions) {
println!(
"Mismatch in {} Line {} Column {}",
file!(),
line!(),
column!()
);
}
println!($msg);
println!("");
vec![Mismatch::TypeMismatch]
}};
($msg:expr,) => {{
if cfg!(debug_assertions) {
println!(
"Mismatch in {} Line {} Column {}",
file!(),
line!(),
column!()
);
}
println!($msg);
println!("");
vec![Mismatch::TypeMismatch]
}};
($msg:expr, $($arg:tt)*) => {{
if cfg!(debug_assertions) {
println!(
"Mismatch in {} Line {} Column {}",
file!(),
line!(),
column!()
);
}
println!($msg, $($arg)*);
println!("");
vec![Mismatch::TypeMismatch]
}};
} }
type Pool = Vec<Variable>; type Pool = Vec<Variable>;
@ -30,9 +69,10 @@ struct Context {
second_desc: Descriptor, second_desc: Descriptor,
} }
pub struct Unified { #[derive(Debug)]
pub vars: Pool, pub enum Unified {
pub mismatches: Vec<Problem>, Success(Pool),
Failure(Pool, ErrorType, ErrorType),
} }
#[derive(Debug)] #[derive(Debug)]
@ -46,19 +86,17 @@ type Outcome = Vec<Mismatch>;
#[inline(always)] #[inline(always)]
pub fn unify(subs: &mut Subs, var1: Variable, var2: Variable) -> Unified { pub fn unify(subs: &mut Subs, var1: Variable, var2: Variable) -> Unified {
let mut vars = Vec::new(); let mut vars = Vec::new();
let mismatches = unify_pool(subs, &mut vars, var1, var2) let mismatches = unify_pool(subs, &mut vars, var1, var2);
.into_iter()
.map(|problem| { if mismatches.is_empty() {
Unified::Success(vars)
} else {
let type1 = subs.var_to_error_type(var1); let type1 = subs.var_to_error_type(var1);
let type2 = subs.var_to_error_type(var2); let type2 = subs.var_to_error_type(var2);
subs.union(var1, var2, Content::Error.into()); subs.union(var1, var2, Content::Error.into());
Unified::Failure(vars, type1, type2)
Problem::Mismatch(problem, type1, type2) }
})
.collect();
Unified { vars, mismatches }
} }
#[inline(always)] #[inline(always)]
@ -78,7 +116,7 @@ pub fn unify_pool(subs: &mut Subs, pool: &mut Pool, var1: Variable, var2: Variab
} }
fn unify_context(subs: &mut Subs, pool: &mut Pool, ctx: Context) -> Outcome { fn unify_context(subs: &mut Subs, pool: &mut Pool, ctx: Context) -> Outcome {
// println!( "{:?} {:?} ~ {:?} {:?}", ctx.first, ctx.first_desc.content, ctx.second, ctx.second_desc.content); // println!( "{:?} {:?} ~ {:?} {:?}", ctx.first, ctx.first_desc.content, ctx.second, ctx.second_desc.content,);
match &ctx.first_desc.content { match &ctx.first_desc.content {
FlexVar(opt_name) => unify_flex(subs, pool, &ctx, opt_name, &ctx.second_desc.content), FlexVar(opt_name) => unify_flex(subs, pool, &ctx, opt_name, &ctx.second_desc.content),
RigidVar(name) => unify_rigid(subs, &ctx, name, &ctx.second_desc.content), RigidVar(name) => unify_rigid(subs, &ctx, name, &ctx.second_desc.content),
@ -154,9 +192,9 @@ fn unify_structure(
} }
} }
} }
RigidVar(_) => { RigidVar(name) => {
// Type mismatch! Rigid can only unify with flex. // Type mismatch! Rigid can only unify with flex.
mismatch!() mismatch!("trying to unify {:?} with rigid var {:?}", &flat_type, name)
} }
Structure(ref other_flat_type) => { Structure(ref other_flat_type) => {
@ -207,6 +245,11 @@ fn unify_record(
if unique_fields1.is_empty() { if unique_fields1.is_empty() {
if unique_fields2.is_empty() { if unique_fields2.is_empty() {
let ext_problems = unify_pool(subs, pool, rec1.ext, rec2.ext); let ext_problems = unify_pool(subs, pool, rec1.ext, rec2.ext);
if !ext_problems.is_empty() {
return ext_problems;
}
let other_fields = MutMap::default(); let other_fields = MutMap::default();
let mut field_problems = let mut field_problems =
unify_shared_fields(subs, pool, ctx, shared_fields, other_fields, rec1.ext); unify_shared_fields(subs, pool, ctx, shared_fields, other_fields, rec1.ext);
@ -218,6 +261,11 @@ fn unify_record(
let flat_type = FlatType::Record(unique_fields2, rec2.ext); let flat_type = FlatType::Record(unique_fields2, rec2.ext);
let sub_record = fresh(subs, pool, ctx, Structure(flat_type)); let sub_record = fresh(subs, pool, ctx, Structure(flat_type));
let ext_problems = unify_pool(subs, pool, rec1.ext, sub_record); let ext_problems = unify_pool(subs, pool, rec1.ext, sub_record);
if !ext_problems.is_empty() {
return ext_problems;
}
let other_fields = MutMap::default(); let other_fields = MutMap::default();
let mut field_problems = let mut field_problems =
unify_shared_fields(subs, pool, ctx, shared_fields, other_fields, sub_record); unify_shared_fields(subs, pool, ctx, shared_fields, other_fields, sub_record);
@ -230,6 +278,11 @@ fn unify_record(
let flat_type = FlatType::Record(unique_fields1, rec1.ext); let flat_type = FlatType::Record(unique_fields1, rec1.ext);
let sub_record = fresh(subs, pool, ctx, Structure(flat_type)); let sub_record = fresh(subs, pool, ctx, Structure(flat_type));
let ext_problems = unify_pool(subs, pool, sub_record, rec2.ext); let ext_problems = unify_pool(subs, pool, sub_record, rec2.ext);
if !ext_problems.is_empty() {
return ext_problems;
}
let other_fields = MutMap::default(); let other_fields = MutMap::default();
let mut field_problems = let mut field_problems =
unify_shared_fields(subs, pool, ctx, shared_fields, other_fields, sub_record); unify_shared_fields(subs, pool, ctx, shared_fields, other_fields, sub_record);
@ -248,7 +301,14 @@ fn unify_record(
let sub2 = fresh(subs, pool, ctx, Structure(flat_type2)); let sub2 = fresh(subs, pool, ctx, Structure(flat_type2));
let rec1_problems = unify_pool(subs, pool, rec1.ext, sub2); let rec1_problems = unify_pool(subs, pool, rec1.ext, sub2);
if !rec1_problems.is_empty() {
return rec1_problems;
}
let rec2_problems = unify_pool(subs, pool, sub1, rec2.ext); let rec2_problems = unify_pool(subs, pool, sub1, rec2.ext);
if !rec2_problems.is_empty() {
return rec2_problems;
}
let mut field_problems = let mut field_problems =
unify_shared_fields(subs, pool, ctx, shared_fields, other_fields, ext); unify_shared_fields(subs, pool, ctx, shared_fields, other_fields, ext);
@ -281,7 +341,15 @@ fn unify_shared_fields(
} }
if num_shared_fields == matching_fields.len() { if num_shared_fields == matching_fields.len() {
let flat_type = FlatType::Record(union(matching_fields, &other_fields), ext); // pull fields in from the ext_var
let mut fields = union(matching_fields, &other_fields);
let new_ext_var = match roc_types::pretty_print::chase_ext_record(subs, ext, &mut fields) {
Ok(()) => Variable::EMPTY_RECORD,
Err((new, _)) => new,
};
let flat_type = FlatType::Record(fields, new_ext_var);
merge(subs, ctx, Structure(flat_type)) merge(subs, ctx, Structure(flat_type))
} else { } else {
@ -316,6 +384,11 @@ fn unify_tag_union(
if unique_tags1.is_empty() { if unique_tags1.is_empty() {
if unique_tags2.is_empty() { if unique_tags2.is_empty() {
let ext_problems = unify_pool(subs, pool, rec1.ext, rec2.ext); let ext_problems = unify_pool(subs, pool, rec1.ext, rec2.ext);
if !ext_problems.is_empty() {
return ext_problems;
}
let mut tag_problems = unify_shared_tags( let mut tag_problems = unify_shared_tags(
subs, subs,
pool, pool,
@ -333,6 +406,11 @@ fn unify_tag_union(
let flat_type = FlatType::TagUnion(unique_tags2, rec2.ext); let flat_type = FlatType::TagUnion(unique_tags2, rec2.ext);
let sub_record = fresh(subs, pool, ctx, Structure(flat_type)); let sub_record = fresh(subs, pool, ctx, Structure(flat_type));
let ext_problems = unify_pool(subs, pool, rec1.ext, sub_record); let ext_problems = unify_pool(subs, pool, rec1.ext, sub_record);
if !ext_problems.is_empty() {
return ext_problems;
}
let mut tag_problems = unify_shared_tags( let mut tag_problems = unify_shared_tags(
subs, subs,
pool, pool,
@ -351,6 +429,11 @@ fn unify_tag_union(
let flat_type = FlatType::TagUnion(unique_tags1, rec1.ext); let flat_type = FlatType::TagUnion(unique_tags1, rec1.ext);
let sub_record = fresh(subs, pool, ctx, Structure(flat_type)); let sub_record = fresh(subs, pool, ctx, Structure(flat_type));
let ext_problems = unify_pool(subs, pool, sub_record, rec2.ext); let ext_problems = unify_pool(subs, pool, sub_record, rec2.ext);
if !ext_problems.is_empty() {
return ext_problems;
}
let mut tag_problems = unify_shared_tags( let mut tag_problems = unify_shared_tags(
subs, subs,
pool, pool,
@ -374,15 +457,42 @@ fn unify_tag_union(
let sub1 = fresh(subs, pool, ctx, Structure(flat_type1)); let sub1 = fresh(subs, pool, ctx, Structure(flat_type1));
let sub2 = fresh(subs, pool, ctx, Structure(flat_type2)); let sub2 = fresh(subs, pool, ctx, Structure(flat_type2));
let rec1_problems = unify_pool(subs, pool, rec1.ext, sub2); // NOTE: for clearer error messages, we rollback unification of the ext vars when either fails
let rec2_problems = unify_pool(subs, pool, sub1, rec2.ext); //
// This is inspired by
//
//
// f : [ Red, Green ] -> Bool
// f = \_ -> True
//
// f Blue
//
// In this case, we want the mismatch to be between `[ Blue ]a` and `[ Red, Green ]`, but
// without rolling back, the mismatch is between `[ Blue, Red, Green ]a` and `[ Red, Green ]`.
// TODO is this also required for the other cases?
let snapshot = subs.snapshot();
let ext1_problems = unify_pool(subs, pool, rec1.ext, sub2);
if !ext1_problems.is_empty() {
subs.rollback_to(snapshot);
return ext1_problems;
}
let ext2_problems = unify_pool(subs, pool, sub1, rec2.ext);
if !ext2_problems.is_empty() {
subs.rollback_to(snapshot);
return ext2_problems;
}
subs.commit_snapshot(snapshot);
let mut tag_problems = let mut tag_problems =
unify_shared_tags(subs, pool, ctx, shared_tags, other_tags, ext, recursion_var); unify_shared_tags(subs, pool, ctx, shared_tags, other_tags, ext, recursion_var);
tag_problems.reserve(rec1_problems.len() + rec2_problems.len()); tag_problems.reserve(ext1_problems.len() + ext2_problems.len());
tag_problems.extend(rec1_problems); tag_problems.extend(ext1_problems);
tag_problems.extend(rec2_problems); tag_problems.extend(ext2_problems);
tag_problems tag_problems
} }
@ -421,10 +531,21 @@ fn unify_shared_tags(
} }
if num_shared_tags == matching_tags.len() { if num_shared_tags == matching_tags.len() {
// merge fields from the ext_var into this tag union
let mut fields = Vec::new();
let new_ext_var = match roc_types::pretty_print::chase_ext_tag_union(subs, ext, &mut fields)
{
Ok(()) => Variable::EMPTY_TAG_UNION,
Err((new, _)) => new,
};
let mut new_tags = union(matching_tags, &other_tags);
new_tags.extend(fields.into_iter());
let flat_type = if let Some(rec) = recursion_var { let flat_type = if let Some(rec) = recursion_var {
FlatType::RecursiveTagUnion(rec, union(matching_tags, &other_tags), ext) FlatType::RecursiveTagUnion(rec, new_tags, new_ext_var)
} else { } else {
FlatType::TagUnion(union(matching_tags, &other_tags), ext) FlatType::TagUnion(new_tags, new_ext_var)
}; };
merge(subs, ctx, Structure(flat_type)) merge(subs, ctx, Structure(flat_type))
@ -552,11 +673,11 @@ fn unify_flat_type(
problems problems
} }
} }
(_other1, _other2) => { (other1, other2) => mismatch!(
// Can't unify other1 and other2 "Trying to unify two flat types that are incompatible: {:?} ~ {:?}",
// dbg!(&_other1, &_other2); other1,
mismatch!() other2
} ),
} }
} }

View file

@ -6,6 +6,7 @@ use roc_module::symbol::Symbol;
use roc_region::all::Region; use roc_region::all::Region;
use roc_types::boolean_algebra::Bool; use roc_types::boolean_algebra::Bool;
use roc_types::subs::Variable; use roc_types::subs::Variable;
use roc_types::types::Category;
use roc_types::types::Reason; use roc_types::types::Reason;
use roc_types::types::Type::{self, *}; use roc_types::types::Type::{self, *};
@ -19,8 +20,8 @@ pub fn int_literal(num_var: Variable, expected: Expected<Type>, region: Region)
exists( exists(
vec![num_var], vec![num_var],
And(vec![ And(vec![
Eq(num_type.clone(), expected_literal, region), Eq(num_type.clone(), expected_literal, Category::Int, region),
Eq(num_type, expected, region), Eq(num_type, expected, Category::Int, region),
]), ]),
) )
} }
@ -35,8 +36,8 @@ pub fn float_literal(num_var: Variable, expected: Expected<Type>, region: Region
exists( exists(
vec![num_var], vec![num_var],
And(vec![ And(vec![
Eq(num_type.clone(), expected_literal, region), Eq(num_type.clone(), expected_literal, Category::Float, region),
Eq(num_type, expected, region), Eq(num_type, expected, Category::Float, region),
]), ]),
) )
} }

View file

@ -515,6 +515,7 @@ pub fn annotate_usage(expr: &Expr, usage: &mut VarUsage) {
match expr { match expr {
RuntimeError(_) RuntimeError(_)
| Num(_, _)
| Int(_, _) | Int(_, _)
| Float(_, _) | Float(_, _)
| Str(_) | Str(_)
@ -552,7 +553,8 @@ pub fn annotate_usage(expr: &Expr, usage: &mut VarUsage) {
annotate_usage(&loc_cond.value, usage); annotate_usage(&loc_cond.value, usage);
let mut branches_usage = VarUsage::default(); let mut branches_usage = VarUsage::default();
for (_, loc_branch) in branches { for branch in branches {
let loc_branch = &branch.value;
let mut current_usage = VarUsage::default(); let mut current_usage = VarUsage::default();
annotate_usage(&loc_branch.value, &mut current_usage); annotate_usage(&loc_branch.value, &mut current_usage);
@ -631,7 +633,7 @@ pub fn annotate_usage(expr: &Expr, usage: &mut VarUsage) {
annotate_usage(&loc_expr.value, usage); annotate_usage(&loc_expr.value, usage);
} }
} }
Record(_, fields) => { Record { fields, .. } => {
for (_, field) in fields { for (_, field) in fields {
annotate_usage(&field.loc_expr.value, usage); annotate_usage(&field.loc_expr.value, usage);
} }
@ -734,7 +736,7 @@ fn special_case_builtins(
annotate_usage(&loc_value.value, usage); annotate_usage(&loc_value.value, usage);
} }
Symbol::LIST_ISEMPTY => { Symbol::LIST_IS_EMPTY | Symbol::LIST_LEN => {
debug_assert!(loc_args.len() == 1); debug_assert!(loc_args.len() == 1);
let loc_list = &loc_args[0].1; let loc_list = &loc_args[0].1;

View file

@ -31,7 +31,7 @@ pub fn test_home() -> ModuleId {
#[allow(dead_code)] #[allow(dead_code)]
pub fn infer_expr( pub fn infer_expr(
subs: Subs, subs: Subs,
problems: &mut Vec<roc_types::types::Problem>, problems: &mut Vec<roc_solve::solve::TypeError>,
constraint: &Constraint, constraint: &Constraint,
expr_var: Variable, expr_var: Variable,
) -> (Content, Subs) { ) -> (Content, Subs) {
@ -416,7 +416,7 @@ fn variable_usage_help(con: &Constraint, declared: &mut SeenVariables, used: &mu
match con { match con {
True | SaveTheEnvironment => (), True | SaveTheEnvironment => (),
Eq(tipe, expectation, _) => { Eq(tipe, expectation, _, _) => {
for v in tipe.variables() { for v in tipe.variables() {
used.insert(v); used.insert(v);
} }

3
examples/hello-world/.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
app
*.o
*.a

View file

@ -0,0 +1,45 @@
# Hello, World!
To run:
```bash
$ cargo run hello.roc
```
To run in release mode instead, do:
```bash
$ cargo run --release hello.roc
```
## Design Notes
This demonstrates the basic design of hosts: Roc code gets compiled into a pure
function (in this case, a thunk that always returns `"Hello, World!"`) and
then the host calls that function. Fundamentally, that's the whole idea! The host
might not even have a `main` - it could be a library, a plugin, anything.
Everything else is built on this basic "hosts calling linked pure functions" design.
For example, things get more interesting when the compiled Roc function returns
a `Task` - that is, a tagged union data structure containing function pointers
to callback closures. This lets the Roc pure function describe arbitrary
chainable effects, which the host can interpret to perform I/O as requested by
the Roc program. (The tagged union `Task` would have a variant for each supported
I/O operation.)
In this trivial example, it's very easy to line up the API between the host and
the Roc program. In a more involved host, this would be much trickier - especially
if the API were changing frequently during development.
The idea there is to have a first-class concept of "glue code" which host authors
can write (it would be plain Roc code, but with some extra keywords that aren't
available in normal modules - kinda like `port module` in Elm), and which
describe both the Roc-host/C boundary as well as the Roc-host/Roc-app boundary.
Roc application authors only care about the Roc-host/Roc-app portion, and the
host author only cares about the Roc-host/C bounary when implementing the host.
Using this glue code, the Roc compiler can generate C header files describing the
boundary. This not only gets us host compatibility with C compilers, but also
Rust FFI for free, because [`rust-bindgen`](https://github.com/rust-lang/rust-bindgen)
generates correct Rust FFI bindings from C headers.

View file

@ -0,0 +1 @@
"Hello, World!"

View file

@ -0,0 +1,14 @@
use std::ffi::CStr;
use std::os::raw::c_char;
#[link(name = "roc_app", kind = "static")]
extern "C" {
#[link_name = "$Test.main"]
fn str_from_roc() -> *const c_char;
}
pub fn main() {
let c_str = unsafe { CStr::from_ptr(str_from_roc()) };
println!("Roc says: {}", c_str.to_str().unwrap());
}

3
examples/quicksort/.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
app
*.o
*.a

View file

@ -0,0 +1,13 @@
# Quicksort
To run:
```bash
$ cargo run qs.roc
```
To run in release mode instead, do:
```bash
$ cargo run --release qs.roc
```

Some files were not shown because too many files have changed in this diff Show more