Remove unused code

This commit is contained in:
JRI98 2024-12-04 18:19:21 +00:00
parent 54e78e8fd4
commit 90f517712a
No known key found for this signature in database
GPG key ID: F83B29916FF13F24
33 changed files with 24 additions and 1300 deletions

View file

@ -81,7 +81,6 @@ pub const FLAG_OUTPUT: &str = "output";
pub const FLAG_FUZZ: &str = "fuzz";
pub const FLAG_MAIN: &str = "main";
pub const ROC_FILE: &str = "ROC_FILE";
pub const ROC_DIR: &str = "ROC_DIR";
pub const GLUE_DIR: &str = "GLUE_DIR";
pub const GLUE_SPEC: &str = "GLUE_SPEC";
pub const DIRECTORY_OR_FILES: &str = "DIRECTORY_OR_FILES";

View file

@ -29,7 +29,6 @@ extern "C" {
#[link_name = "roc__mainForHost_0_caller"]
fn call_Fx(flags: *const u8, closure_data: *const u8, output: *mut u8);
#[allow(dead_code)]
#[link_name = "roc__mainForHost_0_size"]
fn size_Fx() -> i64;

View file

@ -4,7 +4,7 @@ extern crate roc_load;
extern crate roc_module;
extern crate tempfile;
use roc_command_utils::{cargo, root_dir};
use roc_command_utils::root_dir;
use std::env;
use std::path::PathBuf;
@ -36,60 +36,6 @@ pub fn path_to_binary(binary_name: &str) -> PathBuf {
path
}
// If we don't already have a /target/release/roc, build it!
pub fn build_roc_bin_cached() -> PathBuf {
let roc_binary_path = path_to_roc_binary();
if !roc_binary_path.exists() {
build_roc_bin(&[]);
}
roc_binary_path
}
pub fn build_roc_bin(extra_args: &[&str]) -> PathBuf {
let roc_binary_path = path_to_roc_binary();
// Remove the /target/release/roc part
let root_project_dir = roc_binary_path
.parent()
.unwrap()
.parent()
.unwrap()
.parent()
.unwrap();
// cargo build --bin roc
// (with --release iff the test is being built with --release)
let mut args = if cfg!(debug_assertions) {
vec!["build", "--bin", "roc"]
} else {
vec!["build", "--release", "--bin", "roc"]
};
args.extend(extra_args);
let mut cargo_cmd = cargo();
cargo_cmd.current_dir(root_project_dir).args(&args);
let cargo_cmd_str = format!("{cargo_cmd:?}");
let cargo_output = cargo_cmd.output().unwrap();
if !cargo_output.status.success() {
panic!(
"The following cargo command failed:\n\n {}\n\n stdout was:\n\n {}\n\n stderr was:\n\n {}\n",
cargo_cmd_str,
String::from_utf8(cargo_output.stdout).unwrap(),
String::from_utf8(cargo_output.stderr).unwrap()
);
}
roc_binary_path
}
#[allow(dead_code)]
pub fn dir_from_root(dir_name: &str) -> PathBuf {
let mut path = root_dir();

View file

@ -1203,12 +1203,6 @@ fn recursive_variant_types<'a>(
Ok(result)
}
#[allow(dead_code)]
fn worst_case_type(context: &mut impl TypeContext) -> Result<TypeId> {
let cell = context.add_heap_cell_type();
context.add_bag_type(cell)
}
fn expr_spec<'a>(
builder: &mut FuncDefBuilder,
interner: &STLayoutInterner<'a>,

View file

@ -25,22 +25,6 @@ pub fn target_triple_str(target: Target) -> &'static str {
}
}
pub fn target_zig_str(target: Target) -> &'static str {
// Zig has its own architecture mappings, defined here:
// https://github.com/ziglang/zig/blob/master/tools/process_headers.zig
//
// and an open proposal to unify them with the more typical "target triples":
// https://github.com/ziglang/zig/issues/4911
match target {
Target::LinuxArm64 => "aarch64-linux-gnu",
Target::LinuxX32 => "i386-linux-gnu",
Target::LinuxX64 => "x86_64-linux-gnu",
Target::MacArm64 => "aarch64-macos-none",
Target::MacX64 => "x86_64-macos-none",
_ => internal_error!("TODO gracefully handle unsupported target: {:?}", target),
}
}
pub fn init_arch(target: Target) {
match target.architecture() {
Architecture::X86_64 | Architecture::X86_32

View file

@ -3,8 +3,8 @@ use roc_error_macros::internal_error;
use std::fs;
use std::io;
use std::path::Path;
use std::process::Command;
use std::str;
use std::{env, path::PathBuf, process::Command};
#[cfg(target_os = "macos")]
use tempfile::tempdir;
@ -82,17 +82,6 @@ fn generate_bc_file(bitcode_path: &Path, zig_object: &str, file_name: &str) {
run_command(zig_cmd, 0);
}
pub fn get_lib_dir() -> PathBuf {
// Currently we have the OUT_DIR variable which points to `/target/debug/build/roc_builtins-*/out/`.
// So we just need to add "/bitcode" to that.
let dir = PathBuf::from(env::var_os("OUT_DIR").unwrap());
// create dir if it does not exist
fs::create_dir_all(&dir).expect("Failed to make $OUT_DIR/ dir.");
dir
}
fn run_command(mut command: Command, flaky_fail_counter: usize) {
let command_str = pretty_command_string(&command);
let command_str = command_str.to_string_lossy();

View file

@ -13,11 +13,6 @@ impl IntrinsicName {
}
}
#[repr(u8)]
pub enum DecWidth {
Dec,
}
#[repr(u8)]
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)]
pub enum FloatWidth {
@ -171,14 +166,6 @@ impl IntWidth {
}
}
impl Index<DecWidth> for IntrinsicName {
type Output = str;
fn index(&self, _: DecWidth) -> &Self::Output {
self.options[0]
}
}
impl Index<FloatWidth> for IntrinsicName {
type Output = str;

View file

@ -144,7 +144,6 @@ pub struct IntroducedVariables {
pub able: VecSet<AbleVariable>,
/// Extension variables which should be inferred in output position.
pub infer_ext_in_output: Vec<Variable>,
pub host_exposed_aliases: VecMap<Symbol, Variable>,
}
impl IntroducedVariables {
@ -156,7 +155,6 @@ impl IntroducedVariables {
.chain(self.named.iter().map(|nv| &nv.variable))
.chain(self.able.iter().map(|av| &av.variable))
.chain(self.infer_ext_in_output.iter())
.chain(self.host_exposed_aliases.values())
.all(|&v| v != var));
}
@ -205,17 +203,10 @@ impl IntroducedVariables {
self.lambda_sets.push(var);
}
pub fn insert_host_exposed_alias(&mut self, symbol: Symbol, var: Variable) {
self.debug_assert_not_already_present(var);
self.host_exposed_aliases.insert(symbol, var);
}
pub fn union(&mut self, other: &Self) {
self.wildcards.extend(other.wildcards.iter().copied());
self.lambda_sets.extend(other.lambda_sets.iter().copied());
self.inferred.extend(other.inferred.iter().copied());
self.host_exposed_aliases
.extend(other.host_exposed_aliases.iter().map(|(k, v)| (*k, *v)));
self.named.extend(other.named.iter().cloned());
self.able.extend(other.able.iter().cloned());
@ -227,7 +218,6 @@ impl IntroducedVariables {
self.wildcards.extend(other.wildcards);
self.lambda_sets.extend(other.lambda_sets);
self.inferred.extend(other.inferred);
self.host_exposed_aliases.extend(other.host_exposed_aliases);
self.named.extend(other.named);
self.able.extend(other.able);

View file

@ -217,7 +217,6 @@ pub fn deep_copy_type_vars_into_expr(
deep_copy_expr_top(subs, var, expr)
}
#[allow(unused)] // TODO to be removed when this is used for the derivers
pub fn deep_copy_expr_across_subs(
source: &mut Subs,
target: &mut Subs,

View file

@ -1,6 +1,5 @@
mod pretty_print;
pub use pretty_print::pretty_print_declarations;
pub use pretty_print::pretty_print_def;
pub use pretty_print::pretty_write_declarations;
pub use pretty_print::Ctx as PPCtx;

View file

@ -19,14 +19,6 @@ pub struct Ctx<'a> {
pub print_lambda_names: bool,
}
pub fn pretty_print_declarations(c: &Ctx, declarations: &Declarations) -> String {
let f = Arena::new();
print_declarations_help(c, &f, declarations)
.1
.pretty(80)
.to_string()
}
pub fn pretty_write_declarations(
writer: &mut impl std::io::Write,
c: &Ctx,

View file

@ -340,45 +340,12 @@ impl PendingTypeDef<'_> {
pub enum Declaration {
Declare(Def),
DeclareRec(Vec<Def>, IllegalCycleMark),
Builtin(Def),
Expects(ExpectsOrDbgs),
/// If we know a cycle is illegal during canonicalization.
/// Otherwise we will try to detect this during solving; see [`IllegalCycleMark`].
InvalidCycle(Vec<CycleEntry>),
}
impl Declaration {
pub fn def_count(&self) -> usize {
use Declaration::*;
match self {
Declare(_) => 1,
DeclareRec(defs, _) => defs.len(),
InvalidCycle { .. } => 0,
Builtin(_) => 0,
Expects(_) => 0,
}
}
pub fn region(&self) -> Region {
match self {
Declaration::Declare(def) => def.region(),
Declaration::DeclareRec(defs, _) => Region::span_across(
&defs.first().unwrap().region(),
&defs.last().unwrap().region(),
),
Declaration::Builtin(def) => def.region(),
Declaration::InvalidCycle(cycles) => Region::span_across(
&cycles.first().unwrap().expr_region,
&cycles.last().unwrap().expr_region,
),
Declaration::Expects(expects) => Region::span_across(
expects.regions.first().unwrap(),
expects.regions.last().unwrap(),
),
}
}
}
/// Returns a topologically sorted sequence of alias/opaque names
fn sort_type_defs_before_introduction(
referenced_symbols: VecMap<Symbol, Vec<Symbol>>,
@ -2805,10 +2772,6 @@ fn decl_to_let(decl: Declaration, loc_ret: Loc<Expr>) -> Loc<Expr> {
Declaration::InvalidCycle(entries) => {
Loc::at_zero(Expr::RuntimeError(RuntimeError::CircularDef(entries)))
}
Declaration::Builtin(_) => {
// Builtins should only be added to top-level decls, not to let-exprs!
unreachable!()
}
Declaration::Expects(expects) => {
let mut loc_ret = loc_ret;

View file

@ -34,9 +34,6 @@ pub struct Env<'a> {
/// Symbols of values/functions which were referenced by qualified lookups.
pub qualified_value_lookups: VecSet<Symbol>,
/// Symbols of types which were referenced by qualified lookups.
pub qualified_type_lookups: VecSet<Symbol>,
pub top_level_symbols: VecSet<Symbol>,
pub home_params_record: Option<(Symbol, Variable)>,
@ -77,7 +74,6 @@ impl<'a> Env<'a> {
problems: Vec::new(),
closures: MutMap::default(),
qualified_value_lookups: VecSet::default(),
qualified_type_lookups: VecSet::default(),
tailcallable_symbol: None,
top_level_symbols: VecSet::default(),
home_params_record: None,
@ -152,9 +148,7 @@ impl<'a> Env<'a> {
Some(ident_id) => {
let symbol = Symbol::new(module.id, ident_id);
if is_type_name {
self.qualified_type_lookups.insert(symbol);
} else {
if !is_type_name {
self.qualified_value_lookups.insert(symbol);
}
@ -183,9 +177,7 @@ impl<'a> Env<'a> {
Some(ident_id) => {
let symbol = Symbol::new(module.id, ident_id);
if is_type_name {
self.qualified_type_lookups.insert(symbol);
} else {
if !is_type_name {
self.qualified_value_lookups.insert(symbol);
}

View file

@ -1,7 +1,6 @@
use crate::abilities::SpecializationId;
use crate::annotation::{freshen_opaque_def, IntroducedVariables};
use crate::builtins::builtin_defs_map;
use crate::def::{can_defs_with_return, Annotation, Def, DefKind};
use crate::def::{can_defs_with_return, Annotation, Def};
use crate::env::Env;
use crate::num::{
finish_parsing_base, finish_parsing_float, finish_parsing_num, float_expr_from_result,
@ -344,14 +343,6 @@ pub struct ExpectLookup {
pub ability_info: Option<SpecializationId>,
}
#[derive(Clone, Copy, Debug)]
pub struct DbgLookup {
pub symbol: Symbol,
pub var: Variable,
pub region: Region,
pub ability_info: Option<SpecializationId>,
}
impl Expr {
pub fn category(&self) -> Category {
match self {
@ -750,36 +741,6 @@ pub struct WhenBranch {
pub redundant: RedundantMark,
}
impl WhenBranch {
pub fn pattern_region(&self) -> Region {
Region::span_across(
&self
.patterns
.first()
.expect("when branch has no pattern?")
.pattern
.region,
&self
.patterns
.last()
.expect("when branch has no pattern?")
.pattern
.region,
)
}
}
impl WhenBranch {
pub fn region(&self) -> Region {
Region::across_all(
self.patterns
.iter()
.map(|p| &p.pattern.region)
.chain([self.value.region].iter()),
)
}
}
pub fn canonicalize_expr<'a>(
env: &mut Env<'a>,
var_store: &mut VarStore,
@ -2171,446 +2132,6 @@ fn lookup_to_expr(
}
}
/// Currently uses the heuristic of "only inline if it's a builtin"
pub fn inline_calls(var_store: &mut VarStore, expr: Expr) -> Expr {
use Expr::*;
match expr {
// Num stores the `a` variable in `Num a`. Not the same as the variable
// stored in Int and Float below, which is strictly for better error messages
other @ Num(..)
| other @ Int(..)
| other @ Float(..)
| other @ Str { .. }
| other @ IngestedFile(..)
| other @ SingleQuote(..)
| other @ RuntimeError(_)
| other @ EmptyRecord
| other @ RecordAccessor { .. }
| other @ RecordUpdate { .. }
| other @ Var(..)
| other @ ParamsVar { .. }
| other @ AbilityMember(..)
| other @ RunLowLevel { .. }
| other @ ForeignCall { .. }
| other @ OpaqueWrapFunction(_)
| other @ Crash { .. }
| other @ Return { .. } => other,
List {
elem_var,
loc_elems,
} => {
let mut new_elems = Vec::with_capacity(loc_elems.len());
for loc_elem in loc_elems {
let value = inline_calls(var_store, loc_elem.value);
new_elems.push(Loc {
value,
region: loc_elem.region,
});
}
List {
elem_var,
loc_elems: new_elems,
}
}
// Branching
When {
cond_var,
expr_var,
region,
loc_cond,
branches,
branches_cond_var,
exhaustive,
} => {
let loc_cond = Box::new(Loc {
region: loc_cond.region,
value: inline_calls(var_store, loc_cond.value),
});
let mut new_branches = Vec::with_capacity(branches.len());
for branch in branches {
let value = Loc {
value: inline_calls(var_store, branch.value.value),
region: branch.value.region,
};
let guard = match branch.guard {
Some(loc_expr) => Some(Loc {
region: loc_expr.region,
value: inline_calls(var_store, loc_expr.value),
}),
None => None,
};
let new_branch = WhenBranch {
patterns: branch.patterns,
value,
guard,
redundant: RedundantMark::new(var_store),
};
new_branches.push(new_branch);
}
When {
cond_var,
expr_var,
region,
loc_cond,
branches: new_branches,
branches_cond_var,
exhaustive,
}
}
If {
cond_var,
branch_var,
branches,
final_else,
} => {
let mut new_branches = Vec::with_capacity(branches.len());
for (loc_cond, loc_expr) in branches {
let loc_cond = Loc {
value: inline_calls(var_store, loc_cond.value),
region: loc_cond.region,
};
let loc_expr = Loc {
value: inline_calls(var_store, loc_expr.value),
region: loc_expr.region,
};
new_branches.push((loc_cond, loc_expr));
}
let final_else = Box::new(Loc {
region: final_else.region,
value: inline_calls(var_store, final_else.value),
});
If {
cond_var,
branch_var,
branches: new_branches,
final_else,
}
}
Expect {
loc_condition,
loc_continuation,
lookups_in_cond,
} => {
let loc_condition = Loc {
region: loc_condition.region,
value: inline_calls(var_store, loc_condition.value),
};
let loc_continuation = Loc {
region: loc_continuation.region,
value: inline_calls(var_store, loc_continuation.value),
};
Expect {
loc_condition: Box::new(loc_condition),
loc_continuation: Box::new(loc_continuation),
lookups_in_cond,
}
}
Dbg {
source_location,
source,
loc_message,
loc_continuation,
variable,
symbol,
} => {
let loc_message = Loc {
region: loc_message.region,
value: inline_calls(var_store, loc_message.value),
};
let loc_continuation = Loc {
region: loc_continuation.region,
value: inline_calls(var_store, loc_continuation.value),
};
Dbg {
source_location,
source,
loc_message: Box::new(loc_message),
loc_continuation: Box::new(loc_continuation),
variable,
symbol,
}
}
LetRec(defs, loc_expr, mark) => {
let mut new_defs = Vec::with_capacity(defs.len());
for def in defs {
new_defs.push(Def {
loc_pattern: def.loc_pattern,
loc_expr: Loc {
region: def.loc_expr.region,
value: inline_calls(var_store, def.loc_expr.value),
},
expr_var: def.expr_var,
pattern_vars: def.pattern_vars,
annotation: def.annotation,
kind: def.kind,
});
}
let loc_expr = Loc {
region: loc_expr.region,
value: inline_calls(var_store, loc_expr.value),
};
LetRec(new_defs, Box::new(loc_expr), mark)
}
LetNonRec(def, loc_expr) => {
let def = Def {
loc_pattern: def.loc_pattern,
loc_expr: Loc {
region: def.loc_expr.region,
value: inline_calls(var_store, def.loc_expr.value),
},
expr_var: def.expr_var,
pattern_vars: def.pattern_vars,
annotation: def.annotation,
kind: def.kind,
};
let loc_expr = Loc {
region: loc_expr.region,
value: inline_calls(var_store, loc_expr.value),
};
LetNonRec(Box::new(def), Box::new(loc_expr))
}
Closure(ClosureData {
function_type,
closure_type,
return_type,
fx_type,
early_returns,
recursive,
name,
captured_symbols,
arguments,
loc_body,
}) => {
let loc_expr = *loc_body;
let loc_expr = Loc {
value: inline_calls(var_store, loc_expr.value),
region: loc_expr.region,
};
Closure(ClosureData {
function_type,
closure_type,
return_type,
fx_type,
early_returns,
recursive,
name,
captured_symbols,
arguments,
loc_body: Box::new(loc_expr),
})
}
Record { record_var, fields } => {
todo!(
"Inlining for Record with record_var {:?} and fields {:?}",
record_var,
fields
);
}
ImportParams(module_id, region, Some((var, expr))) => ImportParams(
module_id,
region,
Some((var, Box::new(inline_calls(var_store, *expr)))),
),
ImportParams(module_id, region, None) => ImportParams(module_id, region, None),
RecordAccess {
record_var,
ext_var,
field_var,
loc_expr,
field,
} => {
todo!("Inlining for RecordAccess with record_var {:?}, ext_var {:?}, field_var {:?}, loc_expr {:?}, field {:?}", record_var, ext_var, field_var, loc_expr, field);
}
Tuple { tuple_var, elems } => {
todo!(
"Inlining for Tuple with tuple_var {:?} and elems {:?}",
tuple_var,
elems
);
}
TupleAccess {
tuple_var,
ext_var,
elem_var,
loc_expr,
index,
} => {
todo!("Inlining for TupleAccess with tuple_var {:?}, ext_var {:?}, elem_var {:?}, loc_expr {:?}, index {:?}", tuple_var, ext_var, elem_var, loc_expr, index);
}
Tag {
tag_union_var: variant_var,
ext_var,
name,
arguments,
} => {
todo!(
"Inlining for Tag with variant_var {:?}, ext_var {:?}, name {:?}, arguments {:?}",
variant_var,
ext_var,
name,
arguments
);
}
OpaqueRef {
opaque_var,
name,
argument,
specialized_def_type,
type_arguments,
lambda_set_variables,
} => {
let (var, loc_expr) = *argument;
let argument = Box::new((
var,
loc_expr.map_owned(|expr| inline_calls(var_store, expr)),
));
OpaqueRef {
opaque_var,
name,
argument,
specialized_def_type,
type_arguments,
lambda_set_variables,
}
}
ZeroArgumentTag {
closure_name,
variant_var,
ext_var,
name,
} => {
todo!(
"Inlining for ZeroArgumentTag with closure_name {:?}, variant_var {:?}, ext_var {:?}, name {:?}",
closure_name,
variant_var,
ext_var,
name,
);
}
Call(boxed_tuple, args, called_via) => {
let (fn_var, loc_expr, closure_var, expr_var, fx_var) = *boxed_tuple;
match loc_expr.value {
Var(symbol, _) if symbol.is_builtin() => {
// NOTE: This assumes builtins are not effectful!
match builtin_defs_map(symbol, var_store) {
Some(Def {
loc_expr:
Loc {
value:
Closure(ClosureData {
recursive,
arguments: params,
loc_body: boxed_body,
..
}),
..
},
..
}) => {
debug_assert_eq!(recursive, Recursive::NotRecursive);
// Since this is a canonicalized Expr, we should have
// already detected any arity mismatches and replaced this
// with a RuntimeError if there was a mismatch.
debug_assert_eq!(params.len(), args.len());
// Start with the function's body as the answer.
let mut loc_answer = *boxed_body;
// Wrap the body in one LetNonRec for each argument,
// such that at the end we have all the arguments in
// scope with the values the caller provided.
for (
(_param_var, _exhaustive_mark, loc_pattern),
(expr_var, loc_expr),
) in params.iter().cloned().zip(args.into_iter()).rev()
{
// TODO get the correct vars into here.
// Not sure if param_var should be involved.
let pattern_vars = SendMap::default();
let def = Def {
loc_pattern,
loc_expr,
expr_var,
pattern_vars,
annotation: None,
kind: DefKind::Let,
};
loc_answer = Loc {
region: Region::zero(),
value: LetNonRec(Box::new(def), Box::new(loc_answer)),
};
}
loc_answer.value
}
Some(_) => {
internal_error!("Tried to inline a non-function");
}
None => {
internal_error!(
"Tried to inline a builtin that wasn't registered: {:?}",
symbol
);
}
}
}
_ => {
// For now, we only inline calls to builtins. Leave this alone!
Call(
Box::new((fn_var, loc_expr, closure_var, expr_var, fx_var)),
args,
called_via,
)
}
}
}
}
}
fn flatten_str_literal<'a>(
env: &mut Env<'a>,
var_store: &mut VarStore,
@ -3330,7 +2851,7 @@ impl Declarations {
pub fn expects(&self) -> ExpectCollector {
let mut collector = ExpectCollector {
expects: VecMap::default(),
dbgs: VecMap::default(),
has_dbgs: false,
};
let var = Variable::EMPTY_RECORD;
@ -3652,7 +3173,7 @@ pub fn toplevel_expect_to_inline_expect_pure(mut loc_expr: Loc<Expr>) -> Loc<Exp
pub struct ExpectCollector {
pub expects: VecMap<Region, Vec<ExpectLookup>>,
pub dbgs: VecMap<Symbol, DbgLookup>,
pub has_dbgs: bool,
}
impl crate::traverse::Visitor for ExpectCollector {
@ -3666,20 +3187,8 @@ impl crate::traverse::Visitor for ExpectCollector {
self.expects
.insert(loc_condition.region, lookups_in_cond.to_vec());
}
Expr::Dbg {
loc_message,
variable,
symbol,
..
} => {
let lookup = DbgLookup {
symbol: *symbol,
var: *variable,
region: loc_message.region,
ability_info: None,
};
self.dbgs.insert(*symbol, lookup);
Expr::Dbg { .. } => {
self.has_dbgs = true;
}
_ => (),
}

View file

@ -24,7 +24,6 @@ pub mod num;
pub mod pattern;
pub mod procedure;
pub mod scope;
pub mod string;
pub mod suffixed;
pub mod traverse;

View file

@ -5,9 +5,7 @@ use crate::annotation::{canonicalize_annotation, AnnotationFor};
use crate::def::{canonicalize_defs, report_unused_imports, Def, DefKind};
use crate::desugar::desugar_record_destructures;
use crate::env::{Env, FxMode};
use crate::expr::{
ClosureData, DbgLookup, Declarations, ExpectLookup, Expr, Output, PendingDerives,
};
use crate::expr::{ClosureData, Declarations, ExpectLookup, Expr, Output, PendingDerives};
use crate::pattern::{
canonicalize_record_destructs, BindingsFromPattern, Pattern, PermitShadows, RecordDestruct,
};
@ -137,7 +135,7 @@ pub struct Module {
pub rigid_variables: RigidVariables,
pub abilities_store: PendingAbilitiesStore,
pub loc_expects: VecMap<Region, Vec<ExpectLookup>>,
pub loc_dbgs: VecMap<Symbol, DbgLookup>,
pub has_dbgs: bool,
pub module_params: Option<ModuleParams>,
}
@ -188,7 +186,7 @@ pub struct ModuleOutput {
pub pending_derives: PendingDerives,
pub scope: Scope,
pub loc_expects: VecMap<Region, Vec<ExpectLookup>>,
pub loc_dbgs: VecMap<Symbol, DbgLookup>,
pub has_dbgs: bool,
}
fn has_no_implementation(expr: &Expr) -> bool {
@ -763,7 +761,7 @@ pub fn canonicalize_module_defs<'a>(
symbols_from_requires,
pending_derives,
loc_expects: collected.expects,
loc_dbgs: collected.dbgs,
has_dbgs: collected.has_dbgs,
exposed_symbols,
}
}

View file

@ -1,42 +1,5 @@
use crate::pattern::Pattern;
use crate::{expr::Expr, scope::SymbolLookup};
use crate::scope::SymbolLookup;
use roc_module::symbol::{ModuleId, Symbol};
use roc_region::all::{Loc, Region};
use roc_types::subs::Variable;
#[derive(Clone, Debug)]
pub struct Procedure {
pub name: Option<Box<str>>,
pub is_self_tail_recursive: bool,
pub definition: Region,
pub args: Vec<Loc<Pattern>>,
pub body: Loc<Expr>,
pub references: References,
pub var: Variable,
pub ret_var: Variable,
}
impl Procedure {
pub fn new(
definition: Region,
args: Vec<Loc<Pattern>>,
body: Loc<Expr>,
references: References,
var: Variable,
ret_var: Variable,
) -> Procedure {
Procedure {
name: None,
is_self_tail_recursive: false,
definition,
args,
body,
references,
var,
ret_var,
}
}
}
#[derive(Debug, Default, Clone, Copy)]
struct ReferencesBitflags(u8);

View file

@ -1,446 +0,0 @@
// use bumpalo::collections::string::String;
// use bumpalo::collections::vec::Vec;
use bumpalo::Bump;
use roc_error_macros::internal_error;
use roc_parse::ast::Expr;
// use roc_parse::ast::{Attempting, Expr};
// use roc_parse::ident;
// use roc_parse::parser::{unexpected, unexpected_eof, Fail, Parser, State};
// use roc_parse::problems::{Problem, Problems};
// use roc_region::all::{Loc, Region};
use roc_region::all::Region;
// use std::char;
// use std::iter::Peekable;
pub fn canonical_string_literal<'a>(_arena: &Bump, _raw: &'a str, _region: Region) -> Expr<'a> {
internal_error!("TODO restore canonicalization");
}
// let mut problems = std::vec::Vec::new();
// // Stores the accumulated string characters
// let mut buf = String::new_in(arena);
// // This caches the total string length of interpolated_pairs. Every
// // time we add a new pair to interpolated_pairs, we increment this
// // by the sum of whatever we parsed in order to obtain that pair.
// let mut buf_col_offset: usize = 0;
// // Stores interpolated identifiers, if any.
// let mut interpolated_pairs = Vec::new_in(arena);
// let mut chars = raw.chars();
// while let Some(ch) = chars.next() {
// match ch {
// // If it's a backslash, escape things.
// '\\' => match chars.next() {
// Some(next_ch) => {
// if let Some(ident) = handle_escaped_char(
// arena,
// &state,
// next_ch,
// &mut chars,
// &mut buf,
// &mut problems,
// )? {
// let expr = Expr::Var(ident);
// // +2 for `$(` and then another +1 for `)` at the end
// let parsed_length = buf.len() + 2 + ident.len() + 1;
// // Casting should always succeed in this section, because
// // if this string literal overflowed our maximum
// // line length, that would have already happened back
// // in the parsing step, and we never would have reached
// // this code. Still, debug_assert that they won't!
// debug_assert!(buf_col_offset <= u16::MAX as usize);
// debug_assert!(ident.len() <= u16::MAX as usize);
// debug_assert!((parsed_length - ident.len() - 1) <= u16::MAX as usize);
// let start_line = state.line;
// // Subtract ident length and another 1 for the `)`
// let start_col = state.column
// + buf_col_offset as u16
// + (parsed_length - ident.len() - 1) as u16;
// let ident_region = Region {
// start_line,
// start_col,
// end_line: start_line,
// end_col: start_col + ident.len() as u16 - 1,
// };
// let loc_expr = Loc {
// region: ident_region,
// value: expr,
// };
// // Push the accumulated string into the pairs list,
// // along with the ident that came after it.
// interpolated_pairs.push((buf.into_bump_str(), loc_expr));
// // Reset the buffer so we start working on a new string.
// buf = String::new_in(arena);
// // Advance the cached offset of how many chars we've parsed,
// // so the next time we see an interpolated ident, we can
// // correctly calculate its region.
// buf_col_offset += parsed_length;
// }
// }
// None => {
// problems.push(loc_char(Problem::TrailingBackslash, &state, buf.len()));
// }
// },
// '\t' => {
// // Tabs are syntax errors.
// problems.push(loc_char(Problem::Tab, &state, buf.len()));
// }
// '\r' => {
// // Carriage returns aren't allowed in string literals.
// problems.push(loc_char(Problem::CarriageReturn, &state, buf.len()));
// }
// normal_char => buf.push(normal_char),
// }
// }
// // We ran out of characters; this is the end of the string!
// if problems.is_empty() {
// let final_str = buf.into_bump_str();
// if interpolated_pairs.is_empty() {
// Expr::Str(final_str)
// } else {
// let tuple_ref = arena.alloc((interpolated_pairs.into_bump_slice(), final_str));
// Expr::InterpolatedStr(tuple_ref)
// }
// } else {
// Expr::MalformedStr(problems.into_boxed_slice())
// }
// }
// fn loc_char<'a, V>(value: V, state: &State<'a>, buf_len: usize) -> Located<V> {
// let start_line = state.line;
// let start_col = state.column + buf_len as u16;
// let end_line = start_line;
// // All invalid chars should have a length of 1
// let end_col = state.column + 1;
// let region = Region {
// start_line,
// start_col,
// end_line,
// end_col,
// };
// Loc { region, value }
// }
// fn loc_escaped_char<'a, V>(value: V, state: &State<'a>, buf_len: usize) -> Located<V> {
// let start_line = state.line;
// let start_col = state.column + buf_len as u16;
// let end_line = start_line;
// // escapes should all be 2 chars long
// let end_col = state.column + 1;
// let region = Region {
// start_line,
// start_col,
// end_line,
// end_col,
// };
// Loc { region, value }
// }
// fn loc_escaped_unicode<'a, V>(
// value: V,
// state: &State<'a>,
// buf_len: usize,
// hex_str_len: usize,
// ) -> Located<V> {
// let start_line = state.line;
// // +1 due to the `"` which precedes buf.
// let start_col = state.column + buf_len as u16 + 1;
// let end_line = start_line;
// // +3 due to the `\u{` and another + 1 due to the `}`
// // -1 to prevent overshooting because end col is inclusive.
// let end_col = start_col + 3 + hex_str_len as u16 + 1 - 1;
// let region = Region {
// start_line,
// start_col,
// end_line,
// end_col,
// };
// Loc { region, value }
// }
// #[inline(always)]
// fn handle_escaped_char<'a, I>(
// arena: &'a Bump,
// state: &State<'a>,
// ch: char,
// chars: &mut Peekable<I>,
// buf: &mut String<'a>,
// problems: &mut Problems,
// ) -> Result<Option<&'a str>, (Fail, State<'a>)>
// where
// I: Iterator<Item = char>,
// {
// match ch {
// '\\' => buf.push('\\'),
// '"' => buf.push('"'),
// 't' => buf.push('\t'),
// 'n' => buf.push('\n'),
// 'r' => buf.push('\r'),
// '0' => buf.push('\0'), // We explicitly support null characters, as we
// // can't be sure we won't receive them from Rust.
// 'u' => handle_escaped_unicode(arena, &state, chars, buf, problems)?,
// '(' => {
// let ident = parse_interpolated_ident(arena, state, chars)?;
// return Ok(Some(ident));
// }
// '\t' => {
// // Report and continue.
// // Tabs are syntax errors, but maybe the rest of the string is fine!
// problems.push(loc_escaped_char(Problem::Tab, &state, buf.len()));
// }
// '\r' => {
// // Report and continue.
// // Carriage returns aren't allowed in string literals,
// // but maybe the rest of the string is fine!
// problems.push(loc_escaped_char(Problem::CarriageReturn, &state, buf.len()));
// }
// '\n' => {
// // Report and bail out.
// // We can't safely assume where the string was supposed to end.
// problems.push(loc_escaped_char(
// Problem::NewlineInLiteral,
// &state,
// buf.len(),
// ));
// return Err(unexpected_eof(
// buf.len(),
// Attempting::UnicodeEscape,
// state.clone(),
// ));
// }
// _ => {
// // Report and continue.
// // An unsupported escaped char (e.g. \q) shouldn't halt parsing.
// problems.push(loc_escaped_char(
// Problem::UnsupportedEscapedChar,
// &state,
// buf.len(),
// ));
// }
// }
// Ok(None)
// }
// #[inline(always)]
// fn handle_escaped_unicode<'a, I>(
// arena: &'a Bump,
// state: &State<'a>,
// chars: &mut Peekable<I>,
// buf: &mut String<'a>,
// problems: &mut Problems,
// ) -> Result<(), (Fail, State<'a>)>
// where
// I: Iterator<Item = char>,
// {
// // \u{00A0} is how you specify a Unicode code point,
// // so we should always see a '{' next.
// if chars.next() != Some('{') {
// let start_line = state.line;
// // +1 due to the `"` which precedes buf
// let start_col = state.column + 1 + buf.len() as u16;
// let end_line = start_line;
// // All we parsed was `\u`, so end on the column after `\`'s column.
// let end_col = start_col + 1;
// let region = Region {
// start_line,
// start_col,
// end_line,
// end_col,
// };
// problems.push(Loc {
// region,
// value: Problem::NoUnicodeDigits,
// });
// // The rest of the string literal might be fine. Keep parsing!
// return Ok(());
// }
// // Record the point in the string literal where we started parsing `\u`
// let start_of_unicode = buf.len();
// // Stores the accumulated unicode digits
// let mut hex_str = String::new_in(arena);
// while let Some(hex_char) = chars.next() {
// match hex_char {
// '}' => {
// // Done! Validate and add it to the buffer.
// match u32::from_str_radix(&hex_str, 16) {
// Ok(code_pt) => {
// if code_pt > 0x10FFFF {
// let start_line = state.line;
// // +1 due to the `"` which precedes buf
// // +3 due to the `\u{` which precedes the hex digits
// let start_col = state.column + 1 + buf.len() as u16 + 3;
// let end_line = start_line;
// // We want to underline only the number. That's the error!
// // -1 because we want to end on the last digit, not
// // overshoot it.
// let end_col = start_col + hex_str.len() as u16 - 1;
// let region = Region {
// start_line,
// start_col,
// end_line,
// end_col,
// };
// problems.push(Loc {
// region,
// value: Problem::UnicodeCodePtTooLarge,
// });
// } else {
// // If it all checked out, add it to
// // the main buffer.
// match char::from_u32(code_pt) {
// Some(ch) => buf.push(ch),
// None => {
// problems.push(loc_escaped_unicode(
// Problem::InvalidUnicodeCodePt,
// &state,
// start_of_unicode,
// hex_str.len(),
// ));
// }
// }
// }
// }
// Err(_) => {
// let problem = if hex_str.is_empty() {
// Problem::NoUnicodeDigits
// } else {
// Problem::NonHexCharsInUnicodeCodePt
// };
// problems.push(loc_escaped_unicode(
// problem,
// &state,
// start_of_unicode,
// hex_str.len(),
// ));
// }
// }
// // We are now done processing the unicode portion of the string,
// // so exit the loop without further advancing the iterator.
// return Ok(());
// }
// '\t' => {
// // Report and continue.
// // Tabs are syntax errors, but maybe the rest of the string is fine!
// problems.push(loc_escaped_unicode(
// Problem::Tab,
// &state,
// start_of_unicode,
// hex_str.len(),
// ));
// }
// '\r' => {
// // Report and continue.
// // Carriage returns aren't allowed in string literals,
// // but maybe the rest of the string is fine!
// problems.push(loc_escaped_unicode(
// Problem::CarriageReturn,
// &state,
// start_of_unicode,
// hex_str.len(),
// ));
// }
// '\n' => {
// // Report and bail out.
// // We can't safely assume where the string was supposed to end.
// problems.push(loc_escaped_unicode(
// Problem::NewlineInLiteral,
// &state,
// start_of_unicode,
// hex_str.len(),
// ));
// return Err(unexpected_eof(
// buf.len(),
// Attempting::UnicodeEscape,
// state.clone(),
// ));
// }
// normal_char => hex_str.push(normal_char),
// }
// // If we're about to hit the end of the string, and we didn't already
// // complete parsing a valid unicode escape sequence, this is a malformed
// // escape sequence - it wasn't terminated!
// if chars.peek() == Some(&'"') {
// // Record a problem and exit the loop early, so the string literal
// // parsing logic can consume the quote and do its job as normal.
// let start_line = state.line;
// // +1 due to the `"` which precedes buf.
// let start_col = state.column + buf.len() as u16 + 1;
// let end_line = start_line;
// // +3 due to the `\u{`
// // -1 to prevent overshooting because end col is inclusive.
// let end_col = start_col + 3 + hex_str.len() as u16 - 1;
// let region = Region {
// start_line,
// start_col,
// end_line,
// end_col,
// };
// problems.push(Loc {
// region,
// value: Problem::MalformedEscapedUnicode,
// });
// return Ok(());
// }
// }
// Ok(())
// }
// #[inline(always)]
// fn parse_interpolated_ident<'a, I>(
// arena: &'a Bump,
// state: &State<'a>,
// chars: &mut Peekable<I>,
// ) -> Result<&'a str, (Fail, State<'a>)>
// where
// I: Iterator<Item = char>,
// {
// // This will return Err on invalid identifiers like "if"
// let ((string, next_char), state) = ident::parse_into(arena, chars, state.clone())?;
// // Make sure we got a closing ) to end the interpolation.
// match next_char {
// Some(')') => Ok(string),
// Some(ch) => Err(unexpected(ch, 0, state, Attempting::InterpolatedString)),
// None => Err(unexpected_eof(0, Attempting::InterpolatedString, state)),
// }
// }

View file

@ -22,10 +22,6 @@ pub enum DeclarationInfo<'a> {
pattern: Pattern,
annotation: Option<&'a Annotation>,
},
Return {
loc_expr: &'a Loc<Expr>,
expr_var: Variable,
},
Expectation {
loc_condition: &'a Loc<Expr>,
},
@ -54,7 +50,6 @@ impl<'a> DeclarationInfo<'a> {
loc_expr,
..
} => Region::span_across(&loc_symbol.region, &loc_expr.region),
Return { loc_expr, .. } => loc_expr.region,
Expectation { loc_condition } => loc_condition.region,
Function {
loc_symbol,
@ -72,7 +67,6 @@ impl<'a> DeclarationInfo<'a> {
fn var(&self) -> Variable {
match self {
DeclarationInfo::Value { expr_var, .. } => *expr_var,
DeclarationInfo::Return { expr_var, .. } => *expr_var,
DeclarationInfo::Expectation { .. } => Variable::BOOL,
DeclarationInfo::Function { expr_var, .. } => *expr_var,
DeclarationInfo::Destructure { expr_var, .. } => *expr_var,
@ -191,9 +185,6 @@ pub fn walk_decl<V: Visitor>(visitor: &mut V, decl: DeclarationInfo<'_>) {
Expectation { loc_condition } => {
visitor.visit_expr(&loc_condition.value, loc_condition.region, Variable::BOOL);
}
Return { loc_expr, expr_var } => {
visitor.visit_expr(&loc_expr.value, loc_expr.region, expr_var);
}
Function {
loc_symbol,
loc_body,

View file

@ -3,7 +3,6 @@ extern crate bumpalo;
use self::bumpalo::Bump;
use roc_can::desugar;
use roc_can::env::Env;
use roc_can::expr::Output;
use roc_can::expr::{canonicalize_expr, Expr};
use roc_can::scope::Scope;
use roc_collections::all::MutMap;
@ -26,12 +25,8 @@ pub fn can_expr(expr_str: &str) -> CanExprOut {
pub struct CanExprOut {
pub loc_expr: Loc<Expr>,
pub output: Output,
pub problems: Vec<Problem>,
pub home: ModuleId,
pub interns: Interns,
pub var_store: VarStore,
pub var: Variable,
}
#[allow(dead_code)]
@ -43,7 +38,6 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
});
let mut var_store = VarStore::default();
let var = var_store.fresh();
let qualified_module_ids = PackageModuleIds::default();
let mut scope = Scope::new(
@ -86,7 +80,7 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
roc_types::types::AliasKind::Structural,
);
let (loc_expr, output) = canonicalize_expr(
let (loc_expr, _) = canonicalize_expr(
&mut env,
&mut var_store,
&mut scope,
@ -104,12 +98,8 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
CanExprOut {
loc_expr,
output,
problems: env.problems,
home: env.home,
var_store,
interns,
var,
}
}

View file

@ -3,10 +3,6 @@ use std::collections::HashMap;
use schemars::{schema::RootSchema, schema_for, JsonSchema};
use serde::Serialize;
#[derive(Serialize, JsonSchema, Debug)]
#[serde(tag = "type")]
pub enum Constraint {}
#[derive(Serialize, JsonSchema, Debug, PartialEq)]
pub struct Variable(pub u32);
@ -173,12 +169,6 @@ pub enum NumericRangeKind {
#[derive(Serialize, JsonSchema, Debug)]
pub struct Rank(pub u32);
#[derive(Serialize, JsonSchema, Debug)]
pub struct Descriptor {
pub content: Content,
pub rank: Rank,
}
#[derive(Serialize, JsonSchema, Debug)]
pub struct Symbol(
// TODO: should this be module ID + symbol?

View file

@ -50,7 +50,6 @@ impl ReferenceMatrix {
//
// Thank you, Samuel!
impl ReferenceMatrix {
#[allow(dead_code)]
pub fn topological_sort_into_groups(&self) -> TopologicalSort {
if self.length == 0 {
return TopologicalSort::Groups { groups: Vec::new() };

View file

@ -929,14 +929,6 @@ fn fmt_expect<'a>(buf: &mut Buf, condition: &'a Loc<Expr<'a>>, is_multiline: boo
condition.format(buf, return_indent);
}
pub fn fmt_value_def(buf: &mut Buf, def: &roc_parse::ast::ValueDef, indent: u16) {
def.format(buf, indent);
}
pub fn fmt_type_def(buf: &mut Buf, def: &roc_parse::ast::TypeDef, indent: u16) {
def.format(buf, indent);
}
pub fn fmt_defs(buf: &mut Buf, defs: &Defs, indent: u16) {
defs.format(buf, indent);
}

View file

@ -381,22 +381,6 @@ fn fmt_exposes<N: Formattable + Copy + core::fmt::Debug>(
fmt_collection(buf, indent, Braces::Square, loc_entries, Newlines::No)
}
pub trait FormatName {
fn format(&self, buf: &mut Buf);
}
impl<'a> FormatName for &'a str {
fn format(&self, buf: &mut Buf) {
buf.push_str(self)
}
}
impl<'a> FormatName for ModuleName<'a> {
fn format(&self, buf: &mut Buf) {
buf.push_str(self.as_str());
}
}
impl<'a> Formattable for ModuleName<'a> {
fn is_multiline(&self) -> bool {
false
@ -430,12 +414,6 @@ impl<'a> Formattable for ExposedName<'a> {
}
}
impl<'a> FormatName for ExposedName<'a> {
fn format(&self, buf: &mut Buf) {
buf.push_str(self.as_str());
}
}
fn fmt_packages<'a>(
buf: &mut Buf,
loc_entries: Collection<'a, Loc<Spaced<'a, PackageEntry<'a>>>>,

View file

@ -78,7 +78,6 @@ pub struct Env<'a> {
// These relocations likely will need a length.
// They may even need more definition, but this should be at least good enough for how we will use elf.
#[derive(Debug, Clone)]
#[allow(dead_code)]
pub enum Relocation {
LocalData {
offset: u64,

View file

@ -46,8 +46,6 @@ macro_rules! instruction_memargs {
#[derive(Debug)]
pub struct CodeBuilder<'a> {
pub arena: &'a Bump,
/// The main container for the instructions
code: Vec<'a, u8>,
@ -81,7 +79,6 @@ pub struct CodeBuilder<'a> {
impl<'a> CodeBuilder<'a> {
pub fn new(arena: &'a Bump) -> Self {
CodeBuilder {
arena,
code: Vec::with_capacity_in(1024, arena),
insertions: Vec::with_capacity_in(32, arena),
insert_bytes: Vec::with_capacity_in(64, arena),

View file

@ -38,8 +38,6 @@ const PTR_SIZE: u32 = {
const PTR_TYPE: ValueType = ValueType::I32;
pub const MEMORY_NAME: &str = "memory";
pub const BUILTINS_IMPORT_MODULE_NAME: &str = "env";
pub const STACK_POINTER_NAME: &str = "__stack_pointer";
pub struct Env<'a> {
pub arena: &'a Bump,

View file

@ -16,7 +16,7 @@ use roc_builtins::roc::module_source;
use roc_can::abilities::{AbilitiesStore, PendingAbilitiesStore, ResolvedImpl};
use roc_can::constraint::{Constraint as ConstraintSoa, Constraints, TypeOrVar};
use roc_can::env::FxMode;
use roc_can::expr::{DbgLookup, Declarations, ExpectLookup, PendingDerives};
use roc_can::expr::{Declarations, ExpectLookup, PendingDerives};
use roc_can::module::{
canonicalize_module_defs, ExposedByModule, ExposedForModule, ExposedModuleTypes, Module,
ModuleParams, ResolvedImplementations, TypeState,
@ -571,7 +571,6 @@ pub struct ExpectMetadata<'a> {
}
type LocExpects = VecMap<Region, Vec<ExpectLookup>>;
type LocDbgs = VecMap<Symbol, DbgLookup>;
/// A message sent out _from_ a worker thread,
/// representing a result of work done, or a request for further work
@ -591,7 +590,7 @@ enum Msg<'a> {
module_timing: ModuleTiming,
abilities_store: AbilitiesStore,
loc_expects: LocExpects,
loc_dbgs: LocDbgs,
has_dbgs: bool,
#[cfg(debug_assertions)]
checkmate: Option<roc_checkmate::Collector>,
@ -881,7 +880,6 @@ impl std::fmt::Display for ModuleTiming {
/// A message sent _to_ a worker thread, describing the work to be done
#[derive(Debug)]
#[allow(dead_code)]
enum BuildTask<'a> {
LoadModule {
module_name: PQModuleName<'a>,
@ -1464,8 +1462,6 @@ pub fn load<'a>(
) -> Result<LoadResult<'a>, LoadingProblem<'a>> {
enum Threads {
Single,
#[allow(dead_code)]
Many(usize),
}
@ -2448,7 +2444,7 @@ fn update<'a>(
mut module_timing,
abilities_store,
loc_expects,
loc_dbgs,
has_dbgs,
#[cfg(debug_assertions)]
checkmate,
@ -2465,7 +2461,7 @@ fn update<'a>(
.exposes
.insert(module_id, solved_module.exposed_vars_by_symbol.clone());
let should_include_expects = (!loc_expects.is_empty() || !loc_dbgs.is_empty()) && {
let should_include_expects = (!loc_expects.is_empty() || has_dbgs) && {
let modules = state.arc_modules.lock();
modules
.package_eq(module_id, state.root_id)
@ -2477,7 +2473,6 @@ fn update<'a>(
Some(Expectations {
expectations: loc_expects,
dbgs: loc_dbgs,
subs: solved_subs.clone().into_inner(),
path: path.to_owned(),
ident_ids: ident_ids.clone(),
@ -3684,8 +3679,6 @@ fn load_module<'a>(
#[derive(Debug)]
enum ShorthandPath {
/// e.g. "/home/rtfeldman/.cache/roc/0.1.0/oUkxSOI9zFGtSoIaMB40QPdrXphr1p1780eiui2iO9Mz"
#[allow(dead_code)]
// wasm warns FromHttpsUrl is unused, but errors if it is removed ¯\_(ツ)_/¯
FromHttpsUrl {
/// e.g. "/home/rtfeldman/.cache/roc/0.1.0/oUkxSOI9zFGtSoIaMB40QPdrXphr1p1780eiui2iO9Mz"
root_module_dir: PathBuf,
@ -4829,7 +4822,7 @@ fn run_solve<'a>(
let mut module = module;
let loc_expects = std::mem::take(&mut module.loc_expects);
let loc_dbgs = std::mem::take(&mut module.loc_dbgs);
let has_dbgs = module.has_dbgs;
let module = module;
let solve_result = {
@ -4944,7 +4937,7 @@ fn run_solve<'a>(
module_timing,
abilities_store,
loc_expects,
loc_dbgs,
has_dbgs,
#[cfg(debug_assertions)]
checkmate,
@ -5256,7 +5249,7 @@ fn canonicalize_and_constrain<'a>(
rigid_variables: module_output.rigid_variables,
abilities_store: module_output.scope.abilities_store,
loc_expects: module_output.loc_expects,
loc_dbgs: module_output.loc_dbgs,
has_dbgs: module_output.has_dbgs,
module_params: module_output.module_params,
};

View file

@ -1,6 +1,6 @@
use crate::docs::ModuleDocumentation;
use roc_can::constraint::{Constraint as ConstraintSoa, Constraints};
use roc_can::expr::{DbgLookup, ExpectLookup};
use roc_can::expr::ExpectLookup;
use roc_can::{
abilities::AbilitiesStore,
expr::{Declarations, PendingDerives},
@ -223,7 +223,6 @@ pub struct Expectations {
pub subs: roc_types::subs::Subs,
pub path: PathBuf,
pub expectations: VecMap<Region, Vec<ExpectLookup>>,
pub dbgs: VecMap<Symbol, DbgLookup>,
pub ident_ids: IdentIds,
}

View file

@ -198,18 +198,6 @@ pub(crate) fn infer_borrow_signatures<'a>(
borrow_signatures
}
#[allow(unused)]
fn infer_borrow_signature<'a>(
arena: &'a Bump,
interner: &impl LayoutInterner<'a>,
borrow_signatures: &'a mut BorrowSignatures<'a>,
proc: &Proc<'a>,
) -> BorrowSignature {
let mut state = State::new(arena, interner, borrow_signatures, proc);
state.inspect_stmt(interner, borrow_signatures, &proc.body);
state.borrow_signature
}
struct State<'state, 'arena> {
/// Argument symbols with a layout of `List *` or `Str`, i.e. the layouts
/// for which borrow inference might decide to pass as borrowed
@ -235,29 +223,6 @@ fn layout_to_ownership<'a>(
}
impl<'state, 'a> State<'state, 'a> {
fn new(
arena: &'a Bump,
interner: &impl LayoutInterner<'a>,
borrow_signatures: &mut BorrowSignatures<'a>,
proc: &Proc<'a>,
) -> Self {
let key = (proc.name.name(), proc.proc_layout(arena));
// initialize the borrow signature based on the layout if first time
let borrow_signature = borrow_signatures
.procs
.entry(key)
.or_insert_with(|| BorrowSignature::from_layouts(interner, key.1.arguments.iter()));
Self {
args: proc.args,
borrow_signature: *borrow_signature,
join_point_stack: Vec::new_in(arena),
join_points: MutMap::default(),
modified: false,
}
}
/// Mark the given argument symbol as Owned if the symbol participates in borrow inference
///
/// Currently argument symbols participate if `layout_to_ownership` returns `Borrowed` for their layout.

View file

@ -2228,22 +2228,6 @@ typeName = \types, id ->
TagUnion (SingleTagStruct { name }) -> escapeKW name
Function { functionName } -> escapeKW functionName
getSizeRoundedToAlignment = \types, id ->
alignment = Types.alignment types id
Types.size types id
|> roundUpToAlignment alignment
roundUpToAlignment = \width, alignment ->
when alignment is
0 -> width
1 -> width
_ ->
if width % alignment > 0 then
width + alignment - (width % alignment)
else
width
archName = \arch ->
when arch is
Aarch32 ->

View file

@ -214,7 +214,6 @@ impl CompletionVisitor<'_> {
DeclarationInfo::Value {
expr_var, pattern, ..
} => self.patterns(pattern, expr_var),
DeclarationInfo::Return { .. } => vec![],
DeclarationInfo::Function {
expr_var,
pattern,

View file

@ -8,10 +8,7 @@ use roc_can::{
use roc_constrain::expr::{constrain_expr, Env};
use roc_module::symbol::ModuleId;
use roc_region::all::Region;
use roc_types::{
subs::{Subs, Variable},
types::Types,
};
use roc_types::{subs::Variable, types::Types};
#[derive(Debug)]
pub struct ConstrainedExprOut {
@ -23,12 +20,9 @@ pub struct ConstrainedExprOut {
pub region: Region,
}
#[allow(dead_code)]
#[derive(Default)]
pub struct ConstrainedExpr {
can_expr: CanExpr,
subs: Subs,
constraints: Constraints,
}
impl ConstrainedExpr {