Merge remote-tracking branch 'remote/main' into builtin-json

This commit is contained in:
Luke Boswell 2023-04-11 20:11:40 +10:00
commit 0f68f77deb
No known key found for this signature in database
GPG key ID: F6DB3C9DB47377B0
46 changed files with 2320 additions and 338 deletions

View file

@ -21,12 +21,12 @@
## result : Result Language _
## result =
## jsonStr
## |> Decode.fromBytes fromUtf8 # returns `Ok {name : "Röc Lang"}`
## |> Decode.fromBytes Json.fromUtf8 # returns `Ok {name : "Röc Lang"}`
##
## name =
## decodedValue <- Result.map result
##
## Encode.toBytes decodedValue.name toUtf8
## Encode.toBytes decodedValue.name Json.toUtf8
##
## expect name == Ok (Str.toUtf8 "\"Röc Lang\"")
## ```

View file

@ -1,4 +1,6 @@
use std::cell::Cell;
use std::path::PathBuf;
use std::sync::Arc;
use crate::abilities::SpecializationId;
use crate::exhaustive::{ExhaustiveContext, SketchedRows};
@ -599,6 +601,7 @@ impl Constraints {
| Constraint::PatternPresence(_, _, _, _)
| Constraint::Exhaustive { .. }
| Constraint::Resolve(..)
| Constraint::IngestedFile(..)
| Constraint::CheckCycle(..) => false,
}
}
@ -673,6 +676,15 @@ impl Constraints {
Constraint::CheckCycle(cycle_index, cycle_mark)
}
pub fn ingested_file(
&mut self,
type_index: TypeOrVar,
file_path: Box<PathBuf>,
bytes: Arc<Vec<u8>>,
) -> Constraint {
Constraint::IngestedFile(type_index, file_path, bytes)
}
}
roc_error_macros::assert_sizeof_default!(Constraint, 3 * 8);
@ -734,7 +746,7 @@ pub struct OpportunisticResolve {
pub specialization_id: SpecializationId,
}
#[derive(Clone, Copy)]
#[derive(Clone)]
pub enum Constraint {
Eq(Eq),
Store(TypeOrVar, Variable, Index<&'static str>, u32),
@ -773,6 +785,8 @@ pub enum Constraint {
/// Attempt to resolve a specialization.
Resolve(OpportunisticResolve),
CheckCycle(Index<Cycle>, IllegalCycleMark),
IngestedFile(TypeOrVar, Box<PathBuf>, Arc<Vec<u8>>),
}
#[derive(Debug, Clone, Copy, Default)]
@ -856,6 +870,9 @@ impl std::fmt::Debug for Constraint {
Self::CheckCycle(arg0, arg1) => {
write!(f, "CheckCycle({:?}, {:?})", arg0, arg1)
}
Self::IngestedFile(arg0, arg1, arg2) => {
write!(f, "IngestedFile({:?}, {:?}, {:?})", arg0, arg1, arg2)
}
}
}
}

View file

@ -277,6 +277,9 @@ fn deep_copy_expr_help<C: CopyEnv>(env: &mut C, copied: &mut Vec<Variable>, expr
Float(v1, v2, str, val, bound) => Float(sub!(*v1), sub!(*v2), str.clone(), *val, *bound),
Str(str) => Str(str.clone()),
SingleQuote(v1, v2, char, bound) => SingleQuote(sub!(*v1), sub!(*v2), *char, *bound),
IngestedFile(file_path, bytes, var) => {
IngestedFile(file_path.clone(), bytes.clone(), sub!(*var))
}
List {
elem_var,
loc_elems,

View file

@ -165,6 +165,9 @@ fn expr<'a>(c: &Ctx, p: EPrec, f: &'a Arena<'a>, e: &'a Expr) -> DocBuilder<'a,
Num(_, n, _, _) | Int(_, _, n, _, _) | Float(_, _, n, _, _) => f.text(&**n),
Str(s) => f.text(format!(r#""{}""#, s)),
SingleQuote(_, _, c, _) => f.text(format!("'{}'", c)),
IngestedFile(file_path, bytes, _) => {
f.text(format!("<ingested {:?}, {} bytes>", file_path, bytes.len()))
}
List {
elem_var: _,
loc_elems,

View file

@ -27,6 +27,10 @@ use roc_types::num::SingleQuoteBound;
use roc_types::subs::{ExhaustiveMark, IllegalCycleMark, RedundantMark, VarStore, Variable};
use roc_types::types::{Alias, Category, IndexOrField, LambdaSet, OptAbleVar, Type};
use std::fmt::{Debug, Display};
use std::fs::File;
use std::io::Read;
use std::path::PathBuf;
use std::sync::Arc;
use std::{char, u32};
/// Derives that an opaque type has claimed, to checked and recorded after solving.
@ -100,6 +104,9 @@ pub enum Expr {
loc_elems: Vec<Loc<Expr>>,
},
// An ingested files, it's bytes, and the type variable.
IngestedFile(Box<PathBuf>, Arc<Vec<u8>>, Variable),
// Lookups
Var(Symbol, Variable),
AbilityMember(
@ -297,6 +304,7 @@ impl Expr {
Self::Int(..) => Category::Int,
Self::Float(..) => Category::Frac,
Self::Str(..) => Category::Str,
Self::IngestedFile(file_path, _, _) => Category::IngestedFile(file_path.clone()),
Self::SingleQuote(..) => Category::Character,
Self::List { .. } => Category::List,
&Self::Var(sym, _) => Category::Lookup(sym),
@ -729,6 +737,48 @@ pub fn canonicalize_expr<'a>(
ast::Expr::Str(literal) => flatten_str_literal(env, var_store, scope, literal),
ast::Expr::IngestedFile(file_path, _) => match File::open(file_path) {
Ok(mut file) => {
let mut bytes = vec![];
match file.read_to_end(&mut bytes) {
Ok(_) => (
Expr::IngestedFile(
file_path.to_path_buf().into(),
Arc::new(bytes),
var_store.fresh(),
),
Output::default(),
),
Err(e) => {
env.problems.push(Problem::FileProblem {
filename: file_path.to_path_buf(),
error: e.kind(),
});
// This will not manifest as a real runtime error and is just returned to have a value here.
// The pushed FileProblem will be fatal to compilation.
(
Expr::RuntimeError(roc_problem::can::RuntimeError::NoImplementation),
Output::default(),
)
}
}
}
Err(e) => {
env.problems.push(Problem::FileProblem {
filename: file_path.to_path_buf(),
error: e.kind(),
});
// This will not manifest as a real runtime error and is just returned to have a value here.
// The pushed FileProblem will be fatal to compilation.
(
Expr::RuntimeError(roc_problem::can::RuntimeError::NoImplementation),
Output::default(),
)
}
},
ast::Expr::SingleQuote(string) => {
let mut it = string.chars().peekable();
if let Some(char) = it.next() {
@ -1854,6 +1904,7 @@ pub fn inline_calls(var_store: &mut VarStore, expr: Expr) -> Expr {
| other @ Int(..)
| other @ Float(..)
| other @ Str { .. }
| other @ IngestedFile(..)
| other @ SingleQuote(..)
| other @ RuntimeError(_)
| other @ EmptyRecord
@ -2985,6 +3036,7 @@ pub(crate) fn get_lookup_symbols(expr: &Expr) -> Vec<ExpectLookup> {
| Expr::Float(_, _, _, _, _)
| Expr::Int(_, _, _, _, _)
| Expr::Str(_)
| Expr::IngestedFile(..)
| Expr::ZeroArgumentTag { .. }
| Expr::RecordAccessor(_)
| Expr::SingleQuote(..)

View file

@ -1093,6 +1093,7 @@ fn fix_values_captured_in_closure_expr(
| Float(..)
| Str(_)
| SingleQuote(..)
| IngestedFile(..)
| Var(..)
| AbilityMember(..)
| EmptyRecord

View file

@ -138,6 +138,7 @@ pub fn desugar_expr<'a>(arena: &'a Bump, loc_expr: &'a Loc<Expr<'a>>) -> &'a Loc
| PrecedenceConflict { .. }
| Tag(_)
| OpaqueRef(_)
| IngestedFile(_, _)
| Crash => loc_expr,
TupleAccess(sub_expr, paths) => {

View file

@ -257,6 +257,7 @@ pub fn walk_expr<V: Visitor>(visitor: &mut V, expr: &Expr, var: Variable) {
Expr::Int(..) => { /* terminal */ }
Expr::Float(..) => { /* terminal */ }
Expr::Str(..) => { /* terminal */ }
Expr::IngestedFile(..) => { /* terminal */ }
Expr::SingleQuote(..) => { /* terminal */ }
Expr::List {
elem_var,

View file

@ -375,6 +375,20 @@ pub fn constrain_expr(
let expected_index = expected;
constraints.equal_types(str_index, expected_index, Category::Str, region)
}
IngestedFile(file_path, bytes, var) => {
let index = constraints.push_variable(*var);
let eq_con = constraints.equal_types(
index,
expected,
Category::IngestedFile(file_path.clone()),
region,
);
let ingested_con = constraints.ingested_file(index, file_path.clone(), bytes.clone());
// First resolve the type variable with the eq_con then try to ingest a file into the correct type.
let and_constraint = constraints.and_constraint(vec![eq_con, ingested_con]);
constraints.exists([*var], and_constraint)
}
SingleQuote(num_var, precision_var, _, bound) => single_quote_literal(
types,
constraints,
@ -3943,6 +3957,7 @@ fn is_generalizable_expr(mut expr: &Expr) -> bool {
}
OpaqueRef { argument, .. } => expr = &argument.1.value,
Str(_)
| IngestedFile(..)
| List { .. }
| SingleQuote(_, _, _, _)
| When { .. }

View file

@ -44,6 +44,7 @@ impl<'a> Formattable for Expr<'a> {
| MalformedClosure
| Tag(_)
| OpaqueRef(_)
| IngestedFile(_, _)
| Crash => false,
// These expressions always have newlines
@ -477,6 +478,7 @@ impl<'a> Formattable for Expr<'a> {
}
MalformedClosure => {}
PrecedenceConflict { .. } => {}
IngestedFile(_, _) => {}
}
}
}

View file

@ -507,5 +507,11 @@ fn fmt_imports_entry<'a, 'buf>(buf: &mut Buf<'buf>, entry: &ImportsEntry<'a>, in
fmt_collection(buf, indent, Braces::Curly, *entries, Newlines::No)
}
}
IngestedFile(file_name, typed_ident) => {
fmt_str_literal(buf, *file_name, indent);
buf.push_str_allow_spaces(" as ");
typed_ident.format(buf, 0);
}
}
}

View file

@ -420,6 +420,9 @@ impl<'a> RemoveSpaces<'a> for ImportsEntry<'a> {
match *self {
ImportsEntry::Module(a, b) => ImportsEntry::Module(a, b.remove_spaces(arena)),
ImportsEntry::Package(a, b, c) => ImportsEntry::Package(a, b, c.remove_spaces(arena)),
ImportsEntry::IngestedFile(a, b) => {
ImportsEntry::IngestedFile(a, b.remove_spaces(arena))
}
}
}
}
@ -655,6 +658,7 @@ impl<'a> RemoveSpaces<'a> for Expr<'a> {
is_negative,
},
Expr::Str(a) => Expr::Str(a.remove_spaces(arena)),
Expr::IngestedFile(a, b) => Expr::IngestedFile(a, b),
Expr::RecordAccess(a, b) => Expr::RecordAccess(arena.alloc(a.remove_spaces(arena)), b),
Expr::AccessorFunction(a) => Expr::AccessorFunction(a),
Expr::TupleAccess(a, b) => Expr::TupleAccess(arena.alloc(a.remove_spaces(arena)), b),

File diff suppressed because it is too large Load diff

View file

@ -77,4 +77,24 @@ macro_rules! disassembler_test {
}
}
}};
($assemble_fn: expr, $format_fn: expr, $iter:expr, $iter2:expr, $iter3:expr, $iter4:expr) => {{
use $crate::generic64::disassembler_test_macro::merge_instructions_without_line_numbers;
let arena = bumpalo::Bump::new();
let (mut buf, cs) = setup_capstone_and_arena(&arena);
for i in $iter.iter() {
for i2 in $iter2.iter() {
for i3 in $iter3.iter() {
for i4 in $iter4.iter() {
buf.clear();
$assemble_fn(&mut buf, *i, *i2, *i3, *i4);
let instructions = cs.disasm_all(&buf, 0).unwrap();
assert_eq!(
$format_fn(*i, *i2, *i3, *i4),
merge_instructions_without_line_numbers(instructions)
);
}
}
}
}
}};
}

View file

@ -822,7 +822,11 @@ impl<
.storage_manager
.load_to_general_reg(&mut self.buf, cond_symbol);
// this state is updated destructively in the branches. We don't want the branches to
// influence each other, so we must clone here.
let mut base_storage = self.storage_manager.clone();
let base_literal_map = self.literal_map.clone();
let mut max_branch_stack_size = 0;
let mut ret_jumps = bumpalo::vec![in self.env.arena];
let mut tmp = bumpalo::vec![in self.env.arena];
@ -836,6 +840,7 @@ impl<
// Build all statements in this branch. Using storage as from before any branch.
self.storage_manager = base_storage.clone();
self.literal_map = base_literal_map.clone();
self.build_stmt(stmt, ret_layout);
// Build unconditional jump to the end of this switch.
@ -858,6 +863,7 @@ impl<
base_storage.update_fn_call_stack_size(self.storage_manager.fn_call_stack_size());
}
self.storage_manager = base_storage;
self.literal_map = base_literal_map;
self.storage_manager
.update_stack_size(max_branch_stack_size);
let (_branch_info, stmt) = default_branch;

View file

@ -554,7 +554,10 @@ impl<
let field_size = layout_interner.stack_size(*layout);
data_offset += field_size as i32;
}
debug_assert!(data_offset < base_offset + size as i32);
// check that the record completely contains the field
debug_assert!(data_offset <= base_offset + size as i32,);
let layout = field_layouts[index as usize];
let size = layout_interner.stack_size(layout);
self.allocation_map.insert(*sym, owned_data);
@ -686,14 +689,11 @@ impl<
let (data_size, data_alignment) =
union_layout.data_size_and_alignment(layout_interner, self.target_info);
let id_offset = data_size - data_alignment;
if data_alignment < 8 || data_alignment % 8 != 0 {
todo!("small/unaligned tagging");
}
let base_offset = self.claim_stack_area(sym, data_size);
let mut current_offset = base_offset;
for (field, field_layout) in
fields.iter().zip(field_layouts[tag_id as usize].iter())
{
let it = fields.iter().zip(field_layouts[tag_id as usize].iter());
for (field, field_layout) in it {
self.copy_symbol_to_stack_offset(
layout_interner,
buf,
@ -704,10 +704,20 @@ impl<
let field_size = layout_interner.stack_size(*field_layout);
current_offset += field_size as i32;
}
// put the tag id in the right place
self.with_tmp_general_reg(buf, |_symbol_storage, buf, reg| {
ASM::mov_reg64_imm64(buf, reg, tag_id as i64);
debug_assert!((base_offset + id_offset as i32) % 8 == 0);
ASM::mov_base32_reg64(buf, base_offset + id_offset as i32, reg);
let total_id_offset = base_offset as u32 + id_offset;
debug_assert!(total_id_offset % data_alignment == 0);
// pick the right instruction based on the alignment of the tag id
if field_layouts.len() <= u8::MAX as _ {
ASM::mov_base32_reg8(buf, total_id_offset as i32, reg);
} else {
ASM::mov_base32_reg16(buf, total_id_offset as i32, reg);
}
});
}
x => todo!("creating unions with layout: {:?}", x),

View file

@ -7,7 +7,7 @@ use bumpalo::collections::Vec;
use roc_builtins::bitcode::FloatWidth;
use roc_error_macros::internal_error;
use roc_module::symbol::Symbol;
use roc_mono::layout::{InLayout, Layout, LayoutInterner, STLayoutInterner};
use roc_mono::layout::{InLayout, Layout, LayoutInterner, STLayoutInterner, UnionLayout};
use super::{CompareOperation, RegisterWidth};
@ -511,6 +511,24 @@ impl X64_64SystemVStoreArgs {
}
self.tmp_stack_offset += size as i32;
}
Layout::Union(UnionLayout::NonRecursive(_)) => {
// for now, just also store this on the stack
let (base_offset, size) = storage_manager.stack_offset_and_size(&sym);
debug_assert_eq!(base_offset % 8, 0);
for i in (0..size as i32).step_by(8) {
X86_64Assembler::mov_reg64_base32(
buf,
Self::GENERAL_RETURN_REGS[0],
base_offset + i,
);
X86_64Assembler::mov_stack32_reg64(
buf,
self.tmp_stack_offset + i,
Self::GENERAL_RETURN_REGS[0],
);
}
self.tmp_stack_offset += size as i32;
}
_ => {
todo!("calling with arg type, {:?}", layout_interner.dbg(other));
}
@ -615,6 +633,11 @@ impl X64_64SystemVLoadArgs {
storage_manager.complex_stack_arg(&sym, self.argument_offset, stack_size);
self.argument_offset += stack_size as i32;
}
Layout::Union(UnionLayout::NonRecursive(_)) => {
// for now, just also store this on the stack
storage_manager.complex_stack_arg(&sym, self.argument_offset, stack_size);
self.argument_offset += stack_size as i32;
}
_ => {
todo!("Loading args with layout {:?}", layout_interner.dbg(other));
}

View file

@ -18,7 +18,7 @@ use roc_mono::ir::{
SelfRecursive, Stmt,
};
use roc_mono::layout::{
Builtin, InLayout, Layout, LayoutId, LayoutIds, LayoutInterner, STLayoutInterner, TagIdIntType,
Builtin, InLayout, Layout, LayoutIds, LayoutInterner, STLayoutInterner, TagIdIntType,
UnionLayout,
};
use roc_mono::list_element_layout;
@ -79,8 +79,40 @@ trait Backend<'a> {
&mut CodeGenHelp<'a>,
);
fn symbol_to_string(&self, symbol: Symbol, layout_id: LayoutId) -> String {
layout_id.to_symbol_string(symbol, self.interns())
fn function_symbol_to_string<'b, I>(
&self,
symbol: Symbol,
arguments: I,
_lambda_set: Option<InLayout>,
result: InLayout,
) -> String
where
I: Iterator<Item = InLayout<'b>>,
{
use std::hash::{BuildHasher, Hash, Hasher};
// NOTE: due to randomness, this will not be consistent between runs
let mut state = roc_collections::all::BuildHasher::default().build_hasher();
for a in arguments {
a.hash(&mut state);
}
// lambda set should not matter; it should already be added as an argument
// lambda_set.hash(&mut state);
result.hash(&mut state);
let interns = self.interns();
let ident_string = symbol.as_str(interns);
let module_string = interns.module_ids.get_name(symbol.module_id()).unwrap();
// the functions from the generates #help module (refcounting, equality) is always suffixed
// with 1. That is fine, they are always unique anyway.
if ident_string.contains("#help") {
format!("{}_{}_1", module_string, ident_string)
} else {
format!("{}_{}_{}", module_string, ident_string, state.finish())
}
}
fn defined_in_app_module(&self, symbol: Symbol) -> bool {
@ -119,8 +151,13 @@ trait Backend<'a> {
proc: Proc<'a>,
layout_ids: &mut LayoutIds<'a>,
) -> (Vec<u8>, Vec<Relocation>, Vec<'a, (Symbol, String)>) {
let layout_id = layout_ids.get(proc.name.name(), &proc.ret_layout);
let proc_name = self.symbol_to_string(proc.name.name(), layout_id);
let proc_name = self.function_symbol_to_string(
proc.name.name(),
proc.args.iter().map(|t| t.0),
proc.closure_data_layout,
proc.ret_layout,
);
self.reset(proc_name, proc.is_self_recursive);
self.load_args(proc.args, &proc.ret_layout);
for (layout, sym) in proc.args {
@ -304,8 +341,12 @@ trait Backend<'a> {
);
}
let layout_id = LayoutIds::default().get(func_sym.name(), layout);
let fn_name = self.symbol_to_string(func_sym.name(), layout_id);
let fn_name = self.function_symbol_to_string(
func_sym.name(),
arg_layouts.iter().copied(),
None,
*ret_layout,
);
// Now that the arguments are needed, load them if they are literals.
self.load_literal_symbols(arguments);
@ -1081,8 +1122,12 @@ trait Backend<'a> {
}
Symbol::LIST_GET | Symbol::LIST_SET | Symbol::LIST_REPLACE | Symbol::LIST_APPEND => {
// TODO: This is probably simple enough to be worth inlining.
let layout_id = LayoutIds::default().get(func_sym, ret_layout);
let fn_name = self.symbol_to_string(func_sym, layout_id);
let fn_name = self.function_symbol_to_string(
func_sym,
arg_layouts.iter().copied(),
None,
*ret_layout,
);
// Now that the arguments are needed, load them if they are literals.
self.load_literal_symbols(args);
self.build_fn_call(sym, fn_name, args, arg_layouts, ret_layout)
@ -1101,8 +1146,12 @@ trait Backend<'a> {
}
Symbol::STR_IS_VALID_SCALAR => {
// just call the function
let layout_id = LayoutIds::default().get(func_sym, ret_layout);
let fn_name = self.symbol_to_string(func_sym, layout_id);
let fn_name = self.function_symbol_to_string(
func_sym,
arg_layouts.iter().copied(),
None,
*ret_layout,
);
// Now that the arguments are needed, load them if they are literals.
self.load_literal_symbols(args);
self.build_fn_call(sym, fn_name, args, arg_layouts, ret_layout)
@ -1111,8 +1160,12 @@ trait Backend<'a> {
eprintln!("maybe {other:?} should have a custom implementation?");
// just call the function
let layout_id = LayoutIds::default().get(func_sym, ret_layout);
let fn_name = self.symbol_to_string(func_sym, layout_id);
let fn_name = self.function_symbol_to_string(
func_sym,
arg_layouts.iter().copied(),
None,
*ret_layout,
);
// Now that the arguments are needed, load them if they are literals.
self.load_literal_symbols(args);
self.build_fn_call(sym, fn_name, args, arg_layouts, ret_layout)

View file

@ -261,8 +261,13 @@ fn build_object<'a, B: Backend<'a>>(
// Names and linker data for helpers
for ((sym, layout), proc) in helper_symbols_and_layouts.into_iter().zip(helper_procs) {
let layout_id = layout_ids.get_toplevel(sym, &layout);
let fn_name = backend.symbol_to_string(sym, layout_id);
let fn_name = backend.function_symbol_to_string(
sym,
layout.arguments.iter().copied(),
None,
layout.result,
);
if let Some(proc_id) = output.symbol_id(fn_name.as_bytes()) {
if let SymbolSection::Section(section_id) = output.symbol(proc_id).section {
helper_names_symbols_procs.push((fn_name, section_id, proc_id, proc));
@ -327,8 +332,12 @@ fn build_proc_symbol<'a, B: Backend<'a>>(
layout: ProcLayout<'a>,
proc: Proc<'a>,
) {
let layout_id = layout_ids.get_toplevel(sym, &layout);
let base_name = backend.symbol_to_string(sym, layout_id);
let base_name = backend.function_symbol_to_string(
sym,
layout.arguments.iter().copied(),
None,
layout.result,
);
let fn_name = if backend.env().exposed_to_host.contains(&sym) {
layout_ids
@ -459,6 +468,7 @@ fn build_proc<'a, B: Backend<'a>>(
}
}
}
if let Some(sym_id) = output.symbol_id(name.as_bytes()) {
write::Relocation {
offset: offset + proc_offset,

View file

@ -40,7 +40,8 @@ use roc_mono::layout::{
};
use roc_packaging::cache::RocCacheDir;
use roc_parse::ast::{
self, CommentOrNewline, Defs, ExtractSpaces, Spaced, StrLiteral, TypeAnnotation,
self, CommentOrNewline, Defs, Expr, ExtractSpaces, Pattern, Spaced, StrLiteral, TypeAnnotation,
ValueDef,
};
use roc_parse::header::{
ExposedName, ImportsEntry, PackageEntry, PackageHeader, PlatformHeader, To, TypedIdent,
@ -50,7 +51,7 @@ use roc_parse::module::module_defs;
use roc_parse::parser::{FileError, Parser, SourceError, SyntaxError};
use roc_problem::Severity;
use roc_region::all::{LineInfo, Loc, Region};
use roc_reporting::report::{Annotation, Palette, RenderTarget};
use roc_reporting::report::{to_file_problem_report_string, Palette, RenderTarget};
use roc_solve::module::{extract_module_owned_implementations, Solved, SolvedModule};
use roc_solve_problem::TypeError;
use roc_target::TargetInfo;
@ -691,6 +692,7 @@ struct ModuleHeader<'a> {
header_comments: &'a [CommentOrNewline<'a>],
symbols_from_requires: Vec<(Loc<Symbol>, Loc<TypeAnnotation<'a>>)>,
module_timing: ModuleTiming,
defined_values: Vec<ValueDef<'a>>,
}
#[derive(Debug)]
@ -1789,7 +1791,7 @@ fn state_thread_step<'a>(
Ok(ControlFlow::Break(LoadResult::Monomorphized(monomorphized)))
}
Msg::FailedToReadFile { filename, error } => {
let buf = to_file_problem_report(&filename, error);
let buf = to_file_problem_report_string(&filename, error);
Err(LoadingProblem::FormattedReport(buf))
}
@ -1937,7 +1939,9 @@ pub fn report_loading_problem(
)
}
LoadingProblem::FormattedReport(report) => report,
LoadingProblem::FileProblem { filename, error } => to_file_problem_report(&filename, error),
LoadingProblem::FileProblem { filename, error } => {
to_file_problem_report_string(&filename, error)
}
err => todo!("Loading error: {:?}", err),
}
}
@ -3634,7 +3638,7 @@ fn load_package_from_disk<'a>(
&header,
comments,
pkg_module_timing,
);
)?;
Ok(Msg::Header(package_module_msg))
}
@ -3665,7 +3669,7 @@ fn load_package_from_disk<'a>(
&header,
comments,
pkg_module_timing,
);
)?;
Ok(Msg::Header(platform_module_msg))
}
@ -3761,19 +3765,20 @@ fn load_builtin_module<'a>(
module_timing: ModuleTiming,
module_id: ModuleId,
module_name: &str,
) -> (ModuleId, Msg<'a>) {
) -> Result<(ModuleId, Msg<'a>), LoadingProblem<'a>> {
let src_bytes = module_source(module_id);
let (info, parse_state) = load_builtin_module_help(arena, module_name, src_bytes);
let (module_id, _, header) = build_header(
arena,
info,
parse_state,
module_ids,
ident_ids_by_module,
module_timing,
);
(module_id, Msg::Header(header))
)?;
Ok((module_id, Msg::Header(header)))
}
/// Load a module by its module name, rather than by its filename
@ -3809,7 +3814,7 @@ fn load_module<'a>(
module_timing,
$module_id,
concat!($name, ".roc")
);
)?;
return Ok(HeaderOutput { module_id, msg, opt_platform_shorthand: None });
}
@ -4047,12 +4052,13 @@ fn parse_header<'a>(
};
let (module_id, module_name, header) = build_header(
arena,
info,
parse_state.clone(),
module_ids,
ident_ids_by_module,
module_timing,
);
)?;
if let Some(expected_module_name) = opt_expected_module_name {
if expected_module_name != module_name {
@ -4101,12 +4107,13 @@ fn parse_header<'a>(
};
let (module_id, _, header) = build_header(
arena,
info,
parse_state,
module_ids,
ident_ids_by_module,
module_timing,
);
)?;
Ok(HeaderOutput {
module_id,
@ -4162,12 +4169,13 @@ fn parse_header<'a>(
};
let (module_id, _, resolved_header) = build_header(
arena,
info,
parse_state,
module_ids.clone(),
ident_ids_by_module.clone(),
module_timing,
);
)?;
let mut messages = Vec::with_capacity(packages.len() + 1);
@ -4226,7 +4234,7 @@ fn parse_header<'a>(
&header,
comments,
module_timing,
);
)?;
Ok(HeaderOutput {
module_id,
@ -4261,7 +4269,7 @@ fn parse_header<'a>(
&header,
comments,
module_timing,
);
)?;
Ok(HeaderOutput {
module_id,
@ -4429,12 +4437,13 @@ struct HeaderInfo<'a> {
}
fn build_header<'a>(
arena: &'a Bump,
info: HeaderInfo<'a>,
parse_state: roc_parse::state::State<'a>,
module_ids: Arc<Mutex<PackageModuleIds<'a>>>,
ident_ids_by_module: SharedIdentIdsByModule,
module_timing: ModuleTiming,
) -> (ModuleId, PQModuleName<'a>, ModuleHeader<'a>) {
) -> Result<(ModuleId, PQModuleName<'a>, ModuleHeader<'a>), LoadingProblem<'a>> {
let HeaderInfo {
filename,
is_root_module,
@ -4489,12 +4498,16 @@ fn build_header<'a>(
Vec::with_capacity(imports.len());
let mut scope_size = 0;
let mut defined_values = vec![];
for loc_entry in imports {
let (qualified_module_name, exposed) = exposed_from_import(&loc_entry.value);
if let Some((qualified_module_name, exposed)) = exposed_from_import(&loc_entry.value) {
scope_size += num_exposes;
scope_size += num_exposes;
imported.push((qualified_module_name, exposed, loc_entry.region));
imported.push((qualified_module_name, exposed, loc_entry.region));
}
if let Some(value) = value_def_from_imports(arena, &filename, loc_entry)? {
defined_values.push(value);
}
}
let mut exposed: Vec<Symbol> = Vec::with_capacity(num_exposes);
@ -4721,7 +4734,7 @@ fn build_header<'a>(
}
};
(
Ok((
home,
name,
ModuleHeader {
@ -4740,8 +4753,9 @@ fn build_header<'a>(
header_type,
header_comments,
module_timing,
defined_values,
},
)
))
}
impl<'a> BuildTask<'a> {
@ -5268,7 +5282,7 @@ fn build_package_header<'a>(
header: &PackageHeader<'a>,
comments: &'a [CommentOrNewline<'a>],
module_timing: ModuleTiming,
) -> (ModuleId, PQModuleName<'a>, ModuleHeader<'a>) {
) -> Result<(ModuleId, PQModuleName<'a>, ModuleHeader<'a>), LoadingProblem<'a>> {
let exposes = bumpalo::collections::Vec::from_iter_in(
unspace(arena, header.exposes.item.items).iter().copied(),
arena,
@ -5298,6 +5312,7 @@ fn build_package_header<'a>(
};
build_header(
arena,
info,
parse_state,
module_ids,
@ -5318,7 +5333,7 @@ fn build_platform_header<'a>(
header: &PlatformHeader<'a>,
comments: &'a [CommentOrNewline<'a>],
module_timing: ModuleTiming,
) -> (ModuleId, PQModuleName<'a>, ModuleHeader<'a>) {
) -> Result<(ModuleId, PQModuleName<'a>, ModuleHeader<'a>), LoadingProblem<'a>> {
// If we have an app module, then it's the root module;
// otherwise, we must be the root.
let is_root_module = opt_app_module_id.is_none();
@ -5363,6 +5378,7 @@ fn build_platform_header<'a>(
};
build_header(
arena,
info,
parse_state,
module_ids,
@ -5568,7 +5584,7 @@ fn parse<'a>(arena: &'a Bump, header: ModuleHeader<'a>) -> Result<Msg<'a>, Loadi
let parse_start = Instant::now();
let source = header.parse_state.original_bytes();
let parse_state = header.parse_state;
let parsed_defs = match module_defs().parse(arena, parse_state.clone(), 0) {
let mut parsed_defs = match module_defs().parse(arena, parse_state.clone(), 0) {
Ok((_, success, _state)) => success,
Err((_, fail)) => {
return Err(LoadingProblem::ParsingFailed(
@ -5576,6 +5592,10 @@ fn parse<'a>(arena: &'a Bump, header: ModuleHeader<'a>) -> Result<Msg<'a>, Loadi
));
}
};
for value in header.defined_values.into_iter() {
// TODO: should these have a region?
parsed_defs.push_value_def(value, Region::zero(), &[], &[]);
}
// Record the parse end time once, to avoid checking the time a second time
// immediately afterward (for the beginning of canonicalization).
@ -5620,7 +5640,9 @@ fn parse<'a>(arena: &'a Bump, header: ModuleHeader<'a>) -> Result<Msg<'a>, Loadi
Ok(Msg::Parsed(parsed))
}
fn exposed_from_import<'a>(entry: &ImportsEntry<'a>) -> (QualifiedModuleName<'a>, Vec<Loc<Ident>>) {
fn exposed_from_import<'a>(
entry: &ImportsEntry<'a>,
) -> Option<(QualifiedModuleName<'a>, Vec<Loc<Ident>>)> {
use roc_parse::header::ImportsEntry::*;
match entry {
@ -5636,7 +5658,7 @@ fn exposed_from_import<'a>(entry: &ImportsEntry<'a>) -> (QualifiedModuleName<'a>
module: module_name.as_str().into(),
};
(qualified_module_name, exposed)
Some((qualified_module_name, exposed))
}
Package(package_name, module_name, exposes) => {
@ -5651,11 +5673,70 @@ fn exposed_from_import<'a>(entry: &ImportsEntry<'a>) -> (QualifiedModuleName<'a>
module: module_name.as_str().into(),
};
(qualified_module_name, exposed)
Some((qualified_module_name, exposed))
}
IngestedFile(_, _) => None,
}
}
fn value_def_from_imports<'a>(
arena: &'a Bump,
header_path: &Path,
entry: &Loc<ImportsEntry<'a>>,
) -> Result<Option<ValueDef<'a>>, LoadingProblem<'a>> {
use roc_parse::header::ImportsEntry::*;
let value = match entry.value {
Module(_, _) => None,
Package(_, _, _) => None,
IngestedFile(ingested_path, typed_ident) => {
let file_path = if let StrLiteral::PlainLine(ingested_path) = ingested_path {
let mut file_path = header_path.to_path_buf();
// Remove the header file name and push the new path.
file_path.pop();
file_path.push(ingested_path);
match fs::metadata(&file_path) {
Ok(md) => {
if md.is_dir() {
return Err(LoadingProblem::FileProblem {
filename: file_path,
// TODO: change to IsADirectory once that is stable.
error: io::ErrorKind::InvalidInput,
});
}
file_path
}
Err(e) => {
return Err(LoadingProblem::FileProblem {
filename: file_path,
error: e.kind(),
});
}
}
} else {
todo!(
"Only plain strings are supported. Other cases should be made impossible here"
);
};
let typed_ident = typed_ident.extract_spaces().item;
let ident = arena.alloc(typed_ident.ident.map_owned(Pattern::Identifier));
let ann_type = arena.alloc(typed_ident.ann);
Some(ValueDef::AnnotatedBody {
ann_pattern: ident,
ann_type,
comment: None,
body_pattern: ident,
body_expr: arena
.alloc(entry.with_value(Expr::IngestedFile(arena.alloc(file_path), ann_type))),
})
}
};
Ok(value)
}
fn ident_from_exposed(entry: &Spaced<'_, ExposedName<'_>>) -> Ident {
entry.extract_spaces().item.as_str().into()
}
@ -6476,87 +6557,6 @@ fn run_task<'a>(
Ok(())
}
fn to_file_problem_report(filename: &Path, error: io::ErrorKind) -> String {
use roc_reporting::report::{Report, RocDocAllocator, DEFAULT_PALETTE};
use ven_pretty::DocAllocator;
let src_lines: Vec<&str> = Vec::new();
let mut module_ids = ModuleIds::default();
let module_id = module_ids.get_or_insert(&"find module name somehow?".into());
let interns = Interns::default();
// Report parsing and canonicalization problems
let alloc = RocDocAllocator::new(&src_lines, module_id, &interns);
let report = match error {
io::ErrorKind::NotFound => {
let doc = alloc.stack([
alloc.reflow(r"I am looking for this file, but it's not there:"),
alloc
.parser_suggestion(filename.to_str().unwrap())
.indent(4),
alloc.concat([
alloc.reflow(r"Is the file supposed to be there? "),
alloc.reflow("Maybe there is a typo in the file name?"),
]),
]);
Report {
filename: "UNKNOWN.roc".into(),
doc,
title: "FILE NOT FOUND".to_string(),
severity: Severity::RuntimeError,
}
}
io::ErrorKind::PermissionDenied => {
let doc = alloc.stack([
alloc.reflow(r"I don't have the required permissions to read this file:"),
alloc
.parser_suggestion(filename.to_str().unwrap())
.indent(4),
alloc
.concat([alloc.reflow(r"Is it the right file? Maybe change its permissions?")]),
]);
Report {
filename: "UNKNOWN.roc".into(),
doc,
title: "FILE PERMISSION DENIED".to_string(),
severity: Severity::RuntimeError,
}
}
_ => {
let error = std::io::Error::from(error);
let formatted = format!("{}", error);
let doc = alloc.stack([
alloc.reflow(r"I tried to read this file:"),
alloc
.text(filename.to_str().unwrap())
.annotate(Annotation::Error)
.indent(4),
alloc.reflow(r"But ran into:"),
alloc.text(formatted).annotate(Annotation::Error).indent(4),
]);
Report {
filename: "UNKNOWN.roc".into(),
doc,
title: "FILE PROBLEM".to_string(),
severity: Severity::RuntimeError,
}
}
};
let mut buf = String::new();
let palette = DEFAULT_PALETTE;
report.render_color_terminal(&mut buf, &alloc, &palette);
buf
}
fn to_import_cycle_report(
module_ids: ModuleIds,
all_ident_ids: IdentIdsByModule,

View file

@ -0,0 +1,5 @@
interface IngestedFile
exposes [str]
imports ["IngestedFile.roc" as foo : Str]
str = foo

View file

@ -0,0 +1,5 @@
interface IngestedFileBytes
exposes [str]
imports ["IngestedFileBytes.roc" as foo : List U8]
str = Str.fromUtf8 foo |> Result.withDefault ""

View file

@ -0,0 +1,8 @@
interface MissingIngestedFile
exposes [unit]
imports ["ThisFileIsMissing" as data: List U8]
Unit : [Unit]
unit : Unit
unit = Unit

View file

@ -572,6 +572,34 @@ fn imported_dep_regression() {
);
}
#[test]
fn ingested_file() {
let subs_by_module = Default::default();
let loaded_module = load_fixture("interface_with_deps", "IngestedFile", subs_by_module);
expect_types(
loaded_module,
hashmap! {
"foo" => "Str",
"str" => "Str",
},
);
}
#[test]
fn ingested_file_bytes() {
let subs_by_module = Default::default();
let loaded_module = load_fixture("interface_with_deps", "IngestedFileBytes", subs_by_module);
expect_types(
loaded_module,
hashmap! {
"foo" => "List U8",
"str" => "Str",
},
);
}
#[test]
fn parse_problem() {
let modules = vec![(
@ -639,6 +667,20 @@ fn imported_file_not_found() {
);
}
#[test]
#[should_panic(expected = "FILE NOT FOUND")]
fn ingested_file_not_found() {
let subs_by_module = Default::default();
let loaded_module = load_fixture("no_deps", "MissingIngestedFile", subs_by_module);
expect_types(
loaded_module,
hashmap! {
"str" => "Str",
},
);
}
#[test]
fn platform_does_not_exist() {
let modules = vec![(

View file

@ -4158,6 +4158,41 @@ pub fn with_hole<'a>(
hole,
),
IngestedFile(_, bytes, var) => {
let interned = layout_cache.from_var(env.arena, var, env.subs).unwrap();
let layout = layout_cache.get_in(interned);
match layout {
Layout::Builtin(Builtin::List(elem_layout)) if elem_layout == Layout::U8 => {
let mut elements = Vec::with_capacity_in(bytes.len(), env.arena);
for byte in bytes.iter() {
elements.push(ListLiteralElement::Literal(Literal::Byte(*byte)));
}
let expr = Expr::Array {
elem_layout,
elems: elements.into_bump_slice(),
};
Stmt::Let(assigned, expr, interned, hole)
}
Layout::Builtin(Builtin::Str) => Stmt::Let(
assigned,
Expr::Literal(Literal::Str(
// This is safe because we ensure the utf8 bytes are valid earlier in the compiler pipeline.
arena.alloc(
unsafe { std::str::from_utf8_unchecked(bytes.as_ref()) }.to_owned(),
),
)),
Layout::STR,
hole,
),
_ => {
// This will not manifest as a real runtime error and is just returned to have a value here.
// The actual type error during solve will be fatal.
runtime_error(env, "Invalid type for ingested file")
}
}
}
SingleQuote(_, _, character, _) => {
let layout = layout_cache
.from_var(env.arena, variable, env.subs)

View file

@ -1,4 +1,5 @@
use std::fmt::Debug;
use std::path::Path;
use crate::header::{AppHeader, HostedHeader, InterfaceHeader, PackageHeader, PlatformHeader};
use crate::ident::Accessor;
@ -263,6 +264,9 @@ pub enum Expr<'a> {
Tuple(Collection<'a, &'a Loc<Expr<'a>>>),
// The name of a file to be ingested directly into a variable.
IngestedFile(&'a Path, &'a Loc<TypeAnnotation<'a>>),
// Lookups
Var {
module_name: &'a str, // module_name will only be filled if the original Roc code stated something like `5 + SomeModule.myVar`, module_name will be blank if it was `5 + myVar`
@ -1465,6 +1469,7 @@ impl<'a> Malformed for Expr<'a> {
Tag(_) |
OpaqueRef(_) |
SingleQuote(_) | // This is just a &str - not a bunch of segments
IngestedFile(_, _) |
Crash => false,
Str(inner) => inner.is_malformed(),

View file

@ -1906,7 +1906,8 @@ fn expr_to_pattern_help<'a>(arena: &'a Bump, expr: &Expr<'a>) -> Result<Pattern<
is_negative,
},
// These would not have parsed as patterns
Expr::AccessorFunction(_)
Expr::IngestedFile(_, _)
| Expr::AccessorFunction(_)
| Expr::RecordAccess(_, _)
| Expr::TupleAccess(_, _)
| Expr::List { .. }

View file

@ -277,6 +277,9 @@ pub enum ImportsEntry<'a> {
ModuleName<'a>,
Collection<'a, Loc<Spaced<'a, ExposedName<'a>>>>,
),
/// e.g "path/to/my/file.txt" as myFile : Str
IngestedFile(StrLiteral<'a>, Spaced<'a, TypedIdent<'a>>),
}
/// e.g.

View file

@ -9,12 +9,12 @@ use crate::header::{
use crate::ident::{self, lowercase_ident, unqualified_ident, uppercase, UppercaseIdent};
use crate::parser::Progress::{self, *};
use crate::parser::{
backtrackable, increment_min_indent, optional, reset_min_indent, specialize, word1, EExposes,
EGenerates, EGeneratesWith, EHeader, EImports, EPackages, EProvides, ERequires, ETypedIdent,
Parser, SourceError, SpaceProblem, SyntaxError,
backtrackable, increment_min_indent, optional, reset_min_indent, specialize, word1, word2,
EExposes, EGenerates, EGeneratesWith, EHeader, EImports, EPackages, EProvides, ERequires,
ETypedIdent, Parser, SourceError, SpaceProblem, SyntaxError,
};
use crate::state::State;
use crate::string_literal;
use crate::string_literal::{self, parse_str_literal};
use crate::type_annotation;
use roc_region::all::{Loc, Position};
@ -606,40 +606,69 @@ fn imports_entry<'a>() -> impl Parser<'a, Spaced<'a, ImportsEntry<'a>>, EImports
Option<Collection<'a, Loc<Spaced<'a, ExposedName<'a>>>>>,
);
map_with_arena!(
and!(
one_of!(
map!(
and!(
// e.g. `pf.`
optional(backtrackable(skip_second!(
shortname(),
word1(b'.', EImports::ShorthandDot)
))),
// e.g. `Task`
module_name_help(EImports::ModuleName)
and!(
// e.g. `pf.`
optional(backtrackable(skip_second!(
shortname(),
word1(b'.', EImports::ShorthandDot)
))),
// e.g. `Task`
module_name_help(EImports::ModuleName)
),
// e.g. `.{ Task, after}`
optional(skip_first!(
word1(b'.', EImports::ExposingDot),
collection_trailing_sep_e!(
word1(b'{', EImports::SetStart),
exposes_entry(EImports::Identifier),
word1(b',', EImports::SetEnd),
word1(b'}', EImports::SetEnd),
Spaced::SpaceBefore
)
))
),
// e.g. `.{ Task, after}`
optional(skip_first!(
word1(b'.', EImports::ExposingDot),
collection_trailing_sep_e!(
word1(b'{', EImports::SetStart),
exposes_entry(EImports::Identifier),
word1(b',', EImports::SetEnd),
word1(b'}', EImports::SetEnd),
Spaced::SpaceBefore
)
))
),
|_arena, ((opt_shortname, module_name), opt_values): Temp<'a>| {
let exposed_values = opt_values.unwrap_or_else(Collection::empty);
|((opt_shortname, module_name), opt_values): Temp<'a>| {
let exposed_values = opt_values.unwrap_or_else(Collection::empty);
let entry = match opt_shortname {
Some(shortname) => ImportsEntry::Package(shortname, module_name, exposed_values),
let entry = match opt_shortname {
Some(shortname) => {
ImportsEntry::Package(shortname, module_name, exposed_values)
}
None => ImportsEntry::Module(module_name, exposed_values),
};
None => ImportsEntry::Module(module_name, exposed_values),
};
Spaced::Item(entry)
}
Spaced::Item(entry)
}
)
.trace("normal_import"),
map!(
and!(
and!(
// e.g. "filename"
// TODO: str literal allows for multiline strings. We probably don't want that for file names.
specialize(|_, pos| EImports::StrLiteral(pos), parse_str_literal()),
// e.g. as
and!(
and!(
space0_e(EImports::AsKeyword),
word2(b'a', b's', EImports::AsKeyword)
),
space0_e(EImports::AsKeyword)
)
),
// e.g. file : Str
specialize(|_, pos| EImports::TypedIdent(pos), typed_ident())
),
|((file_name, _), typed_ident)| {
// TODO: look at blacking block strings during parsing.
Spaced::Item(ImportsEntry::IngestedFile(file_name, typed_ident))
}
)
.trace("ingest_file_import")
)
.trace("imports_entry")
}

View file

@ -232,6 +232,9 @@ pub enum EImports {
IndentSetStart(Position),
SetStart(Position),
SetEnd(Position),
TypedIdent(Position),
AsKeyword(Position),
StrLiteral(Position),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]

View file

@ -1,3 +1,6 @@
use std::io;
use std::path::PathBuf;
use roc_collections::all::MutSet;
use roc_module::called_via::BinOp;
use roc_module::ident::{Ident, Lowercase, ModuleName, TagName};
@ -204,11 +207,15 @@ pub enum Problem {
OverAppliedCrash {
region: Region,
},
FileProblem {
filename: PathBuf,
error: io::ErrorKind,
},
}
impl Problem {
pub fn severity(&self) -> Severity {
use Severity::{RuntimeError, Warning};
use Severity::{Fatal, RuntimeError, Warning};
match self {
Problem::UnusedDef(_, _) => Warning,
@ -269,6 +276,7 @@ impl Problem {
Problem::UnappliedCrash { .. } => RuntimeError,
Problem::OverAppliedCrash { .. } => RuntimeError,
Problem::DefsOnlyUsedInRecursion(_, _) => Warning,
Problem::FileProblem { .. } => Fatal,
}
}
@ -414,6 +422,7 @@ impl Problem {
| Problem::RuntimeError(RuntimeError::VoidValue)
| Problem::RuntimeError(RuntimeError::ExposedButNotDefined(_))
| Problem::RuntimeError(RuntimeError::NoImplementationNamed { .. })
| Problem::FileProblem { .. }
| Problem::ExposedButNotDefined(_) => None,
}
}

View file

@ -6,6 +6,10 @@ pub mod can;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum Severity {
/// This should stop compilation in all cases.
/// Due to delayed loading of ingested files, this is wanted behaviour over a runtime error.
Fatal,
/// This will cause a runtime error if some code get srun
/// (e.g. type mismatch, naming error)
RuntimeError,

View file

@ -1771,6 +1771,87 @@ fn solve(
state
}
IngestedFile(type_index, file_path, bytes) => {
let actual = either_type_index_to_var(
subs,
rank,
pools,
problems,
abilities_store,
obligation_cache,
&mut can_types,
aliases,
*type_index,
);
let snapshot = subs.snapshot();
if let Success {
vars,
must_implement_ability,
lambda_sets_to_specialize,
extra_metadata: _,
} = unify(
&mut UEnv::new(subs),
actual,
Variable::LIST_U8,
Mode::EQ,
Polarity::OF_VALUE,
) {
// List U8 always valid.
introduce(subs, rank, pools, &vars);
debug_assert!(
must_implement_ability.is_empty() && lambda_sets_to_specialize.is_empty(),
"List U8 will never need to implement abilities or specialize lambda sets"
);
state
} else {
subs.rollback_to(snapshot);
// We explicitly match on the last unify to get the type in the case it errors.
match unify(
&mut UEnv::new(subs),
actual,
Variable::STR,
Mode::EQ,
Polarity::OF_VALUE,
) {
Success {
vars,
must_implement_ability,
lambda_sets_to_specialize,
extra_metadata: _,
} => {
introduce(subs, rank, pools, &vars);
debug_assert!(
must_implement_ability.is_empty() && lambda_sets_to_specialize.is_empty(),
"Str will never need to implement abilities or specialize lambda sets"
);
// Str only valid if valid utf8.
if let Err(err) = std::str::from_utf8(bytes) {
let problem =
TypeError::IngestedFileBadUtf8(file_path.clone(), err);
problems.push(problem);
}
state
}
Failure(vars, actual_type, _, _) => {
introduce(subs, rank, pools, &vars);
let problem = TypeError::IngestedFileUnsupportedType(
file_path.clone(),
actual_type,
);
problems.push(problem);
state
}
}
}
}
};
}

View file

@ -1,4 +1,6 @@
//! Provides types to describe problems that can occur during solving.
use std::{path::PathBuf, str::Utf8Error};
use roc_can::expected::{Expected, PExpected};
use roc_module::{ident::Lowercase, symbol::Symbol};
use roc_problem::{can::CycleEntry, Severity};
@ -29,6 +31,8 @@ pub enum TypeError {
expected_opaque: Symbol,
found_opaque: Symbol,
},
IngestedFileBadUtf8(Box<PathBuf>, Utf8Error),
IngestedFileUnsupportedType(Box<PathBuf>, ErrorType),
}
impl TypeError {
@ -48,6 +52,8 @@ impl TypeError {
TypeError::Exhaustive(exhtv) => exhtv.severity(),
TypeError::StructuralSpecialization { .. } => RuntimeError,
TypeError::WrongSpecialization { .. } => RuntimeError,
TypeError::IngestedFileBadUtf8(..) => Fatal,
TypeError::IngestedFileUnsupportedType(..) => Fatal,
}
}
}

View file

@ -52,7 +52,7 @@ fn applied_tag_nothing() {
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
fn applied_tag_just() {
assert_evals_to!(
indoc!(
@ -71,7 +71,7 @@ fn applied_tag_just() {
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
fn applied_tag_just_enum() {
assert_evals_to!(
indoc!(
@ -337,7 +337,7 @@ fn result_with_underscore() {
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
fn maybe_is_just_not_nested() {
assert_evals_to!(
indoc!(
@ -362,7 +362,7 @@ fn maybe_is_just_not_nested() {
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
fn maybe_is_just_nested() {
assert_evals_to!(
indoc!(
@ -384,7 +384,7 @@ fn maybe_is_just_nested() {
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
fn nested_pattern_match() {
assert_evals_to!(
indoc!(
@ -933,7 +933,7 @@ fn alignment_in_multi_tag_pattern_match() {
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
fn phantom_polymorphic() {
assert_evals_to!(
indoc!(
@ -957,7 +957,7 @@ fn phantom_polymorphic() {
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
fn phantom_polymorphic_record() {
assert_evals_to!(
indoc!(
@ -975,8 +975,8 @@ fn phantom_polymorphic_record() {
main = add zero
"#
),
(0, 0),
(i64, i64)
(0, 0, 0),
(i64, i64, i64)
);
}
@ -1196,7 +1196,7 @@ fn monomorphized_applied_tag() {
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
fn monomorphized_tag_with_polymorphic_arg() {
assert_evals_to!(
indoc!(
@ -1219,17 +1219,17 @@ fn monomorphized_tag_with_polymorphic_arg() {
Wrapped A -> 5
Wrapped B -> 7
useWrap1 (wrap {}) * useWrap2 (wrap {})
if Bool.true then useWrap1 (wrap {}) else useWrap2 (wrap {})
"#
),
10,
2,
u8
)
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn monomorphized_tag_with_polymorphic_arg_and_monomorphic_arg() {
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
fn monomorphized_tag_with_polymorphic_and_monomorphic_arg() {
assert_evals_to!(
indoc!(
r#"
@ -1346,7 +1346,7 @@ fn issue_2365_monomorphize_tag_with_non_empty_ext_var_wrapped_nested() {
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
fn issue_2445() {
assert_evals_to!(
indoc!(
@ -1483,7 +1483,7 @@ fn issue_2725_alias_polymorphic_lambda() {
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
fn opaque_assign_to_symbol() {
assert_evals_to!(
indoc!(
@ -1697,7 +1697,7 @@ fn instantiate_annotated_as_recursive_alias_multiple_polymorphic_expr() {
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
fn issue_3560_nested_tag_constructor_is_newtype() {
assert_evals_to!(
indoc!(
@ -1717,7 +1717,7 @@ fn issue_3560_nested_tag_constructor_is_newtype() {
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
fn issue_3560_nested_tag_constructor_is_record_newtype() {
assert_evals_to!(
indoc!(
@ -1931,7 +1931,7 @@ fn match_on_result_with_uninhabited_error_branch() {
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
fn dispatch_tag_union_function_inferred() {
assert_evals_to!(
indoc!(

View file

@ -15,6 +15,7 @@ use roc_module::symbol::{Interns, Symbol};
use roc_region::all::{Loc, Region};
use std::fmt;
use std::fmt::Write;
use std::path::PathBuf;
pub const TYPE_NUM: &str = "Num";
pub const TYPE_INTEGER: &str = "Integer";
@ -3781,6 +3782,7 @@ pub enum Category {
List,
Str,
Character,
IngestedFile(Box<PathBuf>),
// records
Record,