diff --git a/compiler/gen_wasm/src/backend.rs b/compiler/gen_wasm/src/backend.rs index 437becce7e..4e4b0f5b21 100644 --- a/compiler/gen_wasm/src/backend.rs +++ b/compiler/gen_wasm/src/backend.rs @@ -29,11 +29,6 @@ use crate::{ PTR_SIZE, PTR_TYPE, STACK_POINTER_GLOBAL_ID, STACK_POINTER_NAME, TARGET_INFO, }; -/// The memory address where the constants data will be loaded during module instantiation. -/// We avoid address zero and anywhere near it. They're valid addresses but maybe bug-prone. -/// Follow Emscripten's example by leaving 1kB unused (though 4 bytes would probably do!) -const CONST_SEGMENT_BASE_ADDR: u32 = 1024; - pub struct WasmBackend<'a> { pub env: &'a Env<'a>, interns: &'a mut Interns, @@ -57,7 +52,6 @@ pub struct WasmBackend<'a> { } impl<'a> WasmBackend<'a> { - #[allow(clippy::too_many_arguments)] pub fn new( env: &'a Env<'a>, interns: &'a mut Interns, @@ -78,6 +72,11 @@ impl<'a> WasmBackend<'a> { index: STACK_POINTER_GLOBAL_ID, }); + // The preloaded binary has a global to tell us where its data section ends + // Note: We need this to account for zero data (.bss), which doesn't have an explicit DataSegment! + let data_end_idx = module.export.globals_lookup["__data_end".as_bytes()]; + let next_constant_addr = module.global.parse_u32_at_index(data_end_idx); + WasmBackend { env, interns, @@ -86,7 +85,7 @@ impl<'a> WasmBackend<'a> { module, layout_ids, - next_constant_addr: CONST_SEGMENT_BASE_ADDR, + next_constant_addr, fn_index_offset, called_preload_fns: Vec::with_capacity_in(2, env.arena), proc_lookup, @@ -530,7 +529,23 @@ impl<'a> WasmBackend<'a> { } fn stmt_runtime_error(&mut self, msg: &'a str) { - todo!("RuntimeError {:?}", msg) + // Create a zero-terminated version of the message string + let mut bytes = Vec::with_capacity_in(msg.len() + 1, self.env.arena); + bytes.extend_from_slice(msg.as_bytes()); + bytes.push(0); + + // Store it in the app's data section + let sym = self.create_symbol(msg); + let (linker_sym_index, elements_addr) = self.store_bytes_in_data_section(&bytes, sym); + + // Pass its address to roc_panic + let tag_id = 0; + self.code_builder + .i32_const_mem_addr(elements_addr, linker_sym_index); + self.code_builder.i32_const(tag_id); + self.call_zig_builtin_after_loading_args("roc_panic", 2, false); + + self.code_builder.unreachable_(); } /********************************************************** @@ -541,7 +556,7 @@ impl<'a> WasmBackend<'a> { fn expr(&mut self, sym: Symbol, expr: &Expr<'a>, layout: &Layout<'a>, storage: &StoredValue) { match expr { - Expr::Literal(lit) => self.expr_literal(lit, storage, sym, layout), + Expr::Literal(lit) => self.expr_literal(lit, storage, sym), Expr::Call(roc_mono::ir::Call { call_type, @@ -579,7 +594,9 @@ impl<'a> WasmBackend<'a> { index, } => self.expr_union_at_index(*structure, *tag_id, union_layout, *index, sym), - _ => todo!("Expression `{}`", expr.to_pretty(100)), + Expr::Reuse { .. } | Expr::Reset { .. } | Expr::RuntimeErrorFunction(_) => { + todo!("Expression `{}`", expr.to_pretty(100)) + } } } @@ -587,13 +604,7 @@ impl<'a> WasmBackend<'a> { * Literals *******************************************************************/ - fn expr_literal( - &mut self, - lit: &Literal<'a>, - storage: &StoredValue, - sym: Symbol, - layout: &Layout<'a>, - ) { + fn expr_literal(&mut self, lit: &Literal<'a>, storage: &StoredValue, sym: Symbol) { let invalid_error = || internal_error!("Literal value {:?} has invalid storage {:?}", lit, storage); @@ -655,8 +666,9 @@ impl<'a> WasmBackend<'a> { self.code_builder.i64_const(str_as_int); self.code_builder.i64_store(Align::Bytes4, offset); } else { + let bytes = string.as_bytes(); let (linker_sym_index, elements_addr) = - self.expr_literal_big_str(string, sym, layout); + self.store_bytes_in_data_section(bytes, sym); self.code_builder.get_local(local_id); self.code_builder @@ -678,16 +690,11 @@ impl<'a> WasmBackend<'a> { /// Create a string constant in the module data section /// Return the data we need for code gen: linker symbol index and memory address - fn expr_literal_big_str( - &mut self, - string: &'a str, - sym: Symbol, - layout: &Layout<'a>, - ) -> (u32, u32) { + fn store_bytes_in_data_section(&mut self, bytes: &[u8], sym: Symbol) -> (u32, u32) { // Place the segment at a 4-byte aligned offset let segment_addr = round_up_to_alignment!(self.next_constant_addr, PTR_SIZE); let elements_addr = segment_addr + PTR_SIZE; - let length_with_refcount = 4 + string.len(); + let length_with_refcount = 4 + bytes.len(); self.next_constant_addr = segment_addr + length_with_refcount as u32; let mut segment = DataSegment { @@ -698,14 +705,14 @@ impl<'a> WasmBackend<'a> { // Prefix the string bytes with "infinite" refcount let refcount_max_bytes: [u8; 4] = (REFCOUNT_MAX as i32).to_le_bytes(); segment.init.extend_from_slice(&refcount_max_bytes); - segment.init.extend_from_slice(string.as_bytes()); + segment.init.extend_from_slice(bytes); let segment_index = self.module.data.append_segment(segment); // Generate linker symbol let name = self .layout_ids - .get(sym, layout) + .get(sym, &Layout::Builtin(Builtin::Str)) .to_symbol_string(sym, self.interns); let linker_symbol = SymInfo::Data(DataSymbol::Defined { @@ -713,7 +720,7 @@ impl<'a> WasmBackend<'a> { name: name.clone(), segment_index, segment_offset: 4, - size: string.len() as u32, + size: bytes.len() as u32, }); // Ensure the linker keeps the segment aligned when relocating it diff --git a/compiler/gen_wasm/src/lib.rs b/compiler/gen_wasm/src/lib.rs index 679f97b8a7..57266be640 100644 --- a/compiler/gen_wasm/src/lib.rs +++ b/compiler/gen_wasm/src/lib.rs @@ -216,7 +216,7 @@ macro_rules! round_up_to_alignment { if $alignment_bytes <= 1 { $unaligned } else if $alignment_bytes.count_ones() != 1 { - panic!( + internal_error!( "Cannot align to {} bytes. Not a power of 2.", $alignment_bytes ); diff --git a/compiler/gen_wasm/src/wasm_module/dead_code.rs b/compiler/gen_wasm/src/wasm_module/dead_code.rs index 382f306322..c950484cf1 100644 --- a/compiler/gen_wasm/src/wasm_module/dead_code.rs +++ b/compiler/gen_wasm/src/wasm_module/dead_code.rs @@ -39,16 +39,16 @@ pub struct PreloadsCallGraph<'a> { } impl<'a> PreloadsCallGraph<'a> { - pub fn new(arena: &'a Bump, import_fn_count: u32, fn_count: u32) -> Self { - let num_preloads = (import_fn_count + fn_count) as usize; + pub fn new(arena: &'a Bump, import_fn_count: usize, fn_count: usize) -> Self { + let num_preloads = import_fn_count + fn_count; let mut code_offsets = Vec::with_capacity_in(num_preloads, arena); let calls = Vec::with_capacity_in(2 * num_preloads, arena); let mut calls_offsets = Vec::with_capacity_in(1 + num_preloads, arena); // Imported functions have zero code length and no calls - code_offsets.extend(std::iter::repeat(0).take(import_fn_count as usize)); - calls_offsets.extend(std::iter::repeat(0).take(import_fn_count as usize)); + code_offsets.extend(std::iter::repeat(0).take(import_fn_count)); + calls_offsets.extend(std::iter::repeat(0).take(import_fn_count)); PreloadsCallGraph { num_preloads, @@ -65,11 +65,24 @@ impl<'a> PreloadsCallGraph<'a> { /// use this backend without a linker. pub fn parse_preloads_call_graph<'a>( arena: &'a Bump, - fn_count: u32, code_section_body: &[u8], - import_fn_count: u32, + imported_fn_signatures: &[u32], + defined_fn_signatures: &[u32], + indirect_callees: &[u32], ) -> PreloadsCallGraph<'a> { - let mut call_graph = PreloadsCallGraph::new(arena, import_fn_count, fn_count); + let mut call_graph = PreloadsCallGraph::new( + arena, + imported_fn_signatures.len(), + defined_fn_signatures.len(), + ); + + // Function type signatures, used for indirect calls + let mut signatures = Vec::with_capacity_in( + imported_fn_signatures.len() + defined_fn_signatures.len(), + arena, + ); + signatures.extend_from_slice(imported_fn_signatures); + signatures.extend_from_slice(defined_fn_signatures); // Iterate over the bytes of the Code section let mut cursor: usize = 0; @@ -88,13 +101,23 @@ pub fn parse_preloads_call_graph<'a>( cursor += 1; // ValueType } - // Parse `call` instructions and skip over all other instructions + // Parse `call` and `call_indirect` instructions, skip over everything else while cursor < func_end { let opcode_byte: u8 = code_section_body[cursor]; if opcode_byte == OpCode::CALL as u8 { cursor += 1; let call_index = parse_u32_or_panic(code_section_body, &mut cursor); call_graph.calls.push(call_index as u32); + } else if opcode_byte == OpCode::CALLINDIRECT as u8 { + cursor += 1; + // Insert all indirect callees with a matching type signature + let sig = parse_u32_or_panic(code_section_body, &mut cursor); + call_graph.calls.extend( + indirect_callees + .iter() + .filter(|f| signatures[**f as usize] == sig), + ); + u32::skip_bytes(code_section_body, &mut cursor); // table_idx } else { OpCode::skip_bytes(code_section_body, &mut cursor); } @@ -193,11 +216,13 @@ pub fn copy_preloads_shrinking_dead_fns<'a, T: SerialBuffer>( live_preload_indices.sort_unstable(); live_preload_indices.dedup(); - let mut live_iter = live_preload_indices.iter(); + let mut live_iter = live_preload_indices + .into_iter() + .skip_while(|f| (*f as usize) < preload_idx_start); let mut next_live_idx = live_iter.next(); for i in preload_idx_start..call_graph.num_preloads { match next_live_idx { - Some(live) if *live as usize == i => { + Some(live) if live as usize == i => { next_live_idx = live_iter.next(); let live_body_start = call_graph.code_offsets[i] as usize; let live_body_end = call_graph.code_offsets[i + 1] as usize; diff --git a/compiler/gen_wasm/src/wasm_module/mod.rs b/compiler/gen_wasm/src/wasm_module/mod.rs index 2ac492e620..ce89c4ee07 100644 --- a/compiler/gen_wasm/src/wasm_module/mod.rs +++ b/compiler/gen_wasm/src/wasm_module/mod.rs @@ -11,12 +11,10 @@ pub use linking::SymInfo; use roc_error_macros::internal_error; pub use sections::{ConstExpr, Export, ExportType, Global, GlobalType, Signature}; -use crate::wasm_module::serialize::SkipBytes; - use self::linking::{LinkingSection, RelocationSection}; use self::sections::{ - CodeSection, DataSection, ExportSection, FunctionSection, GlobalSection, ImportSection, - MemorySection, NameSection, OpaqueSection, Section, SectionId, TypeSection, + CodeSection, DataSection, ElementSection, ExportSection, FunctionSection, GlobalSection, + ImportSection, MemorySection, NameSection, OpaqueSection, Section, SectionId, TypeSection, }; use self::serialize::{SerialBuffer, Serialize}; @@ -32,7 +30,7 @@ pub struct WasmModule<'a> { pub global: GlobalSection<'a>, pub export: ExportSection<'a>, pub start: OpaqueSection<'a>, - pub element: OpaqueSection<'a>, + pub element: ElementSection<'a>, pub code: CodeSection<'a>, pub data: DataSection<'a>, pub names: NameSection<'a>, @@ -66,6 +64,7 @@ impl<'a> WasmModule<'a> { self.element.serialize(buffer); self.code.serialize(buffer); self.data.serialize(buffer); + self.names.serialize(buffer); } /// Serialize the module to bytes @@ -118,6 +117,7 @@ impl<'a> WasmModule<'a> { + self.element.size() + self.code.size() + self.data.size() + + self.names.size() } pub fn preload(arena: &'a Bump, bytes: &[u8]) -> Self { @@ -132,18 +132,33 @@ impl<'a> WasmModule<'a> { let mut types = TypeSection::preload(arena, bytes, &mut cursor); types.parse_offsets(); - let import = ImportSection::preload(arena, bytes, &mut cursor); + let mut import = ImportSection::preload(arena, bytes, &mut cursor); + let imported_fn_signatures = import.parse(arena); + let function = FunctionSection::preload(arena, bytes, &mut cursor); + let defined_fn_signatures = function.parse(arena); + let table = OpaqueSection::preload(SectionId::Table, arena, bytes, &mut cursor); + let memory = MemorySection::preload(arena, bytes, &mut cursor); + let global = GlobalSection::preload(arena, bytes, &mut cursor); - ExportSection::skip_bytes(bytes, &mut cursor); - let export = ExportSection::empty(arena); + let export = ExportSection::preload_globals(arena, bytes, &mut cursor); let start = OpaqueSection::preload(SectionId::Start, arena, bytes, &mut cursor); - let element = OpaqueSection::preload(SectionId::Element, arena, bytes, &mut cursor); - let code = CodeSection::preload(arena, bytes, &mut cursor, import.function_count); + + let element = ElementSection::preload(arena, bytes, &mut cursor); + let indirect_callees = element.indirect_callees(arena); + + let code = CodeSection::preload( + arena, + bytes, + &mut cursor, + &imported_fn_signatures, + &defined_fn_signatures, + &indirect_callees, + ); let data = DataSection::preload(arena, bytes, &mut cursor); diff --git a/compiler/gen_wasm/src/wasm_module/sections.rs b/compiler/gen_wasm/src/wasm_module/sections.rs index ea8ca5ec62..f64db3591b 100644 --- a/compiler/gen_wasm/src/wasm_module/sections.rs +++ b/compiler/gen_wasm/src/wasm_module/sections.rs @@ -1,3 +1,5 @@ +use std::fmt::Debug; + use bumpalo::collections::vec::Vec; use bumpalo::Bump; use roc_collections::all::MutMap; @@ -10,7 +12,8 @@ use super::dead_code::{ use super::linking::RelocationEntry; use super::opcodes::OpCode; use super::serialize::{ - parse_u32_or_panic, SerialBuffer, Serialize, SkipBytes, MAX_SIZE_ENCODED_U32, + parse_string_bytes, parse_u32_or_panic, SerialBuffer, Serialize, SkipBytes, + MAX_SIZE_ENCODED_U32, }; use super::{CodeBuilder, ValueType}; @@ -377,20 +380,19 @@ impl<'a> ImportSection<'a> { self.count += 1; } - fn update_function_count(&mut self) { - let mut f_count = 0; + pub fn parse(&mut self, arena: &'a Bump) -> Vec<'a, u32> { + let mut fn_signatures = bumpalo::vec![in arena]; let mut cursor = 0; while cursor < self.bytes.len() { - String::skip_bytes(&self.bytes, &mut cursor); - String::skip_bytes(&self.bytes, &mut cursor); + String::skip_bytes(&self.bytes, &mut cursor); // import namespace + String::skip_bytes(&self.bytes, &mut cursor); // import name let type_id = ImportTypeId::from(self.bytes[cursor]); cursor += 1; match type_id { ImportTypeId::Func => { - f_count += 1; - u32::skip_bytes(&self.bytes, &mut cursor); + fn_signatures.push(parse_u32_or_panic(&self.bytes, &mut cursor)); } ImportTypeId::Table => { TableType::skip_bytes(&self.bytes, &mut cursor); @@ -404,17 +406,16 @@ impl<'a> ImportSection<'a> { } } - self.function_count = f_count; + self.function_count = fn_signatures.len() as u32; + fn_signatures } pub fn from_count_and_bytes(count: u32, bytes: Vec<'a, u8>) -> Self { - let mut created = ImportSection { + ImportSection { bytes, count, function_count: 0, - }; - created.update_function_count(); - created + } } } @@ -442,6 +443,16 @@ impl<'a> FunctionSection<'a> { self.bytes.encode_u32(sig_id); self.count += 1; } + + pub fn parse(&self, arena: &'a Bump) -> Vec<'a, u32> { + let count = self.count as usize; + let mut signatures = Vec::with_capacity_in(count, arena); + let mut cursor = 0; + for _ in 0..count { + signatures.push(parse_u32_or_panic(&self.bytes, &mut cursor)); + } + signatures + } } section_impl!(FunctionSection, SectionId::Function); @@ -555,6 +566,26 @@ pub enum ConstExpr { F64(f64), } +impl ConstExpr { + fn parse_u32(bytes: &[u8], cursor: &mut usize) -> u32 { + let err = || internal_error!("Invalid ConstExpr. Expected i32."); + + if bytes[*cursor] != OpCode::I32CONST as u8 { + err(); + } + *cursor += 1; + + let value = parse_u32_or_panic(bytes, cursor); + + if bytes[*cursor] != OpCode::END as u8 { + err(); + } + *cursor += 1; + + value + } +} + impl Serialize for ConstExpr { fn serialize(&self, buffer: &mut T) { match self { @@ -579,6 +610,15 @@ impl Serialize for ConstExpr { } } +impl SkipBytes for ConstExpr { + fn skip_bytes(bytes: &[u8], cursor: &mut usize) { + while bytes[*cursor] != OpCode::END as u8 { + OpCode::skip_bytes(bytes, cursor); + } + *cursor += 1; + } +} + #[derive(Debug)] pub struct Global { /// Type and mutability of the global @@ -601,16 +641,14 @@ pub struct GlobalSection<'a> { } impl<'a> GlobalSection<'a> { - pub fn new(arena: &'a Bump, globals: &[Global]) -> Self { - let capacity = 13 * globals.len(); - let mut bytes = Vec::with_capacity_in(capacity, arena); - for global in globals { - global.serialize(&mut bytes); - } - GlobalSection { - count: globals.len() as u32, - bytes, + pub fn parse_u32_at_index(&self, index: u32) -> u32 { + let mut cursor = 0; + for _ in 0..index { + GlobalType::skip_bytes(&self.bytes, &mut cursor); + ConstExpr::skip_bytes(&self.bytes, &mut cursor); } + GlobalType::skip_bytes(&self.bytes, &mut cursor); + ConstExpr::parse_u32(&self.bytes, &mut cursor) } pub fn append(&mut self, global: Global) { @@ -636,6 +674,18 @@ pub enum ExportType { Global = 3, } +impl From for ExportType { + fn from(x: u8) -> Self { + match x { + 0 => Self::Func, + 1 => Self::Table, + 2 => Self::Mem, + 3 => Self::Global, + _ => internal_error!("invalid ExportType {:2x?}", x), + } + } +} + #[derive(Debug)] pub struct Export<'a> { pub name: &'a [u8], @@ -643,6 +693,19 @@ pub struct Export<'a> { pub index: u32, } +impl<'a> Export<'a> { + fn parse(arena: &'a Bump, bytes: &[u8], cursor: &mut usize) -> Self { + let name = parse_string_bytes(arena, bytes, cursor); + + let ty = ExportType::from(bytes[*cursor]); + *cursor += 1; + + let index = parse_u32_or_panic(bytes, cursor); + + Export { name, ty, index } + } +} + impl Serialize for Export<'_> { fn serialize(&self, buffer: &mut T) { self.name.serialize(buffer); @@ -655,7 +718,10 @@ impl Serialize for Export<'_> { pub struct ExportSection<'a> { pub count: u32, pub bytes: Vec<'a, u8>, + /// List of exported functions to keep during dead-code-elimination pub function_indices: Vec<'a, u32>, + /// name -> index + pub globals_lookup: MutMap<&'a [u8], u32>, } impl<'a> ExportSection<'a> { @@ -673,18 +739,36 @@ impl<'a> ExportSection<'a> { section_size(&self.bytes) } - pub fn empty(arena: &'a Bump) -> Self { + fn empty(arena: &'a Bump) -> Self { ExportSection { count: 0, bytes: Vec::with_capacity_in(256, arena), function_indices: Vec::with_capacity_in(4, arena), + globals_lookup: MutMap::default(), } } -} -impl SkipBytes for ExportSection<'_> { - fn skip_bytes(bytes: &[u8], cursor: &mut usize) { - parse_section(Self::ID, bytes, cursor); + /// Preload from object file. Keep only the Global exports, ignore the rest. + pub fn preload_globals(arena: &'a Bump, module_bytes: &[u8], cursor: &mut usize) -> Self { + let (num_exports, body_bytes) = parse_section(Self::ID, module_bytes, cursor); + + let mut export_section = ExportSection::empty(arena); + + let mut body_cursor = 0; + for _ in 0..num_exports { + let export_start = body_cursor; + let export = Export::parse(arena, body_bytes, &mut body_cursor); + if matches!(export.ty, ExportType::Global) { + let global_bytes = &body_bytes[export_start..body_cursor]; + export_section.bytes.extend_from_slice(global_bytes); + export_section.count += 1; + export_section + .globals_lookup + .insert(export.name, export.index); + } + } + + export_section } } @@ -699,6 +783,122 @@ impl<'a> Serialize for ExportSection<'a> { } } +/******************************************************************* + * + * Element section + * + * Elements are entries in tables (see Table section) + * For example, Wasm uses a function table instead of function pointers, + * and each entry in that function table is an element. + * The call_indirect instruction uses element indices to refer to functions. + * This section therefore enumerates all indirectly-called functions. + * + *******************************************************************/ + +#[repr(u8)] +enum ElementSegmentFormatId { + /// Currently only supporting the original Wasm MVP format since it's the only one in wide use. + /// There are newer formats for other table types, with complex encodings to preserve backward compatibility + /// (Already going down the same path as x86!) + ActiveImplicitTableIndex = 0x00, +} + +#[derive(Debug)] +struct ElementSegment<'a> { + offset: ConstExpr, + fn_indices: Vec<'a, u32>, +} + +impl<'a> ElementSegment<'a> { + fn parse(arena: &'a Bump, bytes: &[u8], cursor: &mut usize) -> Self { + // In practice we only need the original MVP format + let format_id = bytes[*cursor]; + debug_assert!(format_id == ElementSegmentFormatId::ActiveImplicitTableIndex as u8); + *cursor += 1; + + // The table index offset is encoded as a ConstExpr, but only I32 makes sense + let const_expr_opcode = bytes[*cursor]; + debug_assert!(const_expr_opcode == OpCode::I32CONST as u8); + *cursor += 1; + let offset = parse_u32_or_panic(bytes, cursor); + debug_assert!(bytes[*cursor] == OpCode::END as u8); + *cursor += 1; + + let num_elems = parse_u32_or_panic(bytes, cursor); + let mut fn_indices = Vec::with_capacity_in(num_elems as usize, arena); + for _ in 0..num_elems { + let fn_idx = parse_u32_or_panic(bytes, cursor); + + fn_indices.push(fn_idx); + } + + ElementSegment { + offset: ConstExpr::I32(offset as i32), + fn_indices, + } + } + + fn size(&self) -> usize { + let variant_id = 1; + let constexpr_opcode = 1; + let constexpr_value = MAX_SIZE_ENCODED_U32; + let vec_len = MAX_SIZE_ENCODED_U32; + let vec_contents = MAX_SIZE_ENCODED_U32 * self.fn_indices.len(); + variant_id + constexpr_opcode + constexpr_value + vec_len + vec_contents + } +} + +impl<'a> Serialize for ElementSegment<'a> { + fn serialize(&self, buffer: &mut T) { + buffer.append_u8(ElementSegmentFormatId::ActiveImplicitTableIndex as u8); + self.offset.serialize(buffer); + self.fn_indices.serialize(buffer); + } +} + +#[derive(Debug)] +pub struct ElementSection<'a> { + segments: Vec<'a, ElementSegment<'a>>, +} + +impl<'a> ElementSection<'a> { + const ID: SectionId = SectionId::Element; + + pub fn preload(arena: &'a Bump, module_bytes: &[u8], cursor: &mut usize) -> Self { + let (num_segments, body_bytes) = parse_section(Self::ID, module_bytes, cursor); + + let mut segments = Vec::with_capacity_in(num_segments as usize, arena); + + let mut body_cursor = 0; + for _ in 0..num_segments { + let seg = ElementSegment::parse(arena, body_bytes, &mut body_cursor); + segments.push(seg); + } + + ElementSection { segments } + } + + pub fn size(&self) -> usize { + self.segments.iter().map(|seg| seg.size()).sum() + } + + pub fn indirect_callees(&self, arena: &'a Bump) -> Vec<'a, u32> { + let mut result = bumpalo::vec![in arena]; + for segment in self.segments.iter() { + result.extend_from_slice(&segment.fn_indices); + } + result + } +} + +impl<'a> Serialize for ElementSection<'a> { + fn serialize(&self, buffer: &mut T) { + let header_indices = write_section_header(buffer, Self::ID); + self.segments.serialize(buffer); + update_section_size(buffer, header_indices); + } +} + /******************************************************************* * * Code section (see also code_builder.rs) @@ -742,14 +942,21 @@ impl<'a> CodeSection<'a> { arena: &'a Bump, module_bytes: &[u8], cursor: &mut usize, - import_fn_count: u32, + import_signatures: &[u32], + function_signatures: &[u32], + indirect_callees: &[u32], ) -> Self { let (preloaded_count, initial_bytes) = parse_section(SectionId::Code, module_bytes, cursor); let preloaded_bytes = arena.alloc_slice_copy(initial_bytes); // TODO: Try to move this call_graph preparation to platform build time - let dead_code_metadata = - parse_preloads_call_graph(arena, preloaded_count, initial_bytes, import_fn_count); + let dead_code_metadata = parse_preloads_call_graph( + arena, + initial_bytes, + import_signatures, + function_signatures, + indirect_callees, + ); CodeSection { preloaded_count, @@ -850,7 +1057,7 @@ impl Serialize for DataSegment<'_> { #[derive(Debug)] pub struct DataSection<'a> { count: u32, - bytes: Vec<'a, u8>, + pub bytes: Vec<'a, u8>, // public so backend.rs can calculate addr of first string } impl<'a> DataSection<'a> { @@ -928,8 +1135,8 @@ enum NameSubSections { LocalNames = 2, } -#[derive(Debug, Default)] pub struct NameSection<'a> { + pub bytes: Vec<'a, u8>, pub functions: MutMap<&'a [u8], u32>, } @@ -938,13 +1145,6 @@ impl<'a> NameSection<'a> { const NAME: &'static str = "name"; pub fn parse(arena: &'a Bump, module_bytes: &[u8], cursor: &mut usize) -> Self { - let functions = MutMap::default(); - let mut section = NameSection { functions }; - section.parse_help(arena, module_bytes, cursor); - section - } - - fn parse_help(&mut self, arena: &'a Bump, module_bytes: &[u8], cursor: &mut usize) { // Custom section ID let section_id_byte = module_bytes[*cursor]; if section_id_byte != Self::ID as u8 { @@ -958,21 +1158,37 @@ impl<'a> NameSection<'a> { *cursor += 1; // Section size - let section_size = parse_u32_or_panic(module_bytes, cursor); - let section_end = *cursor + section_size as usize; + let section_size = parse_u32_or_panic(module_bytes, cursor) as usize; + let section_end = *cursor + section_size; - // Custom section name - let section_name_len = parse_u32_or_panic(module_bytes, cursor); - let section_name_end = *cursor + section_name_len as usize; - let section_name = &module_bytes[*cursor..section_name_end]; + let mut bytes = Vec::with_capacity_in(section_size, arena); + bytes.extend_from_slice(&module_bytes[*cursor..section_end]); + let functions = MutMap::default(); + let mut section = NameSection { bytes, functions }; + + section.parse_body(arena, module_bytes, cursor, section_end); + section + } + + pub fn size(&self) -> usize { + self.bytes.len() + } + + fn parse_body( + &mut self, + arena: &'a Bump, + module_bytes: &[u8], + cursor: &mut usize, + section_end: usize, + ) { + let section_name = parse_string_bytes(arena, module_bytes, cursor); if section_name != Self::NAME.as_bytes() { internal_error!( - "Expected Custon section {:?}, found {:?}", + "Expected Custom section {:?}, found {:?}", Self::NAME, std::str::from_utf8(section_name) ); } - *cursor = section_name_end; // Find function names subsection let mut found_function_names = false; @@ -997,10 +1213,7 @@ impl<'a> NameSection<'a> { let num_entries = parse_u32_or_panic(module_bytes, cursor) as usize; for _ in 0..num_entries { let fn_index = parse_u32_or_panic(module_bytes, cursor); - let name_len = parse_u32_or_panic(module_bytes, cursor); - let name_end = *cursor + name_len as usize; - let name_bytes: &[u8] = &module_bytes[*cursor..name_end]; - *cursor = name_end; + let name_bytes = parse_string_bytes(arena, module_bytes, cursor); self.functions .insert(arena.alloc_slice_copy(name_bytes), fn_index); @@ -1008,6 +1221,37 @@ impl<'a> NameSection<'a> { } } +impl<'a> Serialize for NameSection<'a> { + fn serialize(&self, buffer: &mut T) { + if !self.bytes.is_empty() { + let header_indices = write_section_header(buffer, Self::ID); + buffer.append_slice(&self.bytes); + update_section_size(buffer, header_indices); + } + } +} + +impl<'a> Debug for NameSection<'a> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + writeln!(f, "NameSection")?; + + // We want to display index->name because it matches the binary format and looks nicer. + // But our hashmap is name->index because that's what code gen wants to look up. + let mut by_index = std::vec::Vec::with_capacity(self.functions.len()); + for (name, index) in self.functions.iter() { + by_index.push((*index, name)); + } + by_index.sort_unstable(); + + for (index, name) in by_index.iter() { + let name_str = unsafe { std::str::from_utf8_unchecked(name) }; + writeln!(f, " {:4}: {}", index, name_str)?; + } + + Ok(()) + } +} + /******************************************************************* * * Unit tests diff --git a/compiler/gen_wasm/src/wasm_module/serialize.rs b/compiler/gen_wasm/src/wasm_module/serialize.rs index 99561309c5..99abd64018 100644 --- a/compiler/gen_wasm/src/wasm_module/serialize.rs +++ b/compiler/gen_wasm/src/wasm_module/serialize.rs @@ -1,6 +1,6 @@ use std::{fmt::Debug, iter::FromIterator}; -use bumpalo::collections::vec::Vec; +use bumpalo::{collections::vec::Vec, Bump}; use roc_error_macros::internal_error; /// In the WebAssembly binary format, all integers are variable-length encoded (using LEB-128) @@ -262,6 +262,15 @@ pub fn parse_u32_or_panic(bytes: &[u8], cursor: &mut usize) -> u32 { value } +pub fn parse_string_bytes<'a>(arena: &'a Bump, bytes: &[u8], cursor: &mut usize) -> &'a [u8] { + let len = parse_u32_or_panic(bytes, cursor); + let end = *cursor + len as usize; + let bytes: &[u8] = &bytes[*cursor..end]; + let copy = arena.alloc_slice_copy(bytes); + *cursor = end; + copy +} + /// Skip over serialized bytes for a type /// This may, or may not, require looking at the byte values pub trait SkipBytes { diff --git a/compiler/test_gen/src/gen_primitives.rs b/compiler/test_gen/src/gen_primitives.rs index a720176ad3..30df72816a 100644 --- a/compiler/test_gen/src/gen_primitives.rs +++ b/compiler/test_gen/src/gen_primitives.rs @@ -3,8 +3,6 @@ use crate::helpers::llvm::assert_evals_to; #[cfg(feature = "gen-llvm")] use crate::helpers::llvm::assert_expect_failed; #[cfg(feature = "gen-llvm")] -use crate::helpers::llvm::assert_llvm_evals_to; -#[cfg(feature = "gen-llvm")] use crate::helpers::llvm::assert_non_opt_evals_to; #[cfg(feature = "gen-dev")] @@ -2470,10 +2468,10 @@ fn function_malformed_pattern() { } #[test] -#[cfg(any(feature = "gen-llvm"))] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] #[should_panic(expected = "Hit an erroneous type when creating a layout for")] fn call_invalid_layout() { - assert_llvm_evals_to!( + assert_evals_to!( indoc!( r#" f : I64 -> I64 diff --git a/compiler/test_gen/src/gen_records.rs b/compiler/test_gen/src/gen_records.rs index d88d71b08d..0397957edc 100644 --- a/compiler/test_gen/src/gen_records.rs +++ b/compiler/test_gen/src/gen_records.rs @@ -1,14 +1,11 @@ #[cfg(feature = "gen-llvm")] -use crate::helpers::llvm::assert_evals_to; - -#[cfg(feature = "gen-llvm")] -use crate::helpers::llvm::expect_runtime_error_panic; +use crate::helpers::llvm::{assert_evals_to, expect_runtime_error_panic}; #[cfg(feature = "gen-dev")] use crate::helpers::dev::assert_evals_to; #[cfg(feature = "gen-wasm")] -use crate::helpers::wasm::assert_evals_to; +use crate::helpers::wasm::{assert_evals_to, expect_runtime_error_panic}; // use crate::assert_wasm_evals_to as assert_evals_to; use indoc::indoc; @@ -1044,6 +1041,7 @@ fn different_proc_types_specialized_to_same_layout() { #[cfg(any(feature = "gen-llvm"))] #[should_panic( // TODO: something upstream is escaping the ' + // NOTE: Are we sure it's upstream? It's not escaped in gen-wasm version below! expected = r#"Roc failed with message: "Can\'t create record with improper layout""# )] fn call_with_bad_record_runtime_error() { @@ -1059,6 +1057,22 @@ fn call_with_bad_record_runtime_error() { )) } +#[test] +#[cfg(any(feature = "gen-wasm"))] +#[should_panic(expected = r#"Can't create record with improper layout"#)] +fn call_with_bad_record_runtime_error() { + expect_runtime_error_panic!(indoc!( + r#" + app "test" provides [ main ] to "./platform" + + main = + get : {a: Bool} -> Bool + get = \{a} -> a + get {b: ""} + "# + )) +} + #[test] #[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn generalized_accessor() { diff --git a/compiler/test_gen/src/gen_refcount.rs b/compiler/test_gen/src/gen_refcount.rs index e16cad765b..25ade5e934 100644 --- a/compiler/test_gen/src/gen_refcount.rs +++ b/compiler/test_gen/src/gen_refcount.rs @@ -1,5 +1,5 @@ #[cfg(feature = "gen-wasm")] -use crate::helpers::wasm::assert_refcounts; +use crate::helpers::{wasm::assert_refcounts, RefCount::*}; #[allow(unused_imports)] use indoc::indoc; @@ -25,8 +25,8 @@ fn str_inc() { ), RocList, &[ - 3, // s - 1 // result + Live(3), // s + Live(1) // result ] ); } @@ -43,7 +43,7 @@ fn str_dealloc() { "# ), bool, - &[0] + &[Deallocated] ); } @@ -60,10 +60,10 @@ fn list_int_inc() { RocList>, &[ // TODO be smarter about coalescing polymorphic list values - 1, // list0 - 1, // list1 - 1, // list2 - 1 // result + Live(1), // list0 + Live(1), // list1 + Live(1), // list2 + Live(1) // result ] ); } @@ -81,10 +81,10 @@ fn list_int_dealloc() { usize, &[ // TODO be smarter about coalescing polymorphic list values - 0, // list0 - 0, // list1 - 0, // list2 - 0 // result + Deallocated, // list0 + Deallocated, // list1 + Deallocated, // list2 + Deallocated // result ] ); } @@ -102,9 +102,9 @@ fn list_str_inc() { ), RocList>, &[ - 6, // s - 2, // list - 1 // result + Live(6), // s + Live(2), // list + Live(1) // result ] ); } @@ -122,9 +122,9 @@ fn list_str_dealloc() { ), usize, &[ - 0, // s - 0, // list - 0 // result + Deallocated, // s + Deallocated, // list + Deallocated // result ] ); } @@ -142,7 +142,7 @@ fn struct_inc() { "# ), [(i64, RocStr, RocStr); 2], - &[4] // s + &[Live(4)] // s ); } @@ -160,7 +160,7 @@ fn struct_dealloc() { "# ), i64, - &[0] // s + &[Deallocated] // s ); } @@ -186,7 +186,7 @@ fn union_nonrecursive_inc() { "# ), (TwoStr, TwoStr, i64), - &[4] + &[Live(4)] ); } @@ -209,7 +209,7 @@ fn union_nonrecursive_dec() { "# ), RocStr, - &[1] // s + &[Live(1)] // s ); } @@ -234,9 +234,9 @@ fn union_recursive_inc() { ), (Pointer, Pointer), &[ - 4, // s - 4, // sym - 2, // e + Live(4), // s + Live(4), // sym + Live(2), // e ] ); } @@ -264,9 +264,9 @@ fn union_recursive_dec() { ), Pointer, &[ - 1, // s - 1, // sym - 0 // e + Live(1), // s + Live(1), // sym + Deallocated // e ] ); } @@ -300,13 +300,13 @@ fn refcount_different_rosetrees_inc() { ), (Pointer, Pointer), &[ - 2, // s - 3, // i1 - 2, // s1 - 1, // [i1, i1] - 1, // i2 - 1, // [s1, s1] - 1 // s2 + Live(2), // s + Live(3), // i1 + Live(2), // s1 + Live(1), // [i1, i1] + Live(1), // i2 + Live(1), // [s1, s1] + Live(1) // s2 ] ); } @@ -341,13 +341,13 @@ fn refcount_different_rosetrees_dec() { ), i64, &[ - 0, // s - 0, // i1 - 0, // s1 - 0, // [i1, i1] - 0, // i2 - 0, // [s1, s1] - 0, // s2 + Deallocated, // s + Deallocated, // i1 + Deallocated, // s1 + Deallocated, // [i1, i1] + Deallocated, // i2 + Deallocated, // [s1, s1] + Deallocated, // s2 ] ); } @@ -370,10 +370,10 @@ fn union_linked_list_inc() { ), (Pointer, Pointer), &[ - 6, // s - 2, // Cons - 2, // Cons - 2, // Cons + Live(6), // s + Live(2), // Cons + Live(2), // Cons + Live(2), // Cons ] ); } @@ -398,10 +398,10 @@ fn union_linked_list_dec() { ), RocStr, &[ - 1, // s - 0, // Cons - 0, // Cons - 0, // Cons + Live(1), // s + Deallocated, // Cons + Deallocated, // Cons + Deallocated, // Cons ] ); } @@ -434,6 +434,6 @@ fn union_linked_list_long_dec() { "# ), i64, - &[0; 1_000] + &[Deallocated; 1_000] ); } diff --git a/compiler/test_gen/src/gen_tags.rs b/compiler/test_gen/src/gen_tags.rs index f3ce70ea69..4cab64ec87 100644 --- a/compiler/test_gen/src/gen_tags.rs +++ b/compiler/test_gen/src/gen_tags.rs @@ -1222,7 +1222,7 @@ fn applied_tag_function_linked_list() { #[test] #[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] -#[should_panic(expected = "")] +#[should_panic(expected = "")] // TODO: this only panics because it returns 0 instead of 1! fn tag_must_be_its_own_type() { assert_evals_to!( indoc!( diff --git a/compiler/test_gen/src/helpers/debug-wasm-test.html b/compiler/test_gen/src/helpers/debug-wasm-test.html index caf1b01901..ba92959499 100644 --- a/compiler/test_gen/src/helpers/debug-wasm-test.html +++ b/compiler/test_gen/src/helpers/debug-wasm-test.html @@ -258,6 +258,7 @@ function fd_write(fd, iovs_ptr, iovs_len, nwritten_mut_ptr) { let string_buffer = ""; let nwritten = 0; + const STDOUT = 1; for (let i = 0; i < iovs_len; i++) { const index32 = iovs_ptr >> 2; @@ -282,16 +283,18 @@ } wasiLinkObject.memory32[nwritten_mut_ptr >> 2] = nwritten; if (string_buffer) { - console.log(string_buffer); + if (fd === STDOUT) { + console.log(string_buffer); + } else { + console.error(string_buffer); + } } return 0; } // proc_exit : (i32) -> nil function proc_exit(exit_code) { - if (exit_code) { - throw new Error(`Wasm exited with code ${exit_code}`); - } + throw new Error(`Wasm exited with code ${exit_code}`); } // Signatures from wasm_test_platform.o diff --git a/compiler/test_gen/src/helpers/llvm.rs b/compiler/test_gen/src/helpers/llvm.rs index 29854fbcde..35c4e7ab72 100644 --- a/compiler/test_gen/src/helpers/llvm.rs +++ b/compiler/test_gen/src/helpers/llvm.rs @@ -579,19 +579,31 @@ macro_rules! assert_llvm_evals_to { #[allow(unused_macros)] macro_rules! assert_evals_to { ($src:expr, $expected:expr, $ty:ty) => {{ - assert_evals_to!($src, $expected, $ty, $crate::helpers::llvm::identity); + assert_evals_to!($src, $expected, $ty, $crate::helpers::llvm::identity, false); }}; - ($src:expr, $expected:expr, $ty:ty, $transform:expr) => { + ($src:expr, $expected:expr, $ty:ty, $transform:expr) => {{ // same as above, except with an additional transformation argument. - { - #[cfg(feature = "wasm-cli-run")] - $crate::helpers::llvm::assert_wasm_evals_to!( - $src, $expected, $ty, $transform, false, false - ); + assert_evals_to!($src, $expected, $ty, $transform, false); + }}; + ($src:expr, $expected:expr, $ty:ty, $transform:expr, $ignore_problems: expr) => {{ + // same as above, except with ignore_problems. + #[cfg(feature = "wasm-cli-run")] + $crate::helpers::llvm::assert_wasm_evals_to!( + $src, + $expected, + $ty, + $transform, + $ignore_problems + ); - $crate::helpers::llvm::assert_llvm_evals_to!($src, $expected, $ty, $transform, false); - } - }; + $crate::helpers::llvm::assert_llvm_evals_to!( + $src, + $expected, + $ty, + $transform, + $ignore_problems + ); + }}; } #[allow(unused_macros)] diff --git a/compiler/test_gen/src/helpers/mod.rs b/compiler/test_gen/src/helpers/mod.rs index 10e3c70e8b..53b642e3f6 100644 --- a/compiler/test_gen/src/helpers/mod.rs +++ b/compiler/test_gen/src/helpers/mod.rs @@ -55,3 +55,10 @@ where { run_test() } + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum RefCount { + Live(u32), + Deallocated, + Constant, +} diff --git a/compiler/test_gen/src/helpers/wasm.rs b/compiler/test_gen/src/helpers/wasm.rs index 6813459933..02a02bb8d7 100644 --- a/compiler/test_gen/src/helpers/wasm.rs +++ b/compiler/test_gen/src/helpers/wasm.rs @@ -6,6 +6,7 @@ use std::marker::PhantomData; use std::path::{Path, PathBuf}; use wasmer::{Memory, WasmPtr}; +use super::RefCount; use crate::helpers::from_wasmer_memory::FromWasmerMemory; use roc_collections::all::{MutMap, MutSet}; use roc_gen_wasm::wasm32_result::Wasm32Result; @@ -17,6 +18,7 @@ const OUT_DIR_VAR: &str = "TEST_GEN_OUT"; const TEST_WRAPPER_NAME: &str = "test_wrapper"; const INIT_REFCOUNT_NAME: &str = "init_refcount_test"; +const PANIC_MSG_NAME: &str = "panic_msg"; fn promote_expr_to_module(src: &str) -> String { let mut buffer = String::from("app \"test\" provides [ main ] to \"./platform\"\n\nmain =\n"); @@ -176,7 +178,7 @@ fn load_bytes_into_runtime(bytes: Vec) -> wasmer::Instance { } #[allow(dead_code)] -pub fn assert_wasm_evals_to_help(src: &str, phantom: PhantomData) -> Result +pub fn assert_evals_to_help(src: &str, phantom: PhantomData) -> Result where T: FromWasmerMemory + Wasm32Result, { @@ -192,7 +194,13 @@ where let test_wrapper = instance.exports.get_function(TEST_WRAPPER_NAME).unwrap(); match test_wrapper.call(&[]) { - Err(e) => Err(format!("{:?}", e)), + Err(e) => { + if let Some(msg) = get_roc_panic_msg(&instance, memory) { + Err(msg) + } else { + Err(e.to_string()) + } + } Ok(result) => { let address = result[0].unwrap_i32(); @@ -213,12 +221,37 @@ where } } +/// Our test roc_panic stores a pointer to its message in a global variable so we can find it. +fn get_roc_panic_msg(instance: &wasmer::Instance, memory: &Memory) -> Option { + let memory_bytes = unsafe { memory.data_unchecked() }; + + // We need to dereference twice! + // The Wasm Global only points at the memory location of the C global value + let panic_msg_global = instance.exports.get_global(PANIC_MSG_NAME).unwrap(); + let global_addr = panic_msg_global.get().unwrap_i32() as usize; + let global_ptr = memory_bytes[global_addr..].as_ptr() as *const u32; + + // Dereference again to find the bytes of the message string + let msg_addr = unsafe { *global_ptr }; + if msg_addr == 0 { + return None; + } + let msg_index = msg_addr as usize; + let msg_len = memory_bytes[msg_index..] + .iter() + .position(|c| *c == 0) + .unwrap(); + let msg_bytes = memory_bytes[msg_index..][..msg_len].to_vec(); + let msg = unsafe { String::from_utf8_unchecked(msg_bytes) }; + Some(msg) +} + #[allow(dead_code)] pub fn assert_wasm_refcounts_help( src: &str, phantom: PhantomData, num_refcounts: usize, -) -> Result, String> +) -> Result, String> where T: FromWasmerMemory + Wasm32Result, { @@ -264,12 +297,15 @@ where for i in 0..num_refcounts { let rc_ptr = refcount_ptrs[i].get(); let rc = if rc_ptr.offset() == 0 { - // RC pointer has been set to null, which means the value has been freed. - // In tests, we simply represent this as zero refcount. - 0 + RefCount::Deallocated } else { - let rc_encoded = rc_ptr.deref(memory).unwrap().get(); - (rc_encoded - i32::MIN + 1) as u32 + let rc_encoded: i32 = rc_ptr.deref(memory).unwrap().get(); + if rc_encoded == 0 { + RefCount::Constant + } else { + let rc = rc_encoded - i32::MIN + 1; + RefCount::Live(rc as u32) + } }; refcounts.push(rc); } @@ -307,42 +343,44 @@ pub fn debug_memory_hex(memory: &Memory, address: i32, size: usize) { } #[allow(unused_macros)] -macro_rules! assert_wasm_evals_to { +macro_rules! assert_evals_to { + ($src:expr, $expected:expr, $ty:ty) => { + $crate::helpers::wasm::assert_evals_to!( + $src, + $expected, + $ty, + $crate::helpers::wasm::identity, + false + ) + }; + ($src:expr, $expected:expr, $ty:ty, $transform:expr) => { + $crate::helpers::wasm::assert_evals_to!($src, $expected, $ty, $transform, false); + }; + + ($src:expr, $expected:expr, $ty:ty, $transform:expr, $ignore_problems: expr) => {{ let phantom = std::marker::PhantomData; - match $crate::helpers::wasm::assert_wasm_evals_to_help::<$ty>($src, phantom) { - Err(msg) => panic!("{:?}", msg), + let _ = $ignore_problems; // Always ignore "problems"! One backend (LLVM) is enough to cover them. + match $crate::helpers::wasm::assert_evals_to_help::<$ty>($src, phantom) { + Err(msg) => panic!("{}", msg), Ok(actual) => { assert_eq!($transform(actual), $expected) } } - }; - - ($src:expr, $expected:expr, $ty:ty) => { - $crate::helpers::wasm::assert_wasm_evals_to!( - $src, - $expected, - $ty, - $crate::helpers::wasm::identity - ); - }; - - ($src:expr, $expected:expr, $ty:ty, $transform:expr) => { - $crate::helpers::wasm::assert_wasm_evals_to!($src, $expected, $ty, $transform); - }; + }}; } #[allow(unused_macros)] -macro_rules! assert_evals_to { - ($src:expr, $expected:expr, $ty:ty) => {{ - assert_evals_to!($src, $expected, $ty, $crate::helpers::wasm::identity); +macro_rules! expect_runtime_error_panic { + ($src:expr) => {{ + $crate::helpers::wasm::assert_evals_to!( + $src, + false, // fake value/type for eval + bool, + $crate::helpers::wasm::identity, + true // ignore problems + ); }}; - ($src:expr, $expected:expr, $ty:ty, $transform:expr) => { - // Same as above, except with an additional transformation argument. - { - $crate::helpers::wasm::assert_wasm_evals_to!($src, $expected, $ty, $transform); - } - }; } #[allow(dead_code)] @@ -371,8 +409,9 @@ macro_rules! assert_refcounts { #[allow(unused_imports)] pub(crate) use assert_evals_to; + #[allow(unused_imports)] -pub(crate) use assert_wasm_evals_to; +pub(crate) use expect_runtime_error_panic; #[allow(unused_imports)] pub(crate) use assert_refcounts; diff --git a/compiler/test_gen/src/helpers/wasm_test_platform.c b/compiler/test_gen/src/helpers/wasm_test_platform.c index b01543d0de..2a70f8dc65 100644 --- a/compiler/test_gen/src/helpers/wasm_test_platform.c +++ b/compiler/test_gen/src/helpers/wasm_test_platform.c @@ -1,4 +1,5 @@ #include +#include // Makes test runs take 50% longer, due to linking #define ENABLE_PRINTF 0 @@ -121,14 +122,18 @@ void roc_dealloc(void *ptr, unsigned int alignment) //-------------------------- -void roc_panic(void *ptr, unsigned int alignment) +// Allow the test to probe the panic message +char* panic_msg; + +void roc_panic(char *msg, unsigned int tag_id) { -#if ENABLE_PRINTF - char *msg = (char *)ptr; - fprintf(stderr, - "Application crashed with message\n\n %s\n\nShutting down\n", msg); -#endif - abort(); + panic_msg = msg; + + // Note: no dynamic string formatting + fputs("Application crashed with message\n\n ", stderr); + fputs(msg, stderr); + fputs("\n\nShutting down\n", stderr); + exit(101); } //--------------------------