mirror of
https://github.com/roc-lang/roc.git
synced 2025-10-03 00:24:34 +00:00
Remove the trick of pushing an extra N+1 entry to function_offsets. It's bug-prone.
This commit is contained in:
parent
0c81063c68
commit
24e6e8445d
2 changed files with 24 additions and 20 deletions
|
@ -267,7 +267,7 @@ impl<'a> WasmModule<'a> {
|
||||||
self.names.function_names[old_index].1 = new_name;
|
self.names.function_names[old_index].1 = new_name;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Relocate calls from to JS imports
|
// Relocate calls to JS imports
|
||||||
// This must happen *before* we run dead code elimination on the code section,
|
// This must happen *before* we run dead code elimination on the code section,
|
||||||
// so that byte offsets in the linking data will still be valid.
|
// so that byte offsets in the linking data will still be valid.
|
||||||
for (new_index, &old_index) in live_import_fns.iter().enumerate() {
|
for (new_index, &old_index) in live_import_fns.iter().enumerate() {
|
||||||
|
@ -291,7 +291,11 @@ impl<'a> WasmModule<'a> {
|
||||||
for (i, fn_index) in (fn_index_min..fn_index_max).enumerate() {
|
for (i, fn_index) in (fn_index_min..fn_index_max).enumerate() {
|
||||||
if live_flags[fn_index as usize] {
|
if live_flags[fn_index as usize] {
|
||||||
let code_start = self.code.function_offsets[i] as usize;
|
let code_start = self.code.function_offsets[i] as usize;
|
||||||
let code_end = self.code.function_offsets[i + 1] as usize;
|
let code_end = if i < self.code.function_offsets.len() - 1 {
|
||||||
|
self.code.function_offsets[i + 1] as usize
|
||||||
|
} else {
|
||||||
|
self.code.bytes.len()
|
||||||
|
};
|
||||||
buffer.extend_from_slice(&self.code.bytes[code_start..code_end]);
|
buffer.extend_from_slice(&self.code.bytes[code_start..code_end]);
|
||||||
} else {
|
} else {
|
||||||
DUMMY_FUNCTION.serialize(&mut buffer);
|
DUMMY_FUNCTION.serialize(&mut buffer);
|
||||||
|
@ -367,7 +371,11 @@ impl<'a> WasmModule<'a> {
|
||||||
// Find where the function body is
|
// Find where the function body is
|
||||||
let offset_index = fn_index - fn_index_min as usize;
|
let offset_index = fn_index - fn_index_min as usize;
|
||||||
let code_start = self.code.function_offsets[offset_index];
|
let code_start = self.code.function_offsets[offset_index];
|
||||||
let code_end = self.code.function_offsets[offset_index + 1];
|
let code_end = if offset_index < self.code.function_offsets.len() - 1 {
|
||||||
|
self.code.function_offsets[offset_index + 1]
|
||||||
|
} else {
|
||||||
|
self.code.bytes.len() as u32
|
||||||
|
};
|
||||||
|
|
||||||
// For each call in the body
|
// For each call in the body
|
||||||
for (offset, symbol) in call_offsets_and_symbols.iter() {
|
for (offset, symbol) in call_offsets_and_symbols.iter() {
|
||||||
|
|
|
@ -1195,37 +1195,33 @@ impl<'a> CodeSection<'a> {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
*cursor += 1;
|
*cursor += 1;
|
||||||
let section_size = u32::parse((), module_bytes, cursor)?;
|
let section_size = u32::parse((), module_bytes, cursor)? as usize;
|
||||||
let section_body_start = *cursor;
|
let section_body_start = *cursor;
|
||||||
let count = u32::parse((), module_bytes, cursor)?;
|
let function_count = u32::parse((), module_bytes, cursor)?;
|
||||||
let function_bodies_start = *cursor;
|
let next_section_start = section_body_start + section_size;
|
||||||
let next_section_start = section_body_start + section_size as usize;
|
|
||||||
|
|
||||||
// preloaded_bytes starts at the function count, since that's considered the zero offset in the linker data.
|
// `bytes` must include the function count for linker offsets to be correct.
|
||||||
// But when we finally write to file, we'll exclude the function count and write our own, including app fns.
|
let mut bytes = Vec::with_capacity_in(section_size + section_size / 2, arena);
|
||||||
let mut preloaded_bytes =
|
bytes.extend_from_slice(&module_bytes[section_body_start..*cursor]);
|
||||||
Vec::with_capacity_in(next_section_start - function_bodies_start, arena);
|
|
||||||
preloaded_bytes.extend_from_slice(&module_bytes[section_body_start..*cursor]);
|
|
||||||
|
|
||||||
let mut preloaded_offsets = Vec::with_capacity_in(count as usize, arena);
|
let mut function_offsets = Vec::with_capacity_in(function_count as usize, arena);
|
||||||
|
|
||||||
// While copying the code bytes, also note where each function starts & ends
|
// While copying the code bytes, also note where each function starts & ends
|
||||||
// Later we will use this for dead code elimination
|
// Later we will use this for dead code elimination
|
||||||
while *cursor < next_section_start {
|
while *cursor < next_section_start {
|
||||||
let fn_start = *cursor;
|
let fn_start = *cursor;
|
||||||
preloaded_offsets.push((fn_start - section_body_start) as u32);
|
function_offsets.push((fn_start - section_body_start) as u32);
|
||||||
let fn_length = u32::parse((), module_bytes, cursor)? as usize;
|
let fn_length = u32::parse((), module_bytes, cursor)? as usize;
|
||||||
*cursor += fn_length;
|
*cursor += fn_length;
|
||||||
preloaded_bytes.extend_from_slice(&module_bytes[fn_start..*cursor]);
|
bytes.extend_from_slice(&module_bytes[fn_start..*cursor]);
|
||||||
}
|
}
|
||||||
preloaded_offsets.push((next_section_start - section_body_start) as u32);
|
|
||||||
|
|
||||||
debug_assert_eq!(preloaded_offsets.len(), 1 + count as usize);
|
debug_assert_eq!(function_offsets.len(), function_count as usize);
|
||||||
|
|
||||||
Ok(CodeSection {
|
Ok(CodeSection {
|
||||||
function_count: count,
|
function_count,
|
||||||
bytes: preloaded_bytes,
|
bytes,
|
||||||
function_offsets: preloaded_offsets,
|
function_offsets,
|
||||||
dead_import_dummy_count: 0,
|
dead_import_dummy_count: 0,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue