all gen-dev tests work again

This commit is contained in:
Folkert 2023-09-13 18:55:28 +02:00
parent 5e4f43e1d8
commit fdacfce108
No known key found for this signature in database
GPG key ID: 1F17F6FFD112B97C
2 changed files with 106 additions and 26 deletions

View file

@ -3,9 +3,9 @@ const builtin = @import("builtin");
const always_inline = std.builtin.CallOptions.Modifier.always_inline;
const Monotonic = std.builtin.AtomicOrder.Monotonic;
const DEBUG_INCDEC = true;
const DEBUG_INCDEC = false;
const DEBUG_TESTING_ALLOC = false;
const DEBUG_ALLOC = true;
const DEBUG_ALLOC = false;
pub fn WithOverflow(comptime T: type) type {
return extern struct { value: T, has_overflowed: bool };

View file

@ -1318,7 +1318,7 @@ impl<
size: u32,
alignment: u32,
) -> i32 {
let base_offset = self.claim_stack_size_with_alignment(size, Ord::min(alignment, 8));
let base_offset = self.claim_stack_size_with_alignment(size, alignment);
self.symbol_storage_map
.insert(sym, Stack(Complex { base_offset, size }));
self.allocation_map
@ -1341,10 +1341,12 @@ impl<
base_offset
}
/// claim_stack_size claims `amount` bytes from the stack
/// This may be free space in the stack or result in increasing the stack size.
/// It returns base pointer relative offset of the new data.
fn claim_stack_size_with_alignment(&mut self, amount: u32, alignment: u32) -> i32 {
fn claim_stack_size_with_alignment_help(
free_stack_chunks: &mut Vec<'a, (i32, u32)>,
stack_size: &mut u32,
amount: u32,
alignment: u32,
) -> i32 {
debug_assert_ne!(amount, 0);
pub const fn next_multiple_of(lhs: u32, rhs: u32) -> u32 {
@ -1354,55 +1356,69 @@ impl<
}
}
pub const fn is_multiple_of(lhs: u32, rhs: u32) -> bool {
pub const fn is_multiple_of(lhs: i32, rhs: i32) -> bool {
match rhs {
0 => false,
_ => lhs % rhs == 0,
}
}
// round value to alignment.
// round value to the alignment.
let amount = next_multiple_of(amount, alignment);
let alignment_correction = next_multiple_of(self.stack_size, alignment) - self.stack_size;
let chunk_fits = |(offset, size): &(i32, u32)| {
*size >= amount && is_multiple_of(*offset, alignment as i32)
};
if let Some(fitting_chunk) = self
.free_stack_chunks
// padding on the stack to make sure an allocation is aligned
let padding = next_multiple_of(*stack_size, alignment) - *stack_size;
if let Some(fitting_chunk) = free_stack_chunks
.iter()
.enumerate()
.filter(|(_, (offset, size))| {
*size >= amount && is_multiple_of((*offset).abs() as u32, alignment)
})
.filter(|(_, chunk)| chunk_fits(chunk))
.min_by_key(|(_, (_, size))| size)
{
let (pos, (offset, size)) = fitting_chunk;
let (offset, size) = (*offset, *size);
if size == amount {
self.free_stack_chunks.remove(pos);
debug_assert_eq!(offset % alignment as i32, 0);
// fill the whole chunk precisely
free_stack_chunks.remove(pos);
offset
} else {
let (prev_offset, prev_size) = self.free_stack_chunks[pos];
self.free_stack_chunks[pos] = (prev_offset + amount as i32, prev_size - amount);
debug_assert_eq!(prev_offset % alignment as i32, 0);
// use part of this chunk
let (prev_offset, prev_size) = free_stack_chunks[pos];
free_stack_chunks[pos] = (prev_offset + amount as i32, prev_size - amount);
prev_offset
}
} else if let Some(new_size) = self.stack_size.checked_add(alignment_correction + amount) {
} else if let Some(new_size) = stack_size.checked_add(padding + amount) {
// Since stack size is u32, but the max offset is i32, if we pass i32 max, we have overflowed.
if new_size > i32::MAX as u32 {
internal_error!("Ran out of stack space");
} else {
self.stack_size = new_size;
let offset = -(self.stack_size as i32);
debug_assert_eq!(offset % alignment as i32, 0);
offset
*stack_size = new_size;
-(*stack_size as i32)
}
} else {
internal_error!("Ran out of stack space");
}
}
fn claim_stack_size_with_alignment(&mut self, amount: u32, alignment: u32) -> i32 {
// because many instructions work on 8 bytes, we play it safe and make all stack
// allocations a multiple of 8 bits wide. This is of course slightly suboptimal
// if you want to improve this, good luck!
let alignment = Ord::max(8, alignment);
// the helper is just for testing in this case
Self::claim_stack_size_with_alignment_help(
&mut self.free_stack_chunks,
&mut self.stack_size,
amount,
alignment,
)
}
pub fn free_symbol(&mut self, sym: &Symbol) {
if self.join_param_map.remove(&JoinPointId(*sym)).is_some() {
// This is a join point and will not be in the storage map.
@ -1569,3 +1585,67 @@ fn is_primitive(layout_interner: &mut STLayoutInterner<'_>, layout: InLayout<'_>
_ => false,
}
}
#[cfg(test)]
mod tests {
use crate::generic64::x86_64::{
X86_64Assembler, X86_64FloatReg, X86_64GeneralReg, X86_64SystemV,
};
use super::*;
type SystemVStorageManager<'a, 'r> =
StorageManager<'a, 'r, X86_64GeneralReg, X86_64FloatReg, X86_64Assembler, X86_64SystemV>;
fn claim_helper(
mut free_stack_chunks: Vec<'_, (i32, u32)>,
mut stack_size: u32,
amount: u32,
alignment: u32,
) -> (u32, i32, Vec<'_, (i32, u32)>) {
let offset = SystemVStorageManager::claim_stack_size_with_alignment_help(
&mut free_stack_chunks,
&mut stack_size,
amount,
alignment,
);
(stack_size, offset, free_stack_chunks)
}
#[test]
fn claim_stack_memory() {
use bumpalo::vec;
let arena = bumpalo::Bump::new();
assert_eq!(
claim_helper(vec![in &arena;], 24, 8, 8),
(32, -32, vec![in &arena;]),
);
assert_eq!(
claim_helper(vec![in &arena;], 8, 8, 8),
(16, -16, vec![in &arena;]),
);
assert_eq!(
claim_helper(vec![in &arena; (-16, 8)], 56, 4, 4),
(56, -16, vec![in &arena; (-12, 4)])
);
assert_eq!(
claim_helper(vec![in &arena; (-24, 16)], 32, 8, 8),
(32, -24, vec![in &arena; (-16, 8)])
);
assert_eq!(
claim_helper(vec![in &arena; ], 0, 2, 1),
(2, -2, vec![in &arena;])
);
assert_eq!(
claim_helper(vec![in &arena; (-8, 2)], 16, 2, 1),
(16, -8, vec![in &arena; ])
);
}
}