misc cleanup

This commit is contained in:
Brendan Hansknecht 2022-03-01 15:52:34 -08:00
parent 3bada97067
commit beeff5ca72
4 changed files with 18 additions and 14 deletions

View file

@ -558,17 +558,18 @@ impl<
.load_to_general_reg(&mut self.buf, cond_symbol); .load_to_general_reg(&mut self.buf, cond_symbol);
let mut base_storage = self.storage_manager.clone(); let mut base_storage = self.storage_manager.clone();
let mut max_branch_stack_size = 0;
let mut ret_jumps = bumpalo::vec![in self.env.arena]; let mut ret_jumps = bumpalo::vec![in self.env.arena];
let mut tmp = bumpalo::vec![in self.env.arena]; let mut tmp = bumpalo::vec![in self.env.arena];
for (val, _branch_info, stmt) in branches.iter() { for (val, _branch_info, stmt) in branches.iter() {
// TODO: look inot branch info and if it matters here. // TODO: look into branch info and if it matters here.
tmp.clear(); tmp.clear();
// Create jump to next branch if not cond_sym not equal to value. // Create jump to next branch if cond_sym not equal to value.
// Since we don't know the offset yet, set it to 0 and overwrite later. // Since we don't know the offset yet, set it to 0 and overwrite later.
let jne_location = self.buf.len(); let jne_location = self.buf.len();
let start_offset = ASM::jne_reg64_imm64_imm32(&mut self.buf, cond_reg, *val, 0); let start_offset = ASM::jne_reg64_imm64_imm32(&mut self.buf, cond_reg, *val, 0);
// Build all statements in this branch. // Build all statements in this branch. Using storage as from before any branch.
self.storage_manager = base_storage.clone(); self.storage_manager = base_storage.clone();
self.build_stmt(stmt, ret_layout); self.build_stmt(stmt, ret_layout);
@ -586,10 +587,14 @@ impl<
self.buf[jne_location + i] = *byte; self.buf[jne_location + i] = *byte;
} }
base_storage.update_stack_size(self.storage_manager.stack_size()); // Update important storage information to avoid overwrites.
max_branch_stack_size =
std::cmp::max(max_branch_stack_size, self.storage_manager.stack_size());
base_storage.update_fn_call_stack_size(self.storage_manager.fn_call_stack_size()); base_storage.update_fn_call_stack_size(self.storage_manager.fn_call_stack_size());
} }
self.storage_manager = base_storage; self.storage_manager = base_storage;
self.storage_manager
.update_stack_size(max_branch_stack_size);
let (_branch_info, stmt) = default_branch; let (_branch_info, stmt) = default_branch;
self.build_stmt(stmt, ret_layout); self.build_stmt(stmt, ret_layout);
@ -613,7 +618,7 @@ impl<
remainder: &'a Stmt<'a>, remainder: &'a Stmt<'a>,
ret_layout: &Layout<'a>, ret_layout: &Layout<'a>,
) { ) {
// Free everything to the stack to make sure they don't get messed with in the branch. // Free everything to the stack to make sure they don't get messed up when looping back to this point.
// TODO: look into a nicer solution. // TODO: look into a nicer solution.
self.storage_manager.free_all_to_stack(&mut self.buf); self.storage_manager.free_all_to_stack(&mut self.buf);
@ -625,7 +630,6 @@ impl<
self.join_map.insert(*id, bumpalo::vec![in self.env.arena]); self.join_map.insert(*id, bumpalo::vec![in self.env.arena]);
// Build remainder of function first. It is what gets run and jumps to join. // Build remainder of function first. It is what gets run and jumps to join.
// self.storage_manager = base_storage;
self.build_stmt(remainder, ret_layout); self.build_stmt(remainder, ret_layout);
let join_location = self.buf.len() as u64; let join_location = self.buf.len() as u64;
@ -959,7 +963,7 @@ impl<
let list_alignment = list_layout.alignment_bytes(self.storage_manager.target_info()); let list_alignment = list_layout.alignment_bytes(self.storage_manager.target_info());
self.load_literal( self.load_literal(
&Symbol::DEV_TMP, &Symbol::DEV_TMP,
&u32_layout, u32_layout,
&Literal::Int(list_alignment as i128), &Literal::Int(list_alignment as i128),
); );
@ -978,13 +982,13 @@ impl<
let elem_stack_size = elem_layout.stack_size(self.storage_manager.target_info()); let elem_stack_size = elem_layout.stack_size(self.storage_manager.target_info());
self.load_literal( self.load_literal(
&Symbol::DEV_TMP3, &Symbol::DEV_TMP3,
&u64_layout, u64_layout,
&Literal::Int(elem_stack_size as i128), &Literal::Int(elem_stack_size as i128),
); );
// Setup the return location. // Setup the return location.
let base_offset = self.storage_manager.claim_stack_area( let base_offset = self.storage_manager.claim_stack_area(
&dst, dst,
ret_layout.stack_size(self.storage_manager.target_info()), ret_layout.stack_size(self.storage_manager.target_info()),
); );

View file

@ -713,7 +713,7 @@ impl<
} }
/// Copies a complex symbol on the stack to the arg pointer. /// Copies a complex symbol on the stack to the arg pointer.
pub fn copy_symbol_to_arg_pionter( pub fn copy_symbol_to_arg_pointer(
&mut self, &mut self,
buf: &mut Vec<'a, u8>, buf: &mut Vec<'a, u8>,
sym: &Symbol, sym: &Symbol,

View file

@ -440,7 +440,7 @@ impl CallConv<X86_64GeneralReg, X86_64FloatReg, X86_64Assembler> for X86_64Syste
} }
_ => { _ => {
// This is a large type returned via the arg pointer. // This is a large type returned via the arg pointer.
storage_manager.copy_symbol_to_arg_pionter(buf, sym, layout); storage_manager.copy_symbol_to_arg_pointer(buf, sym, layout);
// Also set the return reg to the arg pointer. // Also set the return reg to the arg pointer.
storage_manager.load_to_specified_general_reg( storage_manager.load_to_specified_general_reg(
buf, buf,
@ -1599,7 +1599,7 @@ fn movsd_freg64_rip_offset32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, offset:
buf.extend(&offset.to_le_bytes()); buf.extend(&offset.to_le_bytes());
} }
/// `MOVSD r/m64,xmm1` -> Move xmm1 to r/m64. where m64 references the base pionter. /// `MOVSD r/m64,xmm1` -> Move xmm1 to r/m64. where m64 references the base pointer.
#[inline(always)] #[inline(always)]
fn movsd_base64_offset32_freg64( fn movsd_base64_offset32_freg64(
buf: &mut Vec<'_, u8>, buf: &mut Vec<'_, u8>,
@ -1622,7 +1622,7 @@ fn movsd_base64_offset32_freg64(
buf.extend(&offset.to_le_bytes()); buf.extend(&offset.to_le_bytes());
} }
/// `MOVSD xmm1,r/m64` -> Move r/m64 to xmm1. where m64 references the base pionter. /// `MOVSD xmm1,r/m64` -> Move r/m64 to xmm1. where m64 references the base pointer.
#[inline(always)] #[inline(always)]
fn movsd_freg64_base64_offset32( fn movsd_freg64_base64_offset32(
buf: &mut Vec<'_, u8>, buf: &mut Vec<'_, u8>,

View file

@ -336,7 +336,7 @@ trait Backend<'a> {
.. ..
} => { } => {
self.load_literal_symbols(arguments); self.load_literal_symbols(arguments);
self.tag(sym, &arguments, tag_layout, *tag_id); self.tag(sym, arguments, tag_layout, *tag_id);
} }
x => todo!("the expression, {:?}", x), x => todo!("the expression, {:?}", x),
} }