diff --git a/compiler/gen_dev/src/generic64/storage.rs b/compiler/gen_dev/src/generic64/storage.rs index 0bb48abd2f..551dd8974f 100644 --- a/compiler/gen_dev/src/generic64/storage.rs +++ b/compiler/gen_dev/src/generic64/storage.rs @@ -294,6 +294,17 @@ impl< ); reg } + Stack(ReferencedPrimitive { base_offset, size }) + if base_offset % 8 == 0 && size == 8 => + { + // The primitive is aligned and the data is exactly 8 bytes, treat it like regular stack. + let reg = self.get_general_reg(buf); + ASM::mov_reg64_base32(buf, reg, base_offset); + self.general_used_regs.push((reg, *sym)); + self.symbol_storage_map.insert(*sym, Reg(General(reg))); + self.free_reference(sym); + reg + } Stack(ReferencedPrimitive { .. }) => { todo!("loading referenced primitives") } @@ -349,6 +360,17 @@ impl< ); reg } + Stack(ReferencedPrimitive { base_offset, size }) + if base_offset % 8 == 0 && size == 8 => + { + // The primitive is aligned and the data is exactly 8 bytes, treat it like regular stack. + let reg = self.get_float_reg(buf); + ASM::mov_freg64_base32(buf, reg, base_offset); + self.float_used_regs.push((reg, *sym)); + self.symbol_storage_map.insert(*sym, Reg(Float(reg))); + self.free_reference(sym); + reg + } Stack(ReferencedPrimitive { .. }) => { todo!("loading referenced primitives") } @@ -402,6 +424,12 @@ impl< debug_assert_eq!(base_offset % 8, 0); ASM::mov_reg64_base32(buf, reg, *base_offset); } + Stack(ReferencedPrimitive { base_offset, size }) + if base_offset % 8 == 0 && *size == 8 => + { + // The primitive is aligned and the data is exactly 8 bytes, treat it like regular stack. + ASM::mov_reg64_base32(buf, reg, *base_offset); + } Stack(ReferencedPrimitive { .. }) => { todo!("loading referenced primitives") } @@ -450,6 +478,12 @@ impl< debug_assert_eq!(base_offset % 8, 0); ASM::mov_freg64_base32(buf, reg, *base_offset); } + Stack(ReferencedPrimitive { base_offset, size }) + if base_offset % 8 == 0 && *size == 8 => + { + // The primitive is aligned and the data is exactly 8 bytes, treat it like regular stack. + ASM::mov_freg64_base32(buf, reg, *base_offset); + } Stack(ReferencedPrimitive { .. }) => { todo!("loading referenced primitives") } @@ -766,14 +800,7 @@ impl< self.free_stack_chunk(base_offset, 8); } Stack(Complex { .. } | ReferencedPrimitive { .. }) => { - let owned_data = if let Some(owned_data) = self.allocation_map.remove(sym) { - owned_data - } else { - internal_error!("Unknown symbol: {}", sym); - }; - if Rc::strong_count(&owned_data) == 1 { - self.free_stack_chunk(owned_data.0, owned_data.1); - } + self.free_reference(sym); } _ => {} } @@ -795,6 +822,18 @@ impl< } } + // Frees an reference and release an allocation if it is no longer used. + fn free_reference(&mut self, sym: &Symbol) { + let owned_data = if let Some(owned_data) = self.allocation_map.remove(sym) { + owned_data + } else { + internal_error!("Unknown symbol: {}", sym); + }; + if Rc::strong_count(&owned_data) == 1 { + self.free_stack_chunk(owned_data.0, owned_data.1); + } + } + fn free_stack_chunk(&mut self, base_offset: i32, size: u32) { let loc = (base_offset, size); // Note: this position current points to the offset following the specified location.