From a39f610395eabc96e7e18d5bcb24b2dbb77d883d Mon Sep 17 00:00:00 2001 From: Brendan Hansknecht Date: Sat, 19 Feb 2022 15:53:31 -0800 Subject: [PATCH] add copying to a return pointer --- compiler/gen_dev/src/generic64/aarch64.rs | 33 +++++++++++++++++++++++ compiler/gen_dev/src/generic64/mod.rs | 13 +++++++++ compiler/gen_dev/src/generic64/storage.rs | 19 +++++++++++++ compiler/gen_dev/src/generic64/x86_64.rs | 24 ++++++++++++++++- 4 files changed, 88 insertions(+), 1 deletion(-) diff --git a/compiler/gen_dev/src/generic64/aarch64.rs b/compiler/gen_dev/src/generic64/aarch64.rs index 45c5bd514b..35657e2983 100644 --- a/compiler/gen_dev/src/generic64/aarch64.rs +++ b/compiler/gen_dev/src/generic64/aarch64.rs @@ -480,6 +480,39 @@ impl Assembler for AArch64Assembler { } } + #[inline(always)] + fn mov_reg64_mem64_offset32( + buf: &mut Vec<'_, u8>, + dst: AArch64GeneralReg, + src: AArch64GeneralReg, + offset: i32, + ) { + if offset < 0 { + todo!("negative mem offsets for AArch64"); + } else if offset < (0xFFF << 8) { + debug_assert!(offset % 8 == 0); + ldr_reg64_imm12(buf, dst, src, (offset as u16) >> 3); + } else { + todo!("mem offsets over 32k for AArch64"); + } + } + #[inline(always)] + fn mov_mem64_offset32_reg64( + buf: &mut Vec<'_, u8>, + dst: AArch64GeneralReg, + offset: i32, + src: AArch64GeneralReg, + ) { + if offset < 0 { + todo!("negative mem offsets for AArch64"); + } else if offset < (0xFFF << 8) { + debug_assert!(offset % 8 == 0); + str_reg64_imm12(buf, src, dst, (offset as u16) >> 3); + } else { + todo!("mem offsets over 32k for AArch64"); + } + } + #[inline(always)] fn movsx_reg64_base32(buf: &mut Vec<'_, u8>, dst: AArch64GeneralReg, offset: i32, size: u8) { debug_assert!(size <= 8); diff --git a/compiler/gen_dev/src/generic64/mod.rs b/compiler/gen_dev/src/generic64/mod.rs index fc916352f9..386978fd04 100644 --- a/compiler/gen_dev/src/generic64/mod.rs +++ b/compiler/gen_dev/src/generic64/mod.rs @@ -172,6 +172,19 @@ pub trait Assembler: Sized { fn mov_base32_freg64(buf: &mut Vec<'_, u8>, offset: i32, src: FloatReg); fn mov_base32_reg64(buf: &mut Vec<'_, u8>, offset: i32, src: GeneralReg); + fn mov_reg64_mem64_offset32( + buf: &mut Vec<'_, u8>, + dst: GeneralReg, + src: GeneralReg, + offset: i32, + ); + fn mov_mem64_offset32_reg64( + buf: &mut Vec<'_, u8>, + dst: GeneralReg, + offset: i32, + src: GeneralReg, + ); + /// Sign extends the data at `offset` with `size` as it copies it to `dst` /// size must be less than or equal to 8. fn movsx_reg64_base32(buf: &mut Vec<'_, u8>, dst: GeneralReg, offset: i32, size: u8); diff --git a/compiler/gen_dev/src/generic64/storage.rs b/compiler/gen_dev/src/generic64/storage.rs index 750a85ee52..fbeeeafd61 100644 --- a/compiler/gen_dev/src/generic64/storage.rs +++ b/compiler/gen_dev/src/generic64/storage.rs @@ -635,6 +635,25 @@ impl< } } + /// Copies a complex symbol ot the stack to the arg pointer. + pub fn copy_symbol_to_arg_pionter( + &mut self, + buf: &mut Vec<'a, u8>, + sym: &Symbol, + _layout: &Layout<'a>, + ) { + let ret_reg = self.load_to_general_reg(buf, &Symbol::RET_POINTER); + let (base_offset, size) = self.stack_offset_and_size(sym); + debug_assert!(base_offset % 8 == 0); + debug_assert!(size % 8 == 0); + self.with_tmp_general_reg(buf, |_storage_manager, buf, tmp_reg| { + for i in (0..size as i32).step_by(8) { + ASM::mov_reg64_base32(buf, tmp_reg, base_offset + i); + ASM::mov_mem64_offset32_reg64(buf, ret_reg, i, tmp_reg); + } + }); + } + /// Copies a symbol to the specified stack offset. This is used for things like filling structs. /// The offset is not guarenteed to be perfectly aligned, it follows Roc's alignment plan. /// This means that, for example 2 I32s might be back to back on the stack. diff --git a/compiler/gen_dev/src/generic64/x86_64.rs b/compiler/gen_dev/src/generic64/x86_64.rs index b7ac7737bc..856fa2be68 100644 --- a/compiler/gen_dev/src/generic64/x86_64.rs +++ b/compiler/gen_dev/src/generic64/x86_64.rs @@ -408,7 +408,10 @@ impl CallConv for X86_64Syste ); } } - x => todo!("returning complex type, {:?}", x), + _ => { + // This is a large type returned via the arg pointer. + storage_manager.copy_symbol_to_arg_pionter(buf, sym, layout); + } } } @@ -1033,6 +1036,25 @@ impl Assembler for X86_64Assembler { mov_base64_offset32_reg64(buf, X86_64GeneralReg::RBP, offset, src) } + #[inline(always)] + fn mov_reg64_mem64_offset32( + buf: &mut Vec<'_, u8>, + dst: X86_64GeneralReg, + src: X86_64GeneralReg, + offset: i32, + ) { + mov_reg64_base64_offset32(buf, dst, src, offset) + } + #[inline(always)] + fn mov_mem64_offset32_reg64( + buf: &mut Vec<'_, u8>, + dst: X86_64GeneralReg, + offset: i32, + src: X86_64GeneralReg, + ) { + mov_base64_offset32_reg64(buf, dst, offset, src) + } + #[inline(always)] fn movsx_reg64_base32(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, offset: i32, size: u8) { debug_assert!(size <= 8);