diff --git a/compiler/arena_pool/src/pool.rs b/compiler/arena_pool/src/pool.rs index b035aefd32..f5b497b430 100644 --- a/compiler/arena_pool/src/pool.rs +++ b/compiler/arena_pool/src/pool.rs @@ -38,7 +38,7 @@ pub struct ArenaVec { } impl ArenaVec { - pub fn new_in<'a>(arena: &mut Arena) -> Self { + pub fn new_in(arena: &mut Arena) -> Self { // We can't start with a NonNull::dangling pointer because when we go // to push elements into this, they'll try to verify the dangling // pointer resides in the arena it was given, which will likely panic. @@ -48,7 +48,7 @@ impl ArenaVec { Self::with_capacity_in(0, arena) } - pub fn with_capacity_in<'a>(capacity: usize, arena: &mut Arena) -> Self { + pub fn with_capacity_in(capacity: usize, arena: &mut Arena) -> Self { let ptr = arena.alloc_vec(capacity); Self { @@ -88,7 +88,7 @@ impl ArenaVec { } // Store the element in the appropriate memory address. - let elem_ptr = unsafe { &mut *self.buffer_ptr.as_ptr().offset(self.len as isize) }; + let elem_ptr = unsafe { &mut *self.buffer_ptr.as_ptr().add(self.len) }; *elem_ptr = val; @@ -104,7 +104,7 @@ impl ArenaVec { // deallocated once the pool where it was created gets deallocated // (along with all of the Arenas it detached), and we just verified that // this ArenaRef's ID matches a pool which has not yet been deallocated. - Some(unsafe { &*self.buffer_ptr.as_ptr().offset(index as isize) }) + Some(unsafe { &*self.buffer_ptr.as_ptr().add(index) }) } else { None } @@ -119,7 +119,7 @@ impl ArenaVec { // deallocated once the pool where it was created gets deallocated // (along with all of the Arenas it detached), and we just verified that // this ArenaRef's ID matches a pool which has not yet been deallocated. - Some(unsafe { &mut *self.buffer_ptr.as_ptr().offset(index as isize) }) + Some(unsafe { &mut *self.buffer_ptr.as_ptr().add(index) }) } else { None } @@ -239,7 +239,7 @@ impl Iterator for ArenaIter { if self.quantity_remaining != 0 { let first_chunk_ptr = self.ptr; - self.ptr = unsafe { self.ptr.offset(self.first_chunk_capacity as isize) }; + self.ptr = unsafe { self.ptr.add(self.first_chunk_capacity) }; self.quantity_remaining -= 1; Some(Arena { @@ -269,7 +269,7 @@ impl Arena { self.first_chunk_len += 1; // Return a pointer to the next available slot. - unsafe { self.first_chunk_ptr.offset(self.first_chunk_len as isize) } + unsafe { self.first_chunk_ptr.add(self.first_chunk_len) } } else { // We ran out of space in the first chunk, so we turn to extra chunks. // First, ensure that we have an extra chunk with enough space in it. @@ -309,7 +309,7 @@ impl Arena { self.first_chunk_len += num_elems; // Return a pointer to the next available element. - unsafe { self.first_chunk_ptr.offset(self.first_chunk_len as isize) } + unsafe { self.first_chunk_ptr.add(self.first_chunk_len) } } else { let new_chunk_cap = self.first_chunk_cap.max(num_elems); @@ -367,7 +367,7 @@ impl AsArena for Arena { fn verify_ownership( first_chunk_ptr: *const T, first_chunk_cap: usize, - extra_chunks: &Vec>, + extra_chunks: &[Vec], ptr: *const T, ) { let addr = ptr as usize; diff --git a/compiler/load/src/file.rs b/compiler/load/src/file.rs index e7188f5322..7360cc5e89 100644 --- a/compiler/load/src/file.rs +++ b/compiler/load/src/file.rs @@ -1220,7 +1220,7 @@ fn parse_and_constrain<'a>( // SAFETY: By this point we've already incrementally verified that there // are no UTF-8 errors in these bytes. If there had been any UTF-8 errors, // we'd have bailed out before now. - let src = unsafe { from_utf8_unchecked(header.src.as_ref()) }; + let src = unsafe { from_utf8_unchecked(header.src) }; // Send the constraint to the main thread for processing. Ok(Msg::Constrained {