mirror of
https://github.com/roc-lang/roc.git
synced 2025-10-02 16:21:11 +00:00
reimplement RocList
and RocStr
This commit is contained in:
parent
16e568be76
commit
788c8a6af2
19 changed files with 680 additions and 792 deletions
|
@ -1,14 +1,16 @@
|
|||
#![crate_type = "lib"]
|
||||
#![no_std]
|
||||
use core::convert::From;
|
||||
use core::ffi::c_void;
|
||||
use core::fmt;
|
||||
use core::mem::{ManuallyDrop, MaybeUninit};
|
||||
use core::ops::Drop;
|
||||
|
||||
mod rc;
|
||||
mod roc_list;
|
||||
mod roc_str;
|
||||
mod storage;
|
||||
|
||||
pub use rc::ReferenceCount;
|
||||
pub use roc_list::RocList;
|
||||
pub use roc_str::RocStr;
|
||||
|
||||
|
@ -24,8 +26,6 @@ extern "C" {
|
|||
pub fn roc_dealloc(ptr: *mut c_void, alignment: u32);
|
||||
}
|
||||
|
||||
const REFCOUNT_1: isize = isize::MIN;
|
||||
|
||||
#[repr(u8)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum RocOrder {
|
||||
|
@ -34,13 +34,6 @@ pub enum RocOrder {
|
|||
Lt = 2,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum Storage {
|
||||
ReadOnly,
|
||||
Refcounted(isize),
|
||||
Capacity(usize),
|
||||
}
|
||||
|
||||
/// Like a Rust `Result`, but following Roc's ABI instead of Rust's.
|
||||
/// (Using Rust's `Result` instead of this will not work properly with Roc code!)
|
||||
///
|
||||
|
|
100
roc_std/src/rc.rs
Normal file
100
roc_std/src/rc.rs
Normal file
|
@ -0,0 +1,100 @@
|
|||
/// A type which uses reference counting for it's heap allocated memory.
|
||||
///
|
||||
/// Note that if a type doesn't allocate any heap memory (eg. `i32`), the
|
||||
/// `increment` and `decrement` methods don't need to do anything.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// It must be safe to memcpy this type to a new location after the reference count has been increased.
|
||||
pub unsafe trait ReferenceCount {
|
||||
/// Increment the reference count.
|
||||
fn increment(&self);
|
||||
/// Decrement the reference count.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// The caller must ensure that `ptr` points to a value with a non-zero
|
||||
/// reference count.
|
||||
unsafe fn decrement(ptr: *const Self);
|
||||
}
|
||||
|
||||
macro_rules! impl_reference_count_for_primitive {
|
||||
($ty:ty) => {
|
||||
unsafe impl ReferenceCount for $ty {
|
||||
fn increment(&self) {
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
unsafe fn decrement(_ptr: *const Self) {
|
||||
// Do nothing.
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_reference_count_for_primitive!(bool);
|
||||
impl_reference_count_for_primitive!(char);
|
||||
impl_reference_count_for_primitive!(u8);
|
||||
impl_reference_count_for_primitive!(i8);
|
||||
impl_reference_count_for_primitive!(u16);
|
||||
impl_reference_count_for_primitive!(i16);
|
||||
impl_reference_count_for_primitive!(u32);
|
||||
impl_reference_count_for_primitive!(i32);
|
||||
impl_reference_count_for_primitive!(u64);
|
||||
impl_reference_count_for_primitive!(i64);
|
||||
impl_reference_count_for_primitive!(u128);
|
||||
impl_reference_count_for_primitive!(i128);
|
||||
impl_reference_count_for_primitive!(f32);
|
||||
impl_reference_count_for_primitive!(f64);
|
||||
|
||||
macro_rules! impl_reference_count_for_tuple {
|
||||
($($ty:ident: $field:tt,)*) => {
|
||||
unsafe impl<$($ty),*> ReferenceCount for ($($ty,)*)
|
||||
where
|
||||
$($ty: ReferenceCount,)*
|
||||
{
|
||||
fn increment(&self) {
|
||||
$(self.$field.increment();)*
|
||||
}
|
||||
|
||||
#[allow(unused_variables, clippy::unused_unit)]
|
||||
unsafe fn decrement(ptr: *const Self) {
|
||||
let ptrs = {
|
||||
let this = &*ptr;
|
||||
($(core::ptr::addr_of!(this.$field),)*)
|
||||
};
|
||||
|
||||
$($ty::decrement(ptrs.$field);)*
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_reference_count_for_tuple!();
|
||||
impl_reference_count_for_tuple!(A: 0,);
|
||||
impl_reference_count_for_tuple!(A: 0, B: 1,);
|
||||
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2,);
|
||||
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3,);
|
||||
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4,);
|
||||
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4, F: 5,);
|
||||
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4, F: 5, G: 6,);
|
||||
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4, F: 5, G: 6, H: 7,);
|
||||
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4, F: 5, G: 6, H: 7, I: 8,);
|
||||
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4, F: 5, G: 6, H: 7, I: 8, J: 9,);
|
||||
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4, F: 5, G: 6, H: 7, I: 8, J: 9, K: 10,);
|
||||
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4, F: 5, G: 6, H: 7, I: 8, J: 9, K: 10, L: 11,);
|
||||
|
||||
unsafe impl<T, const N: usize> ReferenceCount for [T; N]
|
||||
where
|
||||
T: ReferenceCount,
|
||||
{
|
||||
fn increment(&self) {
|
||||
self.iter().for_each(T::increment)
|
||||
}
|
||||
|
||||
unsafe fn decrement(ptr: *const Self) {
|
||||
for i in 0..N {
|
||||
T::decrement(ptr.cast::<T>().add(i));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,419 +1,335 @@
|
|||
use core::ffi::c_void;
|
||||
use core::fmt;
|
||||
use core::ops::{Deref, DerefMut, Drop};
|
||||
use core::{mem, ptr};
|
||||
#![deny(unsafe_op_in_unsafe_fn)]
|
||||
|
||||
use crate::{roc_alloc, roc_dealloc, roc_realloc, Storage, REFCOUNT_1};
|
||||
use core::{
|
||||
cell::Cell, cmp, fmt::Debug, intrinsics::copy_nonoverlapping, ops::Deref, ptr::NonNull,
|
||||
};
|
||||
|
||||
use crate::{rc::ReferenceCount, roc_alloc, roc_dealloc, roc_realloc, storage::Storage};
|
||||
|
||||
#[repr(C)]
|
||||
pub struct RocList<T> {
|
||||
elements: *mut T,
|
||||
pub struct RocList<T>
|
||||
where
|
||||
T: ReferenceCount,
|
||||
{
|
||||
elements: Option<NonNull<T>>,
|
||||
length: usize,
|
||||
}
|
||||
|
||||
impl<T: Clone> Clone for RocList<T> {
|
||||
fn clone(&self) -> Self {
|
||||
Self::from_slice(self.as_slice())
|
||||
impl<T> RocList<T>
|
||||
where
|
||||
T: ReferenceCount,
|
||||
{
|
||||
pub fn empty() -> Self {
|
||||
RocList {
|
||||
elements: None,
|
||||
length: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_slice(slice: &[T]) -> Self {
|
||||
let mut list = Self::empty();
|
||||
list.extend_from_slice(slice);
|
||||
list
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RocList<T> {
|
||||
pub fn len(&self) -> usize {
|
||||
self.length
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.length == 0
|
||||
}
|
||||
|
||||
pub fn get(&self, index: usize) -> Option<&T> {
|
||||
if index < self.len() {
|
||||
Some(unsafe {
|
||||
let raw = self.elements.add(index);
|
||||
|
||||
&*raw
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn storage(&self) -> Option<Storage> {
|
||||
use core::cmp::Ordering::*;
|
||||
|
||||
if self.length == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
unsafe {
|
||||
let value = *self.get_storage_ptr();
|
||||
|
||||
// NOTE doesn't work with elements of 16 or more bytes
|
||||
match isize::cmp(&value, &0) {
|
||||
Equal => Some(Storage::ReadOnly),
|
||||
Less => Some(Storage::Refcounted(value)),
|
||||
Greater => Some(Storage::Capacity(value as usize)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_storage_ptr_help(elements: *mut T) -> *mut isize {
|
||||
let ptr = elements as *mut isize;
|
||||
|
||||
unsafe { ptr.offset(-1) }
|
||||
}
|
||||
|
||||
fn get_storage_ptr(&self) -> *const isize {
|
||||
Self::get_storage_ptr_help(self.elements)
|
||||
}
|
||||
|
||||
fn get_storage_ptr_mut(&mut self) -> *mut isize {
|
||||
self.get_storage_ptr() as *mut isize
|
||||
}
|
||||
|
||||
fn set_storage_ptr(&mut self, ptr: *const isize) {
|
||||
self.elements = unsafe { ptr.offset(1) as *mut T };
|
||||
}
|
||||
|
||||
fn get_element_ptr(elements: *const T) -> *const T {
|
||||
let elem_alignment = core::mem::align_of::<T>();
|
||||
let ptr = elements as *const usize;
|
||||
|
||||
unsafe {
|
||||
if elem_alignment <= core::mem::align_of::<usize>() {
|
||||
ptr.add(1) as *const T
|
||||
} else {
|
||||
// If elements have an alignment bigger than usize (e.g. an i128),
|
||||
// we will have necessarily allocated two usize slots worth of
|
||||
// space for the storage value (with the first usize slot being
|
||||
// padding for alignment's sake), and we need to skip past both.
|
||||
ptr.add(2) as *const T
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_slice_with_capacity(slice: &[T], capacity: usize) -> Self
|
||||
where
|
||||
T: Clone,
|
||||
{
|
||||
assert!(capacity > 0);
|
||||
assert!(slice.len() <= capacity);
|
||||
|
||||
let element_bytes = capacity * core::mem::size_of::<T>();
|
||||
|
||||
let padding = {
|
||||
if core::mem::align_of::<T>() <= core::mem::align_of::<usize>() {
|
||||
// aligned on usize (8 bytes on 64-bit systems)
|
||||
0
|
||||
} else {
|
||||
// aligned on 2*usize (16 bytes on 64-bit systems)
|
||||
core::mem::size_of::<usize>()
|
||||
}
|
||||
};
|
||||
|
||||
let num_bytes = core::mem::size_of::<usize>() + padding + element_bytes;
|
||||
|
||||
let elements = unsafe {
|
||||
let raw_ptr = roc_alloc(num_bytes, core::mem::size_of::<usize>() as u32) as *mut u8;
|
||||
|
||||
// pointer to the first element
|
||||
let raw_ptr = Self::get_element_ptr(raw_ptr as *mut T) as *mut T;
|
||||
|
||||
// write the refcount
|
||||
let refcount_ptr = raw_ptr as *mut isize;
|
||||
*(refcount_ptr.offset(-1)) = isize::MIN;
|
||||
|
||||
// Clone the elements into the new array.
|
||||
let target_ptr = raw_ptr;
|
||||
for (i, value) in slice.iter().cloned().enumerate() {
|
||||
let target_ptr = target_ptr.add(i);
|
||||
target_ptr.write(value);
|
||||
}
|
||||
|
||||
raw_ptr
|
||||
};
|
||||
|
||||
Self {
|
||||
length: slice.len(),
|
||||
elements,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_slice(slice: &[T]) -> Self
|
||||
where
|
||||
T: Clone,
|
||||
{
|
||||
// Avoid allocation with empty list.
|
||||
if slice.is_empty() {
|
||||
Self::default()
|
||||
} else {
|
||||
Self::from_slice_with_capacity(slice, slice.len())
|
||||
}
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
pub fn as_slice(&self) -> &[T] {
|
||||
unsafe { core::slice::from_raw_parts(self.elements, self.length) }
|
||||
&*self
|
||||
}
|
||||
|
||||
pub fn as_mut_slice(&mut self) -> &mut [T] {
|
||||
unsafe { core::slice::from_raw_parts_mut(self.elements, self.length) }
|
||||
}
|
||||
pub fn extend_from_slice(&mut self, slice: &[T]) {
|
||||
// TODO: Can we do better for ZSTs? Alignment might be a problem.
|
||||
|
||||
/// Copy the contents of the given slice into the end of this list,
|
||||
/// reallocating and resizing as necessary.
|
||||
pub fn append_slice(&mut self, slice: &[T]) {
|
||||
let new_len = self.len() + slice.len();
|
||||
let storage_ptr = self.get_storage_ptr_mut();
|
||||
if slice.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
// First, ensure that there's enough storage space.
|
||||
unsafe {
|
||||
let storage_val = *storage_ptr as isize;
|
||||
let alignment = cmp::max(core::mem::align_of::<T>(), core::mem::align_of::<Storage>());
|
||||
let elements_offset = alignment;
|
||||
|
||||
// Check if this is refcounted, readonly, or has a capcacity.
|
||||
// (Capacity will be positive if it has a capacity.)
|
||||
if storage_val > 0 {
|
||||
let capacity = storage_val as usize;
|
||||
let new_size = elements_offset + core::mem::size_of::<T>() * (self.len() + slice.len());
|
||||
|
||||
// We don't have enough capacity, so we need to get some more.
|
||||
if capacity < new_len {
|
||||
// Double our capacity using realloc
|
||||
let new_cap = 2 * capacity;
|
||||
let new_ptr = roc_realloc(
|
||||
storage_ptr as *mut c_void,
|
||||
new_cap,
|
||||
capacity,
|
||||
Self::align_of_storage_ptr(),
|
||||
) as *mut isize;
|
||||
let new_ptr = if let Some((elements, storage)) = self.elements_and_storage() {
|
||||
// Decrement the lists refence count.
|
||||
let mut copy = storage.get();
|
||||
let is_unique = copy.decrease();
|
||||
|
||||
// Write the new capacity into the new memory
|
||||
*new_ptr = new_cap as isize;
|
||||
|
||||
// Copy all the existing elements into the new allocation.
|
||||
ptr::copy_nonoverlapping(self.elements, new_ptr as *mut T, self.len());
|
||||
|
||||
// Update our storage pointer to be the new one
|
||||
self.set_storage_ptr(new_ptr);
|
||||
if is_unique {
|
||||
// If the memory is not shared, we can reuse the memory.
|
||||
let old_size = elements_offset + core::mem::size_of::<T>() * self.len();
|
||||
unsafe {
|
||||
let ptr = elements.as_ptr().cast::<u8>().sub(alignment).cast();
|
||||
roc_realloc(ptr, new_size, old_size, alignment as u32).cast()
|
||||
}
|
||||
} else {
|
||||
// If this was reference counted, decrement the refcount!
|
||||
if storage_val < 0 {
|
||||
let refcount = storage_val;
|
||||
|
||||
// Either deallocate or decrement.
|
||||
if refcount == REFCOUNT_1 {
|
||||
roc_dealloc(storage_ptr as *mut c_void, Self::align_of_storage_ptr());
|
||||
} else {
|
||||
*storage_ptr = refcount - 1;
|
||||
}
|
||||
if !copy.is_readonly() {
|
||||
// Write the decremented reference count back.
|
||||
storage.set(copy);
|
||||
}
|
||||
|
||||
// This is either refcounted or readonly; either way, we need
|
||||
// to clone the elements!
|
||||
// Allocate new memory.
|
||||
let new_ptr = unsafe { roc_alloc(new_size, alignment as u32) };
|
||||
let new_elements = unsafe { new_ptr.cast::<u8>().add(alignment).cast::<T>() };
|
||||
|
||||
// Double the capacity we need, in case there are future additions.
|
||||
let new_cap = new_len * 2;
|
||||
let new_ptr = roc_alloc(new_cap, Self::align_of_storage_ptr()) as *mut isize;
|
||||
// Initialize the reference count.
|
||||
unsafe {
|
||||
let storage_ptr = new_elements.cast::<Storage>().sub(1);
|
||||
storage_ptr.write(Storage::new_reference_counted());
|
||||
}
|
||||
|
||||
// Write the new capacity into the new memory; this list is
|
||||
// now unique, and gets its own capacity!
|
||||
*new_ptr = new_cap as isize;
|
||||
// Copy the old elements to the new allocation.
|
||||
unsafe {
|
||||
copy_nonoverlapping(elements.as_ptr(), new_elements, self.length);
|
||||
}
|
||||
|
||||
// Copy all the existing elements into the new allocation.
|
||||
ptr::copy_nonoverlapping(self.elements, new_ptr as *mut T, self.len());
|
||||
new_ptr
|
||||
}
|
||||
} else {
|
||||
// Allocate new memory.
|
||||
let new_ptr = unsafe { roc_alloc(new_size, alignment as u32) };
|
||||
let new_elements = unsafe { new_ptr.cast::<u8>().add(elements_offset).cast::<T>() };
|
||||
|
||||
// Update our storage pointer to be the new one
|
||||
self.set_storage_ptr(new_ptr);
|
||||
// Initialize the reference count.
|
||||
unsafe {
|
||||
let storage_ptr = new_elements.cast::<Storage>().sub(1);
|
||||
storage_ptr.write(Storage::new_reference_counted());
|
||||
}
|
||||
|
||||
// Since this is an append, we want to start writing new elements
|
||||
// into the memory immediately after the current last element.
|
||||
let dest = self.elements.add(self.len());
|
||||
new_ptr
|
||||
};
|
||||
|
||||
// There's now enough storage to append the contents of the slice
|
||||
// in-place, so do that!
|
||||
ptr::copy_nonoverlapping(slice.as_ptr(), dest, self.len());
|
||||
let elements = unsafe { new_ptr.cast::<u8>().add(elements_offset).cast::<T>() };
|
||||
|
||||
let non_null_elements = NonNull::new(elements).unwrap();
|
||||
self.elements = Some(non_null_elements);
|
||||
|
||||
let elements = self.elements.unwrap().as_ptr();
|
||||
|
||||
let append_ptr = unsafe { elements.add(self.len()) };
|
||||
for (i, element) in slice.iter().enumerate() {
|
||||
// Increment the element's reference count.
|
||||
element.increment();
|
||||
|
||||
// Write the element into the slot.
|
||||
unsafe {
|
||||
let element = core::ptr::read(element);
|
||||
append_ptr.add(i).write(element);
|
||||
}
|
||||
|
||||
// It's important that the length is increased one by one, to
|
||||
// make sure that we don't drop uninitialized elements, even when
|
||||
// a incrementing the reference count panics.
|
||||
self.length += 1;
|
||||
}
|
||||
|
||||
self.length = new_len;
|
||||
}
|
||||
|
||||
/// The alignment we need is either the alignment of T, or else
|
||||
/// the alignment of usize, whichever is higher. That's because we need
|
||||
/// to store both T values as well as the refcount/capacity storage slot.
|
||||
fn align_of_storage_ptr() -> u32 {
|
||||
mem::align_of::<T>().max(mem::align_of::<usize>()) as u32
|
||||
}
|
||||
|
||||
unsafe fn drop_pointer_to_first_argument(ptr: *mut T) {
|
||||
let storage_ptr = Self::get_storage_ptr_help(ptr);
|
||||
let storage_val = *storage_ptr;
|
||||
|
||||
if storage_val == REFCOUNT_1 || storage_val > 0 {
|
||||
// If we have no more references, or if this was unique,
|
||||
// deallocate it.
|
||||
roc_dealloc(storage_ptr as *mut c_void, Self::align_of_storage_ptr());
|
||||
} else if storage_val < 0 {
|
||||
// If this still has more references, decrement one.
|
||||
*storage_ptr = storage_val - 1;
|
||||
}
|
||||
|
||||
// The only remaining option is that this is in readonly memory,
|
||||
// in which case we shouldn't attempt to do anything to it.
|
||||
fn elements_and_storage(&self) -> Option<(NonNull<T>, &Cell<Storage>)> {
|
||||
let elements = self.elements?;
|
||||
let storage = unsafe { &*elements.as_ptr().cast::<Cell<Storage>>().sub(1) };
|
||||
Some((elements, storage))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for RocList<T> {
|
||||
impl<T> Deref for RocList<T>
|
||||
where
|
||||
T: ReferenceCount,
|
||||
{
|
||||
type Target = [T];
|
||||
|
||||
fn deref(&self) -> &[T] {
|
||||
self.as_slice()
|
||||
fn deref(&self) -> &Self::Target {
|
||||
if let Some(elements) = self.elements {
|
||||
let elements = core::ptr::slice_from_raw_parts(elements.as_ptr(), self.length);
|
||||
unsafe { &*elements }
|
||||
} else {
|
||||
&[]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> DerefMut for RocList<T> {
|
||||
fn deref_mut(&mut self) -> &mut [T] {
|
||||
self.as_mut_slice()
|
||||
impl<T> Default for RocList<T>
|
||||
where
|
||||
T: ReferenceCount,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self::empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> IntoIterator for &'a RocList<T> {
|
||||
type Item = &'a T;
|
||||
|
||||
type IntoIter = <&'a [T] as IntoIterator>::IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.as_slice().iter()
|
||||
impl<T, U> PartialEq<RocList<U>> for RocList<T>
|
||||
where
|
||||
T: PartialEq<U> + ReferenceCount,
|
||||
U: ReferenceCount,
|
||||
{
|
||||
fn eq(&self, other: &RocList<U>) -> bool {
|
||||
self.deref() == other.deref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IntoIterator for RocList<T> {
|
||||
impl<T> Eq for RocList<T> where T: Eq + ReferenceCount {}
|
||||
|
||||
impl<T> Debug for RocList<T>
|
||||
where
|
||||
T: Debug + ReferenceCount,
|
||||
{
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
self.deref().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl<T> ReferenceCount for RocList<T>
|
||||
where
|
||||
T: ReferenceCount,
|
||||
{
|
||||
fn increment(&self) {
|
||||
// Increment list's the reference count.
|
||||
if let Some((_, storage)) = self.elements_and_storage() {
|
||||
let mut copy = storage.get();
|
||||
if !copy.is_readonly() {
|
||||
copy.increment_reference_count();
|
||||
storage.set(copy);
|
||||
}
|
||||
|
||||
// Increment the children's the reference counts.
|
||||
self.iter().for_each(T::increment);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn decrement(ptr: *const Self) {
|
||||
let this = unsafe { &*ptr };
|
||||
let (elements, storage) = if let Some((elements, storage)) = this.elements_and_storage() {
|
||||
(elements, storage)
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
// Decrement the refence counts of the contained values.
|
||||
for i in 0..this.len() {
|
||||
unsafe {
|
||||
T::decrement(elements.as_ptr().add(i));
|
||||
}
|
||||
}
|
||||
|
||||
// Decrease the list's reference count.
|
||||
let mut copy = storage.get();
|
||||
let can_be_released = copy.decrease();
|
||||
|
||||
if !can_be_released {
|
||||
if !copy.is_readonly() {
|
||||
// Write the storage back.
|
||||
storage.set(copy);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Release the memory.
|
||||
let alignment = cmp::max(core::mem::align_of::<T>(), core::mem::align_of::<Storage>());
|
||||
unsafe {
|
||||
roc_dealloc(
|
||||
elements.as_ptr().cast::<u8>().sub(alignment).cast(),
|
||||
alignment as u32,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Clone for RocList<T>
|
||||
where
|
||||
T: ReferenceCount,
|
||||
{
|
||||
fn clone(&self) -> Self {
|
||||
// Increment the reference counts.
|
||||
self.increment();
|
||||
|
||||
// Create a copy.
|
||||
Self {
|
||||
elements: self.elements,
|
||||
length: self.length,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Drop for RocList<T>
|
||||
where
|
||||
T: ReferenceCount,
|
||||
{
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
Self::decrement(self);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IntoIterator for RocList<T>
|
||||
where
|
||||
T: ReferenceCount,
|
||||
{
|
||||
type Item = T;
|
||||
|
||||
type IntoIter = IntoIter<T>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
let remaining = self.len();
|
||||
|
||||
let buf = unsafe { NonNull::new_unchecked(self.elements as _) };
|
||||
let ptr = self.elements;
|
||||
|
||||
IntoIter {
|
||||
buf,
|
||||
ptr,
|
||||
remaining,
|
||||
}
|
||||
IntoIter { list: self, idx: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
use core::ptr::NonNull;
|
||||
|
||||
pub struct IntoIter<T> {
|
||||
buf: NonNull<T>,
|
||||
// pub cap: usize,
|
||||
ptr: *const T,
|
||||
remaining: usize,
|
||||
pub struct IntoIter<T>
|
||||
where
|
||||
T: ReferenceCount,
|
||||
{
|
||||
list: RocList<T>,
|
||||
idx: usize,
|
||||
}
|
||||
|
||||
impl<T> Iterator for IntoIter<T> {
|
||||
impl<T> Iterator for IntoIter<T>
|
||||
where
|
||||
T: ReferenceCount,
|
||||
{
|
||||
type Item = T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
next_help(self)
|
||||
if self.list.len() <= self.idx {
|
||||
return None;
|
||||
}
|
||||
|
||||
let elements = self.list.elements?;
|
||||
let element_ptr = unsafe { elements.as_ptr().add(self.idx) };
|
||||
self.idx += 1;
|
||||
|
||||
// Return the element.
|
||||
let element = unsafe { element_ptr.read() };
|
||||
Some(element)
|
||||
}
|
||||
}
|
||||
|
||||
fn next_help<T>(this: &mut IntoIter<T>) -> Option<T> {
|
||||
if this.remaining == 0 {
|
||||
None
|
||||
} else if mem::size_of::<T>() == 0 {
|
||||
// purposefully don't use 'ptr.offset' because for
|
||||
// vectors with 0-size elements this would return the
|
||||
// same pointer.
|
||||
this.remaining -= 1;
|
||||
|
||||
// Make up a value of this ZST.
|
||||
Some(unsafe { mem::zeroed() })
|
||||
} else {
|
||||
let old = this.ptr;
|
||||
this.ptr = unsafe { this.ptr.offset(1) };
|
||||
this.remaining -= 1;
|
||||
|
||||
Some(unsafe { ptr::read(old) })
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Drop for IntoIter<T> {
|
||||
impl<T> Drop for IntoIter<T>
|
||||
where
|
||||
T: ReferenceCount,
|
||||
{
|
||||
fn drop(&mut self) {
|
||||
// drop the elements that we have not yet returned.
|
||||
while let Some(item) = next_help(self) {
|
||||
drop(item);
|
||||
}
|
||||
// Check if there are any elements left of which we need to decrement
|
||||
// the refence counts.
|
||||
let elements = if let Some(elements) = self.list.elements {
|
||||
elements
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
// deallocate the whole buffer
|
||||
unsafe {
|
||||
RocList::drop_pointer_to_first_argument(self.buf.as_mut());
|
||||
}
|
||||
}
|
||||
}
|
||||
// Set the list's length to zero to prevent double-frees.
|
||||
// Note that this leaks if decrementing any of the elements' reference
|
||||
// counts panics.
|
||||
let len = core::mem::take(&mut self.list.length);
|
||||
|
||||
impl<T> Default for RocList<T> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
length: 0,
|
||||
elements: core::ptr::null_mut(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: fmt::Debug> fmt::Debug for RocList<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
// RocList { storage: Refcounted(3), elements: [ 1,2,3,4] }
|
||||
f.debug_struct("RocList")
|
||||
.field("storage", &self.storage())
|
||||
.field("elements", &self.as_slice())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PartialEq> PartialEq for RocList<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
if self.length != other.length {
|
||||
return false;
|
||||
}
|
||||
|
||||
for i in 0..self.length {
|
||||
// Decrement the reference counts of the elements that haven't been
|
||||
// returned from the iterator.
|
||||
for i in self.idx..len {
|
||||
unsafe {
|
||||
if *self.elements.add(i) != *other.elements.add(i) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Eq> Eq for RocList<T> {}
|
||||
|
||||
impl<T> Drop for RocList<T> {
|
||||
fn drop(&mut self) {
|
||||
if !self.is_empty() {
|
||||
let storage_ptr = self.get_storage_ptr_mut();
|
||||
|
||||
unsafe {
|
||||
let storage_val = *storage_ptr;
|
||||
|
||||
if storage_val == REFCOUNT_1 || storage_val > 0 {
|
||||
// If we have no more references, or if this was unique,
|
||||
// deallocate it.
|
||||
roc_dealloc(storage_ptr as *mut c_void, Self::align_of_storage_ptr());
|
||||
} else if storage_val < 0 {
|
||||
// If this still has more references, decrement one.
|
||||
*storage_ptr = storage_val - 1;
|
||||
}
|
||||
|
||||
// The only remaining option is that this is in readonly memory,
|
||||
// in which case we shouldn't attempt to do anything to it.
|
||||
T::decrement(elements.as_ptr().add(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,28 +1,59 @@
|
|||
use core::ffi::c_void;
|
||||
use core::fmt::{self, Display, Formatter};
|
||||
use core::ops::{Deref, DerefMut, Drop};
|
||||
use core::{mem, ptr, slice};
|
||||
#![deny(unsafe_op_in_unsafe_fn)]
|
||||
|
||||
use crate::{roc_alloc, roc_dealloc, Storage, REFCOUNT_1};
|
||||
use core::{
|
||||
convert::TryFrom,
|
||||
fmt::Debug,
|
||||
mem::{size_of, ManuallyDrop},
|
||||
ops::{Deref, DerefMut},
|
||||
};
|
||||
|
||||
#[repr(C)]
|
||||
pub struct RocStr {
|
||||
elements: *mut u8,
|
||||
length: usize,
|
||||
}
|
||||
use crate::{rc::ReferenceCount, RocList};
|
||||
|
||||
#[repr(transparent)]
|
||||
pub struct RocStr(RocStrInner);
|
||||
|
||||
impl RocStr {
|
||||
pub const SIZE: usize = core::mem::size_of::<Self>();
|
||||
pub const MASK: u8 = 0b1000_0000;
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
if self.is_small_str() {
|
||||
let bytes = self.length.to_ne_bytes();
|
||||
let last_byte = bytes[mem::size_of::<usize>() - 1];
|
||||
pub const fn empty() -> Self {
|
||||
Self(RocStrInner {
|
||||
small_string: SmallString::empty(),
|
||||
})
|
||||
}
|
||||
|
||||
(last_byte ^ Self::MASK) as usize
|
||||
/// Create a string from bytes.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// `slice` must be valid UTF-8.
|
||||
pub unsafe fn from_slice(slice: &[u8]) -> Self {
|
||||
if let Some(small_string) = unsafe { SmallString::try_from(slice) } {
|
||||
Self(RocStrInner { small_string })
|
||||
} else {
|
||||
self.length
|
||||
let heap_allocated = RocList::from_slice(slice);
|
||||
Self(RocStrInner {
|
||||
heap_allocated: ManuallyDrop::new(heap_allocated),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn is_small_str(&self) -> bool {
|
||||
unsafe { self.0.small_string.is_small_str() }
|
||||
}
|
||||
|
||||
fn as_enum_ref(&self) -> RocStrInnerRef {
|
||||
if self.is_small_str() {
|
||||
unsafe { RocStrInnerRef::SmallString(&self.0.small_string) }
|
||||
} else {
|
||||
unsafe { RocStrInnerRef::HeapAllocated(&self.0.heap_allocated) }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
match self.as_enum_ref() {
|
||||
RocStrInnerRef::HeapAllocated(h) => h.len(),
|
||||
RocStrInnerRef::SmallString(s) => s.len(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -30,226 +61,19 @@ impl RocStr {
|
|||
self.len() == 0
|
||||
}
|
||||
|
||||
pub fn is_small_str(&self) -> bool {
|
||||
(self.length as isize) < 0
|
||||
}
|
||||
|
||||
pub const fn empty() -> Self {
|
||||
Self {
|
||||
length: isize::MIN as usize,
|
||||
elements: core::ptr::null_mut(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get(&self, index: usize) -> Option<&u8> {
|
||||
if index < self.len() {
|
||||
Some(unsafe {
|
||||
let raw = if self.is_small_str() {
|
||||
self.get_small_str_ptr().add(index)
|
||||
} else {
|
||||
self.elements.add(index)
|
||||
};
|
||||
|
||||
&*raw
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_bytes(&self) -> *const u8 {
|
||||
if self.is_small_str() {
|
||||
self.get_small_str_ptr()
|
||||
} else {
|
||||
self.elements
|
||||
}
|
||||
}
|
||||
|
||||
pub fn storage(&self) -> Option<Storage> {
|
||||
use core::cmp::Ordering::*;
|
||||
|
||||
if self.is_small_str() {
|
||||
return None;
|
||||
}
|
||||
|
||||
unsafe {
|
||||
let value = *self.get_storage_ptr();
|
||||
|
||||
// NOTE doesn't work with elements of 16 or more bytes
|
||||
match isize::cmp(&(value as isize), &0) {
|
||||
Equal => Some(Storage::ReadOnly),
|
||||
Less => Some(Storage::Refcounted(value)),
|
||||
Greater => Some(Storage::Capacity(value as usize)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_storage_ptr(&self) -> *const isize {
|
||||
let ptr = self.elements as *const isize;
|
||||
|
||||
unsafe { ptr.offset(-1) }
|
||||
}
|
||||
|
||||
fn get_storage_ptr_mut(&mut self) -> *mut isize {
|
||||
self.get_storage_ptr() as *mut isize
|
||||
}
|
||||
|
||||
fn get_element_ptr(elements: *const u8) -> *const usize {
|
||||
let elem_alignment = core::mem::align_of::<u8>();
|
||||
let ptr = elements as *const usize;
|
||||
|
||||
unsafe {
|
||||
if elem_alignment <= core::mem::align_of::<usize>() {
|
||||
ptr.add(1)
|
||||
} else {
|
||||
// If elements have an alignment bigger than usize (e.g. an i128),
|
||||
// we will have necessarily allocated two usize slots worth of
|
||||
// space for the storage value (with the first usize slot being
|
||||
// padding for alignment's sake), and we need to skip past both.
|
||||
ptr.add(2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_small_str_ptr(&self) -> *const u8 {
|
||||
(self as *const Self).cast()
|
||||
}
|
||||
|
||||
fn get_small_str_ptr_mut(&mut self) -> *mut u8 {
|
||||
(self as *mut Self).cast()
|
||||
}
|
||||
|
||||
const fn from_slice_small_str(slice: &[u8]) -> Self {
|
||||
assert!(slice.len() < Self::SIZE);
|
||||
|
||||
let mut array = [0u8; Self::SIZE];
|
||||
|
||||
// while loop because for uses Iterator and is not available in const contexts
|
||||
let mut i = 0;
|
||||
while i < slice.len() {
|
||||
array[i] = slice[i];
|
||||
i += 1;
|
||||
}
|
||||
|
||||
let highest_index = Self::SIZE - 1;
|
||||
array[highest_index] = slice.len() as u8 | Self::MASK;
|
||||
|
||||
unsafe { core::mem::transmute(array) }
|
||||
}
|
||||
|
||||
fn from_slice_with_capacity_str(slice: &[u8], capacity: usize) -> Self {
|
||||
assert!(
|
||||
slice.len() <= capacity,
|
||||
"RocStr::from_slice_with_capacity_str length bigger than capacity {} {}",
|
||||
slice.len(),
|
||||
capacity
|
||||
);
|
||||
if capacity < core::mem::size_of::<Self>() {
|
||||
Self::from_slice_small_str(slice)
|
||||
} else {
|
||||
let ptr = slice.as_ptr();
|
||||
let element_bytes = capacity;
|
||||
|
||||
let num_bytes = core::mem::size_of::<usize>() + element_bytes;
|
||||
|
||||
let elements = unsafe {
|
||||
let raw_ptr = roc_alloc(num_bytes, core::mem::size_of::<usize>() as u32) as *mut u8;
|
||||
// write the capacity
|
||||
let capacity_ptr = raw_ptr as *mut usize;
|
||||
*capacity_ptr = capacity;
|
||||
|
||||
let raw_ptr = Self::get_element_ptr(raw_ptr as *mut u8);
|
||||
|
||||
// write the refcount
|
||||
let refcount_ptr = raw_ptr as *mut isize;
|
||||
*(refcount_ptr.offset(-1)) = isize::MIN;
|
||||
|
||||
{
|
||||
// NOTE: using a memcpy here causes weird issues
|
||||
let target_ptr = raw_ptr as *mut u8;
|
||||
let source_ptr = ptr as *const u8;
|
||||
let length = slice.len();
|
||||
|
||||
for index in 0..length {
|
||||
*target_ptr.add(index) = *source_ptr.add(index);
|
||||
}
|
||||
}
|
||||
|
||||
raw_ptr as *mut u8
|
||||
};
|
||||
|
||||
Self {
|
||||
length: slice.len(),
|
||||
elements,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_slice(slice: &[u8]) -> Self {
|
||||
Self::from_slice_with_capacity_str(slice, slice.len())
|
||||
}
|
||||
|
||||
pub fn as_slice(&self) -> &[u8] {
|
||||
if self.is_empty() {
|
||||
&[]
|
||||
} else if self.is_small_str() {
|
||||
unsafe { core::slice::from_raw_parts(self.get_small_str_ptr(), self.len()) }
|
||||
} else {
|
||||
unsafe { core::slice::from_raw_parts(self.elements, self.length) }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_mut_slice(&mut self) -> &mut [u8] {
|
||||
if self.is_empty() {
|
||||
&mut []
|
||||
} else if self.is_small_str() {
|
||||
unsafe { core::slice::from_raw_parts_mut(self.get_small_str_ptr_mut(), self.len()) }
|
||||
} else {
|
||||
unsafe { core::slice::from_raw_parts_mut(self.elements, self.length) }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
let slice = self.as_slice();
|
||||
|
||||
unsafe { core::str::from_utf8_unchecked(slice) }
|
||||
}
|
||||
|
||||
pub fn as_mut_str(&mut self) -> &mut str {
|
||||
let slice = self.as_mut_slice();
|
||||
|
||||
unsafe { core::str::from_utf8_unchecked_mut(slice) }
|
||||
}
|
||||
|
||||
/// Write a CStr (null-terminated) representation of this RocStr into
|
||||
/// the given buffer.
|
||||
///
|
||||
/// # Safety
|
||||
/// This assumes the given buffer has enough space, so make sure you only
|
||||
/// pass in a pointer to an allocation that's at least as long as this Str!
|
||||
pub unsafe fn write_c_str(&self, buf: *mut char) {
|
||||
if self.is_small_str() {
|
||||
ptr::copy_nonoverlapping(self.get_small_str_ptr(), buf as *mut u8, self.len());
|
||||
} else {
|
||||
ptr::copy_nonoverlapping(self.elements, buf as *mut u8, self.len());
|
||||
}
|
||||
|
||||
// null-terminate
|
||||
*(buf.add(self.len())) = '\0';
|
||||
&*self
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for RocStr {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &str {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for RocStr {
|
||||
fn deref_mut(&mut self) -> &mut str {
|
||||
self.as_mut_str()
|
||||
fn deref(&self) -> &Self::Target {
|
||||
match self.as_enum_ref() {
|
||||
RocStrInnerRef::HeapAllocated(h) => unsafe { core::str::from_utf8_unchecked(&*h) },
|
||||
RocStrInnerRef::SmallString(s) => &*s,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -260,103 +84,138 @@ impl Default for RocStr {
|
|||
}
|
||||
|
||||
impl From<&str> for RocStr {
|
||||
fn from(str: &str) -> Self {
|
||||
Self::from_slice(str.as_bytes())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for RocStr {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
self.as_str().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for RocStr {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
// RocStr { is_small_str: false, storage: Refcounted(3), elements: [ 1,2,3,4] }
|
||||
|
||||
match core::str::from_utf8(self.as_slice()) {
|
||||
Ok(string) => f
|
||||
.debug_struct("RocStr")
|
||||
.field("is_small_str", &self.is_small_str())
|
||||
.field("storage", &self.storage())
|
||||
.field("string_contents", &string)
|
||||
.finish(),
|
||||
Err(_) => f
|
||||
.debug_struct("RocStr")
|
||||
.field("is_small_str", &self.is_small_str())
|
||||
.field("storage", &self.storage())
|
||||
.field("byte_contents", &self.as_slice())
|
||||
.finish(),
|
||||
}
|
||||
fn from(s: &str) -> Self {
|
||||
unsafe { Self::from_slice(s.as_bytes()) }
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for RocStr {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.as_slice() == other.as_slice()
|
||||
self.deref() == other.deref()
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for RocStr {}
|
||||
|
||||
impl Debug for RocStr {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
self.deref().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl ReferenceCount for RocStr {
|
||||
fn increment(&self) {
|
||||
match self.as_enum_ref() {
|
||||
RocStrInnerRef::HeapAllocated(h) => h.increment(),
|
||||
RocStrInnerRef::SmallString(_) => {
|
||||
// Do nothing.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn decrement(ptr: *const Self) {
|
||||
let this = unsafe { &*ptr };
|
||||
if this.is_small_str() {
|
||||
// Do nothing.
|
||||
} else {
|
||||
unsafe {
|
||||
RocList::<u8>::decrement(ptr.cast());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for RocStr {
|
||||
fn clone(&self) -> Self {
|
||||
if self.is_small_str() {
|
||||
Self {
|
||||
elements: self.elements,
|
||||
length: self.length,
|
||||
}
|
||||
} else {
|
||||
let capacity_size = core::mem::size_of::<usize>();
|
||||
let copy_length = self.length + capacity_size;
|
||||
let elements = unsafe {
|
||||
// We use *mut u8 here even though technically these are
|
||||
// usize-aligned (due to the refcount slot).
|
||||
// This avoids any potential edge cases around there somehow
|
||||
// being unreadable memory after the last byte, which would
|
||||
// potentially get read when reading <usize> bytes at a time.
|
||||
let raw_ptr =
|
||||
roc_alloc(copy_length, core::mem::size_of::<usize>() as u32) as *mut u8;
|
||||
let dest_slice = slice::from_raw_parts_mut(raw_ptr, copy_length);
|
||||
let src_ptr = self.elements.offset(-(capacity_size as isize)) as *mut u8;
|
||||
let src_slice = slice::from_raw_parts(src_ptr, copy_length);
|
||||
|
||||
dest_slice.copy_from_slice(src_slice);
|
||||
|
||||
*(raw_ptr as *mut usize) = self.length;
|
||||
|
||||
(raw_ptr as *mut u8).add(capacity_size)
|
||||
};
|
||||
|
||||
Self {
|
||||
elements,
|
||||
length: self.length,
|
||||
}
|
||||
match self.as_enum_ref() {
|
||||
RocStrInnerRef::HeapAllocated(h) => Self(RocStrInner {
|
||||
heap_allocated: ManuallyDrop::new(h.clone()),
|
||||
}),
|
||||
RocStrInnerRef::SmallString(s) => Self(RocStrInner { small_string: *s }),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for RocStr {
|
||||
fn drop(&mut self) {
|
||||
if !self.is_small_str() {
|
||||
let storage_ptr = self.get_storage_ptr_mut();
|
||||
|
||||
if self.is_small_str() {
|
||||
// Do nothing.
|
||||
} else {
|
||||
unsafe {
|
||||
let storage_val = *storage_ptr;
|
||||
|
||||
if storage_val == REFCOUNT_1 || storage_val > 0 {
|
||||
// If we have no more references, or if this was unique,
|
||||
// deallocate it.
|
||||
roc_dealloc(storage_ptr as *mut c_void, mem::align_of::<isize>() as u32);
|
||||
} else if storage_val < 0 {
|
||||
// If this still has more references, decrement one.
|
||||
*storage_ptr = storage_val - 1;
|
||||
}
|
||||
|
||||
// The only remaining option is that this is in readonly memory,
|
||||
// in which case we shouldn't attempt to do anything to it.
|
||||
ManuallyDrop::drop(&mut self.0.heap_allocated);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
union RocStrInner {
|
||||
heap_allocated: ManuallyDrop<RocList<u8>>,
|
||||
small_string: SmallString,
|
||||
}
|
||||
|
||||
enum RocStrInnerRef<'a> {
|
||||
HeapAllocated(&'a RocList<u8>),
|
||||
SmallString(&'a SmallString),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
struct SmallString {
|
||||
bytes: [u8; Self::CAPACITY],
|
||||
len: u8,
|
||||
}
|
||||
|
||||
impl SmallString {
|
||||
const CAPACITY: usize = size_of::<RocList<u8>>() - 1;
|
||||
|
||||
const fn empty() -> Self {
|
||||
Self {
|
||||
bytes: [0; Self::CAPACITY],
|
||||
len: RocStr::MASK,
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
///
|
||||
/// `slice` must be valid UTF-8.
|
||||
unsafe fn try_from(slice: &[u8]) -> Option<Self> {
|
||||
// Check the size of the slice.
|
||||
let len_as_u8 = u8::try_from(slice.len()).ok()?;
|
||||
if (len_as_u8 as usize) > Self::CAPACITY {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Construct the small string.
|
||||
let mut bytes = [0; Self::CAPACITY];
|
||||
bytes[..slice.len()].copy_from_slice(slice);
|
||||
Some(Self {
|
||||
bytes,
|
||||
len: len_as_u8 | RocStr::MASK,
|
||||
})
|
||||
}
|
||||
|
||||
fn is_small_str(&self) -> bool {
|
||||
self.len & RocStr::MASK != 0
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
usize::from(self.len & !RocStr::MASK)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for SmallString {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
let len = self.len();
|
||||
unsafe { core::str::from_utf8_unchecked(self.bytes.get_unchecked(..len)) }
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for SmallString {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
let len = self.len();
|
||||
unsafe { core::str::from_utf8_unchecked_mut(self.bytes.get_unchecked_mut(..len)) }
|
||||
}
|
||||
}
|
||||
|
|
54
roc_std/src/storage.rs
Normal file
54
roc_std/src/storage.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
use core::num::NonZeroIsize;
|
||||
|
||||
const REFCOUNT_1: isize = isize::MIN;
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum Storage {
|
||||
Readonly,
|
||||
ReferenceCounted(NonZeroIsize),
|
||||
}
|
||||
|
||||
impl Storage {
|
||||
pub fn new_reference_counted() -> Self {
|
||||
Self::ReferenceCounted(NonZeroIsize::new(REFCOUNT_1).unwrap())
|
||||
}
|
||||
|
||||
/// Increment the reference count.
|
||||
pub fn increment_reference_count(&mut self) {
|
||||
match self {
|
||||
Storage::Readonly => {
|
||||
// Do nothing.
|
||||
}
|
||||
Storage::ReferenceCounted(rc) => {
|
||||
let new_rc = rc.get() + 1;
|
||||
if let Some(new_rc) = NonZeroIsize::new(new_rc) {
|
||||
*self = Storage::ReferenceCounted(new_rc);
|
||||
} else {
|
||||
*self = Storage::Readonly;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Decrease the reference count.
|
||||
///
|
||||
/// Returns `true` once there are no more references left.
|
||||
pub fn decrease(&mut self) -> bool {
|
||||
match self {
|
||||
Storage::Readonly => false,
|
||||
Storage::ReferenceCounted(rc) => {
|
||||
let rc_as_isize = rc.get();
|
||||
if rc_as_isize == REFCOUNT_1 {
|
||||
true
|
||||
} else {
|
||||
*rc = NonZeroIsize::new(rc_as_isize - 1).unwrap();
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_readonly(&self) -> bool {
|
||||
matches!(self, Self::Readonly)
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue