roc/roc_std/src/lib.rs
2021-06-18 15:21:11 -04:00

692 lines
21 KiB
Rust

#![crate_type = "lib"]
#![no_std]
use core::ffi::c_void;
use core::{fmt, mem, ptr};
pub mod alloca;
// A list of C functions that are being imported
extern "C" {
pub fn printf(format: *const u8, ...) -> i32;
pub fn roc_alloc(size: usize, alignment: u32) -> *mut c_void;
pub fn roc_realloc(
ptr: *mut c_void,
new_size: usize,
old_size: usize,
alignment: u32,
) -> *mut c_void;
pub fn roc_dealloc(ptr: *mut c_void, alignment: u32);
}
const REFCOUNT_1: isize = isize::MIN;
#[repr(u8)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RocOrder {
Eq = 0,
Gt = 1,
Lt = 2,
}
//#[macro_export]
//macro_rules! roclist {
// () => (
// $crate::RocList::empty()
// );
// ($($x:expr),+ $(,)?) => (
// $crate::RocList::from_slice(&[$($x),+])
// );
//}
#[repr(C)]
pub struct RocList<T> {
elements: *mut T,
length: usize,
}
#[derive(Clone, Copy, Debug)]
pub enum Storage {
ReadOnly,
Refcounted(isize),
Capacity(usize),
}
impl<T> RocList<T> {
pub fn len(&self) -> usize {
self.length
}
pub fn is_empty(&self) -> bool {
self.length == 0
}
pub fn empty() -> Self {
RocList {
length: 0,
elements: core::ptr::null_mut(),
}
}
pub fn get(&self, index: usize) -> Option<&T> {
if index < self.len() {
Some(unsafe {
let raw = self.elements.add(index);
&*raw
})
} else {
None
}
}
pub fn storage(&self) -> Option<Storage> {
use core::cmp::Ordering::*;
if self.length == 0 {
return None;
}
unsafe {
let value = *self.get_storage_ptr();
// NOTE doesn't work with elements of 16 or more bytes
match isize::cmp(&value, &0) {
Equal => Some(Storage::ReadOnly),
Less => Some(Storage::Refcounted(value)),
Greater => Some(Storage::Capacity(value as usize)),
}
}
}
fn get_storage_ptr(&self) -> *const isize {
let ptr = self.elements as *const isize;
unsafe { ptr.offset(-1) }
}
fn get_storage_ptr_mut(&mut self) -> *mut isize {
self.get_storage_ptr() as *mut isize
}
fn set_storage_ptr(&mut self, ptr: *const isize) {
self.elements = unsafe { ptr.offset(1) as *mut T };
}
fn get_element_ptr(elements: *const T) -> *const T {
let elem_alignment = core::mem::align_of::<T>();
let ptr = elements as *const usize;
unsafe {
if elem_alignment <= core::mem::align_of::<usize>() {
ptr.add(1) as *const T
} else {
// If elements have an alignment bigger than usize (e.g. an i128),
// we will have necessarily allocated two usize slots worth of
// space for the storage value (with the first usize slot being
// padding for alignment's sake), and we need to skip past both.
ptr.add(2) as *const T
}
}
}
pub fn from_slice_with_capacity(slice: &[T], capacity: usize) -> RocList<T>
where
T: Clone,
{
assert!(slice.len() <= capacity);
let ptr = slice.as_ptr();
let element_bytes = capacity * core::mem::size_of::<T>();
let padding = {
if core::mem::align_of::<T>() <= core::mem::align_of::<usize>() {
// aligned on usize (8 bytes on 64-bit systems)
0
} else {
// aligned on 2*usize (16 bytes on 64-bit systems)
core::mem::size_of::<usize>()
}
};
let num_bytes = core::mem::size_of::<usize>() + padding + element_bytes;
let elements = unsafe {
let raw_ptr = roc_alloc(num_bytes, core::mem::size_of::<usize>() as u32) as *mut u8;
// pointer to the first element
let raw_ptr = Self::get_element_ptr(raw_ptr as *mut T) as *mut T;
// write the refcount
let refcount_ptr = raw_ptr as *mut isize;
*(refcount_ptr.offset(-1)) = isize::MIN;
{
// NOTE: using a memcpy here causes weird issues
let target_ptr = raw_ptr as *mut T;
let source_ptr = ptr as *const T;
for index in 0..slice.len() {
let source = &*source_ptr.add(index);
let target = &mut *target_ptr.add(index);
// NOTE for a weird reason, it's important that we clone onto the stack
// and explicitly forget the swapped-in value
// cloning directly from source to target causes some garbage memory (cast to a
// RocStr) to end up in the drop implementation of RocStr and cause havoc by
// freeing NULL
let mut temporary = source.clone();
core::mem::swap(target, &mut temporary);
core::mem::forget(temporary);
}
}
raw_ptr
};
RocList {
length: slice.len(),
elements,
}
}
pub fn from_slice(slice: &[T]) -> RocList<T>
where
T: Clone,
{
Self::from_slice_with_capacity(slice, slice.len())
}
pub fn as_slice(&self) -> &[T] {
unsafe { core::slice::from_raw_parts(self.elements, self.length) }
}
/// Copy the contents of the given slice into the end of this list,
/// reallocating and resizing as necessary.
pub fn append_slice(&mut self, slice: &[T]) {
let new_len = self.len() + slice.len();
let storage_ptr = self.get_storage_ptr_mut();
// First, ensure that there's enough storage space.
unsafe {
let storage_val = *storage_ptr as isize;
// Check if this is refcounted, readonly, or has a capcacity.
// (Capacity will be positive if it has a capacity.)
if storage_val > 0 {
let capacity = storage_val as usize;
// We don't have enough capacity, so we need to get some more.
if capacity < new_len {
// Double our capacity using realloc
let new_cap = 2 * capacity;
let new_ptr = roc_realloc(
storage_ptr as *mut c_void,
new_cap,
capacity,
Self::align_of_storage_ptr(),
) as *mut isize;
// Write the new capacity into the new memory
*new_ptr = new_cap as isize;
// Copy all the existing elements into the new allocation.
ptr::copy_nonoverlapping(self.elements, new_ptr as *mut T, self.len());
// Update our storage pointer to be the new one
self.set_storage_ptr(new_ptr);
}
} else {
// If this was reference counted, decrement the refcount!
if storage_val < 0 {
let refcount = storage_val;
// Either deallocate or decrement.
if refcount == REFCOUNT_1 {
roc_dealloc(storage_ptr as *mut c_void, Self::align_of_storage_ptr());
} else {
*storage_ptr = refcount - 1;
}
}
// This is either refcounted or readonly; either way, we need
// to clone the elements!
// Double the capacity we need, in case there are future additions.
let new_cap = new_len * 2;
let new_ptr = roc_alloc(new_cap, Self::align_of_storage_ptr()) as *mut isize;
// Write the new capacity into the new memory; this list is
// now unique, and gets its own capacity!
*new_ptr = new_cap as isize;
// Copy all the existing elements into the new allocation.
ptr::copy_nonoverlapping(self.elements, new_ptr as *mut T, self.len());
// Update our storage pointer to be the new one
self.set_storage_ptr(new_ptr);
}
// Since this is an append, we want to start writing new elements
// into the memory immediately after the current last element.
let dest = self.elements.add(self.len());
// There's now enough storage to append the contents of the slice
// in-place, so do that!
ptr::copy_nonoverlapping(slice.as_ptr(), dest, self.len());
}
self.length = new_len;
}
/// The alignment we need is either the alignment of T, or else
/// the alignment of usize, whichever is higher. That's because we need
/// to store both T values as well as the refcount/capacity storage slot.
fn align_of_storage_ptr() -> u32 {
mem::align_of::<T>().max(mem::align_of::<usize>()) as u32
}
}
impl<T: fmt::Debug> fmt::Debug for RocList<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// RocList { storage: Refcounted(3), elements: [ 1,2,3,4] }
f.debug_struct("RocList")
.field("storage", &self.storage())
.field("elements", &self.as_slice())
.finish()
}
}
impl<T: PartialEq> PartialEq for RocList<T> {
fn eq(&self, other: &Self) -> bool {
if self.length != other.length {
return false;
}
for i in 0..self.length {
unsafe {
if *self.elements.add(i) != *other.elements.add(i) {
return false;
}
}
}
true
}
}
impl<T: Eq> Eq for RocList<T> {}
impl<T> Drop for RocList<T> {
fn drop(&mut self) {
if !self.is_empty() {
let storage_ptr = self.get_storage_ptr_mut();
unsafe {
let storage_val = *storage_ptr;
if storage_val == REFCOUNT_1 || storage_val > 0 {
// If we have no more references, or if this was unique,
// deallocate it.
roc_dealloc(storage_ptr as *mut c_void, Self::align_of_storage_ptr());
} else if storage_val < 0 {
// If this still has more references, decrement one.
*storage_ptr = storage_val - 1;
}
// The only remaining option is that this is in readonly memory,
// in which case we shouldn't attempt to do anything to it.
}
}
}
}
#[repr(C)]
pub struct RocStr {
elements: *mut u8,
length: usize,
}
impl RocStr {
pub fn len(&self) -> usize {
if self.is_small_str() {
let bytes = self.length.to_ne_bytes();
let last_byte = bytes[bytes.len() - 1];
(last_byte ^ 0b1000_0000) as usize
} else {
self.length
}
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn is_small_str(&self) -> bool {
(self.length as isize) < 0
}
pub fn empty() -> Self {
RocStr {
// The first bit of length is 1 to specify small str.
length: 0,
elements: core::ptr::null_mut(),
}
}
pub fn get(&self, index: usize) -> Option<&u8> {
if index < self.len() {
Some(unsafe {
let raw = if self.is_small_str() {
self.get_small_str_ptr().add(index)
} else {
self.elements.add(index)
};
&*raw
})
} else {
None
}
}
pub fn storage(&self) -> Option<Storage> {
use core::cmp::Ordering::*;
if self.is_small_str() || self.length == 0 {
return None;
}
unsafe {
let value = *self.get_storage_ptr();
// NOTE doesn't work with elements of 16 or more bytes
match isize::cmp(&(value as isize), &0) {
Equal => Some(Storage::ReadOnly),
Less => Some(Storage::Refcounted(value)),
Greater => Some(Storage::Capacity(value as usize)),
}
}
}
fn get_storage_ptr(&self) -> *const isize {
let ptr = self.elements as *const isize;
unsafe { ptr.offset(-1) }
}
fn get_storage_ptr_mut(&mut self) -> *mut isize {
self.get_storage_ptr() as *mut isize
}
fn get_element_ptr(elements: *const u8) -> *const usize {
let elem_alignment = core::mem::align_of::<u8>();
let ptr = elements as *const usize;
unsafe {
if elem_alignment <= core::mem::align_of::<usize>() {
ptr.add(1)
} else {
// If elements have an alignment bigger than usize (e.g. an i128),
// we will have necessarily allocated two usize slots worth of
// space for the storage value (with the first usize slot being
// padding for alignment's sake), and we need to skip past both.
ptr.add(2)
}
}
}
fn get_small_str_ptr(&self) -> *const u8 {
(self as *const RocStr).cast()
}
fn get_small_str_ptr_mut(&mut self) -> *mut u8 {
(self as *mut RocStr).cast()
}
fn from_slice_with_capacity_str(slice: &[u8], capacity: usize) -> RocStr {
assert!(
slice.len() <= capacity,
"RocStr::from_slice_with_capacity_str length bigger than capacity {} {}",
slice.len(),
capacity
);
if capacity < core::mem::size_of::<RocStr>() {
let mut rocstr = RocStr::empty();
let target_ptr = rocstr.get_small_str_ptr_mut();
let source_ptr = slice.as_ptr() as *const u8;
for index in 0..slice.len() {
unsafe {
*target_ptr.add(index) = *source_ptr.add(index);
}
}
// Write length and small string bit to last byte of length.
let mut bytes = rocstr.length.to_ne_bytes();
bytes[bytes.len() - 1] = capacity as u8 ^ 0b1000_0000;
rocstr.length = usize::from_ne_bytes(bytes);
rocstr
} else {
let ptr = slice.as_ptr();
let element_bytes = capacity;
let num_bytes = core::mem::size_of::<usize>() + element_bytes;
let elements = unsafe {
let raw_ptr = roc_alloc(num_bytes, core::mem::size_of::<usize>() as u32) as *mut u8;
// write the capacity
let capacity_ptr = raw_ptr as *mut usize;
*capacity_ptr = capacity;
let raw_ptr = Self::get_element_ptr(raw_ptr as *mut u8);
{
// NOTE: using a memcpy here causes weird issues
let target_ptr = raw_ptr as *mut u8;
let source_ptr = ptr as *const u8;
let length = slice.len();
for index in 0..length {
*target_ptr.add(index) = *source_ptr.add(index);
}
}
raw_ptr as *mut u8
};
RocStr {
length: slice.len(),
elements,
}
}
}
pub fn from_slice(slice: &[u8]) -> RocStr {
Self::from_slice_with_capacity_str(slice, slice.len())
}
pub fn as_slice(&self) -> &[u8] {
if self.is_small_str() {
unsafe { core::slice::from_raw_parts(self.get_small_str_ptr(), self.len()) }
} else {
unsafe { core::slice::from_raw_parts(self.elements, self.length) }
}
}
#[allow(clippy::missing_safety_doc)]
pub unsafe fn as_str(&self) -> &str {
let slice = self.as_slice();
core::str::from_utf8_unchecked(slice)
}
/// Write a CStr (null-terminated) representation of this RocStr into
/// the given buffer. Assumes the given buffer has enough space!
pub unsafe fn write_c_str(&self, buf: *mut u8) -> *mut char {
if self.is_small_str() {
ptr::copy_nonoverlapping(self.get_small_str_ptr(), buf, self.len());
} else {
ptr::copy_nonoverlapping(self.elements, buf, self.len());
}
// null-terminate
*(buf.offset(self.len() as isize)) = 0;
buf as *mut char
}
}
impl From<&str> for RocStr {
fn from(str: &str) -> Self {
Self::from_slice(str.as_bytes())
}
}
impl fmt::Debug for RocStr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// RocStr { is_small_str: false, storage: Refcounted(3), elements: [ 1,2,3,4] }
f.debug_struct("RocStr")
.field("is_small_str", &self.is_small_str())
.field("storage", &self.storage())
.field("elements", &self.as_slice())
.finish()
}
}
impl PartialEq for RocStr {
fn eq(&self, other: &Self) -> bool {
self.as_slice() == other.as_slice()
}
}
impl Eq for RocStr {}
impl Clone for RocStr {
fn clone(&self) -> Self {
if self.is_small_str() || self.is_empty() {
Self {
elements: self.elements,
length: self.length,
}
} else {
let capacity_size = core::mem::size_of::<usize>();
let copy_length = self.length + capacity_size;
let elements = unsafe {
let raw = roc_alloc(copy_length, core::mem::size_of::<usize>() as u32);
libc::memcpy(
raw,
self.elements.offset(-(capacity_size as isize)) as *mut libc::c_void,
copy_length,
);
*(raw as *mut usize) = self.length;
(raw as *mut u8).add(capacity_size)
};
Self {
elements,
length: self.length,
}
}
}
}
impl Drop for RocStr {
fn drop(&mut self) {
if !self.is_small_str() {
let storage_ptr = self.get_storage_ptr_mut();
unsafe {
let storage_val = *storage_ptr;
if storage_val == REFCOUNT_1 || storage_val > 0 {
// If we have no more references, or if this was unique,
// deallocate it.
roc_dealloc(storage_ptr as *mut c_void, mem::align_of::<isize>() as u32);
} else if storage_val < 0 {
// If this still has more references, decrement one.
*storage_ptr = storage_val - 1;
}
// The only remaining option is that this is in readonly memory,
// in which case we shouldn't attempt to do anything to it.
}
}
}
}
/// Like a Rust Result, but with Roc's fixed discriminant size of u64, and
/// with Roc's Err = 0, Ok = 1 discriminant numbers.
///
/// Using Rust's Result instead of this will not work properly with Roc code!
#[repr(u64)]
pub enum RocResult<Ok, Err> {
Err(Err),
Ok(Ok),
}
#[allow(non_camel_case_types)]
type c_char = u8;
#[repr(u64)]
pub enum RocCallResult<T> {
Success(T),
Failure(*mut c_char),
}
impl<T: Sized> From<RocCallResult<T>> for Result<T, &'static str> {
fn from(call_result: RocCallResult<T>) -> Self {
use RocCallResult::*;
match call_result {
Success(value) => Ok(value),
Failure(failure) => Err({
let msg = unsafe {
let mut null_byte_index = 0;
loop {
if *failure.offset(null_byte_index) == 0 {
break;
}
null_byte_index += 1;
}
let bytes = core::slice::from_raw_parts(failure, null_byte_index as usize);
core::str::from_utf8_unchecked(bytes)
};
msg
}),
}
}
}
impl<'a, T: Sized + Copy> From<&'a RocCallResult<T>> for Result<T, &'a str> {
fn from(call_result: &'a RocCallResult<T>) -> Self {
use RocCallResult::*;
match call_result {
Success(value) => Ok(*value),
Failure(failure) => Err({
let msg = unsafe {
let mut null_byte_index = 0;
loop {
if *failure.offset(null_byte_index) == 0 {
break;
}
null_byte_index += 1;
}
let bytes = core::slice::from_raw_parts(*failure, null_byte_index as usize);
core::str::from_utf8_unchecked(bytes)
};
msg
}),
}
}
}