moved all crates into seperate folder + related path fixes

This commit is contained in:
Anton-4 2022-07-01 17:37:43 +02:00
parent 12ef03bb86
commit eee85fa45d
No known key found for this signature in database
GPG key ID: C954D6E0F9C0ABFD
1063 changed files with 92 additions and 93 deletions

View file

@ -0,0 +1,139 @@
use crate::util::id_type::{Count, Id};
use crate::util::id_vec::IdVec;
use crate::util::replace_none::replace_none;
id_type! {
BlockFragId(u32);
}
/// A "fragment" of a block, representing a run of consecutive `ValId`s and a pointer to the next
/// fragment.
#[derive(Clone, Debug)]
struct BlockFrag<ValId> {
/// Inclusive bound
min_val: ValId,
/// Exclusive bound
max_val: ValId,
prev: Option<BlockFragId>,
next: Option<BlockFragId>,
}
#[derive(Clone, Debug)]
struct BlockData<BlockInfo> {
head: BlockFragId,
tail: BlockFragId,
info: BlockInfo,
}
/// Conceptually represents a collection of the form `IdVec<BlockId, (Vec<ValId>, BlockInfo)>`.
///
/// Each tuple `(Vec<ValId>, BlockInfo)` is called a "block".
///
/// The blocks are actually stored in a single contiguous buffer to reduce the number of heap
/// allocations, and blocks with long runs of consecutive `ValId`s are stored in a compressed
/// representation.
#[derive(Clone, Debug)]
pub struct Blocks<BlockId: Id, ValId: Id, BlockInfo> {
frags: IdVec<BlockFragId, BlockFrag<ValId>>,
blocks: IdVec<BlockId, BlockData<BlockInfo>>,
}
impl<BlockId: Id, ValId: Id, BlockInfo> Blocks<BlockId, ValId, BlockInfo> {
pub fn new() -> Self {
Self {
frags: IdVec::new(),
blocks: IdVec::new(),
}
}
pub fn len(&self) -> usize {
self.blocks.len()
}
pub fn block_count(&self) -> Count<BlockId> {
self.blocks.count()
}
pub fn add_block(&mut self, start_hint: ValId, info: BlockInfo) -> BlockId {
let frag = BlockFrag {
min_val: start_hint.clone(),
max_val: start_hint,
prev: None,
next: None,
};
let frag_id = self.frags.push(frag);
self.blocks.push(BlockData {
head: frag_id,
tail: frag_id,
info,
})
}
pub fn add_value(&mut self, block_id: BlockId, val_id: ValId) {
let block = &mut self.blocks[block_id];
let tail_frag = &mut self.frags[block.tail];
let next_val = ValId::from_index_or_panic(tail_frag.max_val.to_index() + 1);
if tail_frag.max_val.to_index() == val_id.to_index() {
tail_frag.max_val = next_val;
} else {
let new_tail = BlockFrag {
min_val: val_id.clone(),
max_val: ValId::from_index_or_panic(val_id.to_index() + 1),
prev: Some(block.tail),
next: None,
};
let new_tail_id = self.frags.push(new_tail);
replace_none(&mut self.frags[block.tail].next, new_tail_id).unwrap();
block.tail = new_tail_id;
}
}
pub fn block_info(&self, block_id: BlockId) -> &BlockInfo {
&self.blocks[block_id].info
}
pub fn block_info_mut(&mut self, block_id: BlockId) -> &mut BlockInfo {
&mut self.blocks[block_id].info
}
pub fn block_values(&self, block_id: BlockId) -> impl Iterator<Item = ValId> + '_ {
let mut frag = &self.frags[self.blocks[block_id].head];
let mut val = frag.min_val.clone();
std::iter::from_fn(move || {
while val.to_index() >= frag.max_val.to_index() {
match frag.next {
Some(next) => {
frag = &self.frags[next];
val = frag.min_val.clone();
}
None => {
return None;
}
}
}
let this_val = val.clone();
val = ValId::from_index_unchecked(val.to_index() + 1);
Some(this_val)
})
}
pub fn block_values_rev(&self, block_id: BlockId) -> impl Iterator<Item = ValId> + '_ {
let mut frag = &self.frags[self.blocks[block_id].tail];
let mut val = frag.max_val.clone();
std::iter::from_fn(move || {
while val.to_index() <= frag.min_val.to_index() {
match frag.prev {
Some(prev) => {
frag = &self.frags[prev];
val = frag.max_val.clone();
}
None => {
return None;
}
}
}
val = ValId::from_index_unchecked(val.to_index() - 1);
Some(val.clone())
})
}
}

View file

@ -0,0 +1,51 @@
macro_rules! bytes_id {
(
// Capturing attributes allows us to capture doc comments
$(#[$annot_borrowed:meta])* $borrowed_vis:vis $borrowed:ident;
$(#[$annot_owned:meta])* $owned_vis:vis $owned:ident;
) => {
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
$(#[$annot_borrowed])*
$borrowed_vis struct $borrowed<'a>($borrowed_vis &'a [u8]);
// On 64-bit platforms we can store up to `23` bytes inline in a `SmallVec` with no space
// penalty.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
$(#[$annot_owned])*
$owned_vis struct $owned($owned_vis ::smallvec::SmallVec<[u8; 23]>);
impl $owned {
fn borrowed<'a>(&'a self) -> $borrowed<'a> {
$borrowed(&self.0)
}
}
impl<'a> ::std::convert::From<$borrowed<'a>> for $owned {
fn from(borrowed: $borrowed<'a>) -> Self {
$owned(::smallvec::SmallVec::from_slice(&borrowed.0))
}
}
impl<'a> ::std::fmt::Debug for $borrowed<'a> {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
// TODO: Consolidate this with render_api_ir.rs
write!(f, "{}(\"", stringify!($borrowed))?;
for &byte in self.0 {
write!(f, "{}", ::std::ascii::escape_default(byte))?;
}
write!(f, "\")")?;
Ok(())
}
}
impl ::std::fmt::Debug for $owned {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
// We intentionally use the name of $borrowed to render values of type $owned,
// because only the borrowed version of each bytestring-based identifier type are
// exposed/documented in the public API, and we use this debug formatting logic to
// render public-facing error messages.
write!(f, "{:?}", self.borrowed())
}
}
}
}

View file

@ -0,0 +1,114 @@
use std::borrow::Borrow;
use std::fmt;
use crate::util::id_type::{self, Count, Id};
use crate::util::id_vec::IdVec;
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
struct SliceData<SliceInfo> {
info: SliceInfo,
/// Determines which slice of the `flat_data` buffer contains the items of this slice.
///
/// If the *previous* slice has `slice_end_idx == a`, and this slice has `inputs_end_idx == b`,
/// then the items of this slice are given by `flat_data[a..b]`.
slice_end_idx: usize,
}
/// Conceptually represents a collection of the form `IdVec<SliceId, (SliceInfo, Vec<T>)>`.
///
/// The notional `Vec<T>` values are actually stored in a single contiguous buffer to reduce the
/// number of heap allocations. Because these values are all different slices of the same
/// underlying buffer, we refer to each tuple `(SliceInfo, Vec<T>)` as a "slice" for the purposes of
/// this data structure's API.
///
/// The `SliceInfo` parameter should be regarded as optional, and for some purposes it is perfectly
/// fine (stylistically speaking) to set `SliceInfo = ()`.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct FlatSlices<SliceId: Id, SliceInfo, T> {
flat_data: Vec<T>,
slices: IdVec<SliceId, SliceData<SliceInfo>>,
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Slice<'a, SliceInfo, T> {
pub info: &'a SliceInfo,
pub items: &'a [T],
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct SliceMut<'a, SliceInfo, T> {
pub info: &'a mut SliceInfo,
pub items: &'a mut [T],
}
impl<SliceId: Id, SliceInfo, T> Default for FlatSlices<SliceId, SliceInfo, T> {
fn default() -> Self {
Self::new()
}
}
impl<SliceId: Id, SliceInfo, T> FlatSlices<SliceId, SliceInfo, T> {
pub fn new() -> Self {
Self {
flat_data: Vec::new(),
slices: IdVec::new(),
}
}
pub fn len(&self) -> usize {
self.slices.len()
}
pub fn count(&self) -> Count<SliceId> {
self.slices.count()
}
pub fn push_slice(&mut self, info: SliceInfo, slice: &[T]) -> SliceId
where
T: Clone,
{
self.flat_data.extend_from_slice(slice);
self.slices.push(SliceData {
info,
slice_end_idx: self.flat_data.len(),
})
}
fn data_range(&self, idx: SliceId) -> (usize, usize) {
let start = match id_type::decrement(idx.clone()) {
None => 0,
Some(prev_idx) => self.slices[prev_idx].slice_end_idx,
};
let end = self.slices[idx].slice_end_idx;
(start, end)
}
pub fn get<I: Borrow<SliceId>>(&self, idx: I) -> Slice<SliceInfo, T> {
let (start, end) = self.data_range(idx.borrow().clone());
Slice {
info: &self.slices[idx].info,
items: &self.flat_data[start..end],
}
}
pub fn get_mut<I: Borrow<SliceId>>(&mut self, idx: I) -> SliceMut<SliceInfo, T> {
let (start, end) = self.data_range(idx.borrow().clone());
SliceMut {
info: &mut self.slices[idx].info,
items: &mut self.flat_data[start..end],
}
}
pub fn iter(&self) -> impl Iterator<Item = (SliceId, Slice<SliceInfo, T>)> {
self.count().iter().map(move |i| (i.clone(), self.get(i)))
}
}
impl<SliceId: Id + fmt::Debug, SliceInfo: fmt::Debug, T: fmt::Debug> fmt::Debug
for FlatSlices<SliceId, SliceInfo, T>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}

View file

@ -0,0 +1,91 @@
macro_rules! forward_trait {
(
$(#[$annot:meta])*
$t_vis:vis trait $t_name:ident {
$($methods:tt)*
}
$($impls:tt)*
) => {
$(#[$annot])*
$t_vis trait $t_name { $($methods)* }
forward_trait_impls!(trait $t_name { $($methods)* } $($impls)*);
};
}
macro_rules! forward_trait_impls {
(
trait $t_name:ident { $($methods:tt)* }
) => {
// Base case: no impls left
};
(
trait $t_name:ident { $($methods:tt)* }
impl $wrapper:ident => .$field:ident;
$($impls:tt)*
) => {
impl $t_name for $wrapper {
forward_trait_impl_body!( { $($methods)* } .$field );
}
forward_trait_impls!(trait $t_name { $($methods)* } $($impls)*);
}
}
macro_rules! forward_trait_impl_body {
(
{}
.$field:ident
) => {
// Base case: no methods left
};
(
{
$(#[$annot:meta])*
fn $fn_name:ident(self $(, $arg_name:ident : $arg_ty:ty)* $(,)? ) -> $ret_ty:ty ;
$($methods:tt)*
}
.$field:ident
) => {
fn $fn_name(self, $($arg_name: $arg_ty),*) -> $ret_ty {
self.$field.$fn_name($($arg_name),*)
}
forward_trait_impl_body!({ $($methods)* } .$field);
};
(
{
$(#[$annot:meta])*
fn $fn_name:ident(&self $(, $arg_name:ident : $arg_ty:ty)* $(,)? ) -> $ret_ty:ty ;
$($methods:tt)*
}
.$field:ident
) => {
fn $fn_name(&self, $($arg_name: $arg_ty),*) -> $ret_ty {
self.$field.$fn_name($($arg_name),*)
}
forward_trait_impl_body!({ $($methods)* } .$field);
};
(
{
$(#[$annot:meta])*
fn $fn_name:ident(&mut self $(, $arg_name:ident : $arg_ty:ty)* $(,)? ) -> $ret_ty:ty ;
$($methods:tt)*
}
.$field:ident
) => {
fn $fn_name(&mut self, $($arg_name: $arg_ty),*) -> $ret_ty {
self.$field.$fn_name($($arg_name),*)
}
forward_trait_impl_body!({ $($methods)* } .$field);
};
}

View file

@ -0,0 +1,16 @@
use std::cmp::Ordering;
// inspired by https://docs.rs/generational-arena/0.2.8/generational_arena/struct.Arena.html#method.get2_mut
pub fn get2_mut<T>(slice: &mut [T], i: usize, j: usize) -> Option<(&mut T, &mut T)> {
match i.cmp(&j) {
Ordering::Less => {
let (l, r) = slice.split_at_mut(j);
Some((&mut l[i], &mut r[0]))
}
Ordering::Greater => {
let (l, r) = slice.split_at_mut(i);
Some((&mut r[0], &mut l[j]))
}
Ordering::Equal => None,
}
}

View file

@ -0,0 +1,74 @@
use std::collections::hash_map::{Entry, HashMap};
use std::hash::Hash;
use std::ops::Deref;
use crate::util::id_type::Id;
use crate::util::id_vec::IdVec;
/// Conceptually represents a collection of the form `IdVec<K, V>` where the `V` values are unique.
///
/// The collection is implemented such that lookups from `V` values to `K` keys are efficient.
#[derive(Clone, Debug)]
pub struct IdBiMap<K: Id, V> {
key_to_val: IdVec<K, V>,
val_to_key: HashMap<V, K>,
}
impl<K: Id, V: Hash + Eq + Clone> Default for IdBiMap<K, V> {
fn default() -> Self {
Self::new()
}
}
impl<K: Id, V> Deref for IdBiMap<K, V> {
type Target = IdVec<K, V>;
fn deref(&self) -> &Self::Target {
&self.key_to_val
}
}
impl<K: Id, V: Hash + Eq + Clone> IdBiMap<K, V> {
pub fn new() -> Self {
IdBiMap {
key_to_val: IdVec::new(),
val_to_key: HashMap::new(),
}
}
/// Insert a new unique value into the bi-map.
///
/// If the value is not already present, returns an `Ok` value with the new index of the value.
///
/// If the value is already present, returns an `Err` value with the existing index.
pub fn insert(&mut self, val: V) -> Result<K, K> {
match self.val_to_key.entry(val) {
Entry::Occupied(occupied) => Err(occupied.get().clone()),
Entry::Vacant(vacant) => {
let new_index = self.key_to_val.push(vacant.key().clone());
vacant.insert(new_index.clone());
Ok(new_index)
}
}
}
/// Insert a value into the bi-map, or get its key if it is already present.
pub fn get_or_insert(&mut self, val: V) -> K {
match self.val_to_key.entry(val) {
Entry::Occupied(occupied) => occupied.get().clone(),
Entry::Vacant(vacant) => {
let new_index = self.key_to_val.push(vacant.key().clone());
vacant.insert(new_index.clone());
new_index
}
}
}
pub fn get_by_val(&self, val: &V) -> Option<K> {
self.val_to_key.get(val).cloned()
}
pub fn iter(&self) -> impl Iterator<Item = (K, &V)> {
self.key_to_val.iter()
}
}

View file

@ -0,0 +1,111 @@
use std::cmp::Ordering;
/// Abstracts over types which wrap numerical indices.
pub trait Id: Clone {
/// The maximum `usize` representable by this type.
const MAX_USIZE: usize;
/// Convert from `usize` to `Self`, potentially silently wrapping if the `usize` is out of
/// range.
///
/// In debug builds, we may still perform a check.
fn from_index_unchecked(idx: usize) -> Self;
/// Check that a `usize` is in range for `Self`, panicking with an informative error message if
/// not.
fn assert_in_range(idx: usize);
/// Convert from `usize` to `Self`, panicking if the `usize` is out of range.
fn from_index_or_panic(idx: usize) -> Self {
Self::assert_in_range(idx);
Self::from_index_unchecked(idx)
}
/// Convert from `Self` to `usize`. This should never fail.
fn to_index(&self) -> usize;
}
macro_rules! id_type {
($(#[$annot:meta])* $id_vis:vis $name:ident($wrapped:ty); ) => {
$(#[$annot])*
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
$id_vis struct $name($id_vis $wrapped);
impl $crate::util::id_type::Id for $name {
const MAX_USIZE: usize = <$wrapped>::MAX as usize;
fn from_index_unchecked(idx: usize) -> Self {
if cfg!(debug_assertions) {
<Self as $crate::util::id_type::Id>::assert_in_range(idx);
}
$name(idx as $wrapped)
}
fn assert_in_range(idx: usize) {
if idx > Self::MAX_USIZE {
panic!(
"index {} overflows range of type '{}' (backed by {})",
idx,
stringify!(name),
stringify!($wrapped),
);
}
}
fn to_index(&self) -> usize {
self.0 as usize
}
}
// Custom Debug impl avoid multi-line formatting when formatted with {:#?}
impl ::std::fmt::Debug for $name {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
write!(f, "{}({})", stringify!($name), self.0)
}
}
impl ::std::cmp::PartialEq<$crate::util::id_type::Count<$name>> for $name {
fn eq(&self, other: &$crate::util::id_type::Count<$name>) -> bool {
self.eq(&other.0)
}
}
impl ::std::cmp::PartialOrd<$crate::util::id_type::Count<$name>> for $name {
fn partial_cmp(
&self,
other: &$crate::util::id_type::Count<$name>,
) -> Option<::std::cmp::Ordering> {
self.partial_cmp(&other.0)
}
}
}
}
/// `Count(x)` represents the range of ids `0..x`.
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Count<T>(pub T);
impl<T: PartialEq> PartialEq<T> for Count<T> {
fn eq(&self, other: &T) -> bool {
self.0.eq(other)
}
}
impl<T: PartialOrd> PartialOrd<T> for Count<T> {
fn partial_cmp(&self, other: &T) -> Option<Ordering> {
self.0.partial_cmp(other)
}
}
impl<T: Id> Count<T> {
pub fn iter(&self) -> impl DoubleEndedIterator<Item = T> {
(0..self.0.to_index()).map(T::from_index_unchecked)
}
}
pub fn decrement<T: Id>(id: T) -> Option<T> {
match id.to_index() {
0 => None,
index => Some(T::from_index_unchecked(index - 1)),
}
}

View file

@ -0,0 +1,263 @@
use std::borrow::Borrow;
use std::fmt::{self, Debug};
use std::iter;
use std::marker::PhantomData;
use std::ops::{Index, IndexMut};
use std::slice;
use std::vec;
use crate::util::get2_mut::get2_mut;
use crate::util::id_type::{Count, Id};
#[derive(Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct IdVec<K, V> {
key: PhantomData<K>,
// Invariant: items.len() <= K::MAX_USIZE
items: Vec<V>,
}
impl<K: Id + Debug, V: Debug> Debug for IdVec<K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map()
.entries(
self.items
.iter()
.enumerate()
.map(|(idx, val)| (K::from_index_unchecked(idx), val)),
)
.finish()
}
}
#[derive(Clone, Debug)]
pub struct IndexMapped<K, I> {
key: PhantomData<K>,
inner: I,
}
impl<K: Id, V, I: Iterator<Item = (usize, V)>> Iterator for IndexMapped<K, I> {
type Item = (K, V);
#[inline]
fn next(&mut self) -> Option<(K, V)> {
self.inner
.next()
.map(|(idx, val)| (K::from_index_unchecked(idx), val))
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
#[inline]
fn fold<Acc, G>(self, init: Acc, mut g: G) -> Acc
where
G: FnMut(Acc, Self::Item) -> Acc,
{
self.inner.fold(init, |acc, (idx, val)| {
g(acc, (K::from_index_unchecked(idx), val))
})
}
}
impl<K: Id, V> IntoIterator for IdVec<K, V> {
type Item = (K, V);
type IntoIter = IndexMapped<K, iter::Enumerate<vec::IntoIter<V>>>;
fn into_iter(self) -> Self::IntoIter {
IndexMapped {
key: PhantomData,
inner: self.items.into_iter().enumerate(),
}
}
}
impl<'a, K: Id, V> IntoIterator for &'a IdVec<K, V> {
type Item = (K, &'a V);
type IntoIter = IndexMapped<K, iter::Enumerate<slice::Iter<'a, V>>>;
fn into_iter(self) -> Self::IntoIter {
IndexMapped {
key: PhantomData,
inner: self.items.iter().enumerate(),
}
}
}
impl<'a, K: Id, V> IntoIterator for &'a mut IdVec<K, V> {
type Item = (K, &'a mut V);
type IntoIter = IndexMapped<K, iter::Enumerate<slice::IterMut<'a, V>>>;
fn into_iter(self) -> Self::IntoIter {
IndexMapped {
key: PhantomData,
inner: self.items.iter_mut().enumerate(),
}
}
}
impl<K: Id, V> IdVec<K, V> {
pub fn new() -> Self {
IdVec {
key: PhantomData,
items: Vec::new(),
}
}
pub fn with_capacity(capacity: usize) -> Self {
IdVec {
key: PhantomData,
items: Vec::with_capacity(capacity),
}
}
pub fn from_items(items: Vec<V>) -> Self {
K::assert_in_range(items.len());
IdVec {
key: PhantomData,
items,
}
}
pub fn filled_with(count: Count<K>, mut f: impl FnMut() -> V) -> Self {
IdVec {
key: PhantomData,
items: count.iter().map(|_| f()).collect(),
}
}
pub fn filled_with_indexed(count: Count<K>, f: impl FnMut(K) -> V) -> Self {
IdVec {
key: PhantomData,
items: count.iter().map(f).collect(),
}
}
pub fn items(&self) -> &[V] {
&self.items
}
pub fn items_mut(&mut self) -> &mut [V] {
&mut self.items
}
pub fn into_items(self) -> Vec<V> {
self.items
}
pub fn len(&self) -> usize {
self.items.len()
}
pub fn count(&self) -> Count<K> {
Count(K::from_index_unchecked(self.len()))
}
pub fn is_empty(&self) -> bool {
self.items.is_empty()
}
#[must_use]
pub fn push(&mut self, item: V) -> K {
let id = K::from_index_unchecked(self.len());
self.items.push(item);
K::assert_in_range(self.len());
id
}
pub fn truncate(&mut self, len: usize) {
self.items.truncate(len)
}
pub fn iter(&self) -> IndexMapped<K, iter::Enumerate<slice::Iter<V>>> {
self.into_iter()
}
pub fn iter_mut(&mut self) -> IndexMapped<K, iter::Enumerate<slice::IterMut<V>>> {
self.into_iter()
}
pub fn try_zip_exact<'a, U>(
&'a self,
other: &'a IdVec<K, U>,
) -> Option<impl Iterator<Item = (K, &'a V, &'a U)>> {
if self.items.len() == other.items.len() {
Some(
self.items
.iter()
.zip(other.items.iter())
.enumerate()
.map(|(idx, (v1, v2))| (K::from_index_unchecked(idx), v1, v2)),
)
} else {
None
}
}
pub fn into_mapped<W, F: FnMut(K, V) -> W>(self, mut f: F) -> IdVec<K, W> {
let mapped_items = self.into_iter().map(|(idx, val)| f(idx, val)).collect();
IdVec::from_items(mapped_items)
}
pub fn try_into_mapped<W, E, F: FnMut(K, V) -> Result<W, E>>(
self,
mut f: F,
) -> Result<IdVec<K, W>, E> {
let mapped_items = self
.into_iter()
.map(|(idx, val)| f(idx, val))
.collect::<Result<_, _>>()?;
Ok(IdVec::from_items(mapped_items))
}
pub fn map<W, F: FnMut(K, &V) -> W>(&self, mut f: F) -> IdVec<K, W> {
let mapped_items = self.iter().map(|(idx, val)| f(idx, val)).collect();
IdVec::from_items(mapped_items)
}
pub fn try_map<W, E, F: FnMut(K, &V) -> Result<W, E>>(
&self,
mut f: F,
) -> Result<IdVec<K, W>, E> {
let mapped_items = self
.iter()
.map(|(idx, val)| f(idx, val))
.collect::<Result<_, _>>()?;
Ok(IdVec::from_items(mapped_items))
}
pub fn try_from_contiguous(entries: impl Iterator<Item = (K, V)>) -> Option<Self> {
let mut items = Vec::with_capacity(entries.size_hint().0);
for (idx, (key, val)) in entries.enumerate() {
if idx != key.to_index() {
return None;
}
items.push(val);
}
Some(Self::from_items(items))
}
pub fn get2_mut(&mut self, i: K, j: K) -> Option<(&mut V, &mut V)> {
get2_mut(&mut self.items, i.to_index(), j.to_index())
}
}
impl<K: Id, V, I: Borrow<K>> Index<I> for IdVec<K, V> {
type Output = V;
fn index(&self, key: I) -> &V {
&self.items[key.borrow().to_index()]
}
}
impl<K: Id, V, I: Borrow<K>> IndexMut<I> for IdVec<K, V> {
fn index_mut(&mut self, key: I) -> &mut V {
&mut self.items[key.borrow().to_index()]
}
}

View file

@ -0,0 +1,18 @@
#[macro_use]
pub mod id_type;
#[macro_use]
pub mod bytes_id;
#[macro_use]
pub mod forward_trait;
pub mod blocks;
pub mod flat_slices;
pub mod get2_mut;
pub mod id_bi_map;
pub mod id_vec;
pub mod norm_pair;
pub mod op_graph;
pub mod replace_none;
pub mod strongly_connected;

View file

@ -0,0 +1,33 @@
/// A normalized unordered pair, where the first component is always <= the second
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct NormPair<T>(T, T);
impl<T: Ord> NormPair<T> {
pub fn new(fst: T, snd: T) -> Self {
if fst <= snd {
NormPair(fst, snd)
} else {
NormPair(snd, fst)
}
}
pub fn fst(&self) -> &T {
&self.0
}
pub fn snd(&self) -> &T {
&self.1
}
pub fn into_fst(self) -> T {
self.0
}
pub fn into_snd(self) -> T {
self.1
}
pub fn into_tuple(self) -> (T, T) {
(self.0, self.1)
}
}

View file

@ -0,0 +1,60 @@
use std::borrow::Borrow;
use crate::util::flat_slices::{FlatSlices, Slice};
use crate::util::id_type::{Count, Id};
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Node<'a, K, Op> {
pub op: &'a Op,
pub inputs: &'a [K],
}
/// Conceptually represents a collection of the form `IdVec<K, (Op, Vec<K>)>`.
///
/// Each tuple `(Op, Vec<K>)` is called a "node", and can be thought of as an "op" together with a
/// list of zero or more "inputs" to that op, which are indices pointing to other nodes.
///
/// The input lists are actually stored in a single contiguous buffer to reduce the number of heap
/// allocations.
///
/// This is essentially just a newtype wrapper around `FlatSlices`. We use this wrapper to
/// represent op graphs (instead of using `FlatSlices` directly) just so that the names of types,
/// functions, and fields in the API are more meaningful for this use case.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct OpGraph<K: Id, Op> {
inner: FlatSlices<K, Op, K>,
}
impl<K: Id, Op> Default for OpGraph<K, Op> {
fn default() -> Self {
Self::new()
}
}
impl<K: Id, Op> OpGraph<K, Op> {
pub fn new() -> Self {
Self {
inner: FlatSlices::new(),
}
}
pub fn len(&self) -> usize {
self.inner.len()
}
pub fn count(&self) -> Count<K> {
self.inner.count()
}
pub fn add_node(&mut self, op: Op, inputs: &[K]) -> K {
self.inner.push_slice(op, inputs)
}
pub fn node<I: Borrow<K>>(&self, idx: I) -> Node<K, Op> {
let Slice { info, items } = self.inner.get(idx);
Node {
op: info,
inputs: items,
}
}
}

View file

@ -0,0 +1,13 @@
#[derive(Clone, Debug, thiserror::Error)]
#[error("replace_none: expected 'None' option, found value {0:?}")]
pub struct ReplaceNoneError<T: std::fmt::Debug>(pub T);
pub fn replace_none<T: std::fmt::Debug>(
opt: &mut Option<T>,
val: T,
) -> Result<(), ReplaceNoneError<T>> {
match std::mem::replace(opt, Some(val)) {
None => Ok(()),
Some(prev) => Err(ReplaceNoneError(prev)),
}
}

View file

@ -0,0 +1,153 @@
use crate::util::flat_slices::FlatSlices;
use crate::util::id_type::{Count, Id};
use crate::util::id_vec::IdVec;
use crate::util::replace_none::replace_none;
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum SccKind {
Acyclic,
Cyclic,
}
pub fn strongly_connected<SccId, NodeId, NodeSuccessors>(
node_count: Count<NodeId>,
mut node_successors: impl FnMut(NodeId) -> NodeSuccessors,
) -> FlatSlices<SccId, SccKind, NodeId>
where
SccId: Id,
NodeId: Id + Eq + Copy,
NodeSuccessors: Iterator<Item = NodeId>,
{
// We use Tarjan's algorithm, performing the depth-first search using an explicit Vec-based
// stack instead of recursion to avoid stack overflows on large graphs.
#[derive(Clone, Copy, Debug)]
enum NodeState {
Unvisited,
OnSearchStack { index: u32, low_link: u32 },
OnSccStack { index: u32 },
Complete,
}
#[derive(Clone, Copy)]
enum Action<NodeId> {
TryVisit {
parent: Option<NodeId>,
node: NodeId,
},
FinishVisit {
parent: Option<NodeId>,
node: NodeId,
},
}
let mut sccs = FlatSlices::new();
let mut node_states = IdVec::filled_with(node_count, || NodeState::Unvisited);
let mut node_self_loops = IdVec::filled_with(node_count, || None);
let mut scc_stack = Vec::new();
let mut search_stack = Vec::new();
let mut next_index = 0;
for search_root in node_count.iter() {
search_stack.push(Action::TryVisit {
parent: None,
node: search_root,
});
while let Some(action) = search_stack.pop() {
match action {
Action::TryVisit { parent, node } => match node_states[node] {
NodeState::Unvisited => {
node_states[node] = NodeState::OnSearchStack {
index: next_index,
low_link: next_index,
};
next_index += 1;
scc_stack.push(node);
search_stack.push(Action::FinishVisit { parent, node });
// We need to explicitly track self-loops so that when we obtain a size-1
// SCC we can determine if it's cyclic or acyclic.
let mut has_self_loop = false;
for successor in node_successors(node) {
if successor == node {
has_self_loop = true;
}
search_stack.push(Action::TryVisit {
parent: Some(node),
node: successor,
});
}
replace_none(&mut node_self_loops[node], has_self_loop).unwrap();
}
NodeState::OnSearchStack { index, low_link: _ }
| NodeState::OnSccStack { index } => {
if let Some(parent) = parent {
if let NodeState::OnSearchStack {
index: _,
low_link: parent_low_link,
} = &mut node_states[parent]
{
*parent_low_link = (*parent_low_link).min(index);
} else {
unreachable!("parent should be on search stack");
}
}
}
NodeState::Complete => {}
},
Action::FinishVisit { parent, node } => {
let (index, low_link) =
if let NodeState::OnSearchStack { index, low_link } = node_states[node] {
(index, low_link)
} else {
unreachable!("node should be on search stack");
};
node_states[node] = NodeState::OnSccStack { index };
if let Some(parent) = parent {
if let NodeState::OnSearchStack {
index: _,
low_link: parent_low_link,
} = &mut node_states[parent]
{
*parent_low_link = (*parent_low_link).min(low_link);
} else {
unreachable!("parent should be on search stack")
}
}
if low_link == index {
let mut scc_start = scc_stack.len();
loop {
scc_start -= 1;
let scc_node = scc_stack[scc_start];
debug_assert!(matches!(
node_states[scc_node],
NodeState::OnSccStack { .. }
));
node_states[scc_node] = NodeState::Complete;
if scc_node == node {
break;
}
}
let scc_slice = &scc_stack[scc_start..];
let scc_kind = if scc_slice.len() == 1 && !node_self_loops[node].unwrap() {
SccKind::Acyclic
} else {
SccKind::Cyclic
};
sccs.push_slice(scc_kind, scc_slice);
scc_stack.truncate(scc_start);
}
}
}
}
}
sccs
}