switch to new unification table

This commit is contained in:
Folkert 2022-05-20 21:54:22 +02:00
parent 4550f3fa88
commit 2b94eeed60
No known key found for this signature in database
GPG key ID: 1F17F6FFD112B97C
3 changed files with 233 additions and 224 deletions

View file

@ -413,7 +413,7 @@ fn deep_copy_type_vars<'a>(
// Always deal with the root, so that unified variables are treated the same.
let var = subs.get_root_key_without_compacting(var);
let desc = subs.get_ref_mut(var);
let desc = subs.get(var);
// Unlike `deep_copy_var` in solve, here we are cloning *all* flex and rigid vars.
// So we only want to short-circuit if we've already done the cloning work for a particular
@ -432,7 +432,7 @@ fn deep_copy_type_vars<'a>(
};
let copy = subs.fresh(copy_descriptor);
subs.get_ref_mut(var).copy = copy.into();
subs.set_copy(var, copy.into());
visited.push(var);
@ -456,7 +456,7 @@ fn deep_copy_type_vars<'a>(
let new_arguments = VariableSubsSlice::reserve_into_subs(subs, $slice.len());
for (target_index, var_index) in (new_arguments.indices()).zip($slice) {
let var = subs[var_index];
let copy_var = subs.get_ref(var).copy.into_variable().unwrap_or(var);
let copy_var = subs.get_copy(var).into_variable().unwrap_or(var);
subs.variables[target_index] = copy_var;
}
new_arguments

View file

@ -8,7 +8,9 @@ use roc_module::ident::{Lowercase, TagName, Uppercase};
use roc_module::symbol::Symbol;
use std::fmt;
use std::iter::{once, Iterator, Map};
use ven_ena::unify::{InPlace, Snapshot, UnificationTable, UnifyKey};
use ven_ena::unify::{InPlace, /* Snapshot, UnificationTable, */ UnifyKey};
use crate::unification_table::{Snapshot, UnificationTable};
// if your changes cause this number to go down, great!
// please change it to the lower number.
@ -143,34 +145,11 @@ impl Subs {
}
fn serialize_unification_table(
utable: &UnificationTable<InPlace<Variable>>,
utable: &UnificationTable,
writer: &mut impl std::io::Write,
mut written: usize,
) -> std::io::Result<usize> {
for i in 0..utable.len() {
let var = unsafe { Variable::from_index(i as u32) };
let desc = if utable.is_redirect(var) {
let root = utable.get_root_key_without_compacting(var);
// our strategy for a redirect; rank is max, mark is max, copy stores the var
Descriptor {
content: Content::Error,
rank: Rank(u32::MAX),
mark: Mark(i32::MAX),
copy: root.into(),
}
} else {
utable.probe_value_without_compacting(var)
};
let bytes: [u8; std::mem::size_of::<Descriptor>()] =
unsafe { std::mem::transmute(desc) };
written += bytes.len();
writer.write_all(&bytes)?;
}
Ok(written)
todo!()
}
/// Lowercase can be heap-allocated
@ -280,38 +259,8 @@ impl Subs {
bytes: &[u8],
length: usize,
offset: usize,
) -> (UnificationTable<InPlace<Variable>>, usize) {
let alignment = std::mem::align_of::<Descriptor>();
let size = std::mem::size_of::<Descriptor>();
debug_assert_eq!(offset, round_to_multiple_of(offset, alignment));
let mut utable = UnificationTable::default();
utable.reserve(length);
let byte_length = length * size;
let byte_slice = &bytes[offset..][..byte_length];
let slice =
unsafe { std::slice::from_raw_parts(byte_slice.as_ptr() as *const Descriptor, length) };
let mut roots = Vec::new();
for desc in slice {
let var = utable.new_key(*desc);
if desc.rank == Rank(u32::MAX) && desc.mark == Mark(i32::MAX) {
let root = desc.copy.into_variable().unwrap();
roots.push((var, root));
}
}
for (var, root) in roots {
let desc = utable.probe_value_without_compacting(root);
utable.unify_roots(var, root, desc)
}
(utable, offset + byte_length)
) -> (UnificationTable, usize) {
todo!()
}
fn deserialize_field_names(
@ -377,7 +326,7 @@ impl Subs {
#[derive(Clone)]
pub struct Subs {
utable: UnificationTable<InPlace<Variable>>,
utable: UnificationTable,
pub variables: Vec<Variable>,
pub tag_names: Vec<TagName>,
pub field_names: Vec<Lowercase>,
@ -1597,16 +1546,8 @@ impl Subs {
problems: Vec::new(),
};
// NOTE the utable does not (currently) have a with_capacity; using this as the next-best thing
subs.utable.reserve(capacity);
// TODO There are at least these opportunities for performance optimization here:
// * Making the default flex_var_descriptor be all 0s, so no init step is needed.
for _ in 0..capacity {
subs.utable.new_key(flex_var_descriptor());
}
define_integer_types(&mut subs);
define_float_types(&mut subs);
@ -1654,14 +1595,14 @@ impl Subs {
pub fn extend_by(&mut self, entries: usize) {
self.utable.reserve(entries);
for _ in 0..entries {
self.utable.new_key(flex_var_descriptor());
}
}
#[inline(always)]
pub fn fresh(&mut self, value: Descriptor) -> Variable {
self.utable.new_key(value)
// self.utable.new_key(value)
self.utable
.push(value.content, value.rank, value.mark, value.copy)
}
#[inline(always)]
@ -1697,39 +1638,43 @@ impl Subs {
}
pub fn get(&mut self, key: Variable) -> Descriptor {
self.utable.probe_value(key)
self.utable.get_descriptor(key)
}
pub fn get_ref(&self, key: Variable) -> &Descriptor {
&self.utable.probe_value_ref(key).value
}
#[inline(always)]
pub fn get_ref_mut(&mut self, key: Variable) -> &mut Descriptor {
&mut self.utable.probe_value_ref_mut(key).value
}
// pub fn get_ref(&self, key: Variable) -> &Descriptor {
// &self.utable.probe_value_ref(key).value
// }
//
// #[inline(always)]
// pub fn get_ref_mut(&mut self, key: Variable) -> &mut Descriptor {
// &mut self.utable.probe_value_ref_mut(key).value
// }
pub fn get_rank(&self, key: Variable) -> Rank {
self.utable.probe_value_ref(key).value.rank
self.utable.get_rank(key)
}
pub fn get_copy(&self, key: Variable) -> OptVariable {
self.utable.get_copy(key)
}
pub fn get_mark(&self, key: Variable) -> Mark {
self.utable.probe_value_ref(key).value.mark
self.utable.get_mark(key)
}
pub fn get_rank_mark(&self, key: Variable) -> (Rank, Mark) {
let desc = &self.utable.probe_value_ref(key).value;
(desc.rank, desc.mark)
(self.utable.get_rank(key), self.utable.get_mark(key))
}
#[inline(always)]
pub fn get_without_compacting(&self, key: Variable) -> Descriptor {
self.utable.probe_value_without_compacting(key)
// self.utable.probe_value_without_compacting(key)
self.utable.get_descriptor(key)
}
pub fn get_content_without_compacting(&self, key: Variable) -> &Content {
&self.utable.probe_value_ref(key).value.content
// &self.utable.probe_value_ref(key).value.content
self.utable.get_content(key)
}
#[inline(always)]
@ -1739,80 +1684,64 @@ impl Subs {
#[inline(always)]
pub fn get_root_key_without_compacting(&self, key: Variable) -> Variable {
self.utable.get_root_key_without_compacting(key)
self.utable.root_key_without_compacting(key)
}
#[inline(always)]
pub fn set(&mut self, key: Variable, r_value: Descriptor) {
let l_key = self.utable.inlined_get_root_key(key);
self.utable.update_value(l_key, |node| node.value = r_value);
// self.utable.update_value(l_key, |node| node.value = r_value);
self.utable.set_descriptor(l_key, r_value)
}
pub fn set_rank(&mut self, key: Variable, rank: Rank) {
let l_key = self.utable.inlined_get_root_key(key);
// let l_key = self.utable.inlined_get_root_key(key);
self.utable.update_value(l_key, |node| {
node.value.rank = rank;
});
self.utable.set_rank(key, rank)
}
pub fn set_mark(&mut self, key: Variable, mark: Mark) {
let l_key = self.utable.inlined_get_root_key(key);
// let l_key = self.utable.inlined_get_root_key(key);
self.utable.update_value(l_key, |node| {
node.value.mark = mark;
});
self.utable.set_mark(key, mark)
}
pub fn set_copy(&mut self, key: Variable, copy: OptVariable) {
// let l_key = self.utable.inlined_get_root_key(key);
self.utable.set_copy(key, copy)
}
pub fn set_rank_mark(&mut self, key: Variable, rank: Rank, mark: Mark) {
let l_key = self.utable.inlined_get_root_key(key);
// let l_key = self.utable.inlined_get_root_key(key);
self.utable.update_value(l_key, |node| {
node.value.rank = rank;
node.value.mark = mark;
});
self.utable.set_rank(key, rank);
self.utable.set_mark(key, mark);
}
pub fn set_content(&mut self, key: Variable, content: Content) {
let l_key = self.utable.inlined_get_root_key(key);
// let l_key = self.utable.inlined_get_root_key(key);
self.utable.update_value(l_key, |node| {
node.value.content = content;
});
}
pub fn get_copy(&self, key: Variable) -> OptVariable {
self.utable.probe_value_ref(key).value.copy
}
pub fn set_copy(&mut self, key: Variable, v: OptVariable) {
let l_key = self.utable.inlined_get_root_key(key);
self.utable.update_value(l_key, |node| {
node.value.copy = v;
});
self.utable.set_content(key, content);
}
pub fn modify<F>(&mut self, key: Variable, mapper: F)
where
F: FnOnce(&mut Descriptor),
{
mapper(self.get_ref_mut(key));
let mut desc = self.utable.get_descriptor(key);
mapper(&mut desc);
self.utable.set_descriptor(key, desc);
}
#[inline(always)]
pub fn get_rank_set_mark(&mut self, key: Variable, mark: Mark) -> Rank {
let l_key = self.utable.inlined_get_root_key(key);
// let l_key = self.utable.inlined_get_root_key(key);
let mut rank = Rank::NONE;
self.utable.set_mark(key, mark);
self.utable.update_value(l_key, |node| {
node.value.mark = mark;
rank = node.value.rank;
});
rank
self.utable.get_rank(key)
}
pub fn equivalent(&mut self, left: Variable, right: Variable) -> bool {
@ -1932,22 +1861,19 @@ impl Subs {
(var.index() as usize) < self.len()
}
pub fn snapshot(&mut self) -> Snapshot<InPlace<Variable>> {
pub fn snapshot(&mut self) -> Snapshot {
self.utable.snapshot()
}
pub fn rollback_to(&mut self, snapshot: Snapshot<InPlace<Variable>>) {
pub fn rollback_to(&mut self, snapshot: Snapshot) {
self.utable.rollback_to(snapshot)
}
pub fn commit_snapshot(&mut self, snapshot: Snapshot<InPlace<Variable>>) {
self.utable.commit(snapshot)
pub fn commit_snapshot(&mut self, _snapshot: Snapshot) {
// self.utable.commit(snapshot)
}
pub fn vars_since_snapshot(
&mut self,
snapshot: &Snapshot<InPlace<Variable>>,
) -> core::ops::Range<Variable> {
pub fn vars_since_snapshot(&mut self, snapshot: &Snapshot) -> core::ops::Range<Variable> {
self.utable.vars_since_snapshot(snapshot)
}
}
@ -3657,17 +3583,22 @@ fn restore_help(subs: &mut Subs, initial: Variable) {
|variable_subs_slice: VariableSubsSlice| &variables[variable_subs_slice.indices()];
while let Some(var) = stack.pop() {
let desc = &mut subs.utable.probe_value_ref_mut(var).value;
// let desc = &mut subs.utable.probe_value_ref_mut(var).value;
if desc.copy.is_some() {
desc.rank = Rank::NONE;
desc.mark = Mark::NONE;
desc.copy = OptVariable::NONE;
let copy = subs.utable.get_copy(var);
if copy.is_none() {
continue;
}
subs.utable.set_rank(var, Rank::NONE);
subs.utable.set_mark(var, Mark::NONE);
subs.utable.set_copy(var, OptVariable::NONE);
use Content::*;
use FlatType::*;
match &desc.content {
match subs.utable.get_content(var) {
FlexVar(_) | RigidVar(_) | FlexAbleVar(_, _) | RigidAbleVar(_, _) | Error => (),
RecursionVar { structure, .. } => {
@ -3730,7 +3661,6 @@ fn restore_help(subs: &mut Subs, initial: Variable) {
}
}
}
}
}
#[derive(Clone, Debug)]
@ -3804,7 +3734,7 @@ impl StorageSubs {
for i in range {
let variable = Variable(i as u32);
let descriptor = self.subs.get_ref(variable);
let descriptor = self.subs.utable.get_descriptor(variable);
debug_assert!(descriptor.copy.is_none());
let new_content = Self::offset_content(&offsets, &descriptor.content);
@ -4025,13 +3955,13 @@ pub fn deep_copy_var_to(
// we have tracked all visited variables, and can now traverse them
// in one go (without looking at the UnificationTable) and clear the copy field
for var in env.visited {
let descriptor = env.source.get_ref_mut(var);
env.source.modify(var, |descriptor| {
if descriptor.copy.is_some() {
descriptor.rank = Rank::NONE;
descriptor.mark = Mark::NONE;
descriptor.copy = OptVariable::NONE;
}
});
}
copy
@ -4436,13 +4366,13 @@ pub fn copy_import_to(
// in one go (without looking at the UnificationTable) and clear the copy field
for var in visited {
let descriptor = source.get_ref_mut(var);
source.modify(var, |descriptor| {
if descriptor.copy.is_some() {
descriptor.rank = Rank::NONE;
descriptor.mark = Mark::NONE;
descriptor.copy = OptVariable::NONE;
}
});
}
CopiedImport {

View file

@ -1,6 +1,6 @@
use crate::subs::{Content, Descriptor, Mark, OptVariable, Rank, Variable, VariableSubsSlice};
#[derive(Clone)]
#[derive(Clone, Default)]
pub struct UnificationTable {
contents: Vec<Content>,
ranks: Vec<Rank>,
@ -9,7 +9,7 @@ pub struct UnificationTable {
redirects: Vec<OptVariable>,
}
struct Snapshot(UnificationTable);
pub struct Snapshot(UnificationTable);
impl UnificationTable {
pub fn with_capacity(cap: usize) -> Self {
@ -36,7 +36,7 @@ impl UnificationTable {
let start = self.contents.len();
self.contents
.extend(repeat(Content::Error).take(extra_length));
.extend(repeat(Content::FlexVar(None)).take(extra_length));
self.ranks.extend(repeat(Rank::NONE).take(extra_length));
self.marks.extend(repeat(Mark::NONE).take(extra_length));
self.copies
@ -47,12 +47,22 @@ impl UnificationTable {
VariableSubsSlice::new(start as _, extra_length as _)
}
pub fn push(&mut self, content: Content, rank: Rank, mark: Mark, copy: OptVariable) {
pub fn push(
&mut self,
content: Content,
rank: Rank,
mark: Mark,
copy: OptVariable,
) -> Variable {
let variable = unsafe { Variable::from_index(self.len() as _) };
self.contents.push(content);
self.ranks.push(rank);
self.marks.push(mark);
self.copies.push(copy);
self.redirects.push(OptVariable::NONE);
variable
}
pub fn set(
@ -71,11 +81,6 @@ impl UnificationTable {
self.copies[index] = copy;
}
#[inline(always)]
pub fn unify_right_to_left(&mut self, to: Variable, from: Variable) {
self.redirects[to.index() as usize] = OptVariable::from(from);
}
#[inline(always)]
pub fn root_key(&mut self, mut key: Variable) -> Variable {
let index = key.index() as usize;
@ -84,7 +89,9 @@ impl UnificationTable {
key = redirect;
}
if index != key.index() as usize {
self.redirects[index] = OptVariable::from(key);
}
key
}
@ -108,6 +115,12 @@ impl UnificationTable {
self.marks[self.root_key_without_compacting(key).index() as usize]
}
#[inline(always)]
pub fn get_copy(&self, key: Variable) -> OptVariable {
let index = self.root_key_without_compacting(key).index() as usize;
self.copies[index]
}
#[inline(always)]
pub fn get_content(&self, key: Variable) -> &Content {
&self.contents[self.root_key_without_compacting(key).index() as usize]
@ -125,6 +138,12 @@ impl UnificationTable {
self.marks[index] = value;
}
#[inline(always)]
pub fn set_copy(&mut self, key: Variable, value: OptVariable) {
let index = self.root_key(key).index() as usize;
self.copies[index] = value;
}
#[inline(always)]
pub fn set_content(&mut self, key: Variable, value: Content) {
let index = self.root_key(key).index() as usize;
@ -138,4 +157,64 @@ impl UnificationTable {
pub fn rollback_to(&mut self, snapshot: Snapshot) {
*self = snapshot.0;
}
pub fn vars_since_snapshot(&self, snapshot: &Snapshot) -> std::ops::Range<Variable> {
unsafe {
let start = Variable::from_index(snapshot.0.len() as u32);
let end = Variable::from_index(self.len() as u32);
start..end
}
}
pub fn is_redirect(&self, key: Variable) -> bool {
self.redirects[key.index() as usize].is_some()
}
pub fn unioned(&mut self, a: Variable, b: Variable) -> bool {
self.root_key(a) == self.root_key(b)
}
// TODO remove
#[inline(always)]
pub fn inlined_get_root_key(&mut self, key: Variable) -> Variable {
self.root_key(key)
}
/// NOTE: assumes variables are root
pub fn unify_roots(&mut self, to: Variable, from: Variable, desc: Descriptor) {
let from_index = from.index() as usize;
let to_index = to.index() as usize;
// redirect from -> to
if from_index != to_index {
self.redirects[from_index] = OptVariable::from(to);
}
// update to's Descriptor
self.contents[to_index] = desc.content;
self.ranks[to_index] = desc.rank;
self.marks[to_index] = desc.mark;
self.copies[to_index] = desc.copy;
}
pub fn get_descriptor(&self, key: Variable) -> Descriptor {
let index = self.root_key_without_compacting(key).index() as usize;
Descriptor {
content: self.contents[index],
rank: self.ranks[index],
mark: self.marks[index],
copy: self.copies[index],
}
}
pub fn set_descriptor(&mut self, key: Variable, desc: Descriptor) {
let index = self.root_key(key).index() as usize;
self.contents[index] = desc.content;
self.ranks[index] = desc.rank;
self.marks[index] = desc.mark;
self.copies[index] = desc.copy;
}
}