mirror of
https://github.com/salsa-rs/salsa.git
synced 2025-09-27 20:49:27 +00:00
Use inventory
for static ingredient registration (#934)
Some checks failed
Book / Book (push) Has been cancelled
Release-plz / Release-plz release (push) Has been cancelled
Release-plz / Release-plz PR (push) Has been cancelled
Test / Test (push) Has been cancelled
Test / Miri (push) Has been cancelled
Test / Shuttle (push) Has been cancelled
Test / Benchmarks (push) Has been cancelled
Book / Deploy (push) Has been cancelled
Some checks failed
Book / Book (push) Has been cancelled
Release-plz / Release-plz release (push) Has been cancelled
Release-plz / Release-plz PR (push) Has been cancelled
Test / Test (push) Has been cancelled
Test / Miri (push) Has been cancelled
Test / Shuttle (push) Has been cancelled
Test / Benchmarks (push) Has been cancelled
Book / Deploy (push) Has been cancelled
* use `inventory` for static ingredient registration * remove unnecessary synchronization from memo tables * use global ingredient caches for database-independent ingredients * add manual ingredient registration API * remove static ingredient index optimization when manual registration is in use * fix atomic imports * simplify ingredient caches
This commit is contained in:
parent
d28d66bf13
commit
dba66f1a37
130 changed files with 1023 additions and 613 deletions
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
|
@ -55,6 +55,8 @@ jobs:
|
||||||
run: cargo clippy --workspace --all-targets -- -D warnings
|
run: cargo clippy --workspace --all-targets -- -D warnings
|
||||||
- name: Test
|
- name: Test
|
||||||
run: cargo nextest run --workspace --all-targets --no-fail-fast
|
run: cargo nextest run --workspace --all-targets --no-fail-fast
|
||||||
|
- name: Test Manual Registration
|
||||||
|
run: cargo nextest run --workspace --tests --no-fail-fast --no-default-features --features macros
|
||||||
- name: Test docs
|
- name: Test docs
|
||||||
run: cargo test --workspace --doc
|
run: cargo test --workspace --doc
|
||||||
- name: Check (without default features)
|
- name: Check (without default features)
|
||||||
|
|
|
@ -19,13 +19,15 @@ hashbrown = "0.15"
|
||||||
hashlink = "0.10"
|
hashlink = "0.10"
|
||||||
indexmap = "2"
|
indexmap = "2"
|
||||||
intrusive-collections = "0.9.7"
|
intrusive-collections = "0.9.7"
|
||||||
papaya = "0.2.3"
|
|
||||||
parking_lot = "0.12"
|
parking_lot = "0.12"
|
||||||
portable-atomic = "1"
|
portable-atomic = "1"
|
||||||
rustc-hash = "2"
|
rustc-hash = "2"
|
||||||
smallvec = "1"
|
smallvec = "1"
|
||||||
tracing = { version = "0.1", default-features = false, features = ["std"] }
|
tracing = { version = "0.1", default-features = false, features = ["std"] }
|
||||||
|
|
||||||
|
# Automatic ingredient registration.
|
||||||
|
inventory = { version = "0.3.20", optional = true }
|
||||||
|
|
||||||
# parallel map
|
# parallel map
|
||||||
rayon = { version = "1.10.0", optional = true }
|
rayon = { version = "1.10.0", optional = true }
|
||||||
|
|
||||||
|
@ -36,7 +38,8 @@ thin-vec = "0.2.13"
|
||||||
shuttle = { version = "0.8.0", optional = true }
|
shuttle = { version = "0.8.0", optional = true }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["salsa_unstable", "rayon", "macros"]
|
default = ["salsa_unstable", "rayon", "macros", "inventory"]
|
||||||
|
inventory = ["dep:inventory"]
|
||||||
shuttle = ["dep:shuttle"]
|
shuttle = ["dep:shuttle"]
|
||||||
# FIXME: remove `salsa_unstable` before 1.0.
|
# FIXME: remove `salsa_unstable` before 1.0.
|
||||||
salsa_unstable = []
|
salsa_unstable = []
|
||||||
|
|
|
@ -21,14 +21,21 @@ macro_rules! setup_accumulator_impl {
|
||||||
use salsa::plumbing as $zalsa;
|
use salsa::plumbing as $zalsa;
|
||||||
use salsa::plumbing::accumulator as $zalsa_struct;
|
use salsa::plumbing::accumulator as $zalsa_struct;
|
||||||
|
|
||||||
|
impl $zalsa::HasJar for $Struct {
|
||||||
|
type Jar = $zalsa_struct::JarImpl<$Struct>;
|
||||||
|
const KIND: $zalsa::JarKind = $zalsa::JarKind::Struct;
|
||||||
|
}
|
||||||
|
|
||||||
|
$zalsa::register_jar! {
|
||||||
|
$zalsa::ErasedJar::erase::<$Struct>()
|
||||||
|
}
|
||||||
|
|
||||||
fn $ingredient(zalsa: &$zalsa::Zalsa) -> &$zalsa_struct::IngredientImpl<$Struct> {
|
fn $ingredient(zalsa: &$zalsa::Zalsa) -> &$zalsa_struct::IngredientImpl<$Struct> {
|
||||||
static $CACHE: $zalsa::IngredientCache<$zalsa_struct::IngredientImpl<$Struct>> =
|
static $CACHE: $zalsa::IngredientCache<$zalsa_struct::IngredientImpl<$Struct>> =
|
||||||
$zalsa::IngredientCache::new();
|
$zalsa::IngredientCache::new();
|
||||||
|
|
||||||
$CACHE.get_or_create(zalsa, || {
|
$CACHE.get_or_create(zalsa, || {
|
||||||
zalsa
|
zalsa.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Struct>>()
|
||||||
.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Struct>>()
|
|
||||||
.get_or_create()
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -74,6 +74,15 @@ macro_rules! setup_input_struct {
|
||||||
|
|
||||||
type $Configuration = $Struct;
|
type $Configuration = $Struct;
|
||||||
|
|
||||||
|
impl $zalsa::HasJar for $Struct {
|
||||||
|
type Jar = $zalsa_struct::JarImpl<$Configuration>;
|
||||||
|
const KIND: $zalsa::JarKind = $zalsa::JarKind::Struct;
|
||||||
|
}
|
||||||
|
|
||||||
|
$zalsa::register_jar! {
|
||||||
|
$zalsa::ErasedJar::erase::<$Struct>()
|
||||||
|
}
|
||||||
|
|
||||||
impl $zalsa_struct::Configuration for $Configuration {
|
impl $zalsa_struct::Configuration for $Configuration {
|
||||||
const LOCATION: $zalsa::Location = $zalsa::Location {
|
const LOCATION: $zalsa::Location = $zalsa::Location {
|
||||||
file: file!(),
|
file: file!(),
|
||||||
|
@ -101,14 +110,14 @@ macro_rules! setup_input_struct {
|
||||||
$zalsa::IngredientCache::new();
|
$zalsa::IngredientCache::new();
|
||||||
|
|
||||||
CACHE.get_or_create(zalsa, || {
|
CACHE.get_or_create(zalsa, || {
|
||||||
zalsa.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Configuration>>().get_or_create()
|
zalsa.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Configuration>>()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ingredient_mut(db: &mut dyn $zalsa::Database) -> (&mut $zalsa_struct::IngredientImpl<Self>, &mut $zalsa::Runtime) {
|
pub fn ingredient_mut(db: &mut dyn $zalsa::Database) -> (&mut $zalsa_struct::IngredientImpl<Self>, &mut $zalsa::Runtime) {
|
||||||
let zalsa_mut = db.zalsa_mut();
|
let zalsa_mut = db.zalsa_mut();
|
||||||
zalsa_mut.new_revision();
|
zalsa_mut.new_revision();
|
||||||
let index = zalsa_mut.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Configuration>>().get_or_create();
|
let index = zalsa_mut.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Configuration>>();
|
||||||
let (ingredient, runtime) = zalsa_mut.lookup_ingredient_mut(index);
|
let (ingredient, runtime) = zalsa_mut.lookup_ingredient_mut(index);
|
||||||
let ingredient = ingredient.assert_type_mut::<$zalsa_struct::IngredientImpl<Self>>();
|
let ingredient = ingredient.assert_type_mut::<$zalsa_struct::IngredientImpl<Self>>();
|
||||||
(ingredient, runtime)
|
(ingredient, runtime)
|
||||||
|
@ -149,8 +158,8 @@ macro_rules! setup_input_struct {
|
||||||
impl $zalsa::SalsaStructInDb for $Struct {
|
impl $zalsa::SalsaStructInDb for $Struct {
|
||||||
type MemoIngredientMap = $zalsa::MemoIngredientSingletonIndex;
|
type MemoIngredientMap = $zalsa::MemoIngredientSingletonIndex;
|
||||||
|
|
||||||
fn lookup_or_create_ingredient_index(aux: &$zalsa::Zalsa) -> $zalsa::IngredientIndices {
|
fn lookup_ingredient_index(aux: &$zalsa::Zalsa) -> $zalsa::IngredientIndices {
|
||||||
aux.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Configuration>>().get_or_create().into()
|
aux.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Configuration>>().into()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
|
|
@ -92,6 +92,15 @@ macro_rules! setup_interned_struct {
|
||||||
|
|
||||||
type $Configuration = $StructWithStatic;
|
type $Configuration = $StructWithStatic;
|
||||||
|
|
||||||
|
impl<$($db_lt_arg)?> $zalsa::HasJar for $Struct<$($db_lt_arg)?> {
|
||||||
|
type Jar = $zalsa_struct::JarImpl<$Configuration>;
|
||||||
|
const KIND: $zalsa::JarKind = $zalsa::JarKind::Struct;
|
||||||
|
}
|
||||||
|
|
||||||
|
$zalsa::register_jar! {
|
||||||
|
$zalsa::ErasedJar::erase::<$StructWithStatic>()
|
||||||
|
}
|
||||||
|
|
||||||
type $StructDataIdent<$db_lt> = ($($field_ty,)*);
|
type $StructDataIdent<$db_lt> = ($($field_ty,)*);
|
||||||
|
|
||||||
/// Key to use during hash lookups. Each field is some type that implements `Lookup<T>`
|
/// Key to use during hash lookups. Each field is some type that implements `Lookup<T>`
|
||||||
|
@ -149,7 +158,7 @@ macro_rules! setup_interned_struct {
|
||||||
|
|
||||||
let zalsa = db.zalsa();
|
let zalsa = db.zalsa();
|
||||||
CACHE.get_or_create(zalsa, || {
|
CACHE.get_or_create(zalsa, || {
|
||||||
zalsa.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Configuration>>().get_or_create()
|
zalsa.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Configuration>>()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -181,8 +190,8 @@ macro_rules! setup_interned_struct {
|
||||||
impl< $($db_lt_arg)? > $zalsa::SalsaStructInDb for $Struct< $($db_lt_arg)? > {
|
impl< $($db_lt_arg)? > $zalsa::SalsaStructInDb for $Struct< $($db_lt_arg)? > {
|
||||||
type MemoIngredientMap = $zalsa::MemoIngredientSingletonIndex;
|
type MemoIngredientMap = $zalsa::MemoIngredientSingletonIndex;
|
||||||
|
|
||||||
fn lookup_or_create_ingredient_index(aux: &$zalsa::Zalsa) -> $zalsa::IngredientIndices {
|
fn lookup_ingredient_index(aux: &$zalsa::Zalsa) -> $zalsa::IngredientIndices {
|
||||||
aux.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Configuration>>().get_or_create().into()
|
aux.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Configuration>>().into()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
|
|
@ -91,6 +91,16 @@ macro_rules! setup_tracked_fn {
|
||||||
|
|
||||||
struct $Configuration;
|
struct $Configuration;
|
||||||
|
|
||||||
|
$zalsa::register_jar! {
|
||||||
|
$zalsa::ErasedJar::erase::<$fn_name>()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(non_local_definitions)]
|
||||||
|
impl $zalsa::HasJar for $fn_name {
|
||||||
|
type Jar = $fn_name;
|
||||||
|
const KIND: $zalsa::JarKind = $zalsa::JarKind::TrackedFn;
|
||||||
|
}
|
||||||
|
|
||||||
static $FN_CACHE: $zalsa::IngredientCache<$zalsa::function::IngredientImpl<$Configuration>> =
|
static $FN_CACHE: $zalsa::IngredientCache<$zalsa::function::IngredientImpl<$Configuration>> =
|
||||||
$zalsa::IngredientCache::new();
|
$zalsa::IngredientCache::new();
|
||||||
|
|
||||||
|
@ -108,7 +118,7 @@ macro_rules! setup_tracked_fn {
|
||||||
impl $zalsa::SalsaStructInDb for $InternedData<'_> {
|
impl $zalsa::SalsaStructInDb for $InternedData<'_> {
|
||||||
type MemoIngredientMap = $zalsa::MemoIngredientSingletonIndex;
|
type MemoIngredientMap = $zalsa::MemoIngredientSingletonIndex;
|
||||||
|
|
||||||
fn lookup_or_create_ingredient_index(aux: &$zalsa::Zalsa) -> $zalsa::IngredientIndices {
|
fn lookup_ingredient_index(aux: &$zalsa::Zalsa) -> $zalsa::IngredientIndices {
|
||||||
$zalsa::IngredientIndices::empty()
|
$zalsa::IngredientIndices::empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -155,27 +165,19 @@ macro_rules! setup_tracked_fn {
|
||||||
impl $Configuration {
|
impl $Configuration {
|
||||||
fn fn_ingredient(db: &dyn $Db) -> &$zalsa::function::IngredientImpl<$Configuration> {
|
fn fn_ingredient(db: &dyn $Db) -> &$zalsa::function::IngredientImpl<$Configuration> {
|
||||||
let zalsa = db.zalsa();
|
let zalsa = db.zalsa();
|
||||||
$FN_CACHE.get_or_create(zalsa, || {
|
$FN_CACHE
|
||||||
let jar_entry = zalsa.lookup_jar_by_type::<$Configuration>();
|
.get_or_create(zalsa, || zalsa.lookup_jar_by_type::<$fn_name>())
|
||||||
|
.get_or_init(|| <dyn $Db as $Db>::zalsa_register_downcaster(db))
|
||||||
// If the ingredient has already been inserted, we know that the downcaster
|
|
||||||
// has also been registered. This is a fast-path for multi-database use cases
|
|
||||||
// that bypass the ingredient cache and will always execute this closure.
|
|
||||||
if let Some(index) = jar_entry.get() {
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
|
|
||||||
<dyn $Db as $Db>::zalsa_register_downcaster(db);
|
|
||||||
jar_entry.get_or_create()
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fn_ingredient_mut(db: &mut dyn $Db) -> &mut $zalsa::function::IngredientImpl<Self> {
|
pub fn fn_ingredient_mut(db: &mut dyn $Db) -> &mut $zalsa::function::IngredientImpl<Self> {
|
||||||
<dyn $Db as $Db>::zalsa_register_downcaster(db);
|
let view = <dyn $Db as $Db>::zalsa_register_downcaster(db);
|
||||||
let zalsa_mut = db.zalsa_mut();
|
let zalsa_mut = db.zalsa_mut();
|
||||||
let index = zalsa_mut.lookup_jar_by_type::<$Configuration>().get_or_create();
|
let index = zalsa_mut.lookup_jar_by_type::<$fn_name>();
|
||||||
let (ingredient, _) = zalsa_mut.lookup_ingredient_mut(index);
|
let (ingredient, _) = zalsa_mut.lookup_ingredient_mut(index);
|
||||||
ingredient.assert_type_mut::<$zalsa::function::IngredientImpl<Self>>()
|
let ingredient = ingredient.assert_type_mut::<$zalsa::function::IngredientImpl<Self>>();
|
||||||
|
ingredient.get_or_init(|| view);
|
||||||
|
ingredient
|
||||||
}
|
}
|
||||||
|
|
||||||
$zalsa::macro_if! { $needs_interner =>
|
$zalsa::macro_if! { $needs_interner =>
|
||||||
|
@ -184,8 +186,7 @@ macro_rules! setup_tracked_fn {
|
||||||
) -> &$zalsa::interned::IngredientImpl<$Configuration> {
|
) -> &$zalsa::interned::IngredientImpl<$Configuration> {
|
||||||
let zalsa = db.zalsa();
|
let zalsa = db.zalsa();
|
||||||
$INTERN_CACHE.get_or_create(zalsa, || {
|
$INTERN_CACHE.get_or_create(zalsa, || {
|
||||||
<dyn $Db as $Db>::zalsa_register_downcaster(db);
|
zalsa.lookup_jar_by_type::<$fn_name>().successor(0)
|
||||||
zalsa.lookup_jar_by_type::<$Configuration>().get_or_create().successor(0)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -248,42 +249,31 @@ macro_rules! setup_tracked_fn {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl $zalsa::Jar for $Configuration {
|
#[allow(non_local_definitions)]
|
||||||
fn create_dependencies(zalsa: &$zalsa::Zalsa) -> $zalsa::IngredientIndices
|
impl $zalsa::Jar for $fn_name {
|
||||||
where
|
|
||||||
Self: Sized
|
|
||||||
{
|
|
||||||
$zalsa::macro_if! {
|
|
||||||
if $needs_interner {
|
|
||||||
$zalsa::IngredientIndices::empty()
|
|
||||||
} else {
|
|
||||||
<$InternedData as $zalsa::SalsaStructInDb>::lookup_or_create_ingredient_index(zalsa)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_ingredients(
|
fn create_ingredients(
|
||||||
zalsa: &$zalsa::Zalsa,
|
zalsa: &mut $zalsa::Zalsa,
|
||||||
first_index: $zalsa::IngredientIndex,
|
first_index: $zalsa::IngredientIndex,
|
||||||
struct_index: $zalsa::IngredientIndices,
|
|
||||||
) -> Vec<Box<dyn $zalsa::Ingredient>> {
|
) -> Vec<Box<dyn $zalsa::Ingredient>> {
|
||||||
let struct_index: $zalsa::IngredientIndices = $zalsa::macro_if! {
|
let struct_index: $zalsa::IngredientIndices = $zalsa::macro_if! {
|
||||||
if $needs_interner {
|
if $needs_interner {
|
||||||
first_index.successor(0).into()
|
first_index.successor(0).into()
|
||||||
} else {
|
} else {
|
||||||
struct_index
|
// Note that struct ingredients are created before tracked functions,
|
||||||
|
// so this cannot panic.
|
||||||
|
<$InternedData as $zalsa::SalsaStructInDb>::lookup_ingredient_index(zalsa)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
$zalsa::macro_if! { $needs_interner =>
|
$zalsa::macro_if! { $needs_interner =>
|
||||||
let intern_ingredient = <$zalsa::interned::IngredientImpl<$Configuration>>::new(
|
let mut intern_ingredient = <$zalsa::interned::IngredientImpl<$Configuration>>::new(
|
||||||
first_index.successor(0)
|
first_index.successor(0)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let intern_ingredient_memo_types = $zalsa::macro_if! {
|
let intern_ingredient_memo_types = $zalsa::macro_if! {
|
||||||
if $needs_interner {
|
if $needs_interner {
|
||||||
Some($zalsa::Ingredient::memo_table_types(&intern_ingredient))
|
Some($zalsa::Ingredient::memo_table_types_mut(&mut intern_ingredient))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -303,7 +293,6 @@ macro_rules! setup_tracked_fn {
|
||||||
first_index,
|
first_index,
|
||||||
memo_ingredient_indices,
|
memo_ingredient_indices,
|
||||||
$lru,
|
$lru,
|
||||||
zalsa.views().downcaster_for::<dyn $Db>(),
|
|
||||||
);
|
);
|
||||||
$zalsa::macro_if! {
|
$zalsa::macro_if! {
|
||||||
if $needs_interner {
|
if $needs_interner {
|
||||||
|
@ -386,6 +375,7 @@ macro_rules! setup_tracked_fn {
|
||||||
$zalsa::return_mode_expression!(($return_mode, __, __), $output_ty, result,)
|
$zalsa::return_mode_expression!(($return_mode, __, __), $output_ty, result,)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// The struct needs be last in the macro expansion in order to make the tracked
|
// The struct needs be last in the macro expansion in order to make the tracked
|
||||||
// function's ident be identified as a function, not a struct, during semantic highlighting.
|
// function's ident be identified as a function, not a struct, during semantic highlighting.
|
||||||
// for more details, see https://github.com/salsa-rs/salsa/pull/612.
|
// for more details, see https://github.com/salsa-rs/salsa/pull/612.
|
||||||
|
|
|
@ -107,8 +107,8 @@ macro_rules! setup_tracked_struct {
|
||||||
std::marker::PhantomData<fn() -> &$db_lt ()>
|
std::marker::PhantomData<fn() -> &$db_lt ()>
|
||||||
);
|
);
|
||||||
|
|
||||||
#[allow(clippy::all)]
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
|
#[allow(clippy::all)]
|
||||||
const _: () = {
|
const _: () = {
|
||||||
use salsa::plumbing as $zalsa;
|
use salsa::plumbing as $zalsa;
|
||||||
use $zalsa::tracked_struct as $zalsa_struct;
|
use $zalsa::tracked_struct as $zalsa_struct;
|
||||||
|
@ -116,6 +116,15 @@ macro_rules! setup_tracked_struct {
|
||||||
|
|
||||||
type $Configuration = $Struct<'static>;
|
type $Configuration = $Struct<'static>;
|
||||||
|
|
||||||
|
impl<$db_lt> $zalsa::HasJar for $Struct<$db_lt> {
|
||||||
|
type Jar = $zalsa_struct::JarImpl<$Configuration>;
|
||||||
|
const KIND: $zalsa::JarKind = $zalsa::JarKind::Struct;
|
||||||
|
}
|
||||||
|
|
||||||
|
$zalsa::register_jar! {
|
||||||
|
$zalsa::ErasedJar::erase::<$Struct<'static>>()
|
||||||
|
}
|
||||||
|
|
||||||
impl $zalsa_struct::Configuration for $Configuration {
|
impl $zalsa_struct::Configuration for $Configuration {
|
||||||
const LOCATION: $zalsa::Location = $zalsa::Location {
|
const LOCATION: $zalsa::Location = $zalsa::Location {
|
||||||
file: file!(),
|
file: file!(),
|
||||||
|
@ -188,7 +197,7 @@ macro_rules! setup_tracked_struct {
|
||||||
$zalsa::IngredientCache::new();
|
$zalsa::IngredientCache::new();
|
||||||
|
|
||||||
CACHE.get_or_create(zalsa, || {
|
CACHE.get_or_create(zalsa, || {
|
||||||
zalsa.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Configuration>>().get_or_create()
|
zalsa.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Configuration>>()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -210,8 +219,8 @@ macro_rules! setup_tracked_struct {
|
||||||
impl $zalsa::SalsaStructInDb for $Struct<'_> {
|
impl $zalsa::SalsaStructInDb for $Struct<'_> {
|
||||||
type MemoIngredientMap = $zalsa::MemoIngredientSingletonIndex;
|
type MemoIngredientMap = $zalsa::MemoIngredientSingletonIndex;
|
||||||
|
|
||||||
fn lookup_or_create_ingredient_index(aux: &$zalsa::Zalsa) -> $zalsa::IngredientIndices {
|
fn lookup_ingredient_index(aux: &$zalsa::Zalsa) -> $zalsa::IngredientIndices {
|
||||||
aux.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Configuration>>().get_or_create().into()
|
aux.lookup_jar_by_type::<$zalsa_struct::JarImpl<$Configuration>>().into()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
|
|
@ -110,7 +110,7 @@ impl DbMacro {
|
||||||
let trait_name = &input.ident;
|
let trait_name = &input.ident;
|
||||||
input.items.push(parse_quote! {
|
input.items.push(parse_quote! {
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
fn zalsa_register_downcaster(&self);
|
fn zalsa_register_downcaster(&self) -> salsa::plumbing::DatabaseDownCaster<dyn #trait_name>;
|
||||||
});
|
});
|
||||||
|
|
||||||
let comment = format!(" Downcast a [`dyn Database`] to a [`dyn {trait_name}`]");
|
let comment = format!(" Downcast a [`dyn Database`] to a [`dyn {trait_name}`]");
|
||||||
|
@ -135,10 +135,11 @@ impl DbMacro {
|
||||||
};
|
};
|
||||||
|
|
||||||
input.items.push(parse_quote! {
|
input.items.push(parse_quote! {
|
||||||
|
#[cold]
|
||||||
|
#[inline(never)]
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
#[inline(always)]
|
fn zalsa_register_downcaster(&self) -> salsa::plumbing::DatabaseDownCaster<dyn #TraitPath> {
|
||||||
fn zalsa_register_downcaster(&self) {
|
salsa::plumbing::views(self).add(<Self as #TraitPath>::downcast)
|
||||||
salsa::plumbing::views(self).add(<Self as #TraitPath>::downcast);
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
input.items.push(parse_quote! {
|
input.items.push(parse_quote! {
|
||||||
|
|
|
@ -15,7 +15,7 @@ pub fn input_ids(hygiene: &Hygiene, sig: &syn::Signature, skip: usize) -> Vec<sy
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
hygiene.ident(&format!("input{index}"))
|
hygiene.ident(format!("input{index}"))
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,10 +50,10 @@ impl Hygiene {
|
||||||
/// Generates an identifier similar to `text` but
|
/// Generates an identifier similar to `text` but
|
||||||
/// distinct from any identifiers that appear in the user's
|
/// distinct from any identifiers that appear in the user's
|
||||||
/// code.
|
/// code.
|
||||||
pub(crate) fn ident(&self, text: &str) -> syn::Ident {
|
pub(crate) fn ident(&self, text: impl AsRef<str>) -> syn::Ident {
|
||||||
// Make the default be `foo_` rather than `foo` -- this helps detect
|
// Make the default be `foo_` rather than `foo` -- this helps detect
|
||||||
// cases where people wrote `foo` instead of `#foo` or `$foo` in the generated code.
|
// cases where people wrote `foo` instead of `#foo` or `$foo` in the generated code.
|
||||||
let mut buffer = format!("{text}_");
|
let mut buffer = format!("{}_", text.as_ref());
|
||||||
|
|
||||||
while self.user_tokens.contains(&buffer) {
|
while self.user_tokens.contains(&buffer) {
|
||||||
buffer.push('_');
|
buffer.push('_');
|
||||||
|
@ -61,4 +61,12 @@ impl Hygiene {
|
||||||
|
|
||||||
syn::Ident::new(&buffer, proc_macro2::Span::call_site())
|
syn::Ident::new(&buffer, proc_macro2::Span::call_site())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generates an identifier similar to `text` but distinct from any identifiers
|
||||||
|
/// that appear in the user's code.
|
||||||
|
///
|
||||||
|
/// The identifier must be unique relative to the `scope` identifier.
|
||||||
|
pub(crate) fn scoped_ident(&self, scope: &syn::Ident, text: &str) -> syn::Ident {
|
||||||
|
self.ident(format!("{scope}_{text}"))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -72,8 +72,8 @@ fn enum_impl(enum_item: syn::ItemEnum) -> syn::Result<TokenStream> {
|
||||||
type MemoIngredientMap = zalsa::MemoIngredientIndices;
|
type MemoIngredientMap = zalsa::MemoIngredientIndices;
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn lookup_or_create_ingredient_index(__zalsa: &zalsa::Zalsa) -> zalsa::IngredientIndices {
|
fn lookup_ingredient_index(__zalsa: &zalsa::Zalsa) -> zalsa::IngredientIndices {
|
||||||
zalsa::IngredientIndices::merge([ #( <#variant_types as zalsa::SalsaStructInDb>::lookup_or_create_ingredient_index(__zalsa) ),* ])
|
zalsa::IngredientIndices::merge([ #( <#variant_types as zalsa::SalsaStructInDb>::lookup_ingredient_index(__zalsa) ),* ])
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
|
|
@ -132,10 +132,10 @@ impl Macro {
|
||||||
inner_fn.sig.ident = self.hygiene.ident("inner");
|
inner_fn.sig.ident = self.hygiene.ident("inner");
|
||||||
|
|
||||||
let zalsa = self.hygiene.ident("zalsa");
|
let zalsa = self.hygiene.ident("zalsa");
|
||||||
let Configuration = self.hygiene.ident("Configuration");
|
let Configuration = self.hygiene.scoped_ident(fn_name, "Configuration");
|
||||||
let InternedData = self.hygiene.ident("InternedData");
|
let InternedData = self.hygiene.scoped_ident(fn_name, "InternedData");
|
||||||
let FN_CACHE = self.hygiene.ident("FN_CACHE");
|
let FN_CACHE = self.hygiene.scoped_ident(fn_name, "FN_CACHE");
|
||||||
let INTERN_CACHE = self.hygiene.ident("INTERN_CACHE");
|
let INTERN_CACHE = self.hygiene.scoped_ident(fn_name, "INTERN_CACHE");
|
||||||
let inner = &inner_fn.sig.ident;
|
let inner = &inner_fn.sig.ident;
|
||||||
|
|
||||||
let function_type = function_type(&item);
|
let function_type = function_type(&item);
|
||||||
|
|
|
@ -99,7 +99,7 @@ impl Macro {
|
||||||
});
|
});
|
||||||
|
|
||||||
let InnerTrait = self.hygiene.ident("InnerTrait");
|
let InnerTrait = self.hygiene.ident("InnerTrait");
|
||||||
let inner_fn_name = self.hygiene.ident(&fn_item.sig.ident.to_string());
|
let inner_fn_name = self.hygiene.ident(fn_item.sig.ident.to_string());
|
||||||
|
|
||||||
let AssociatedFunctionArguments {
|
let AssociatedFunctionArguments {
|
||||||
self_token,
|
self_token,
|
||||||
|
|
|
@ -10,7 +10,7 @@ use accumulated::{Accumulated, AnyAccumulated};
|
||||||
use crate::cycle::CycleHeads;
|
use crate::cycle::CycleHeads;
|
||||||
use crate::function::VerifyResult;
|
use crate::function::VerifyResult;
|
||||||
use crate::ingredient::{Ingredient, Jar};
|
use crate::ingredient::{Ingredient, Jar};
|
||||||
use crate::plumbing::{IngredientIndices, ZalsaLocal};
|
use crate::plumbing::ZalsaLocal;
|
||||||
use crate::sync::Arc;
|
use crate::sync::Arc;
|
||||||
use crate::table::memo::MemoTableTypes;
|
use crate::table::memo::MemoTableTypes;
|
||||||
use crate::zalsa::{IngredientIndex, Zalsa};
|
use crate::zalsa::{IngredientIndex, Zalsa};
|
||||||
|
@ -44,9 +44,8 @@ impl<A: Accumulator> Default for JarImpl<A> {
|
||||||
|
|
||||||
impl<A: Accumulator> Jar for JarImpl<A> {
|
impl<A: Accumulator> Jar for JarImpl<A> {
|
||||||
fn create_ingredients(
|
fn create_ingredients(
|
||||||
_zalsa: &Zalsa,
|
_zalsa: &mut Zalsa,
|
||||||
first_index: IngredientIndex,
|
first_index: IngredientIndex,
|
||||||
_dependencies: IngredientIndices,
|
|
||||||
) -> Vec<Box<dyn Ingredient>> {
|
) -> Vec<Box<dyn Ingredient>> {
|
||||||
vec![Box::new(<IngredientImpl<A>>::new(first_index))]
|
vec![Box::new(<IngredientImpl<A>>::new(first_index))]
|
||||||
}
|
}
|
||||||
|
@ -64,7 +63,7 @@ pub struct IngredientImpl<A: Accumulator> {
|
||||||
impl<A: Accumulator> IngredientImpl<A> {
|
impl<A: Accumulator> IngredientImpl<A> {
|
||||||
/// Find the accumulator ingredient for `A` in the database, if any.
|
/// Find the accumulator ingredient for `A` in the database, if any.
|
||||||
pub fn from_zalsa(zalsa: &Zalsa) -> Option<&Self> {
|
pub fn from_zalsa(zalsa: &Zalsa) -> Option<&Self> {
|
||||||
let index = zalsa.lookup_jar_by_type::<JarImpl<A>>().get_or_create();
|
let index = zalsa.lookup_jar_by_type::<JarImpl<A>>();
|
||||||
let ingredient = zalsa.lookup_ingredient(index).assert_type::<Self>();
|
let ingredient = zalsa.lookup_ingredient(index).assert_type::<Self>();
|
||||||
Some(ingredient)
|
Some(ingredient)
|
||||||
}
|
}
|
||||||
|
@ -115,7 +114,11 @@ impl<A: Accumulator> Ingredient for IngredientImpl<A> {
|
||||||
A::DEBUG_NAME
|
A::DEBUG_NAME
|
||||||
}
|
}
|
||||||
|
|
||||||
fn memo_table_types(&self) -> Arc<MemoTableTypes> {
|
fn memo_table_types(&self) -> &Arc<MemoTableTypes> {
|
||||||
|
unreachable!("accumulator does not allocate pages")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn memo_table_types_mut(&mut self) -> &mut Arc<MemoTableTypes> {
|
||||||
unreachable!("accumulator does not allocate pages")
|
unreachable!("accumulator does not allocate pages")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
use crate::views::DatabaseDownCaster;
|
||||||
use crate::zalsa::{IngredientIndex, ZalsaDatabase};
|
use crate::zalsa::{IngredientIndex, ZalsaDatabase};
|
||||||
use crate::{Durability, Revision};
|
use crate::{Durability, Revision};
|
||||||
|
|
||||||
|
@ -80,9 +81,11 @@ pub trait Database: Send + AsDynDatabase + Any + ZalsaDatabase {
|
||||||
crate::attach::attach(self, || op(self))
|
crate::attach::attach(self, || op(self))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cold]
|
||||||
|
#[inline(never)]
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
#[inline(always)]
|
fn zalsa_register_downcaster(&self) -> DatabaseDownCaster<dyn Database> {
|
||||||
fn zalsa_register_downcaster(&self) {
|
self.zalsa().views().downcaster_for::<dyn Database>()
|
||||||
// The no-op downcaster is special cased in view caster construction.
|
// The no-op downcaster is special cased in view caster construction.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ use std::any::Any;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::ptr::NonNull;
|
use std::ptr::NonNull;
|
||||||
use std::sync::atomic::Ordering;
|
use std::sync::atomic::Ordering;
|
||||||
|
use std::sync::OnceLock;
|
||||||
pub(crate) use sync::SyncGuard;
|
pub(crate) use sync::SyncGuard;
|
||||||
|
|
||||||
use crate::accumulator::accumulated_map::{AccumulatedMap, InputAccumulatedValues};
|
use crate::accumulator::accumulated_map::{AccumulatedMap, InputAccumulatedValues};
|
||||||
|
@ -129,7 +130,7 @@ pub struct IngredientImpl<C: Configuration> {
|
||||||
///
|
///
|
||||||
/// The supplied database must be be the same as the database used to construct the [`Views`]
|
/// The supplied database must be be the same as the database used to construct the [`Views`]
|
||||||
/// instances that this downcaster was derived from.
|
/// instances that this downcaster was derived from.
|
||||||
view_caster: DatabaseDownCaster<C::DbView>,
|
view_caster: OnceLock<DatabaseDownCaster<C::DbView>>,
|
||||||
|
|
||||||
sync_table: SyncTable,
|
sync_table: SyncTable,
|
||||||
|
|
||||||
|
@ -156,18 +157,30 @@ where
|
||||||
index: IngredientIndex,
|
index: IngredientIndex,
|
||||||
memo_ingredient_indices: <C::SalsaStruct<'static> as SalsaStructInDb>::MemoIngredientMap,
|
memo_ingredient_indices: <C::SalsaStruct<'static> as SalsaStructInDb>::MemoIngredientMap,
|
||||||
lru: usize,
|
lru: usize,
|
||||||
view_caster: DatabaseDownCaster<C::DbView>,
|
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
index,
|
index,
|
||||||
memo_ingredient_indices,
|
memo_ingredient_indices,
|
||||||
lru: lru::Lru::new(lru),
|
lru: lru::Lru::new(lru),
|
||||||
deleted_entries: Default::default(),
|
deleted_entries: Default::default(),
|
||||||
view_caster,
|
view_caster: OnceLock::new(),
|
||||||
sync_table: SyncTable::new(index),
|
sync_table: SyncTable::new(index),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Set the view-caster for this tracked function ingredient, if it has
|
||||||
|
/// not already been initialized.
|
||||||
|
#[inline]
|
||||||
|
pub fn get_or_init(
|
||||||
|
&self,
|
||||||
|
view_caster: impl FnOnce() -> DatabaseDownCaster<C::DbView>,
|
||||||
|
) -> &Self {
|
||||||
|
// Note that we must set this lazily as we don't have access to the database
|
||||||
|
// type when ingredients are registered into the `Zalsa`.
|
||||||
|
self.view_caster.get_or_init(view_caster);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn database_key_index(&self, key: Id) -> DatabaseKeyIndex {
|
pub fn database_key_index(&self, key: Id) -> DatabaseKeyIndex {
|
||||||
DatabaseKeyIndex::new(self.index, key)
|
DatabaseKeyIndex::new(self.index, key)
|
||||||
|
@ -226,6 +239,12 @@ where
|
||||||
fn memo_ingredient_index(&self, zalsa: &Zalsa, id: Id) -> MemoIngredientIndex {
|
fn memo_ingredient_index(&self, zalsa: &Zalsa, id: Id) -> MemoIngredientIndex {
|
||||||
self.memo_ingredient_indices.get_zalsa_id(zalsa, id)
|
self.memo_ingredient_indices.get_zalsa_id(zalsa, id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn view_caster(&self) -> &DatabaseDownCaster<C::DbView> {
|
||||||
|
self.view_caster
|
||||||
|
.get()
|
||||||
|
.expect("tracked function ingredients cannot be accessed before calling `init`")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<C> Ingredient for IngredientImpl<C>
|
impl<C> Ingredient for IngredientImpl<C>
|
||||||
|
@ -248,7 +267,7 @@ where
|
||||||
cycle_heads: &mut CycleHeads,
|
cycle_heads: &mut CycleHeads,
|
||||||
) -> VerifyResult {
|
) -> VerifyResult {
|
||||||
// SAFETY: The `db` belongs to the ingredient as per caller invariant
|
// SAFETY: The `db` belongs to the ingredient as per caller invariant
|
||||||
let db = unsafe { self.view_caster.downcast_unchecked(db) };
|
let db = unsafe { self.view_caster().downcast_unchecked(db) };
|
||||||
self.maybe_changed_after(db, input, revision, cycle_heads)
|
self.maybe_changed_after(db, input, revision, cycle_heads)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -339,7 +358,11 @@ where
|
||||||
C::DEBUG_NAME
|
C::DEBUG_NAME
|
||||||
}
|
}
|
||||||
|
|
||||||
fn memo_table_types(&self) -> Arc<MemoTableTypes> {
|
fn memo_table_types(&self) -> &Arc<MemoTableTypes> {
|
||||||
|
unreachable!("function does not allocate pages")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn memo_table_types_mut(&mut self) -> &mut Arc<MemoTableTypes> {
|
||||||
unreachable!("function does not allocate pages")
|
unreachable!("function does not allocate pages")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -352,7 +375,7 @@ where
|
||||||
db: &'db dyn Database,
|
db: &'db dyn Database,
|
||||||
key_index: Id,
|
key_index: Id,
|
||||||
) -> (Option<&'db AccumulatedMap>, InputAccumulatedValues) {
|
) -> (Option<&'db AccumulatedMap>, InputAccumulatedValues) {
|
||||||
let db = self.view_caster.downcast(db);
|
let db = self.view_caster().downcast(db);
|
||||||
self.accumulated_map(db, key_index)
|
self.accumulated_map(db, key_index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -466,7 +466,7 @@ mod _memory_usage {
|
||||||
impl SalsaStructInDb for DummyStruct {
|
impl SalsaStructInDb for DummyStruct {
|
||||||
type MemoIngredientMap = MemoIngredientSingletonIndex;
|
type MemoIngredientMap = MemoIngredientSingletonIndex;
|
||||||
|
|
||||||
fn lookup_or_create_ingredient_index(_: &Zalsa) -> IngredientIndices {
|
fn lookup_ingredient_index(_: &Zalsa) -> IngredientIndices {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,6 @@ use crate::cycle::{
|
||||||
empty_cycle_heads, CycleHeads, CycleRecoveryStrategy, IterationCount, ProvisionalStatus,
|
empty_cycle_heads, CycleHeads, CycleRecoveryStrategy, IterationCount, ProvisionalStatus,
|
||||||
};
|
};
|
||||||
use crate::function::VerifyResult;
|
use crate::function::VerifyResult;
|
||||||
use crate::plumbing::IngredientIndices;
|
|
||||||
use crate::runtime::Running;
|
use crate::runtime::Running;
|
||||||
use crate::sync::Arc;
|
use crate::sync::Arc;
|
||||||
use crate::table::memo::MemoTableTypes;
|
use crate::table::memo::MemoTableTypes;
|
||||||
|
@ -16,35 +15,20 @@ use crate::zalsa_local::QueryOriginRef;
|
||||||
use crate::{Database, DatabaseKeyIndex, Id, Revision};
|
use crate::{Database, DatabaseKeyIndex, Id, Revision};
|
||||||
|
|
||||||
/// A "jar" is a group of ingredients that are added atomically.
|
/// A "jar" is a group of ingredients that are added atomically.
|
||||||
|
///
|
||||||
/// Each type implementing jar can be added to the database at most once.
|
/// Each type implementing jar can be added to the database at most once.
|
||||||
pub trait Jar: Any {
|
pub trait Jar: Any {
|
||||||
/// This creates the ingredient dependencies of this jar. We need to split this from `create_ingredients()`
|
|
||||||
/// because while `create_ingredients()` is called, a lock on the ingredient map is held (to guarantee
|
|
||||||
/// atomicity), so other ingredients could not be created.
|
|
||||||
///
|
|
||||||
/// Only tracked fns use this.
|
|
||||||
fn create_dependencies(_zalsa: &Zalsa) -> IngredientIndices
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
IngredientIndices::empty()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create the ingredients given the index of the first one.
|
/// Create the ingredients given the index of the first one.
|
||||||
|
///
|
||||||
/// All subsequent ingredients will be assigned contiguous indices.
|
/// All subsequent ingredients will be assigned contiguous indices.
|
||||||
fn create_ingredients(
|
fn create_ingredients(
|
||||||
zalsa: &Zalsa,
|
zalsa: &mut Zalsa,
|
||||||
first_index: IngredientIndex,
|
first_index: IngredientIndex,
|
||||||
dependencies: IngredientIndices,
|
) -> Vec<Box<dyn Ingredient>>;
|
||||||
) -> Vec<Box<dyn Ingredient>>
|
|
||||||
where
|
|
||||||
Self: Sized;
|
|
||||||
|
|
||||||
/// This returns the [`TypeId`] of the ID struct, that is, the struct that wraps `salsa::Id`
|
/// This returns the [`TypeId`] of the ID struct, that is, the struct that wraps `salsa::Id`
|
||||||
/// and carry the name of the jar.
|
/// and carry the name of the jar.
|
||||||
fn id_struct_type_id() -> TypeId
|
fn id_struct_type_id() -> TypeId;
|
||||||
where
|
|
||||||
Self: Sized;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Location {
|
pub struct Location {
|
||||||
|
@ -151,7 +135,9 @@ pub trait Ingredient: Any + std::fmt::Debug + Send + Sync {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn memo_table_types(&self) -> Arc<MemoTableTypes>;
|
fn memo_table_types(&self) -> &Arc<MemoTableTypes>;
|
||||||
|
|
||||||
|
fn memo_table_types_mut(&mut self) -> &mut Arc<MemoTableTypes>;
|
||||||
|
|
||||||
fn fmt_index(&self, index: crate::Id, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt_index(&self, index: crate::Id, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
fmt_index(self.debug_name(), index, fmt)
|
fmt_index(self.debug_name(), index, fmt)
|
||||||
|
|
202
src/ingredient_cache.rs
Normal file
202
src/ingredient_cache.rs
Normal file
|
@ -0,0 +1,202 @@
|
||||||
|
pub use imp::IngredientCache;
|
||||||
|
|
||||||
|
#[cfg(feature = "inventory")]
|
||||||
|
mod imp {
|
||||||
|
use crate::plumbing::Ingredient;
|
||||||
|
use crate::sync::atomic::{self, AtomicU32, Ordering};
|
||||||
|
use crate::zalsa::Zalsa;
|
||||||
|
use crate::IngredientIndex;
|
||||||
|
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
|
||||||
|
/// Caches an ingredient index.
|
||||||
|
///
|
||||||
|
/// Note that all ingredients are statically registered with `inventory`, so their
|
||||||
|
/// indices should be stable across any databases.
|
||||||
|
pub struct IngredientCache<I>
|
||||||
|
where
|
||||||
|
I: Ingredient,
|
||||||
|
{
|
||||||
|
ingredient_index: AtomicU32,
|
||||||
|
phantom: PhantomData<fn() -> I>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<I> Default for IngredientCache<I>
|
||||||
|
where
|
||||||
|
I: Ingredient,
|
||||||
|
{
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<I> IngredientCache<I>
|
||||||
|
where
|
||||||
|
I: Ingredient,
|
||||||
|
{
|
||||||
|
const UNINITIALIZED: u32 = u32::MAX;
|
||||||
|
|
||||||
|
/// Create a new cache
|
||||||
|
pub const fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
ingredient_index: atomic::AtomicU32::new(Self::UNINITIALIZED),
|
||||||
|
phantom: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a reference to the ingredient in the database.
|
||||||
|
///
|
||||||
|
/// If the ingredient index is not already in the cache, it will be loaded and cached.
|
||||||
|
pub fn get_or_create<'db>(
|
||||||
|
&self,
|
||||||
|
zalsa: &'db Zalsa,
|
||||||
|
load_index: impl Fn() -> IngredientIndex,
|
||||||
|
) -> &'db I {
|
||||||
|
let mut ingredient_index = self.ingredient_index.load(Ordering::Acquire);
|
||||||
|
if ingredient_index == Self::UNINITIALIZED {
|
||||||
|
ingredient_index = self.get_or_create_index_slow(load_index).as_u32();
|
||||||
|
};
|
||||||
|
|
||||||
|
zalsa
|
||||||
|
.lookup_ingredient(IngredientIndex::from_unchecked(ingredient_index))
|
||||||
|
.assert_type()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cold]
|
||||||
|
#[inline(never)]
|
||||||
|
fn get_or_create_index_slow(
|
||||||
|
&self,
|
||||||
|
load_index: impl Fn() -> IngredientIndex,
|
||||||
|
) -> IngredientIndex {
|
||||||
|
let ingredient_index = load_index();
|
||||||
|
|
||||||
|
// It doesn't matter if we overwrite any stores, as `create_index` should
|
||||||
|
// always return the same index when the `inventory` feature is enabled.
|
||||||
|
self.ingredient_index
|
||||||
|
.store(ingredient_index.as_u32(), Ordering::Release);
|
||||||
|
|
||||||
|
ingredient_index
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "inventory"))]
|
||||||
|
mod imp {
|
||||||
|
use crate::nonce::Nonce;
|
||||||
|
use crate::plumbing::Ingredient;
|
||||||
|
use crate::sync::atomic::{AtomicU64, Ordering};
|
||||||
|
use crate::zalsa::{StorageNonce, Zalsa};
|
||||||
|
use crate::IngredientIndex;
|
||||||
|
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
use std::mem;
|
||||||
|
|
||||||
|
/// Caches an ingredient index.
|
||||||
|
///
|
||||||
|
/// With manual registration, ingredient indices can vary across databases,
|
||||||
|
/// but we can retain most of the benefit by optimizing for the the case of
|
||||||
|
/// a single database.
|
||||||
|
pub struct IngredientCache<I>
|
||||||
|
where
|
||||||
|
I: Ingredient,
|
||||||
|
{
|
||||||
|
// A packed representation of `Option<(Nonce<StorageNonce>, IngredientIndex)>`.
|
||||||
|
//
|
||||||
|
// This allows us to replace a lock in favor of an atomic load. This works thanks to `Nonce`
|
||||||
|
// having a niche, which means the entire type can fit into an `AtomicU64`.
|
||||||
|
cached_data: AtomicU64,
|
||||||
|
phantom: PhantomData<fn() -> I>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<I> Default for IngredientCache<I>
|
||||||
|
where
|
||||||
|
I: Ingredient,
|
||||||
|
{
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<I> IngredientCache<I>
|
||||||
|
where
|
||||||
|
I: Ingredient,
|
||||||
|
{
|
||||||
|
const UNINITIALIZED: u64 = 0;
|
||||||
|
|
||||||
|
/// Create a new cache
|
||||||
|
pub const fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
cached_data: AtomicU64::new(Self::UNINITIALIZED),
|
||||||
|
phantom: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a reference to the ingredient in the database.
|
||||||
|
///
|
||||||
|
/// If the ingredient is not already in the cache, it will be created.
|
||||||
|
#[inline(always)]
|
||||||
|
pub fn get_or_create<'db>(
|
||||||
|
&self,
|
||||||
|
zalsa: &'db Zalsa,
|
||||||
|
create_index: impl Fn() -> IngredientIndex,
|
||||||
|
) -> &'db I {
|
||||||
|
let index = self.get_or_create_index(zalsa, create_index);
|
||||||
|
zalsa.lookup_ingredient(index).assert_type::<I>()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_or_create_index(
|
||||||
|
&self,
|
||||||
|
zalsa: &Zalsa,
|
||||||
|
create_index: impl Fn() -> IngredientIndex,
|
||||||
|
) -> IngredientIndex {
|
||||||
|
const _: () = assert!(
|
||||||
|
mem::size_of::<(Nonce<StorageNonce>, IngredientIndex)>() == mem::size_of::<u64>()
|
||||||
|
);
|
||||||
|
|
||||||
|
let cached_data = self.cached_data.load(Ordering::Acquire);
|
||||||
|
if cached_data == Self::UNINITIALIZED {
|
||||||
|
return self.get_or_create_index_slow(zalsa, create_index);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Unpack our `u64` into the nonce and index.
|
||||||
|
let index = IngredientIndex::from_unchecked(cached_data as u32);
|
||||||
|
|
||||||
|
// SAFETY: We've checked against `UNINITIALIZED` (0) above and so the upper bits must be non-zero.
|
||||||
|
let nonce = crate::nonce::Nonce::<StorageNonce>::from_u32(unsafe {
|
||||||
|
std::num::NonZeroU32::new_unchecked((cached_data >> u32::BITS) as u32)
|
||||||
|
});
|
||||||
|
|
||||||
|
// The data was cached for a different database, we have to ensure the ingredient was
|
||||||
|
// created in ours.
|
||||||
|
if zalsa.nonce() != nonce {
|
||||||
|
return create_index();
|
||||||
|
}
|
||||||
|
|
||||||
|
index
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cold]
|
||||||
|
#[inline(never)]
|
||||||
|
fn get_or_create_index_slow(
|
||||||
|
&self,
|
||||||
|
zalsa: &Zalsa,
|
||||||
|
create_index: impl Fn() -> IngredientIndex,
|
||||||
|
) -> IngredientIndex {
|
||||||
|
let index = create_index();
|
||||||
|
let nonce = zalsa.nonce().into_u32().get() as u64;
|
||||||
|
let packed = (nonce << u32::BITS) | (index.as_u32() as u64);
|
||||||
|
debug_assert_ne!(packed, IngredientCache::<I>::UNINITIALIZED);
|
||||||
|
|
||||||
|
// Discard the result, whether we won over the cache or not doesn't matter.
|
||||||
|
_ = self.cached_data.compare_exchange(
|
||||||
|
IngredientCache::<I>::UNINITIALIZED,
|
||||||
|
packed,
|
||||||
|
Ordering::Release,
|
||||||
|
Ordering::Relaxed,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Use our locally computed index regardless of which one was cached.
|
||||||
|
index
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
13
src/input.rs
13
src/input.rs
|
@ -56,9 +56,8 @@ impl<C: Configuration> Default for JarImpl<C> {
|
||||||
|
|
||||||
impl<C: Configuration> Jar for JarImpl<C> {
|
impl<C: Configuration> Jar for JarImpl<C> {
|
||||||
fn create_ingredients(
|
fn create_ingredients(
|
||||||
_zalsa: &Zalsa,
|
_zalsa: &mut Zalsa,
|
||||||
struct_index: crate::zalsa::IngredientIndex,
|
struct_index: crate::zalsa::IngredientIndex,
|
||||||
_dependencies: crate::memo_ingredient_indices::IngredientIndices,
|
|
||||||
) -> Vec<Box<dyn Ingredient>> {
|
) -> Vec<Box<dyn Ingredient>> {
|
||||||
let struct_ingredient: IngredientImpl<C> = IngredientImpl::new(struct_index);
|
let struct_ingredient: IngredientImpl<C> = IngredientImpl::new(struct_index);
|
||||||
|
|
||||||
|
@ -117,7 +116,7 @@ impl<C: Configuration> IngredientImpl<C> {
|
||||||
fields,
|
fields,
|
||||||
revisions,
|
revisions,
|
||||||
durabilities,
|
durabilities,
|
||||||
memos: Default::default(),
|
memos: MemoTable::new(self.memo_table_types()),
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -238,8 +237,12 @@ impl<C: Configuration> Ingredient for IngredientImpl<C> {
|
||||||
C::DEBUG_NAME
|
C::DEBUG_NAME
|
||||||
}
|
}
|
||||||
|
|
||||||
fn memo_table_types(&self) -> Arc<MemoTableTypes> {
|
fn memo_table_types(&self) -> &Arc<MemoTableTypes> {
|
||||||
self.memo_table_types.clone()
|
&self.memo_table_types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn memo_table_types_mut(&mut self) -> &mut Arc<MemoTableTypes> {
|
||||||
|
&mut self.memo_table_types
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns memory usage information about any inputs.
|
/// Returns memory usage information about any inputs.
|
||||||
|
|
|
@ -76,7 +76,11 @@ where
|
||||||
C::FIELD_DEBUG_NAMES[self.field_index]
|
C::FIELD_DEBUG_NAMES[self.field_index]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn memo_table_types(&self) -> Arc<MemoTableTypes> {
|
fn memo_table_types(&self) -> &Arc<MemoTableTypes> {
|
||||||
|
unreachable!("input fields do not allocate pages")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn memo_table_types_mut(&mut self) -> &mut Arc<MemoTableTypes> {
|
||||||
unreachable!("input fields do not allocate pages")
|
unreachable!("input fields do not allocate pages")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,7 +15,7 @@ use crate::durability::Durability;
|
||||||
use crate::function::VerifyResult;
|
use crate::function::VerifyResult;
|
||||||
use crate::id::{AsId, FromId};
|
use crate::id::{AsId, FromId};
|
||||||
use crate::ingredient::Ingredient;
|
use crate::ingredient::Ingredient;
|
||||||
use crate::plumbing::{IngredientIndices, Jar, ZalsaLocal};
|
use crate::plumbing::{Jar, ZalsaLocal};
|
||||||
use crate::revision::AtomicRevision;
|
use crate::revision::AtomicRevision;
|
||||||
use crate::sync::{Arc, Mutex, OnceLock};
|
use crate::sync::{Arc, Mutex, OnceLock};
|
||||||
use crate::table::memo::{MemoTable, MemoTableTypes, MemoTableWithTypesMut};
|
use crate::table::memo::{MemoTable, MemoTableTypes, MemoTableWithTypesMut};
|
||||||
|
@ -224,9 +224,8 @@ impl<C: Configuration> Default for JarImpl<C> {
|
||||||
|
|
||||||
impl<C: Configuration> Jar for JarImpl<C> {
|
impl<C: Configuration> Jar for JarImpl<C> {
|
||||||
fn create_ingredients(
|
fn create_ingredients(
|
||||||
_zalsa: &Zalsa,
|
_zalsa: &mut Zalsa,
|
||||||
first_index: IngredientIndex,
|
first_index: IngredientIndex,
|
||||||
_dependencies: IngredientIndices,
|
|
||||||
) -> Vec<Box<dyn Ingredient>> {
|
) -> Vec<Box<dyn Ingredient>> {
|
||||||
vec![Box::new(IngredientImpl::<C>::new(first_index)) as _]
|
vec![Box::new(IngredientImpl::<C>::new(first_index)) as _]
|
||||||
}
|
}
|
||||||
|
@ -416,7 +415,6 @@ where
|
||||||
// Fill up the table for the first few revisions without attempting garbage collection.
|
// Fill up the table for the first few revisions without attempting garbage collection.
|
||||||
if !self.revision_queue.is_primed() {
|
if !self.revision_queue.is_primed() {
|
||||||
return self.intern_id_cold(
|
return self.intern_id_cold(
|
||||||
db,
|
|
||||||
key,
|
key,
|
||||||
zalsa,
|
zalsa,
|
||||||
zalsa_local,
|
zalsa_local,
|
||||||
|
@ -530,16 +528,16 @@ where
|
||||||
// Insert the new value into the ID map.
|
// Insert the new value into the ID map.
|
||||||
shard.key_map.insert_unique(hash, new_id, hasher);
|
shard.key_map.insert_unique(hash, new_id, hasher);
|
||||||
|
|
||||||
// Free the memos associated with the previous interned value.
|
|
||||||
//
|
|
||||||
// SAFETY: We hold the lock for the shard containing the value, and the
|
// SAFETY: We hold the lock for the shard containing the value, and the
|
||||||
// value has not been interned in the current revision, so no references to
|
// value has not been interned in the current revision, so no references to
|
||||||
// it can exist.
|
// it can exist.
|
||||||
let mut memo_table = unsafe { std::mem::take(&mut *value.memos.get()) };
|
let memo_table = unsafe { &mut *value.memos.get() };
|
||||||
|
|
||||||
|
// Free the memos associated with the previous interned value.
|
||||||
|
//
|
||||||
// SAFETY: The memo table belongs to a value that we allocated, so it has the
|
// SAFETY: The memo table belongs to a value that we allocated, so it has the
|
||||||
// correct type.
|
// correct type.
|
||||||
unsafe { self.clear_memos(zalsa, &mut memo_table, new_id) };
|
unsafe { self.clear_memos(zalsa, memo_table, new_id) };
|
||||||
|
|
||||||
if value_shared.is_reusable::<C>() {
|
if value_shared.is_reusable::<C>() {
|
||||||
// Move the value to the front of the LRU list.
|
// Move the value to the front of the LRU list.
|
||||||
|
@ -553,16 +551,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we could not find any stale slots, we are forced to allocate a new one.
|
// If we could not find any stale slots, we are forced to allocate a new one.
|
||||||
self.intern_id_cold(
|
self.intern_id_cold(key, zalsa, zalsa_local, assemble, shard, shard_index, hash)
|
||||||
db,
|
|
||||||
key,
|
|
||||||
zalsa,
|
|
||||||
zalsa_local,
|
|
||||||
assemble,
|
|
||||||
shard,
|
|
||||||
shard_index,
|
|
||||||
hash,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The cold path for interning a value, allocating a new slot.
|
/// The cold path for interning a value, allocating a new slot.
|
||||||
|
@ -571,7 +560,6 @@ where
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn intern_id_cold<'db, Key>(
|
fn intern_id_cold<'db, Key>(
|
||||||
&'db self,
|
&'db self,
|
||||||
_db: &'db dyn crate::Database,
|
|
||||||
key: Key,
|
key: Key,
|
||||||
zalsa: &Zalsa,
|
zalsa: &Zalsa,
|
||||||
zalsa_local: &ZalsaLocal,
|
zalsa_local: &ZalsaLocal,
|
||||||
|
@ -598,7 +586,7 @@ where
|
||||||
let id = zalsa_local.allocate(zalsa, self.ingredient_index, |id| Value::<C> {
|
let id = zalsa_local.allocate(zalsa, self.ingredient_index, |id| Value::<C> {
|
||||||
shard: shard_index as u16,
|
shard: shard_index as u16,
|
||||||
link: LinkedListLink::new(),
|
link: LinkedListLink::new(),
|
||||||
memos: UnsafeCell::new(MemoTable::default()),
|
memos: UnsafeCell::new(MemoTable::new(self.memo_table_types())),
|
||||||
// SAFETY: We call `from_internal_data` to restore the correct lifetime before access.
|
// SAFETY: We call `from_internal_data` to restore the correct lifetime before access.
|
||||||
fields: UnsafeCell::new(unsafe { self.to_internal_data(assemble(id, key)) }),
|
fields: UnsafeCell::new(unsafe { self.to_internal_data(assemble(id, key)) }),
|
||||||
shared: UnsafeCell::new(ValueShared {
|
shared: UnsafeCell::new(ValueShared {
|
||||||
|
@ -696,6 +684,9 @@ where
|
||||||
};
|
};
|
||||||
|
|
||||||
std::mem::forget(table_guard);
|
std::mem::forget(table_guard);
|
||||||
|
|
||||||
|
// Reset the table after having dropped any memos.
|
||||||
|
memo_table.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hashes the value by its fields.
|
// Hashes the value by its fields.
|
||||||
|
@ -849,8 +840,12 @@ where
|
||||||
C::DEBUG_NAME
|
C::DEBUG_NAME
|
||||||
}
|
}
|
||||||
|
|
||||||
fn memo_table_types(&self) -> Arc<MemoTableTypes> {
|
fn memo_table_types(&self) -> &Arc<MemoTableTypes> {
|
||||||
self.memo_table_types.clone()
|
&self.memo_table_types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn memo_table_types_mut(&mut self) -> &mut Arc<MemoTableTypes> {
|
||||||
|
&mut self.memo_table_types
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns memory usage information about any interned values.
|
/// Returns memory usage information about any interned values.
|
||||||
|
|
15
src/lib.rs
15
src/lib.rs
|
@ -14,13 +14,11 @@ mod function;
|
||||||
mod hash;
|
mod hash;
|
||||||
mod id;
|
mod id;
|
||||||
mod ingredient;
|
mod ingredient;
|
||||||
|
mod ingredient_cache;
|
||||||
mod input;
|
mod input;
|
||||||
mod interned;
|
mod interned;
|
||||||
mod key;
|
mod key;
|
||||||
mod memo_ingredient_indices;
|
mod memo_ingredient_indices;
|
||||||
mod nonce;
|
|
||||||
#[cfg(feature = "rayon")]
|
|
||||||
mod parallel;
|
|
||||||
mod return_mode;
|
mod return_mode;
|
||||||
mod revision;
|
mod revision;
|
||||||
mod runtime;
|
mod runtime;
|
||||||
|
@ -34,6 +32,12 @@ mod views;
|
||||||
mod zalsa;
|
mod zalsa;
|
||||||
mod zalsa_local;
|
mod zalsa_local;
|
||||||
|
|
||||||
|
#[cfg(not(feature = "inventory"))]
|
||||||
|
mod nonce;
|
||||||
|
|
||||||
|
#[cfg(feature = "rayon")]
|
||||||
|
mod parallel;
|
||||||
|
|
||||||
#[cfg(feature = "rayon")]
|
#[cfg(feature = "rayon")]
|
||||||
pub use parallel::{join, par_map};
|
pub use parallel::{join, par_map};
|
||||||
#[cfg(feature = "macros")]
|
#[cfg(feature = "macros")]
|
||||||
|
@ -90,6 +94,7 @@ pub mod plumbing {
|
||||||
pub use crate::durability::Durability;
|
pub use crate::durability::Durability;
|
||||||
pub use crate::id::{AsId, FromId, FromIdWithDb, Id};
|
pub use crate::id::{AsId, FromId, FromIdWithDb, Id};
|
||||||
pub use crate::ingredient::{Ingredient, Jar, Location};
|
pub use crate::ingredient::{Ingredient, Jar, Location};
|
||||||
|
pub use crate::ingredient_cache::IngredientCache;
|
||||||
pub use crate::key::DatabaseKeyIndex;
|
pub use crate::key::DatabaseKeyIndex;
|
||||||
pub use crate::memo_ingredient_indices::{
|
pub use crate::memo_ingredient_indices::{
|
||||||
IngredientIndices, MemoIngredientIndices, MemoIngredientMap, MemoIngredientSingletonIndex,
|
IngredientIndices, MemoIngredientIndices, MemoIngredientMap, MemoIngredientSingletonIndex,
|
||||||
|
@ -102,8 +107,10 @@ pub mod plumbing {
|
||||||
pub use crate::tracked_struct::TrackedStructInDb;
|
pub use crate::tracked_struct::TrackedStructInDb;
|
||||||
pub use crate::update::helper::{Dispatch as UpdateDispatch, Fallback as UpdateFallback};
|
pub use crate::update::helper::{Dispatch as UpdateDispatch, Fallback as UpdateFallback};
|
||||||
pub use crate::update::{always_update, Update};
|
pub use crate::update::{always_update, Update};
|
||||||
|
pub use crate::views::DatabaseDownCaster;
|
||||||
pub use crate::zalsa::{
|
pub use crate::zalsa::{
|
||||||
transmute_data_ptr, views, IngredientCache, IngredientIndex, Zalsa, ZalsaDatabase,
|
register_jar, transmute_data_ptr, views, ErasedJar, HasJar, IngredientIndex, JarKind,
|
||||||
|
Zalsa, ZalsaDatabase,
|
||||||
};
|
};
|
||||||
pub use crate::zalsa_local::ZalsaLocal;
|
pub use crate::zalsa_local::ZalsaLocal;
|
||||||
|
|
||||||
|
|
|
@ -49,11 +49,11 @@ pub trait NewMemoIngredientIndices {
|
||||||
///
|
///
|
||||||
/// The memo types must be correct.
|
/// The memo types must be correct.
|
||||||
unsafe fn create(
|
unsafe fn create(
|
||||||
zalsa: &Zalsa,
|
zalsa: &mut Zalsa,
|
||||||
struct_indices: IngredientIndices,
|
struct_indices: IngredientIndices,
|
||||||
ingredient: IngredientIndex,
|
ingredient: IngredientIndex,
|
||||||
memo_type: MemoEntryType,
|
memo_type: MemoEntryType,
|
||||||
intern_ingredient_memo_types: Option<Arc<MemoTableTypes>>,
|
intern_ingredient_memo_types: Option<&mut Arc<MemoTableTypes>>,
|
||||||
) -> Self;
|
) -> Self;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,34 +62,39 @@ impl NewMemoIngredientIndices for MemoIngredientIndices {
|
||||||
///
|
///
|
||||||
/// The memo types must be correct.
|
/// The memo types must be correct.
|
||||||
unsafe fn create(
|
unsafe fn create(
|
||||||
zalsa: &Zalsa,
|
zalsa: &mut Zalsa,
|
||||||
struct_indices: IngredientIndices,
|
struct_indices: IngredientIndices,
|
||||||
ingredient: IngredientIndex,
|
ingredient: IngredientIndex,
|
||||||
memo_type: MemoEntryType,
|
memo_type: MemoEntryType,
|
||||||
_intern_ingredient_memo_types: Option<Arc<MemoTableTypes>>,
|
_intern_ingredient_memo_types: Option<&mut Arc<MemoTableTypes>>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
_intern_ingredient_memo_types.is_none(),
|
_intern_ingredient_memo_types.is_none(),
|
||||||
"intern ingredient can only have a singleton memo ingredient"
|
"intern ingredient can only have a singleton memo ingredient"
|
||||||
);
|
);
|
||||||
|
|
||||||
let Some(&last) = struct_indices.indices.last() else {
|
let Some(&last) = struct_indices.indices.last() else {
|
||||||
unreachable!("Attempting to construct struct memo mapping for non tracked function?")
|
unreachable!("Attempting to construct struct memo mapping for non tracked function?")
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut indices = Vec::new();
|
let mut indices = Vec::new();
|
||||||
indices.resize(
|
indices.resize(
|
||||||
(last.as_u32() as usize) + 1,
|
(last.as_u32() as usize) + 1,
|
||||||
MemoIngredientIndex::from_usize((u32::MAX - 1) as usize),
|
MemoIngredientIndex::from_usize((u32::MAX - 1) as usize),
|
||||||
);
|
);
|
||||||
|
|
||||||
for &struct_ingredient in &struct_indices.indices {
|
for &struct_ingredient in &struct_indices.indices {
|
||||||
let memo_types = zalsa
|
let memo_ingredient_index =
|
||||||
.lookup_ingredient(struct_ingredient)
|
zalsa.next_memo_ingredient_index(struct_ingredient, ingredient);
|
||||||
.memo_table_types();
|
indices[struct_ingredient.as_u32() as usize] = memo_ingredient_index;
|
||||||
|
|
||||||
let mi = zalsa.next_memo_ingredient_index(struct_ingredient, ingredient);
|
let (struct_ingredient, _) = zalsa.lookup_ingredient_mut(struct_ingredient);
|
||||||
memo_types.set(mi, &memo_type);
|
let memo_types = Arc::get_mut(struct_ingredient.memo_table_types_mut())
|
||||||
|
.expect("memo tables are not shared until database initialization is complete");
|
||||||
|
|
||||||
indices[struct_ingredient.as_u32() as usize] = mi;
|
memo_types.set(memo_ingredient_index, memo_type);
|
||||||
}
|
}
|
||||||
|
|
||||||
MemoIngredientIndices {
|
MemoIngredientIndices {
|
||||||
indices: indices.into_boxed_slice(),
|
indices: indices.into_boxed_slice(),
|
||||||
}
|
}
|
||||||
|
@ -146,25 +151,27 @@ impl MemoIngredientMap for MemoIngredientSingletonIndex {
|
||||||
impl NewMemoIngredientIndices for MemoIngredientSingletonIndex {
|
impl NewMemoIngredientIndices for MemoIngredientSingletonIndex {
|
||||||
#[inline]
|
#[inline]
|
||||||
unsafe fn create(
|
unsafe fn create(
|
||||||
zalsa: &Zalsa,
|
zalsa: &mut Zalsa,
|
||||||
indices: IngredientIndices,
|
indices: IngredientIndices,
|
||||||
ingredient: IngredientIndex,
|
ingredient: IngredientIndex,
|
||||||
memo_type: MemoEntryType,
|
memo_type: MemoEntryType,
|
||||||
intern_ingredient_memo_types: Option<Arc<MemoTableTypes>>,
|
intern_ingredient_memo_types: Option<&mut Arc<MemoTableTypes>>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let &[struct_ingredient] = &*indices.indices else {
|
let &[struct_ingredient] = &*indices.indices else {
|
||||||
unreachable!("Attempting to construct struct memo mapping from enum?")
|
unreachable!("Attempting to construct struct memo mapping from enum?")
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let memo_ingredient_index = zalsa.next_memo_ingredient_index(struct_ingredient, ingredient);
|
||||||
let memo_types = intern_ingredient_memo_types.unwrap_or_else(|| {
|
let memo_types = intern_ingredient_memo_types.unwrap_or_else(|| {
|
||||||
zalsa
|
let (struct_ingredient, _) = zalsa.lookup_ingredient_mut(struct_ingredient);
|
||||||
.lookup_ingredient(struct_ingredient)
|
struct_ingredient.memo_table_types_mut()
|
||||||
.memo_table_types()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let mi = zalsa.next_memo_ingredient_index(struct_ingredient, ingredient);
|
Arc::get_mut(memo_types)
|
||||||
memo_types.set(mi, &memo_type);
|
.expect("memo tables are not shared until database initialization is complete")
|
||||||
Self(mi)
|
.set(memo_ingredient_index, memo_type);
|
||||||
|
|
||||||
|
Self(memo_ingredient_index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@ pub trait SalsaStructInDb: Sized {
|
||||||
/// While implementors of this trait may call [`crate::zalsa::JarEntry::get_or_create`]
|
/// While implementors of this trait may call [`crate::zalsa::JarEntry::get_or_create`]
|
||||||
/// to create the ingredient, they aren't required to. For example, supertypes recursively
|
/// to create the ingredient, they aren't required to. For example, supertypes recursively
|
||||||
/// call [`crate::zalsa::JarEntry::get_or_create`] for their variants and combine them.
|
/// call [`crate::zalsa::JarEntry::get_or_create`] for their variants and combine them.
|
||||||
fn lookup_or_create_ingredient_index(zalsa: &Zalsa) -> IngredientIndices;
|
fn lookup_ingredient_index(zalsa: &Zalsa) -> IngredientIndices;
|
||||||
|
|
||||||
/// Plumbing to support nested salsa supertypes.
|
/// Plumbing to support nested salsa supertypes.
|
||||||
///
|
///
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::marker::PhantomData;
|
||||||
use std::panic::RefUnwindSafe;
|
use std::panic::RefUnwindSafe;
|
||||||
|
|
||||||
use crate::sync::{Arc, Condvar, Mutex};
|
use crate::sync::{Arc, Condvar, Mutex};
|
||||||
use crate::zalsa::{Zalsa, ZalsaDatabase};
|
use crate::zalsa::{ErasedJar, HasJar, Zalsa, ZalsaDatabase};
|
||||||
use crate::zalsa_local::{self, ZalsaLocal};
|
use crate::zalsa_local::{self, ZalsaLocal};
|
||||||
use crate::{Database, Event, EventKind};
|
use crate::{Database, Event, EventKind};
|
||||||
|
|
||||||
|
@ -42,8 +42,15 @@ impl<Db: Database> Default for StorageHandle<Db> {
|
||||||
|
|
||||||
impl<Db: Database> StorageHandle<Db> {
|
impl<Db: Database> StorageHandle<Db> {
|
||||||
pub fn new(event_callback: Option<Box<dyn Fn(crate::Event) + Send + Sync + 'static>>) -> Self {
|
pub fn new(event_callback: Option<Box<dyn Fn(crate::Event) + Send + Sync + 'static>>) -> Self {
|
||||||
|
Self::with_jars(event_callback, Vec::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_jars(
|
||||||
|
event_callback: Option<Box<dyn Fn(crate::Event) + Send + Sync + 'static>>,
|
||||||
|
jars: Vec<ErasedJar>,
|
||||||
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
zalsa_impl: Arc::new(Zalsa::new::<Db>(event_callback)),
|
zalsa_impl: Arc::new(Zalsa::new::<Db>(event_callback, jars)),
|
||||||
coordinate: CoordinateDrop(Arc::new(Coordinate {
|
coordinate: CoordinateDrop(Arc::new(Coordinate {
|
||||||
clones: Mutex::new(1),
|
clones: Mutex::new(1),
|
||||||
cvar: Default::default(),
|
cvar: Default::default(),
|
||||||
|
@ -115,6 +122,11 @@ impl<Db: Database> Storage<Db> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns a builder for database storage.
|
||||||
|
pub fn builder() -> StorageBuilder<Db> {
|
||||||
|
StorageBuilder::default()
|
||||||
|
}
|
||||||
|
|
||||||
/// Convert this instance of [`Storage`] into a [`StorageHandle`].
|
/// Convert this instance of [`Storage`] into a [`StorageHandle`].
|
||||||
///
|
///
|
||||||
/// This will discard the local state of this [`Storage`], thereby returning a value that
|
/// This will discard the local state of this [`Storage`], thereby returning a value that
|
||||||
|
@ -168,6 +180,54 @@ impl<Db: Database> Storage<Db> {
|
||||||
// ANCHOR_END: cancel_other_workers
|
// ANCHOR_END: cancel_other_workers
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A builder for a [`Storage`] instance.
|
||||||
|
///
|
||||||
|
/// This type can be created with the [`Storage::builder`] function.
|
||||||
|
pub struct StorageBuilder<Db> {
|
||||||
|
jars: Vec<ErasedJar>,
|
||||||
|
event_callback: Option<Box<dyn Fn(crate::Event) + Send + Sync + 'static>>,
|
||||||
|
_db: PhantomData<Db>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Db> Default for StorageBuilder<Db> {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
jars: Vec::new(),
|
||||||
|
event_callback: None,
|
||||||
|
_db: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Db: Database> StorageBuilder<Db> {
|
||||||
|
/// Set a callback for salsa events.
|
||||||
|
///
|
||||||
|
/// The `event_callback` function will be invoked by the salsa runtime at various points during execution.
|
||||||
|
pub fn event_callback(
|
||||||
|
mut self,
|
||||||
|
callback: Box<dyn Fn(crate::Event) + Send + Sync + 'static>,
|
||||||
|
) -> Self {
|
||||||
|
self.event_callback = Some(callback);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Manually register an ingredient.
|
||||||
|
///
|
||||||
|
/// Manual ingredient registration is necessary when the `inventory` feature is disabled.
|
||||||
|
pub fn ingredient<I: HasJar>(mut self) -> Self {
|
||||||
|
self.jars.push(ErasedJar::erase::<I>());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct the [`Storage`] using the provided builder options.
|
||||||
|
pub fn build(self) -> Storage<Db> {
|
||||||
|
Storage {
|
||||||
|
handle: StorageHandle::with_jars(self.event_callback, self.jars),
|
||||||
|
zalsa_local: ZalsaLocal::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[allow(clippy::undocumented_unsafe_blocks)] // TODO(#697) document safety
|
#[allow(clippy::undocumented_unsafe_blocks)] // TODO(#697) document safety
|
||||||
unsafe impl<T: HasStorage> ZalsaDatabase for T {
|
unsafe impl<T: HasStorage> ZalsaDatabase for T {
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
|
|
96
src/sync.rs
96
src/sync.rs
|
@ -5,40 +5,6 @@ pub mod shim {
|
||||||
pub use shuttle::sync::*;
|
pub use shuttle::sync::*;
|
||||||
pub use shuttle::{thread, thread_local};
|
pub use shuttle::{thread, thread_local};
|
||||||
|
|
||||||
pub mod papaya {
|
|
||||||
use std::hash::{BuildHasher, Hash};
|
|
||||||
use std::marker::PhantomData;
|
|
||||||
|
|
||||||
pub struct HashMap<K, V, S>(super::Mutex<std::collections::HashMap<K, V, S>>);
|
|
||||||
|
|
||||||
impl<K, V, S: Default> Default for HashMap<K, V, S> {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self(super::Mutex::default())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct LocalGuard<'a>(PhantomData<&'a ()>);
|
|
||||||
|
|
||||||
impl<K, V, S> HashMap<K, V, S>
|
|
||||||
where
|
|
||||||
K: Eq + Hash,
|
|
||||||
V: Clone,
|
|
||||||
S: BuildHasher,
|
|
||||||
{
|
|
||||||
pub fn guard(&self) -> LocalGuard<'_> {
|
|
||||||
LocalGuard(PhantomData)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(&self, key: &K, _guard: &LocalGuard<'_>) -> Option<V> {
|
|
||||||
self.0.lock().get(key).cloned()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert(&self, key: K, value: V, _guard: &LocalGuard<'_>) {
|
|
||||||
self.0.lock().insert(key, value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A wrapper around shuttle's `Mutex` to mirror parking-lot's API.
|
/// A wrapper around shuttle's `Mutex` to mirror parking-lot's API.
|
||||||
#[derive(Default, Debug)]
|
#[derive(Default, Debug)]
|
||||||
pub struct Mutex<T>(shuttle::sync::Mutex<T>);
|
pub struct Mutex<T>(shuttle::sync::Mutex<T>);
|
||||||
|
@ -57,24 +23,6 @@ pub mod shim {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A wrapper around shuttle's `RwLock` to mirror parking-lot's API.
|
|
||||||
#[derive(Default, Debug)]
|
|
||||||
pub struct RwLock<T>(shuttle::sync::RwLock<T>);
|
|
||||||
|
|
||||||
impl<T> RwLock<T> {
|
|
||||||
pub fn read(&self) -> RwLockReadGuard<'_, T> {
|
|
||||||
self.0.read().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write(&self) -> RwLockWriteGuard<'_, T> {
|
|
||||||
self.0.write().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_mut(&mut self) -> &mut T {
|
|
||||||
self.0.get_mut().unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A wrapper around shuttle's `Condvar` to mirror parking-lot's API.
|
/// A wrapper around shuttle's `Condvar` to mirror parking-lot's API.
|
||||||
#[derive(Default, Debug)]
|
#[derive(Default, Debug)]
|
||||||
pub struct Condvar(shuttle::sync::Condvar);
|
pub struct Condvar(shuttle::sync::Condvar);
|
||||||
|
@ -164,7 +112,7 @@ pub mod shim {
|
||||||
|
|
||||||
#[cfg(not(feature = "shuttle"))]
|
#[cfg(not(feature = "shuttle"))]
|
||||||
pub mod shim {
|
pub mod shim {
|
||||||
pub use parking_lot::{Mutex, MutexGuard, RwLock};
|
pub use parking_lot::{Mutex, MutexGuard};
|
||||||
pub use std::sync::*;
|
pub use std::sync::*;
|
||||||
pub use std::{thread, thread_local};
|
pub use std::{thread, thread_local};
|
||||||
|
|
||||||
|
@ -173,48 +121,6 @@ pub mod shim {
|
||||||
pub use std::sync::atomic::*;
|
pub use std::sync::atomic::*;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub mod papaya {
|
|
||||||
use std::hash::{BuildHasher, Hash};
|
|
||||||
|
|
||||||
pub use papaya::LocalGuard;
|
|
||||||
|
|
||||||
pub struct HashMap<K, V, S>(papaya::HashMap<K, V, S>);
|
|
||||||
|
|
||||||
impl<K, V, S: Default> Default for HashMap<K, V, S> {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self(
|
|
||||||
papaya::HashMap::builder()
|
|
||||||
.capacity(256) // A relatively large capacity to hopefully avoid resizing.
|
|
||||||
.resize_mode(papaya::ResizeMode::Blocking)
|
|
||||||
.hasher(S::default())
|
|
||||||
.build(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K, V, S> HashMap<K, V, S>
|
|
||||||
where
|
|
||||||
K: Eq + Hash,
|
|
||||||
V: Clone,
|
|
||||||
S: BuildHasher,
|
|
||||||
{
|
|
||||||
#[inline]
|
|
||||||
pub fn guard(&self) -> LocalGuard<'_> {
|
|
||||||
self.0.guard()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub fn get(&self, key: &K, guard: &LocalGuard<'_>) -> Option<V> {
|
|
||||||
self.0.get(key, guard).cloned()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub fn insert(&self, key: K, value: V, guard: &LocalGuard<'_>) {
|
|
||||||
self.0.insert(key, value, guard);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A wrapper around parking-lot's `Condvar` to mirror shuttle's API.
|
/// A wrapper around parking-lot's `Condvar` to mirror shuttle's API.
|
||||||
pub struct Condvar(parking_lot::Condvar);
|
pub struct Condvar(parking_lot::Condvar);
|
||||||
|
|
||||||
|
|
|
@ -3,19 +3,32 @@ use std::fmt::Debug;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::ptr::{self, NonNull};
|
use std::ptr::{self, NonNull};
|
||||||
|
|
||||||
use portable_atomic::hint::spin_loop;
|
|
||||||
use thin_vec::ThinVec;
|
|
||||||
|
|
||||||
use crate::sync::atomic::{AtomicPtr, Ordering};
|
use crate::sync::atomic::{AtomicPtr, Ordering};
|
||||||
use crate::sync::{OnceLock, RwLock};
|
|
||||||
use crate::{zalsa::MemoIngredientIndex, zalsa_local::QueryOriginRef};
|
use crate::{zalsa::MemoIngredientIndex, zalsa_local::QueryOriginRef};
|
||||||
|
|
||||||
/// The "memo table" stores the memoized results of tracked function calls.
|
/// The "memo table" stores the memoized results of tracked function calls.
|
||||||
/// Every tracked function must take a salsa struct as its first argument
|
/// Every tracked function must take a salsa struct as its first argument
|
||||||
/// and memo tables are attached to those salsa structs as auxiliary data.
|
/// and memo tables are attached to those salsa structs as auxiliary data.
|
||||||
#[derive(Default)]
|
|
||||||
pub(crate) struct MemoTable {
|
pub(crate) struct MemoTable {
|
||||||
memos: RwLock<ThinVec<MemoEntry>>,
|
memos: Box<[MemoEntry]>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MemoTable {
|
||||||
|
/// Create a `MemoTable` with slots for memos from the provided `MemoTableTypes`.
|
||||||
|
pub fn new(types: &MemoTableTypes) -> Self {
|
||||||
|
Self {
|
||||||
|
memos: (0..types.len()).map(|_| MemoEntry::default()).collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reset any memos in the table.
|
||||||
|
///
|
||||||
|
/// Note that the memo entries should be freed manually before calling this function.
|
||||||
|
pub fn reset(&mut self) {
|
||||||
|
for memo in &mut self.memos {
|
||||||
|
*memo = MemoEntry::default();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Memo: Any + Send + Sync {
|
pub trait Memo: Any + Send + Sync {
|
||||||
|
@ -50,13 +63,8 @@ struct MemoEntry {
|
||||||
atomic_memo: AtomicPtr<DummyMemo>,
|
atomic_memo: AtomicPtr<DummyMemo>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct MemoEntryType {
|
|
||||||
data: OnceLock<MemoEntryTypeData>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
#[derive(Clone, Copy, Debug)]
|
||||||
struct MemoEntryTypeData {
|
pub struct MemoEntryType {
|
||||||
/// The `type_id` of the erased memo type `M`
|
/// The `type_id` of the erased memo type `M`
|
||||||
type_id: TypeId,
|
type_id: TypeId,
|
||||||
|
|
||||||
|
@ -89,17 +97,10 @@ impl MemoEntryType {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn of<M: Memo>() -> Self {
|
pub fn of<M: Memo>() -> Self {
|
||||||
Self {
|
Self {
|
||||||
data: OnceLock::from(MemoEntryTypeData {
|
type_id: TypeId::of::<M>(),
|
||||||
type_id: TypeId::of::<M>(),
|
to_dyn_fn: Self::to_dyn_fn::<M>(),
|
||||||
to_dyn_fn: Self::to_dyn_fn::<M>(),
|
|
||||||
}),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn load(&self) -> Option<&MemoEntryTypeData> {
|
|
||||||
self.data.get()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Dummy placeholder type that we use when erasing the memo type `M` in [`MemoEntryData`][].
|
/// Dummy placeholder type that we use when erasing the memo type `M` in [`MemoEntryData`][].
|
||||||
|
@ -127,43 +128,21 @@ impl Memo for DummyMemo {
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct MemoTableTypes {
|
pub struct MemoTableTypes {
|
||||||
types: boxcar::Vec<MemoEntryType>,
|
types: Vec<MemoEntryType>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MemoTableTypes {
|
impl MemoTableTypes {
|
||||||
pub(crate) fn set(
|
pub(crate) fn set(
|
||||||
&self,
|
&mut self,
|
||||||
memo_ingredient_index: MemoIngredientIndex,
|
memo_ingredient_index: MemoIngredientIndex,
|
||||||
memo_type: &MemoEntryType,
|
memo_type: MemoEntryType,
|
||||||
) {
|
) {
|
||||||
let memo_ingredient_index = memo_ingredient_index.as_usize();
|
self.types
|
||||||
|
.insert(memo_ingredient_index.as_usize(), memo_type);
|
||||||
|
}
|
||||||
|
|
||||||
// Try to create our entry if it has not already been created.
|
pub fn len(&self) -> usize {
|
||||||
if memo_ingredient_index >= self.types.count() {
|
self.types.len()
|
||||||
while self.types.push(MemoEntryType::default()) < memo_ingredient_index {}
|
|
||||||
}
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let Some(memo_entry_type) = self.types.get(memo_ingredient_index) else {
|
|
||||||
// It's possible that someone else began pushing to our index but has not
|
|
||||||
// completed the entry's initialization yet, as `boxcar` is lock-free. This
|
|
||||||
// is extremely unlikely given initialization is just a handful of instructions.
|
|
||||||
// Additionally, this function is generally only called on startup, so we can
|
|
||||||
// just spin here.
|
|
||||||
spin_loop();
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
memo_entry_type
|
|
||||||
.data
|
|
||||||
.set(
|
|
||||||
*memo_type.data.get().expect(
|
|
||||||
"cannot provide an empty `MemoEntryType` for `MemoEntryType::set()`",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.expect("memo type should only be set once");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
/// # Safety
|
||||||
|
@ -204,59 +183,25 @@ impl MemoTableWithTypes<'_> {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
self.types
|
self.types
|
||||||
.types
|
.types
|
||||||
.get(memo_ingredient_index.as_usize())
|
.get(memo_ingredient_index.as_usize())?
|
||||||
.and_then(MemoEntryType::load)?
|
|
||||||
.type_id,
|
.type_id,
|
||||||
TypeId::of::<M>(),
|
TypeId::of::<M>(),
|
||||||
"inconsistent type-id for `{memo_ingredient_index:?}`"
|
"inconsistent type-id for `{memo_ingredient_index:?}`"
|
||||||
);
|
);
|
||||||
|
|
||||||
// If the memo slot is already occupied, it must already have the
|
// The memo table is pre-sized on creation based on the corresponding `MemoTableTypes`.
|
||||||
// right type info etc, and we only need the read-lock.
|
let MemoEntry { atomic_memo } = self
|
||||||
if let Some(MemoEntry { atomic_memo }) = self
|
|
||||||
.memos
|
.memos
|
||||||
.memos
|
.memos
|
||||||
.read()
|
|
||||||
.get(memo_ingredient_index.as_usize())
|
.get(memo_ingredient_index.as_usize())
|
||||||
{
|
.expect("accessed memo table with invalid index");
|
||||||
let old_memo =
|
|
||||||
atomic_memo.swap(MemoEntryType::to_dummy(memo).as_ptr(), Ordering::AcqRel);
|
|
||||||
|
|
||||||
let old_memo = NonNull::new(old_memo);
|
let old_memo = atomic_memo.swap(MemoEntryType::to_dummy(memo).as_ptr(), Ordering::AcqRel);
|
||||||
|
|
||||||
// SAFETY: `type_id` check asserted above
|
let old_memo = NonNull::new(old_memo);
|
||||||
return old_memo.map(|old_memo| unsafe { MemoEntryType::from_dummy(old_memo) });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise we need the write lock.
|
// SAFETY: `type_id` check asserted above
|
||||||
self.insert_cold(memo_ingredient_index, memo)
|
old_memo.map(|old_memo| unsafe { MemoEntryType::from_dummy(old_memo) })
|
||||||
}
|
|
||||||
|
|
||||||
#[cold]
|
|
||||||
fn insert_cold<M: Memo>(
|
|
||||||
self,
|
|
||||||
memo_ingredient_index: MemoIngredientIndex,
|
|
||||||
memo: NonNull<M>,
|
|
||||||
) -> Option<NonNull<M>> {
|
|
||||||
let memo_ingredient_index = memo_ingredient_index.as_usize();
|
|
||||||
let mut memos = self.memos.memos.write();
|
|
||||||
|
|
||||||
// Grow the table if needed.
|
|
||||||
if memos.len() <= memo_ingredient_index {
|
|
||||||
let additional_len = memo_ingredient_index - memos.len() + 1;
|
|
||||||
memos.reserve(additional_len);
|
|
||||||
while memos.len() <= memo_ingredient_index {
|
|
||||||
memos.push(MemoEntry::default());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let old_entry = mem::replace(
|
|
||||||
memos[memo_ingredient_index].atomic_memo.get_mut(),
|
|
||||||
MemoEntryType::to_dummy(memo).as_ptr(),
|
|
||||||
);
|
|
||||||
|
|
||||||
// SAFETY: The `TypeId` is asserted in `insert()`.
|
|
||||||
NonNull::new(old_entry).map(|memo| unsafe { MemoEntryType::from_dummy(memo) })
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -264,13 +209,8 @@ impl MemoTableWithTypes<'_> {
|
||||||
self,
|
self,
|
||||||
memo_ingredient_index: MemoIngredientIndex,
|
memo_ingredient_index: MemoIngredientIndex,
|
||||||
) -> Option<NonNull<M>> {
|
) -> Option<NonNull<M>> {
|
||||||
let read = self.memos.memos.read();
|
let memo = self.memos.memos.get(memo_ingredient_index.as_usize())?;
|
||||||
let memo = read.get(memo_ingredient_index.as_usize())?;
|
let type_ = self.types.types.get(memo_ingredient_index.as_usize())?;
|
||||||
let type_ = self
|
|
||||||
.types
|
|
||||||
.types
|
|
||||||
.get(memo_ingredient_index.as_usize())
|
|
||||||
.and_then(MemoEntryType::load)?;
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
type_.type_id,
|
type_.type_id,
|
||||||
TypeId::of::<M>(),
|
TypeId::of::<M>(),
|
||||||
|
@ -284,13 +224,12 @@ impl MemoTableWithTypes<'_> {
|
||||||
#[cfg(feature = "salsa_unstable")]
|
#[cfg(feature = "salsa_unstable")]
|
||||||
pub(crate) fn memory_usage(&self) -> Vec<crate::database::MemoInfo> {
|
pub(crate) fn memory_usage(&self) -> Vec<crate::database::MemoInfo> {
|
||||||
let mut memory_usage = Vec::new();
|
let mut memory_usage = Vec::new();
|
||||||
let memos = self.memos.memos.read();
|
for (index, memo) in self.memos.memos.iter().enumerate() {
|
||||||
for (index, memo) in memos.iter().enumerate() {
|
|
||||||
let Some(memo) = NonNull::new(memo.atomic_memo.load(Ordering::Acquire)) else {
|
let Some(memo) = NonNull::new(memo.atomic_memo.load(Ordering::Acquire)) else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(type_) = self.types.types.get(index).and_then(MemoEntryType::load) else {
|
let Some(type_) = self.types.types.get(index) else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -317,12 +256,7 @@ impl MemoTableWithTypesMut<'_> {
|
||||||
memo_ingredient_index: MemoIngredientIndex,
|
memo_ingredient_index: MemoIngredientIndex,
|
||||||
f: impl FnOnce(&mut M),
|
f: impl FnOnce(&mut M),
|
||||||
) {
|
) {
|
||||||
let Some(type_) = self
|
let Some(type_) = self.types.types.get(memo_ingredient_index.as_usize()) else {
|
||||||
.types
|
|
||||||
.types
|
|
||||||
.get(memo_ingredient_index.as_usize())
|
|
||||||
.and_then(MemoEntryType::load)
|
|
||||||
else {
|
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -331,13 +265,13 @@ impl MemoTableWithTypesMut<'_> {
|
||||||
"inconsistent type-id for `{memo_ingredient_index:?}`"
|
"inconsistent type-id for `{memo_ingredient_index:?}`"
|
||||||
);
|
);
|
||||||
|
|
||||||
// If the memo slot is already occupied, it must already have the
|
// The memo table is pre-sized on creation based on the corresponding `MemoTableTypes`.
|
||||||
// right type info etc, and we only need the read-lock.
|
let Some(MemoEntry { atomic_memo }) =
|
||||||
let memos = self.memos.memos.get_mut();
|
self.memos.memos.get_mut(memo_ingredient_index.as_usize())
|
||||||
let Some(MemoEntry { atomic_memo }) = memos.get_mut(memo_ingredient_index.as_usize())
|
|
||||||
else {
|
else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(memo) = NonNull::new(*atomic_memo.get_mut()) else {
|
let Some(memo) = NonNull::new(*atomic_memo.get_mut()) else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
@ -357,7 +291,7 @@ impl MemoTableWithTypesMut<'_> {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub unsafe fn drop(&mut self) {
|
pub unsafe fn drop(&mut self) {
|
||||||
let types = self.types.types.iter();
|
let types = self.types.types.iter();
|
||||||
for ((_, type_), memo) in std::iter::zip(types, self.memos.memos.get_mut()) {
|
for (type_, memo) in std::iter::zip(types, &mut self.memos.memos) {
|
||||||
// SAFETY: The types match as per our constructor invariant.
|
// SAFETY: The types match as per our constructor invariant.
|
||||||
unsafe { memo.take(type_) };
|
unsafe { memo.take(type_) };
|
||||||
}
|
}
|
||||||
|
@ -371,12 +305,12 @@ impl MemoTableWithTypesMut<'_> {
|
||||||
&mut self,
|
&mut self,
|
||||||
mut f: impl FnMut(MemoIngredientIndex, Box<dyn Memo>),
|
mut f: impl FnMut(MemoIngredientIndex, Box<dyn Memo>),
|
||||||
) {
|
) {
|
||||||
let memos = self.memos.memos.get_mut();
|
self.memos
|
||||||
memos
|
.memos
|
||||||
.iter_mut()
|
.iter_mut()
|
||||||
.zip(self.types.types.iter())
|
.zip(self.types.types.iter())
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.filter_map(|(index, (memo, (_, type_)))| {
|
.filter_map(|(index, (memo, type_))| {
|
||||||
// SAFETY: The types match as per our constructor invariant.
|
// SAFETY: The types match as per our constructor invariant.
|
||||||
let memo = unsafe { memo.take(type_)? };
|
let memo = unsafe { memo.take(type_)? };
|
||||||
Some((MemoIngredientIndex::from_usize(index), memo))
|
Some((MemoIngredientIndex::from_usize(index), memo))
|
||||||
|
@ -393,7 +327,6 @@ impl MemoEntry {
|
||||||
unsafe fn take(&mut self, type_: &MemoEntryType) -> Option<Box<dyn Memo>> {
|
unsafe fn take(&mut self, type_: &MemoEntryType) -> Option<Box<dyn Memo>> {
|
||||||
let memo = mem::replace(self.atomic_memo.get_mut(), ptr::null_mut());
|
let memo = mem::replace(self.atomic_memo.get_mut(), ptr::null_mut());
|
||||||
let memo = NonNull::new(memo)?;
|
let memo = NonNull::new(memo)?;
|
||||||
let type_ = type_.load()?;
|
|
||||||
// SAFETY: Our preconditions.
|
// SAFETY: Our preconditions.
|
||||||
Some(unsafe { Box::from_raw((type_.to_dyn_fn)(memo).as_ptr()) })
|
Some(unsafe { Box::from_raw((type_.to_dyn_fn)(memo).as_ptr()) })
|
||||||
}
|
}
|
||||||
|
|
|
@ -110,9 +110,8 @@ impl<C: Configuration> Default for JarImpl<C> {
|
||||||
|
|
||||||
impl<C: Configuration> Jar for JarImpl<C> {
|
impl<C: Configuration> Jar for JarImpl<C> {
|
||||||
fn create_ingredients(
|
fn create_ingredients(
|
||||||
_zalsa: &Zalsa,
|
_zalsa: &mut Zalsa,
|
||||||
struct_index: crate::zalsa::IngredientIndex,
|
struct_index: crate::zalsa::IngredientIndex,
|
||||||
_dependencies: crate::memo_ingredient_indices::IngredientIndices,
|
|
||||||
) -> Vec<Box<dyn Ingredient>> {
|
) -> Vec<Box<dyn Ingredient>> {
|
||||||
let struct_ingredient = <IngredientImpl<C>>::new(struct_index);
|
let struct_ingredient = <IngredientImpl<C>>::new(struct_index);
|
||||||
|
|
||||||
|
@ -444,7 +443,7 @@ where
|
||||||
// lifetime erase for storage
|
// lifetime erase for storage
|
||||||
fields: unsafe { mem::transmute::<C::Fields<'db>, C::Fields<'static>>(fields) },
|
fields: unsafe { mem::transmute::<C::Fields<'db>, C::Fields<'static>>(fields) },
|
||||||
revisions: C::new_revisions(current_deps.changed_at),
|
revisions: C::new_revisions(current_deps.changed_at),
|
||||||
memos: Default::default(),
|
memos: MemoTable::new(self.memo_table_types()),
|
||||||
};
|
};
|
||||||
|
|
||||||
while let Some(id) = self.free_list.pop() {
|
while let Some(id) = self.free_list.pop() {
|
||||||
|
@ -601,11 +600,11 @@ where
|
||||||
// Note that we hold the lock and have exclusive access to the tracked struct data,
|
// Note that we hold the lock and have exclusive access to the tracked struct data,
|
||||||
// so there should be no live instances of IDs from the previous generation. We clear
|
// so there should be no live instances of IDs from the previous generation. We clear
|
||||||
// the memos and return a new ID here as if we have allocated a new slot.
|
// the memos and return a new ID here as if we have allocated a new slot.
|
||||||
let mut table = data.take_memo_table();
|
let memo_table = data.memo_table_mut();
|
||||||
|
|
||||||
// SAFETY: The memo table belongs to a value that we allocated, so it has the
|
// SAFETY: The memo table belongs to a value that we allocated, so it has the
|
||||||
// correct type.
|
// correct type.
|
||||||
unsafe { self.clear_memos(zalsa, &mut table, id) };
|
unsafe { self.clear_memos(zalsa, memo_table, id) };
|
||||||
|
|
||||||
id = id
|
id = id
|
||||||
.next_generation()
|
.next_generation()
|
||||||
|
@ -674,11 +673,11 @@ where
|
||||||
|
|
||||||
// SAFETY: We have acquired the write lock
|
// SAFETY: We have acquired the write lock
|
||||||
let data = unsafe { &mut *data_raw };
|
let data = unsafe { &mut *data_raw };
|
||||||
let mut memo_table = data.take_memo_table();
|
let memo_table = data.memo_table_mut();
|
||||||
|
|
||||||
// SAFETY: The memo table belongs to a value that we allocated, so it
|
// SAFETY: The memo table belongs to a value that we allocated, so it
|
||||||
// has the correct type.
|
// has the correct type.
|
||||||
unsafe { self.clear_memos(zalsa, &mut memo_table, id) };
|
unsafe { self.clear_memos(zalsa, memo_table, id) };
|
||||||
|
|
||||||
// now that all cleanup has occurred, make available for re-use
|
// now that all cleanup has occurred, make available for re-use
|
||||||
self.free_list.push(id);
|
self.free_list.push(id);
|
||||||
|
@ -724,6 +723,9 @@ where
|
||||||
};
|
};
|
||||||
|
|
||||||
mem::forget(table_guard);
|
mem::forget(table_guard);
|
||||||
|
|
||||||
|
// Reset the table after having dropped any memos.
|
||||||
|
memo_table.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return reference to the field data ignoring dependency tracking.
|
/// Return reference to the field data ignoring dependency tracking.
|
||||||
|
@ -849,8 +851,12 @@ where
|
||||||
C::DEBUG_NAME
|
C::DEBUG_NAME
|
||||||
}
|
}
|
||||||
|
|
||||||
fn memo_table_types(&self) -> Arc<MemoTableTypes> {
|
fn memo_table_types(&self) -> &Arc<MemoTableTypes> {
|
||||||
self.memo_table_types.clone()
|
&self.memo_table_types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn memo_table_types_mut(&mut self) -> &mut Arc<MemoTableTypes> {
|
||||||
|
&mut self.memo_table_types
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns memory usage information about any tracked structs.
|
/// Returns memory usage information about any tracked structs.
|
||||||
|
@ -891,13 +897,12 @@ where
|
||||||
unsafe { mem::transmute::<&C::Fields<'static>, &C::Fields<'_>>(&self.fields) }
|
unsafe { mem::transmute::<&C::Fields<'static>, &C::Fields<'_>>(&self.fields) }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn take_memo_table(&mut self) -> MemoTable {
|
fn memo_table_mut(&mut self) -> &mut MemoTable {
|
||||||
// This fn is only called after `updated_at` has been set to `None`;
|
// This fn is only called after `updated_at` has been set to `None`;
|
||||||
// this ensures that there is no concurrent access
|
// this ensures that there is no concurrent access
|
||||||
// (and that the `&mut self` is accurate...).
|
// (and that the `&mut self` is accurate...).
|
||||||
assert!(self.updated_at.load().is_none());
|
assert!(self.updated_at.load().is_none());
|
||||||
|
&mut self.memos
|
||||||
mem::take(&mut self.memos)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_lock(&self, current_revision: Revision) {
|
fn read_lock(&self, current_revision: Revision) {
|
||||||
|
|
|
@ -82,7 +82,11 @@ where
|
||||||
C::TRACKED_FIELD_NAMES[self.field_index]
|
C::TRACKED_FIELD_NAMES[self.field_index]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn memo_table_types(&self) -> Arc<MemoTableTypes> {
|
fn memo_table_types(&self) -> &Arc<MemoTableTypes> {
|
||||||
|
unreachable!("tracked field does not allocate pages")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn memo_table_types_mut(&mut self) -> &mut Arc<MemoTableTypes> {
|
||||||
unreachable!("tracked field does not allocate pages")
|
unreachable!("tracked field does not allocate pages")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
40
src/views.rs
40
src/views.rs
|
@ -80,16 +80,16 @@ impl Views {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a new downcaster from `dyn Database` to `dyn DbView`.
|
/// Add a new downcaster from `dyn Database` to `dyn DbView`.
|
||||||
pub fn add<DbView: ?Sized + Any>(&self, func: DatabaseDownCasterSig<DbView>) {
|
pub fn add<DbView: ?Sized + Any>(
|
||||||
let target_type_id = TypeId::of::<DbView>();
|
&self,
|
||||||
if self
|
func: DatabaseDownCasterSig<DbView>,
|
||||||
.view_casters
|
) -> DatabaseDownCaster<DbView> {
|
||||||
.iter()
|
if let Some(view) = self.try_downcaster_for() {
|
||||||
.any(|(_, u)| u.target_type_id == target_type_id)
|
return view;
|
||||||
{
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.view_casters.push(ViewCaster::new::<DbView>(func));
|
self.view_casters.push(ViewCaster::new::<DbView>(func));
|
||||||
|
DatabaseDownCaster(self.source_type_id, func)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Retrieve an downcaster function from `dyn Database` to `dyn DbView`.
|
/// Retrieve an downcaster function from `dyn Database` to `dyn DbView`.
|
||||||
|
@ -98,23 +98,31 @@ impl Views {
|
||||||
///
|
///
|
||||||
/// If the underlying type of `db` is not the same as the database type this upcasts was created for.
|
/// If the underlying type of `db` is not the same as the database type this upcasts was created for.
|
||||||
pub fn downcaster_for<DbView: ?Sized + Any>(&self) -> DatabaseDownCaster<DbView> {
|
pub fn downcaster_for<DbView: ?Sized + Any>(&self) -> DatabaseDownCaster<DbView> {
|
||||||
|
self.try_downcaster_for().unwrap_or_else(|| {
|
||||||
|
panic!(
|
||||||
|
"No downcaster registered for type `{}` in `Views`",
|
||||||
|
std::any::type_name::<DbView>(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieve an downcaster function from `dyn Database` to `dyn DbView`, if it exists.
|
||||||
|
#[inline]
|
||||||
|
pub fn try_downcaster_for<DbView: ?Sized + Any>(&self) -> Option<DatabaseDownCaster<DbView>> {
|
||||||
let view_type_id = TypeId::of::<DbView>();
|
let view_type_id = TypeId::of::<DbView>();
|
||||||
for (_idx, view) in self.view_casters.iter() {
|
for (_, view) in self.view_casters.iter() {
|
||||||
if view.target_type_id == view_type_id {
|
if view.target_type_id == view_type_id {
|
||||||
// SAFETY: We are unerasing the type erased function pointer having made sure the
|
// SAFETY: We are unerasing the type erased function pointer having made sure the
|
||||||
// TypeId matches.
|
// `TypeId` matches.
|
||||||
return DatabaseDownCaster(self.source_type_id, unsafe {
|
return Some(DatabaseDownCaster(self.source_type_id, unsafe {
|
||||||
std::mem::transmute::<ErasedDatabaseDownCasterSig, DatabaseDownCasterSig<DbView>>(
|
std::mem::transmute::<ErasedDatabaseDownCasterSig, DatabaseDownCasterSig<DbView>>(
|
||||||
view.cast,
|
view.cast,
|
||||||
)
|
)
|
||||||
});
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
panic!(
|
None
|
||||||
"No downcaster registered for type `{}` in `Views`",
|
|
||||||
std::any::type_name::<DbView>(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
315
src/zalsa.rs
315
src/zalsa.rs
|
@ -1,18 +1,13 @@
|
||||||
use std::any::{Any, TypeId};
|
use std::any::{Any, TypeId};
|
||||||
use std::hash::BuildHasherDefault;
|
use std::hash::BuildHasherDefault;
|
||||||
use std::marker::PhantomData;
|
|
||||||
use std::mem;
|
|
||||||
use std::num::NonZeroU32;
|
|
||||||
use std::panic::RefUnwindSafe;
|
use std::panic::RefUnwindSafe;
|
||||||
|
|
||||||
|
use hashbrown::HashMap;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
use crate::hash::TypeIdHasher;
|
use crate::hash::TypeIdHasher;
|
||||||
use crate::ingredient::{Ingredient, Jar};
|
use crate::ingredient::{Ingredient, Jar};
|
||||||
use crate::nonce::{Nonce, NonceGenerator};
|
|
||||||
use crate::runtime::Runtime;
|
use crate::runtime::Runtime;
|
||||||
use crate::sync::atomic::{AtomicU64, Ordering};
|
|
||||||
use crate::sync::{papaya, Mutex, RwLock};
|
|
||||||
use crate::table::memo::MemoTableWithTypes;
|
use crate::table::memo::MemoTableWithTypes;
|
||||||
use crate::table::Table;
|
use crate::table::Table;
|
||||||
use crate::views::Views;
|
use crate::views::Views;
|
||||||
|
@ -62,13 +57,14 @@ pub unsafe trait ZalsaDatabase: Any {
|
||||||
pub fn views<Db: ?Sized + Database>(db: &Db) -> &Views {
|
pub fn views<Db: ?Sized + Database>(db: &Db) -> &Views {
|
||||||
db.zalsa().views()
|
db.zalsa().views()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Nonce type representing the underlying database storage.
|
/// Nonce type representing the underlying database storage.
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
|
#[cfg(not(feature = "inventory"))]
|
||||||
pub struct StorageNonce;
|
pub struct StorageNonce;
|
||||||
|
|
||||||
// Generator for storage nonces.
|
// Generator for storage nonces.
|
||||||
static NONCE: NonceGenerator<StorageNonce> = NonceGenerator::new();
|
#[cfg(not(feature = "inventory"))]
|
||||||
|
static NONCE: crate::nonce::NonceGenerator<StorageNonce> = crate::nonce::NonceGenerator::new();
|
||||||
|
|
||||||
/// An ingredient index identifies a particular [`Ingredient`] in the database.
|
/// An ingredient index identifies a particular [`Ingredient`] in the database.
|
||||||
///
|
///
|
||||||
|
@ -83,10 +79,16 @@ impl IngredientIndex {
|
||||||
/// This reserves one bit for an optional tag.
|
/// This reserves one bit for an optional tag.
|
||||||
const MAX_INDEX: u32 = 0x7FFF_FFFF;
|
const MAX_INDEX: u32 = 0x7FFF_FFFF;
|
||||||
|
|
||||||
/// Create an ingredient index from a `usize`.
|
/// Create an ingredient index from a `u32`.
|
||||||
pub(crate) fn from(v: usize) -> Self {
|
pub(crate) fn from(v: u32) -> Self {
|
||||||
assert!(v <= Self::MAX_INDEX as usize);
|
assert!(v <= Self::MAX_INDEX);
|
||||||
Self(v as u32)
|
Self(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create an ingredient index from a `u32`, without performing validating
|
||||||
|
/// that the index is valid.
|
||||||
|
pub(crate) fn from_unchecked(v: u32) -> Self {
|
||||||
|
Self(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert the ingredient index back into a `u32`.
|
/// Convert the ingredient index back into a `u32`.
|
||||||
|
@ -134,28 +136,24 @@ impl MemoIngredientIndex {
|
||||||
pub struct Zalsa {
|
pub struct Zalsa {
|
||||||
views_of: Views,
|
views_of: Views,
|
||||||
|
|
||||||
nonce: Nonce<StorageNonce>,
|
#[cfg(not(feature = "inventory"))]
|
||||||
|
nonce: crate::nonce::Nonce<StorageNonce>,
|
||||||
|
|
||||||
/// Map from the [`IngredientIndex::as_usize`][] of a salsa struct to a list of
|
/// Map from the [`IngredientIndex::as_usize`][] of a salsa struct to a list of
|
||||||
/// [ingredient-indices](`IngredientIndex`) for tracked functions that have this salsa struct
|
/// [ingredient-indices](`IngredientIndex`) for tracked functions that have this salsa struct
|
||||||
/// as input.
|
/// as input.
|
||||||
memo_ingredient_indices: RwLock<Vec<Vec<IngredientIndex>>>,
|
memo_ingredient_indices: Vec<Vec<IngredientIndex>>,
|
||||||
|
|
||||||
/// Map from the type-id of an `impl Jar` to the index of its first ingredient.
|
/// Map from the type-id of an `impl Jar` to the index of its first ingredient.
|
||||||
jar_map: papaya::HashMap<TypeId, IngredientIndex, BuildHasherDefault<TypeIdHasher>>,
|
jar_map: HashMap<TypeId, IngredientIndex, BuildHasherDefault<TypeIdHasher>>,
|
||||||
|
|
||||||
/// The write-lock for `jar_map`.
|
|
||||||
jar_map_lock: Mutex<()>,
|
|
||||||
|
|
||||||
/// A map from the `IngredientIndex` to the `TypeId` of its ID struct.
|
/// A map from the `IngredientIndex` to the `TypeId` of its ID struct.
|
||||||
///
|
///
|
||||||
/// Notably this is not the reverse mapping of `jar_map`.
|
/// Notably this is not the reverse mapping of `jar_map`.
|
||||||
ingredient_to_id_struct_type_id_map: RwLock<FxHashMap<IngredientIndex, TypeId>>,
|
ingredient_to_id_struct_type_id_map: FxHashMap<IngredientIndex, TypeId>,
|
||||||
|
|
||||||
/// Vector of ingredients.
|
/// Vector of ingredients.
|
||||||
///
|
ingredients_vec: Vec<Box<dyn Ingredient>>,
|
||||||
/// Immutable unless the mutex on `ingredients_map` is held.
|
|
||||||
ingredients_vec: boxcar::Vec<Box<dyn Ingredient>>,
|
|
||||||
|
|
||||||
/// Indices of ingredients that require reset when a new revision starts.
|
/// Indices of ingredients that require reset when a new revision starts.
|
||||||
ingredients_requiring_reset: boxcar::Vec<IngredientIndex>,
|
ingredients_requiring_reset: boxcar::Vec<IngredientIndex>,
|
||||||
|
@ -177,22 +175,43 @@ impl RefUnwindSafe for Zalsa {}
|
||||||
impl Zalsa {
|
impl Zalsa {
|
||||||
pub(crate) fn new<Db: Database>(
|
pub(crate) fn new<Db: Database>(
|
||||||
event_callback: Option<Box<dyn Fn(crate::Event) + Send + Sync + 'static>>,
|
event_callback: Option<Box<dyn Fn(crate::Event) + Send + Sync + 'static>>,
|
||||||
|
jars: Vec<ErasedJar>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
let mut zalsa = Self {
|
||||||
views_of: Views::new::<Db>(),
|
views_of: Views::new::<Db>(),
|
||||||
nonce: NONCE.nonce(),
|
jar_map: HashMap::default(),
|
||||||
jar_map: papaya::HashMap::default(),
|
|
||||||
jar_map_lock: Mutex::default(),
|
|
||||||
ingredient_to_id_struct_type_id_map: Default::default(),
|
ingredient_to_id_struct_type_id_map: Default::default(),
|
||||||
ingredients_vec: boxcar::Vec::new(),
|
ingredients_vec: Vec::new(),
|
||||||
ingredients_requiring_reset: boxcar::Vec::new(),
|
ingredients_requiring_reset: boxcar::Vec::new(),
|
||||||
runtime: Runtime::default(),
|
runtime: Runtime::default(),
|
||||||
memo_ingredient_indices: Default::default(),
|
memo_ingredient_indices: Default::default(),
|
||||||
event_callback,
|
event_callback,
|
||||||
|
#[cfg(not(feature = "inventory"))]
|
||||||
|
nonce: NONCE.nonce(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Collect and initialize all registered ingredients.
|
||||||
|
#[cfg(feature = "inventory")]
|
||||||
|
let mut jars = inventory::iter::<ErasedJar>()
|
||||||
|
.copied()
|
||||||
|
.chain(jars)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
#[cfg(not(feature = "inventory"))]
|
||||||
|
let mut jars = jars;
|
||||||
|
|
||||||
|
// Ensure structs are initialized before tracked functions.
|
||||||
|
jars.sort_by_key(|jar| jar.kind);
|
||||||
|
|
||||||
|
for jar in jars {
|
||||||
|
zalsa.insert_jar(jar);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
zalsa
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn nonce(&self) -> Nonce<StorageNonce> {
|
#[cfg(not(feature = "inventory"))]
|
||||||
|
pub(crate) fn nonce(&self) -> crate::nonce::Nonce<StorageNonce> {
|
||||||
self.nonce
|
self.nonce
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -218,7 +237,7 @@ impl Zalsa {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub(crate) fn lookup_ingredient(&self, index: IngredientIndex) -> &dyn Ingredient {
|
pub fn lookup_ingredient(&self, index: IngredientIndex) -> &dyn Ingredient {
|
||||||
let index = index.as_u32() as usize;
|
let index = index.as_u32() as usize;
|
||||||
self.ingredients_vec
|
self.ingredients_vec
|
||||||
.get(index)
|
.get(index)
|
||||||
|
@ -231,7 +250,7 @@ impl Zalsa {
|
||||||
struct_ingredient_index: IngredientIndex,
|
struct_ingredient_index: IngredientIndex,
|
||||||
memo_ingredient_index: MemoIngredientIndex,
|
memo_ingredient_index: MemoIngredientIndex,
|
||||||
) -> IngredientIndex {
|
) -> IngredientIndex {
|
||||||
self.memo_ingredient_indices.read()[struct_ingredient_index.as_u32() as usize]
|
self.memo_ingredient_indices[struct_ingredient_index.as_u32() as usize]
|
||||||
[memo_ingredient_index.as_usize()]
|
[memo_ingredient_index.as_usize()]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -239,7 +258,7 @@ impl Zalsa {
|
||||||
pub(crate) fn ingredients(&self) -> impl Iterator<Item = &dyn Ingredient> {
|
pub(crate) fn ingredients(&self) -> impl Iterator<Item = &dyn Ingredient> {
|
||||||
self.ingredients_vec
|
self.ingredients_vec
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(_, ingredient)| ingredient.as_ref())
|
.map(|ingredient| ingredient.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Starts unwinding the stack if the current revision is cancelled.
|
/// Starts unwinding the stack if the current revision is cancelled.
|
||||||
|
@ -259,11 +278,11 @@ impl Zalsa {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn next_memo_ingredient_index(
|
pub(crate) fn next_memo_ingredient_index(
|
||||||
&self,
|
&mut self,
|
||||||
struct_ingredient_index: IngredientIndex,
|
struct_ingredient_index: IngredientIndex,
|
||||||
ingredient_index: IngredientIndex,
|
ingredient_index: IngredientIndex,
|
||||||
) -> MemoIngredientIndex {
|
) -> MemoIngredientIndex {
|
||||||
let mut memo_ingredients = self.memo_ingredient_indices.write();
|
let memo_ingredients = &mut self.memo_ingredient_indices;
|
||||||
let idx = struct_ingredient_index.as_u32() as usize;
|
let idx = struct_ingredient_index.as_u32() as usize;
|
||||||
let memo_ingredients = if let Some(memo_ingredients) = memo_ingredients.get_mut(idx) {
|
let memo_ingredients = if let Some(memo_ingredients) = memo_ingredients.get_mut(idx) {
|
||||||
memo_ingredients
|
memo_ingredients
|
||||||
|
@ -291,7 +310,6 @@ impl Zalsa {
|
||||||
let ingredient_index = self.ingredient_index(id);
|
let ingredient_index = self.ingredient_index(id);
|
||||||
*self
|
*self
|
||||||
.ingredient_to_id_struct_type_id_map
|
.ingredient_to_id_struct_type_id_map
|
||||||
.read()
|
|
||||||
.get(&ingredient_index)
|
.get(&ingredient_index)
|
||||||
.expect("should have the ingredient index available")
|
.expect("should have the ingredient index available")
|
||||||
}
|
}
|
||||||
|
@ -299,44 +317,36 @@ impl Zalsa {
|
||||||
/// **NOT SEMVER STABLE**
|
/// **NOT SEMVER STABLE**
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn lookup_jar_by_type<J: Jar>(&self) -> JarEntry<'_, J> {
|
pub fn lookup_jar_by_type<J: Jar>(&self) -> IngredientIndex {
|
||||||
let jar_type_id = TypeId::of::<J>();
|
let jar_type_id = TypeId::of::<J>();
|
||||||
let guard = self.jar_map.guard();
|
|
||||||
|
|
||||||
match self.jar_map.get(&jar_type_id, &guard) {
|
*self.jar_map.get(&jar_type_id).unwrap_or_else(|| {
|
||||||
Some(index) => JarEntry::Occupied(index),
|
panic!(
|
||||||
None => JarEntry::Vacant {
|
"ingredient `{}` was not registered",
|
||||||
guard,
|
std::any::type_name::<J>()
|
||||||
zalsa: self,
|
)
|
||||||
_jar: PhantomData,
|
})
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cold]
|
fn insert_jar(&mut self, jar: ErasedJar) {
|
||||||
#[inline(never)]
|
let jar_type_id = (jar.type_id)();
|
||||||
fn add_or_lookup_jar_by_type<J: Jar>(&self, guard: &papaya::LocalGuard<'_>) -> IngredientIndex {
|
|
||||||
let jar_type_id = TypeId::of::<J>();
|
|
||||||
let dependencies = J::create_dependencies(self);
|
|
||||||
|
|
||||||
let jar_map_lock = self.jar_map_lock.lock();
|
let index = IngredientIndex::from(self.ingredients_vec.len() as u32);
|
||||||
|
|
||||||
let index = IngredientIndex::from(self.ingredients_vec.count());
|
if self.jar_map.contains_key(&jar_type_id) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Someone made it earlier than us.
|
let ingredients = (jar.create_ingredients)(self, index);
|
||||||
if let Some(index) = self.jar_map.get(&jar_type_id, guard) {
|
|
||||||
return index;
|
|
||||||
};
|
|
||||||
|
|
||||||
let ingredients = J::create_ingredients(self, index, dependencies);
|
|
||||||
for ingredient in ingredients {
|
for ingredient in ingredients {
|
||||||
let expected_index = ingredient.ingredient_index();
|
let expected_index = ingredient.ingredient_index();
|
||||||
|
|
||||||
if ingredient.requires_reset_for_new_revision() {
|
if ingredient.requires_reset_for_new_revision() {
|
||||||
self.ingredients_requiring_reset.push(expected_index);
|
self.ingredients_requiring_reset.push(expected_index);
|
||||||
}
|
}
|
||||||
|
|
||||||
let actual_index = self.ingredients_vec.push(ingredient);
|
self.ingredients_vec.push(ingredient);
|
||||||
|
|
||||||
|
let actual_index = self.ingredients_vec.len() - 1;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
expected_index.as_u32() as usize,
|
expected_index.as_u32() as usize,
|
||||||
actual_index,
|
actual_index,
|
||||||
|
@ -347,17 +357,10 @@ impl Zalsa {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert the index after all ingredients are inserted to avoid exposing
|
self.jar_map.insert(jar_type_id, index);
|
||||||
// partially initialized jars to readers.
|
|
||||||
self.jar_map.insert(jar_type_id, index, guard);
|
|
||||||
|
|
||||||
drop(jar_map_lock);
|
|
||||||
|
|
||||||
self.ingredient_to_id_struct_type_id_map
|
self.ingredient_to_id_struct_type_id_map
|
||||||
.write()
|
.insert(index, (jar.id_struct_type_id)());
|
||||||
.insert(index, J::id_struct_type_id());
|
|
||||||
|
|
||||||
index
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// **NOT SEMVER STABLE**
|
/// **NOT SEMVER STABLE**
|
||||||
|
@ -434,139 +437,69 @@ impl Zalsa {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum JarEntry<'a, J> {
|
/// A type-erased `Jar`, used for ingredient registration.
|
||||||
Occupied(IngredientIndex),
|
#[derive(Clone, Copy)]
|
||||||
Vacant {
|
pub struct ErasedJar {
|
||||||
zalsa: &'a Zalsa,
|
kind: JarKind,
|
||||||
guard: papaya::LocalGuard<'a>,
|
type_id: fn() -> TypeId,
|
||||||
_jar: PhantomData<J>,
|
id_struct_type_id: fn() -> TypeId,
|
||||||
},
|
create_ingredients: fn(&mut Zalsa, IngredientIndex) -> Vec<Box<dyn Ingredient>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<J> JarEntry<'_, J>
|
/// The kind of an `Jar`.
|
||||||
where
|
///
|
||||||
J: Jar,
|
/// Note that the ordering of the variants is important. Struct ingredients must be
|
||||||
{
|
/// initialized before tracked functions, as tracked function ingredients depend on
|
||||||
#[inline]
|
/// their input struct.
|
||||||
pub fn get(&self) -> Option<IngredientIndex> {
|
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Debug)]
|
||||||
match *self {
|
pub enum JarKind {
|
||||||
JarEntry::Occupied(index) => Some(index),
|
/// An input/tracked/interned struct.
|
||||||
JarEntry::Vacant { .. } => None,
|
Struct,
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
/// A tracked function.
|
||||||
pub fn get_or_create(&self) -> IngredientIndex {
|
TrackedFn,
|
||||||
match self {
|
|
||||||
JarEntry::Occupied(index) => *index,
|
|
||||||
JarEntry::Vacant { zalsa, guard, _jar } => zalsa.add_or_lookup_jar_by_type::<J>(guard),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Caches a pointer to an ingredient in a database.
|
impl ErasedJar {
|
||||||
/// Optimized for the case of a single database.
|
/// Performs type-erasure of a given ingredient.
|
||||||
pub struct IngredientCache<I>
|
pub const fn erase<I: HasJar>() -> Self {
|
||||||
where
|
|
||||||
I: Ingredient,
|
|
||||||
{
|
|
||||||
// A packed representation of `Option<(Nonce<StorageNonce>, IngredientIndex)>`.
|
|
||||||
//
|
|
||||||
// This allows us to replace a lock in favor of an atomic load. This works thanks to `Nonce`
|
|
||||||
// having a niche, which means the entire type can fit into an `AtomicU64`.
|
|
||||||
cached_data: AtomicU64,
|
|
||||||
phantom: PhantomData<fn() -> I>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<I> Default for IngredientCache<I>
|
|
||||||
where
|
|
||||||
I: Ingredient,
|
|
||||||
{
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<I> IngredientCache<I>
|
|
||||||
where
|
|
||||||
I: Ingredient,
|
|
||||||
{
|
|
||||||
const UNINITIALIZED: u64 = 0;
|
|
||||||
|
|
||||||
/// Create a new cache
|
|
||||||
pub const fn new() -> Self {
|
|
||||||
Self {
|
Self {
|
||||||
cached_data: AtomicU64::new(Self::UNINITIALIZED),
|
kind: I::KIND,
|
||||||
phantom: PhantomData,
|
type_id: TypeId::of::<I::Jar>,
|
||||||
|
create_ingredients: <I::Jar>::create_ingredients,
|
||||||
|
id_struct_type_id: <I::Jar>::id_struct_type_id,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a reference to the ingredient in the database.
|
|
||||||
/// If the ingredient is not already in the cache, it will be created.
|
|
||||||
#[inline(always)]
|
|
||||||
pub fn get_or_create<'db>(
|
|
||||||
&self,
|
|
||||||
zalsa: &'db Zalsa,
|
|
||||||
create_index: impl Fn() -> IngredientIndex,
|
|
||||||
) -> &'db I {
|
|
||||||
let index = self.get_or_create_index(zalsa, create_index);
|
|
||||||
zalsa.lookup_ingredient(index).assert_type::<I>()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a reference to the ingredient in the database.
|
|
||||||
/// If the ingredient is not already in the cache, it will be created.
|
|
||||||
#[inline(always)]
|
|
||||||
pub fn get_or_create_index(
|
|
||||||
&self,
|
|
||||||
zalsa: &Zalsa,
|
|
||||||
create_index: impl Fn() -> IngredientIndex,
|
|
||||||
) -> IngredientIndex {
|
|
||||||
const _: () = assert!(
|
|
||||||
mem::size_of::<(Nonce<StorageNonce>, IngredientIndex)>() == mem::size_of::<u64>()
|
|
||||||
);
|
|
||||||
let cached_data = self.cached_data.load(Ordering::Acquire);
|
|
||||||
if cached_data == Self::UNINITIALIZED {
|
|
||||||
#[cold]
|
|
||||||
#[inline(never)]
|
|
||||||
fn get_or_create_index_slow<I: Ingredient>(
|
|
||||||
this: &IngredientCache<I>,
|
|
||||||
zalsa: &Zalsa,
|
|
||||||
create_index: impl Fn() -> IngredientIndex,
|
|
||||||
) -> IngredientIndex {
|
|
||||||
let index = create_index();
|
|
||||||
let nonce = zalsa.nonce().into_u32().get() as u64;
|
|
||||||
let packed = (nonce << u32::BITS) | (index.as_u32() as u64);
|
|
||||||
debug_assert_ne!(packed, IngredientCache::<I>::UNINITIALIZED);
|
|
||||||
|
|
||||||
// Discard the result, whether we won over the cache or not does not matter
|
|
||||||
// we know that something has been cached now
|
|
||||||
_ = this.cached_data.compare_exchange(
|
|
||||||
IngredientCache::<I>::UNINITIALIZED,
|
|
||||||
packed,
|
|
||||||
Ordering::Release,
|
|
||||||
Ordering::Acquire,
|
|
||||||
);
|
|
||||||
// and we already have our index computed so we can just use that
|
|
||||||
index
|
|
||||||
}
|
|
||||||
|
|
||||||
return get_or_create_index_slow(self, zalsa, create_index);
|
|
||||||
};
|
|
||||||
|
|
||||||
// unpack our u64
|
|
||||||
// SAFETY: We've checked against `UNINITIALIZED` (0) above and so the upper bits must be non-zero
|
|
||||||
let nonce = Nonce::<StorageNonce>::from_u32(unsafe {
|
|
||||||
NonZeroU32::new_unchecked((cached_data >> u32::BITS) as u32)
|
|
||||||
});
|
|
||||||
let mut index = IngredientIndex(cached_data as u32);
|
|
||||||
|
|
||||||
if zalsa.nonce() != nonce {
|
|
||||||
index = create_index();
|
|
||||||
}
|
|
||||||
index
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A salsa ingredient that can be registered in the database.
|
||||||
|
///
|
||||||
|
/// This trait is implemented for tracked functions and salsa structs.
|
||||||
|
pub trait HasJar {
|
||||||
|
/// The [`Jar`] associated with this ingredient.
|
||||||
|
type Jar: Jar;
|
||||||
|
|
||||||
|
/// The [`JarKind`] for `Self::Jar`.
|
||||||
|
const KIND: JarKind;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect jars statically at compile-time if supported.
|
||||||
|
#[cfg(feature = "inventory")]
|
||||||
|
inventory::collect!(ErasedJar);
|
||||||
|
|
||||||
|
#[cfg(feature = "inventory")]
|
||||||
|
pub use inventory::submit as register_jar;
|
||||||
|
|
||||||
|
#[cfg(not(feature = "inventory"))]
|
||||||
|
#[macro_export]
|
||||||
|
#[doc(hidden)]
|
||||||
|
macro_rules! register_jar {
|
||||||
|
($($_:tt)*) => {};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "inventory"))]
|
||||||
|
pub use crate::register_jar;
|
||||||
|
|
||||||
/// Given a wide pointer `T`, extracts the data pointer (typed as `U`).
|
/// Given a wide pointer `T`, extracts the data pointer (typed as `U`).
|
||||||
///
|
///
|
||||||
/// # Safety
|
/// # Safety
|
||||||
|
|
|
@ -754,7 +754,7 @@ impl QueryOrigin {
|
||||||
QueryOriginKind::Assigned => {
|
QueryOriginKind::Assigned => {
|
||||||
// SAFETY: `data.index` is initialized when the tag is `QueryOriginKind::Assigned`.
|
// SAFETY: `data.index` is initialized when the tag is `QueryOriginKind::Assigned`.
|
||||||
let index = unsafe { self.data.index };
|
let index = unsafe { self.data.index };
|
||||||
let ingredient_index = IngredientIndex::from(self.metadata as usize);
|
let ingredient_index = IngredientIndex::from(self.metadata);
|
||||||
QueryOriginRef::Assigned(DatabaseKeyIndex::new(ingredient_index, index))
|
QueryOriginRef::Assigned(DatabaseKeyIndex::new(ingredient_index, index))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that when having nested tracked functions
|
//! Test that when having nested tracked functions
|
||||||
//! we don't drop any values when accumulating.
|
//! we don't drop any values when accumulating.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
mod common;
|
mod common;
|
||||||
|
|
||||||
use expect_test::expect;
|
use expect_test::expect;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
mod common;
|
mod common;
|
||||||
|
|
||||||
use expect_test::expect;
|
use expect_test::expect;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Demonstrates that accumulation is done in the order
|
//! Demonstrates that accumulation is done in the order
|
||||||
//! in which things were originally executed.
|
//! in which things were originally executed.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Accumulate values from within a tracked function.
|
//! Accumulate values from within a tracked function.
|
||||||
//! Then mutate the values so that the tracked function re-executes.
|
//! Then mutate the values so that the tracked function re-executes.
|
||||||
//! Check that we accumulate the appropriate, new values.
|
//! Check that we accumulate the appropriate, new values.
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that we don't get duplicate accumulated values
|
//! Test that we don't get duplicate accumulated values
|
||||||
|
|
||||||
mod common;
|
mod common;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Demonstrates the workaround of wrapping calls to
|
//! Demonstrates the workaround of wrapping calls to
|
||||||
//! `accumulated` in a tracked function to get better
|
//! `accumulated` in a tracked function to get better
|
||||||
//! reuse.
|
//! reuse.
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Accumulator re-use test.
|
//! Accumulator re-use test.
|
||||||
//!
|
//!
|
||||||
//! Tests behavior when a query's only inputs
|
//! Tests behavior when a query's only inputs
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
mod common;
|
mod common;
|
||||||
use common::{LogDatabase, LoggerDatabase};
|
use common::{LogDatabase, LoggerDatabase};
|
||||||
use expect_test::expect;
|
use expect_test::expect;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Tests that accumulated values are correctly accounted for
|
//! Tests that accumulated values are correctly accounted for
|
||||||
//! when backdating a value.
|
//! when backdating a value.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
use expect_test::expect;
|
use expect_test::expect;
|
||||||
use salsa::{Backtrace, Database, DatabaseImpl};
|
use salsa::{Backtrace, Database, DatabaseImpl};
|
||||||
use test_log::test;
|
use test_log::test;
|
||||||
|
@ -71,15 +73,15 @@ fn backtrace_works() {
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
query stacktrace:
|
query stacktrace:
|
||||||
0: query_e(Id(0))
|
0: query_e(Id(0))
|
||||||
at tests/backtrace.rs:30
|
at tests/backtrace.rs:32
|
||||||
1: query_d(Id(0))
|
1: query_d(Id(0))
|
||||||
at tests/backtrace.rs:25
|
at tests/backtrace.rs:27
|
||||||
2: query_c(Id(0))
|
2: query_c(Id(0))
|
||||||
at tests/backtrace.rs:20
|
at tests/backtrace.rs:22
|
||||||
3: query_b(Id(0))
|
3: query_b(Id(0))
|
||||||
at tests/backtrace.rs:15
|
at tests/backtrace.rs:17
|
||||||
4: query_a(Id(0))
|
4: query_a(Id(0))
|
||||||
at tests/backtrace.rs:10
|
at tests/backtrace.rs:12
|
||||||
"#]]
|
"#]]
|
||||||
.assert_eq(&backtrace);
|
.assert_eq(&backtrace);
|
||||||
|
|
||||||
|
@ -87,15 +89,15 @@ fn backtrace_works() {
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
query stacktrace:
|
query stacktrace:
|
||||||
0: query_e(Id(1)) -> (R1, Durability::LOW)
|
0: query_e(Id(1)) -> (R1, Durability::LOW)
|
||||||
at tests/backtrace.rs:30
|
at tests/backtrace.rs:32
|
||||||
1: query_d(Id(1)) -> (R1, Durability::HIGH)
|
1: query_d(Id(1)) -> (R1, Durability::HIGH)
|
||||||
at tests/backtrace.rs:25
|
at tests/backtrace.rs:27
|
||||||
2: query_c(Id(1)) -> (R1, Durability::HIGH)
|
2: query_c(Id(1)) -> (R1, Durability::HIGH)
|
||||||
at tests/backtrace.rs:20
|
at tests/backtrace.rs:22
|
||||||
3: query_b(Id(1)) -> (R1, Durability::HIGH)
|
3: query_b(Id(1)) -> (R1, Durability::HIGH)
|
||||||
at tests/backtrace.rs:15
|
at tests/backtrace.rs:17
|
||||||
4: query_a(Id(1)) -> (R1, Durability::HIGH)
|
4: query_a(Id(1)) -> (R1, Durability::HIGH)
|
||||||
at tests/backtrace.rs:10
|
at tests/backtrace.rs:12
|
||||||
"#]]
|
"#]]
|
||||||
.assert_eq(&backtrace);
|
.assert_eq(&backtrace);
|
||||||
|
|
||||||
|
@ -103,12 +105,12 @@ fn backtrace_works() {
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
query stacktrace:
|
query stacktrace:
|
||||||
0: query_e(Id(2))
|
0: query_e(Id(2))
|
||||||
at tests/backtrace.rs:30
|
at tests/backtrace.rs:32
|
||||||
1: query_cycle(Id(2))
|
1: query_cycle(Id(2))
|
||||||
at tests/backtrace.rs:43
|
at tests/backtrace.rs:45
|
||||||
cycle heads: query_cycle(Id(2)) -> IterationCount(0)
|
cycle heads: query_cycle(Id(2)) -> IterationCount(0)
|
||||||
2: query_f(Id(2))
|
2: query_f(Id(2))
|
||||||
at tests/backtrace.rs:38
|
at tests/backtrace.rs:40
|
||||||
"#]]
|
"#]]
|
||||||
.assert_eq(&backtrace);
|
.assert_eq(&backtrace);
|
||||||
|
|
||||||
|
@ -116,12 +118,12 @@ fn backtrace_works() {
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
query stacktrace:
|
query stacktrace:
|
||||||
0: query_e(Id(3)) -> (R1, Durability::LOW)
|
0: query_e(Id(3)) -> (R1, Durability::LOW)
|
||||||
at tests/backtrace.rs:30
|
at tests/backtrace.rs:32
|
||||||
1: query_cycle(Id(3)) -> (R1, Durability::HIGH, iteration = IterationCount(0))
|
1: query_cycle(Id(3)) -> (R1, Durability::HIGH, iteration = IterationCount(0))
|
||||||
at tests/backtrace.rs:43
|
at tests/backtrace.rs:45
|
||||||
cycle heads: query_cycle(Id(3)) -> IterationCount(0)
|
cycle heads: query_cycle(Id(3)) -> IterationCount(0)
|
||||||
2: query_f(Id(3)) -> (R1, Durability::HIGH)
|
2: query_f(Id(3)) -> (R1, Durability::HIGH)
|
||||||
at tests/backtrace.rs:38
|
at tests/backtrace.rs:40
|
||||||
"#]]
|
"#]]
|
||||||
.assert_eq(&backtrace);
|
.assert_eq(&backtrace);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that auto trait impls exist as expected.
|
//! Test that auto trait impls exist as expected.
|
||||||
|
|
||||||
use std::panic::UnwindSafe;
|
use std::panic::UnwindSafe;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
#[rustversion::all(stable, since(1.84))]
|
#[rustversion::all(stable, since(1.84))]
|
||||||
#[test]
|
#[test]
|
||||||
fn compile_fail() {
|
fn compile_fail() {
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test cases for fixpoint iteration cycle resolution.
|
//! Test cases for fixpoint iteration cycle resolution.
|
||||||
//!
|
//!
|
||||||
//! These test cases use a generic query setup that allows constructing arbitrary dependency
|
//! These test cases use a generic query setup that allows constructing arbitrary dependency
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
mod common;
|
mod common;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! It is possible to omit the `cycle_fn`, only specifying `cycle_result` in which case
|
//! It is possible to omit the `cycle_fn`, only specifying `cycle_result` in which case
|
||||||
//! an immediate fallback value is used as the cycle handling opposed to doing a fixpoint resolution.
|
//! an immediate fallback value is used as the cycle handling opposed to doing a fixpoint resolution.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Calling back into the same cycle from your cycle initial function will trigger another cycle.
|
//! Calling back into the same cycle from your cycle initial function will trigger another cycle.
|
||||||
|
|
||||||
#[salsa::tracked]
|
#[salsa::tracked]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! It's possible to call a Salsa query from within a cycle initial fn.
|
//! It's possible to call a Salsa query from within a cycle initial fn.
|
||||||
|
|
||||||
#[salsa::tracked]
|
#[salsa::tracked]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Tests for incremental validation for queries involved in a cycle.
|
//! Tests for incremental validation for queries involved in a cycle.
|
||||||
mod common;
|
mod common;
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test tracked struct output from a query in a cycle.
|
//! Test tracked struct output from a query in a cycle.
|
||||||
mod common;
|
mod common;
|
||||||
use common::{HasLogger, LogDatabase, Logger};
|
use common::{HasLogger, LogDatabase, Logger};
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Calling back into the same cycle from your cycle recovery function _can_ work out, as long as
|
//! Calling back into the same cycle from your cycle recovery function _can_ work out, as long as
|
||||||
//! the overall cycle still converges.
|
//! the overall cycle still converges.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! It's possible to call a Salsa query from within a cycle recovery fn.
|
//! It's possible to call a Salsa query from within a cycle recovery fn.
|
||||||
|
|
||||||
#[salsa::tracked]
|
#[salsa::tracked]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
use salsa::{Database, Setter};
|
use salsa::{Database, Setter};
|
||||||
|
|
||||||
#[salsa::tracked]
|
#[salsa::tracked]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
use salsa::{Database, Setter};
|
use salsa::{Database, Setter};
|
||||||
|
|
||||||
#[salsa::input]
|
#[salsa::input]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Tests for cycles where the cycle head is stored on a tracked struct
|
//! Tests for cycles where the cycle head is stored on a tracked struct
|
||||||
//! and that tracked struct is freed in a later revision.
|
//! and that tracked struct is freed in a later revision.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test for cycle handling where a tracked struct created in the first revision
|
//! Test for cycle handling where a tracked struct created in the first revision
|
||||||
//! is stored in the final value of the cycle but isn't recreated in the second
|
//! is stored in the final value of the cycle but isn't recreated in the second
|
||||||
//! iteration of the creating query.
|
//! iteration of the creating query.
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test case for fixpoint iteration cycle resolution.
|
//! Test case for fixpoint iteration cycle resolution.
|
||||||
//!
|
//!
|
||||||
//! This test case is intended to simulate a (very simplified) version of a real dataflow analysis
|
//! This test case is intended to simulate a (very simplified) version of a real dataflow analysis
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that `DeriveWithDb` is correctly derived.
|
//! Test that `DeriveWithDb` is correctly derived.
|
||||||
|
|
||||||
use expect_test::expect;
|
use expect_test::expect;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
#[salsa::interned(debug)]
|
#[salsa::interned(debug)]
|
||||||
struct InternedStruct<'db> {
|
struct InternedStruct<'db> {
|
||||||
name: String,
|
name: String,
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Delete cascade:
|
//! Delete cascade:
|
||||||
//!
|
//!
|
||||||
//! * when we delete memoized data, also delete outputs from that data
|
//! * when we delete memoized data, also delete outputs from that data
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Basic deletion test:
|
//! Basic deletion test:
|
||||||
//!
|
//!
|
||||||
//! * entities not created in a revision are deleted, as is any memoized data keyed on them.
|
//! * entities not created in a revision are deleted, as is any memoized data keyed on them.
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Basic deletion test:
|
//! Basic deletion test:
|
||||||
//!
|
//!
|
||||||
//! * entities not created in a revision are deleted, as is any memoized data keyed on them.
|
//! * entities not created in a revision are deleted, as is any memoized data keyed on them.
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that the `Update` derive works as expected
|
//! Test that the `Update` derive works as expected
|
||||||
|
|
||||||
#[derive(salsa::Update)]
|
#[derive(salsa::Update)]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Tests that code using the builder's durability methods compiles.
|
//! Tests that code using the builder's durability methods compiles.
|
||||||
|
|
||||||
use salsa::{Database, Durability, Setter};
|
use salsa::{Database, Durability, Setter};
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that a `tracked` fn on a `salsa::input`
|
//! Test that a `tracked` fn on a `salsa::input`
|
||||||
//! compiles and executes successfully.
|
//! compiles and executes successfully.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that if field X of a tracked struct changes but not field Y,
|
//! Test that if field X of a tracked struct changes but not field Y,
|
||||||
//! functions that depend on X re-execute, but those depending only on Y do not
|
//! functions that depend on X re-execute, but those depending only on Y do not
|
||||||
//! compiles and executes successfully.
|
//! compiles and executes successfully.
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that if field X of an input changes but not field Y,
|
//! Test that if field X of an input changes but not field Y,
|
||||||
//! functions that depend on X re-execute, but those depending only on Y do not
|
//! functions that depend on X re-execute, but those depending only on Y do not
|
||||||
//! compiles and executes successfully.
|
//! compiles and executes successfully.
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that a `tracked` fn on a `salsa::input`
|
//! Test that a `tracked` fn on a `salsa::input`
|
||||||
//! compiles and executes successfully.
|
//! compiles and executes successfully.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Tests that fields attributed with `#[default]` are initialized with `Default::default()`.
|
//! Tests that fields attributed with `#[default]` are initialized with `Default::default()`.
|
||||||
|
|
||||||
use salsa::Durability;
|
use salsa::Durability;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Tests that code using the builder's durability methods compiles.
|
//! Tests that code using the builder's durability methods compiles.
|
||||||
|
|
||||||
use salsa::Durability;
|
use salsa::Durability;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
use salsa::plumbing::ZalsaDatabase;
|
use salsa::plumbing::ZalsaDatabase;
|
||||||
use salsa::{Durability, Setter};
|
use salsa::{Durability, Setter};
|
||||||
use test_log::test;
|
use test_log::test;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
use salsa::{Durability, Setter};
|
use salsa::{Durability, Setter};
|
||||||
|
|
||||||
#[salsa::interned(no_lifetime)]
|
#[salsa::interned(no_lifetime)]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that a `tracked` fn on a `salsa::input`
|
//! Test that a `tracked` fn on a `salsa::input`
|
||||||
//! compiles and executes successfully.
|
//! compiles and executes successfully.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that a `tracked` fn on a `salsa::input`
|
//! Test that a `tracked` fn on a `salsa::input`
|
||||||
//! compiles and executes successfully.
|
//! compiles and executes successfully.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that a `tracked` fn on a `salsa::input`
|
//! Test that a `tracked` fn on a `salsa::input`
|
||||||
//! compiles and executes successfully.
|
//! compiles and executes successfully.
|
||||||
|
|
||||||
|
@ -35,7 +37,18 @@ struct InternedString<'db>(
|
||||||
const _: () = {
|
const _: () = {
|
||||||
use salsa::plumbing as zalsa_;
|
use salsa::plumbing as zalsa_;
|
||||||
use zalsa_::interned as zalsa_struct_;
|
use zalsa_::interned as zalsa_struct_;
|
||||||
|
|
||||||
type Configuration_ = InternedString<'static>;
|
type Configuration_ = InternedString<'static>;
|
||||||
|
|
||||||
|
impl<'db> zalsa_::HasJar for InternedString<'db> {
|
||||||
|
type Jar = zalsa_struct_::JarImpl<Configuration_>;
|
||||||
|
const KIND: zalsa_::JarKind = zalsa_::JarKind::Struct;
|
||||||
|
}
|
||||||
|
|
||||||
|
zalsa_::register_jar! {
|
||||||
|
zalsa_::ErasedJar::erase::<InternedString<'static>>()
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct StructData<'db>(String, InternedString<'db>);
|
struct StructData<'db>(String, InternedString<'db>);
|
||||||
|
|
||||||
|
@ -87,9 +100,7 @@ const _: () = {
|
||||||
|
|
||||||
let zalsa = db.zalsa();
|
let zalsa = db.zalsa();
|
||||||
CACHE.get_or_create(zalsa, || {
|
CACHE.get_or_create(zalsa, || {
|
||||||
zalsa
|
zalsa.lookup_jar_by_type::<zalsa_struct_::JarImpl<Configuration_>>()
|
||||||
.lookup_jar_by_type::<zalsa_struct_::JarImpl<Configuration_>>()
|
|
||||||
.get_or_create()
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -115,9 +126,8 @@ const _: () = {
|
||||||
impl zalsa_::SalsaStructInDb for InternedString<'_> {
|
impl zalsa_::SalsaStructInDb for InternedString<'_> {
|
||||||
type MemoIngredientMap = zalsa_::MemoIngredientSingletonIndex;
|
type MemoIngredientMap = zalsa_::MemoIngredientSingletonIndex;
|
||||||
|
|
||||||
fn lookup_or_create_ingredient_index(aux: &Zalsa) -> salsa::plumbing::IngredientIndices {
|
fn lookup_ingredient_index(aux: &Zalsa) -> salsa::plumbing::IngredientIndices {
|
||||||
aux.lookup_jar_by_type::<zalsa_struct_::JarImpl<Configuration_>>()
|
aux.lookup_jar_by_type::<zalsa_struct_::JarImpl<Configuration_>>()
|
||||||
.get_or_create()
|
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that a `tracked` fn with lru options
|
//! Test that a `tracked` fn with lru options
|
||||||
//! compiles and executes successfully.
|
//! compiles and executes successfully.
|
||||||
|
|
||||||
|
|
92
tests/manual_registration.rs
Normal file
92
tests/manual_registration.rs
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
#![cfg(not(feature = "inventory"))]
|
||||||
|
|
||||||
|
mod ingredients {
|
||||||
|
#[salsa::input]
|
||||||
|
pub(super) struct MyInput {
|
||||||
|
field: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[salsa::tracked]
|
||||||
|
pub(super) struct MyTracked<'db> {
|
||||||
|
pub(super) field: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[salsa::interned]
|
||||||
|
pub(super) struct MyInterned<'db> {
|
||||||
|
pub(super) field: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[salsa::tracked]
|
||||||
|
pub(super) fn intern<'db>(db: &'db dyn salsa::Database, input: MyInput) -> MyInterned<'db> {
|
||||||
|
MyInterned::new(db, input.field(db))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[salsa::tracked]
|
||||||
|
pub(super) fn track<'db>(db: &'db dyn salsa::Database, input: MyInput) -> MyTracked<'db> {
|
||||||
|
MyTracked::new(db, input.field(db))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[salsa::db]
|
||||||
|
#[derive(Clone, Default)]
|
||||||
|
pub struct DatabaseImpl {
|
||||||
|
storage: salsa::Storage<Self>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[salsa::db]
|
||||||
|
impl salsa::Database for DatabaseImpl {}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn single_database() {
|
||||||
|
let db = DatabaseImpl {
|
||||||
|
storage: salsa::Storage::builder()
|
||||||
|
.ingredient::<ingredients::track>()
|
||||||
|
.ingredient::<ingredients::intern>()
|
||||||
|
.ingredient::<ingredients::MyInput>()
|
||||||
|
.ingredient::<ingredients::MyTracked<'_>>()
|
||||||
|
.ingredient::<ingredients::MyInterned<'_>>()
|
||||||
|
.build(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let input = ingredients::MyInput::new(&db, 1);
|
||||||
|
|
||||||
|
let tracked = ingredients::track(&db, input);
|
||||||
|
let interned = ingredients::intern(&db, input);
|
||||||
|
|
||||||
|
assert_eq!(tracked.field(&db), 1);
|
||||||
|
assert_eq!(interned.field(&db), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multiple_databases() {
|
||||||
|
let db1 = DatabaseImpl {
|
||||||
|
storage: salsa::Storage::builder()
|
||||||
|
.ingredient::<ingredients::intern>()
|
||||||
|
.ingredient::<ingredients::MyInput>()
|
||||||
|
.ingredient::<ingredients::MyInterned<'_>>()
|
||||||
|
.build(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let input = ingredients::MyInput::new(&db1, 1);
|
||||||
|
let interned = ingredients::intern(&db1, input);
|
||||||
|
assert_eq!(interned.field(&db1), 1);
|
||||||
|
|
||||||
|
// Create a second database with different ingredient indices.
|
||||||
|
let db2 = DatabaseImpl {
|
||||||
|
storage: salsa::Storage::builder()
|
||||||
|
.ingredient::<ingredients::track>()
|
||||||
|
.ingredient::<ingredients::intern>()
|
||||||
|
.ingredient::<ingredients::MyInput>()
|
||||||
|
.ingredient::<ingredients::MyTracked<'_>>()
|
||||||
|
.ingredient::<ingredients::MyInterned<'_>>()
|
||||||
|
.build(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let input = ingredients::MyInput::new(&db2, 2);
|
||||||
|
let interned = ingredients::intern(&db2, input);
|
||||||
|
assert_eq!(interned.field(&db2), 2);
|
||||||
|
|
||||||
|
let input = ingredients::MyInput::new(&db2, 3);
|
||||||
|
let tracked = ingredients::track(&db2, input);
|
||||||
|
assert_eq!(tracked.field(&db2), 3);
|
||||||
|
}
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
use expect_test::expect;
|
use expect_test::expect;
|
||||||
|
|
||||||
#[salsa::input]
|
#[salsa::input]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that a setting a field on a `#[salsa::input]`
|
//! Test that a setting a field on a `#[salsa::input]`
|
||||||
//! overwrites and returns the old value.
|
//! overwrites and returns the old value.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that the `constructor` macro overrides
|
//! Test that the `constructor` macro overrides
|
||||||
//! the `new` method's name and `get` and `set`
|
//! the `new` method's name and `get` and `set`
|
||||||
//! change the name of the getter and setter of the fields.
|
//! change the name of the getter and setter of the fields.
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that creating a tracked struct outside of a
|
//! Test that creating a tracked struct outside of a
|
||||||
//! tracked function panics with an assert message.
|
//! tracked function panics with an assert message.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
mod setup;
|
mod setup;
|
||||||
mod signal;
|
mod signal;
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that a `tracked` fn on a `salsa::input`
|
//! Test that a `tracked` fn on a `salsa::input`
|
||||||
//! compiles and executes successfully.
|
//! compiles and executes successfully.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that a `tracked` fn on a `salsa::input`
|
//! Test that a `tracked` fn on a `salsa::input`
|
||||||
//! compiles and executes successfully.
|
//! compiles and executes successfully.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
use salsa::Database;
|
use salsa::Database;
|
||||||
|
|
||||||
#[salsa::input]
|
#[salsa::input]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Basic Singleton struct test:
|
//! Basic Singleton struct test:
|
||||||
//!
|
//!
|
||||||
//! Singleton structs are created only once. Subsequent `get`s and `new`s after creation return the same `Id`.
|
//! Singleton structs are created only once. Subsequent `get`s and `new`s after creation return the same `Id`.
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that `specify` only works if the key is a tracked struct created in the current query.
|
//! Test that `specify` only works if the key is a tracked struct created in the current query.
|
||||||
//! compilation succeeds but execution panics
|
//! compilation succeeds but execution panics
|
||||||
#![allow(warnings)]
|
#![allow(warnings)]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that a constant `tracked` fn (has no inputs)
|
//! Test that a constant `tracked` fn (has no inputs)
|
||||||
//! compiles and executes successfully.
|
//! compiles and executes successfully.
|
||||||
#![allow(warnings)]
|
#![allow(warnings)]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test an id field whose `PartialEq` impl is always true.
|
//! Test an id field whose `PartialEq` impl is always true.
|
||||||
|
|
||||||
use salsa::{Database, Setter};
|
use salsa::{Database, Setter};
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test for a tracked struct where an untracked field has a
|
//! Test for a tracked struct where an untracked field has a
|
||||||
//! very poorly chosen hash impl (always returns 0).
|
//! very poorly chosen hash impl (always returns 0).
|
||||||
//!
|
//!
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
use salsa::{Database as Db, Setter};
|
use salsa::{Database as Db, Setter};
|
||||||
use test_log::test;
|
use test_log::test;
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test a field whose `PartialEq` impl is always true.
|
//! Test a field whose `PartialEq` impl is always true.
|
||||||
//! This can result in us getting different results than
|
//! This can result in us getting different results than
|
||||||
//! if we were to execute from scratch.
|
//! if we were to execute from scratch.
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test a field whose `PartialEq` impl is always true.
|
//! Test a field whose `PartialEq` impl is always true.
|
||||||
//! This can our "last changed" data to be wrong
|
//! This can our "last changed" data to be wrong
|
||||||
//! but we *should* always reflect the final values.
|
//! but we *should* always reflect the final values.
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that a `tracked` fn on a `salsa::input`
|
//! Test that a `tracked` fn on a `salsa::input`
|
||||||
//! compiles and executes successfully.
|
//! compiles and executes successfully.
|
||||||
#![allow(warnings)]
|
#![allow(warnings)]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
|
|
||||||
//! Test that a constant `tracked` fn (has no inputs)
|
//! Test that a constant `tracked` fn (has no inputs)
|
||||||
//! compiles and executes successfully.
|
//! compiles and executes successfully.
|
||||||
#![allow(warnings)]
|
#![allow(warnings)]
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#![cfg(feature = "inventory")]
|
||||||
#![allow(warnings)]
|
#![allow(warnings)]
|
||||||
|
|
||||||
use salsa::plumbing::HasStorage;
|
use salsa::plumbing::HasStorage;
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue