[red-knot] Upgrade to the *new* *new* salsa (#12406)

This commit is contained in:
Micha Reiser 2024-07-29 09:21:24 +02:00 committed by GitHub
parent 9495331a5f
commit e18b4e42d3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
30 changed files with 483 additions and 415 deletions

118
Cargo.lock generated
View file

@ -188,6 +188,18 @@ version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
[[package]]
name = "boomphf"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "617e2d952880a00583ddb9237ac3965732e8df6a92a8e7bcc054100ec467ec3b"
dependencies = [
"crossbeam-utils",
"log",
"rayon",
"wyhash",
]
[[package]]
name = "bstr"
version = "1.10.0"
@ -930,9 +942,9 @@ dependencies = [
[[package]]
name = "hashlink"
version = "0.8.4"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af"
dependencies = [
"hashbrown",
]
@ -1525,6 +1537,80 @@ dependencies = [
"indexmap",
]
[[package]]
name = "orx-concurrent-ordered-bag"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9aa866e2be4aa03927eddb481e7c479d5109fe3121324fb7db6d97f91adf9876"
dependencies = [
"orx-fixed-vec",
"orx-pinned-concurrent-col",
"orx-pinned-vec",
"orx-pseudo-default",
"orx-split-vec",
]
[[package]]
name = "orx-concurrent-vec"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5912426ffb660f8b61e8f0812a1d07400803cd5513969d2c7af4d69602ba8a1"
dependencies = [
"orx-concurrent-ordered-bag",
"orx-fixed-vec",
"orx-pinned-concurrent-col",
"orx-pinned-vec",
"orx-pseudo-default",
"orx-split-vec",
]
[[package]]
name = "orx-fixed-vec"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f69466c7c1fc2e1f00b58e39059b78c438b9fad144d1937ef177ecfc413e997"
dependencies = [
"orx-pinned-vec",
"orx-pseudo-default",
]
[[package]]
name = "orx-pinned-concurrent-col"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdbcb1fa05dc1676f1c9cf19f443b3d2d2ca5835911477d22fa77cad8b79208d"
dependencies = [
"orx-fixed-vec",
"orx-pinned-vec",
"orx-pseudo-default",
"orx-split-vec",
]
[[package]]
name = "orx-pinned-vec"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1071baf586de45722668234bddf56c52c1ece6a6153d16541bbb0505f0ac055"
dependencies = [
"orx-pseudo-default",
]
[[package]]
name = "orx-pseudo-default"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2f627c439e723fa78e410a0faba89047a8a47d0dc013da5c0e05806e8a6cddb"
[[package]]
name = "orx-split-vec"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52b9dbfa8c7069ae73a890870d3aa9097a897d616751d3d0278f2b42d5214730"
dependencies = [
"orx-pinned-vec",
"orx-pseudo-default",
]
[[package]]
name = "os_str_bytes"
version = "7.0.0"
@ -2636,25 +2722,34 @@ checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
[[package]]
name = "salsa"
version = "0.18.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=a1bf3a613f451af7fc0a59411c56abc47fe8e8e1#a1bf3a613f451af7fc0a59411c56abc47fe8e8e1"
source = "git+https://github.com/salsa-rs/salsa.git?rev=cd339fc1c9a6ea0ffb1d09bd3bffb5633f776ef3#cd339fc1c9a6ea0ffb1d09bd3bffb5633f776ef3"
dependencies = [
"arc-swap",
"boomphf",
"crossbeam",
"dashmap 5.5.3",
"dashmap 6.0.1",
"hashlink",
"indexmap",
"log",
"orx-concurrent-vec",
"parking_lot",
"rustc-hash 1.1.0",
"rustc-hash 2.0.0",
"salsa-macro-rules",
"salsa-macros",
"smallvec",
"tracing",
]
[[package]]
name = "salsa-macro-rules"
version = "0.1.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=cd339fc1c9a6ea0ffb1d09bd3bffb5633f776ef3#cd339fc1c9a6ea0ffb1d09bd3bffb5633f776ef3"
[[package]]
name = "salsa-macros"
version = "0.18.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=a1bf3a613f451af7fc0a59411c56abc47fe8e8e1#a1bf3a613f451af7fc0a59411c56abc47fe8e8e1"
source = "git+https://github.com/salsa-rs/salsa.git?rev=cd339fc1c9a6ea0ffb1d09bd3bffb5633f776ef3#cd339fc1c9a6ea0ffb1d09bd3bffb5633f776ef3"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn",
@ -3752,6 +3847,15 @@ version = "0.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904"
[[package]]
name = "wyhash"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf6e163c25e3fac820b4b453185ea2dea3b6a3e0a721d4d23d75bd33734c295"
dependencies = [
"rand_core",
]
[[package]]
name = "yansi"
version = "0.5.1"

View file

@ -4,7 +4,7 @@ resolver = "2"
[workspace.package]
edition = "2021"
rust-version = "1.75"
rust-version = "1.76"
homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
@ -107,7 +107,7 @@ rand = { version = "0.8.5" }
rayon = { version = "1.10.0" }
regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "a1bf3a613f451af7fc0a59411c56abc47fe8e8e1" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "cd339fc1c9a6ea0ffb1d09bd3bffb5633f776ef3" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }

View file

@ -1,34 +1,25 @@
use std::panic::{AssertUnwindSafe, RefUnwindSafe};
use std::sync::Arc;
use salsa::{Cancelled, Database, DbWithJar};
use salsa::Cancelled;
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb};
use red_knot_python_semantic::Db as SemanticDb;
use ruff_db::files::{File, Files};
use ruff_db::program::{Program, ProgramSettings};
use ruff_db::system::System;
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
use ruff_db::{Db as SourceDb, Upcast};
use crate::lint::{lint_semantic, lint_syntax, unwind_if_cancelled, Diagnostics};
use crate::workspace::{check_file, Package, Package_files, Workspace, WorkspaceMetadata};
use crate::lint::Diagnostics;
use crate::workspace::{check_file, Workspace, WorkspaceMetadata};
mod changes;
pub trait Db: DbWithJar<Jar> + SemanticDb + Upcast<dyn SemanticDb> {}
#[salsa::db]
pub trait Db: SemanticDb + Upcast<dyn SemanticDb> {}
#[salsa::jar(db=Db)]
pub struct Jar(
Workspace,
Package,
Package_files,
lint_syntax,
lint_semantic,
unwind_if_cancelled,
);
#[salsa::db(SourceJar, ResolverJar, SemanticJar, Jar)]
#[salsa::db]
pub struct RootDatabase {
workspace: Option<Workspace>,
storage: salsa::Storage<RootDatabase>,
@ -127,10 +118,13 @@ impl Upcast<dyn ResolverDb> for RootDatabase {
}
}
#[salsa::db]
impl ResolverDb for RootDatabase {}
#[salsa::db]
impl SemanticDb for RootDatabase {}
#[salsa::db]
impl SourceDb for RootDatabase {
fn vendored(&self) -> &VendoredFileSystem {
vendored_typeshed_stubs()
@ -145,33 +139,23 @@ impl SourceDb for RootDatabase {
}
}
impl Database for RootDatabase {}
#[salsa::db]
impl salsa::Database for RootDatabase {}
#[salsa::db]
impl Db for RootDatabase {}
impl salsa::ParallelDatabase for RootDatabase {
fn snapshot(&self) -> salsa::Snapshot<Self> {
salsa::Snapshot::new(Self {
workspace: self.workspace,
storage: self.storage.snapshot(),
files: self.files.snapshot(),
system: self.system.clone(),
})
}
}
#[cfg(test)]
pub(crate) mod tests {
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
use crate::db::Db;
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb};
use red_knot_python_semantic::Db as SemanticDb;
use ruff_db::files::Files;
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
use ruff_db::{Db as SourceDb, Upcast};
use super::{Db, Jar};
#[salsa::db(Jar, SemanticJar, ResolverJar, SourceJar)]
#[salsa::db]
pub(crate) struct TestDb {
storage: salsa::Storage<Self>,
files: Files,
@ -184,7 +168,7 @@ pub(crate) mod tests {
Self {
storage: salsa::Storage::default(),
system: TestSystem::default(),
vendored: vendored_typeshed_stubs().snapshot(),
vendored: vendored_typeshed_stubs().clone(),
files: Files::default(),
}
}
@ -200,6 +184,7 @@ pub(crate) mod tests {
}
}
#[salsa::db]
impl SourceDb for TestDb {
fn vendored(&self) -> &VendoredFileSystem {
&self.vendored
@ -241,20 +226,13 @@ pub(crate) mod tests {
}
}
#[salsa::db]
impl red_knot_module_resolver::Db for TestDb {}
#[salsa::db]
impl red_knot_python_semantic::Db for TestDb {}
#[salsa::db]
impl Db for TestDb {}
#[salsa::db]
impl salsa::Database for TestDb {}
impl salsa::ParallelDatabase for TestDb {
fn snapshot(&self) -> salsa::Snapshot<Self> {
salsa::Snapshot::new(Self {
storage: self.storage.snapshot(),
files: self.files.snapshot(),
system: self.system.snapshot(),
vendored: self.vendored.snapshot(),
})
}
}
}

View file

@ -1,5 +1,3 @@
use crate::db::Jar;
pub mod db;
pub mod lint;
pub mod watch;

View file

@ -76,7 +76,7 @@ fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
#[allow(unreachable_pub)]
#[salsa::tracked(return_ref)]
pub fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
let _span = trace_span!("lint_semantic", ?file_id).entered();
let _span = trace_span!("lint_semantic", file=?file_id.path(db)).entered();
let source = source_text(db.upcast(), file_id);
let parsed = parsed_module(db.upcast(), file_id);

View file

@ -2,7 +2,6 @@ use std::sync::Mutex;
use clap::Parser;
use crossbeam::channel as crossbeam_channel;
use salsa::ParallelDatabase;
use tracing::subscriber::Interest;
use tracing::{Level, Metadata};
use tracing_subscriber::filter::LevelFilter;
@ -111,7 +110,7 @@ pub fn main() -> anyhow::Result<()> {
// TODO: Use the `program_settings` to compute the key for the database's persistent
// cache and load the cache if it exists.
let mut db = RootDatabase::new(workspace_metadata, program_settings, system);
let db = RootDatabase::new(workspace_metadata, program_settings, system);
let (main_loop, main_loop_cancellation_token) = MainLoop::new(verbosity);
@ -125,11 +124,14 @@ pub fn main() -> anyhow::Result<()> {
}
})?;
let mut db = salsa::Handle::new(db);
if watch {
main_loop.watch(&mut db)?;
} else {
main_loop.run(&mut db);
}
};
std::mem::forget(db);
Ok(())
}
@ -162,7 +164,7 @@ impl MainLoop {
)
}
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<()> {
fn watch(mut self, db: &mut salsa::Handle<RootDatabase>) -> anyhow::Result<()> {
let sender = self.sender.clone();
let watcher = watch::directory_watcher(move |event| {
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
@ -170,12 +172,11 @@ impl MainLoop {
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
self.run(db);
Ok(())
}
#[allow(clippy::print_stderr)]
fn run(mut self, db: &mut RootDatabase) {
fn run(mut self, db: &mut salsa::Handle<RootDatabase>) {
// Schedule the first check.
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
let mut revision = 0usize;
@ -185,7 +186,7 @@ impl MainLoop {
match message {
MainLoopMessage::CheckWorkspace => {
let db = db.snapshot();
let db = db.clone();
let sender = self.sender.clone();
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
@ -220,7 +221,7 @@ impl MainLoop {
MainLoopMessage::ApplyChanges(changes) => {
revision += 1;
// Automatically cancels any pending queries and waits for them to complete.
db.apply_changes(changes);
db.get_mut().apply_changes(changes);
if let Some(watcher) = self.watcher.as_mut() {
watcher.update(db);
}
@ -231,6 +232,8 @@ impl MainLoop {
}
}
}
self.exit();
}
#[allow(clippy::print_stderr, clippy::unused_self)]
@ -296,6 +299,9 @@ impl LoggingFilter {
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
self.trace_level
} else if meta.target().starts_with("salsa") && self.trace_level <= Level::INFO {
// Salsa emits very verbose query traces with level info. Let's not show these to the user.
Level::WARN
} else {
Level::INFO
};

View file

@ -1,6 +1,4 @@
// TODO: Fix clippy warnings created by salsa macros
#![allow(clippy::used_underscore_binding, unreachable_pub)]
use salsa::Setter as _;
use std::{collections::BTreeMap, sync::Arc};
use rustc_hash::{FxBuildHasher, FxHashSet};
@ -67,7 +65,6 @@ mod metadata;
/// holding on to the most fundamental settings required for checking.
#[salsa::input]
pub struct Workspace {
#[id]
#[return_ref]
root_buf: SystemPathBuf,
@ -90,7 +87,6 @@ pub struct Package {
pub name: Name,
/// The path to the root directory of the package.
#[id]
#[return_ref]
root_buf: SystemPathBuf,

View file

@ -3,10 +3,12 @@ use std::ops::Deref;
use std::sync::Arc;
use rustc_hash::FxHashSet;
use salsa::Setter;
use ruff_db::files::File;
use crate::db::Db;
use crate::workspace::Package;
use ruff_db::files::File;
/// The indexed files of a package.
///

View file

@ -3,6 +3,7 @@
use std::time::Duration;
use anyhow::{anyhow, Context};
use salsa::Setter;
use red_knot::db::RootDatabase;
use red_knot::watch;

View file

@ -1,29 +1,12 @@
use ruff_db::Upcast;
use crate::resolver::{
editable_install_resolution_paths, file_to_module, internal::ModuleNameIngredient,
module_resolution_settings, resolve_module_query,
};
use crate::typeshed::parse_typeshed_versions;
#[salsa::jar(db=Db)]
pub struct Jar(
ModuleNameIngredient<'_>,
module_resolution_settings,
editable_install_resolution_paths,
resolve_module_query,
file_to_module,
parse_typeshed_versions,
);
pub trait Db: salsa::DbWithJar<Jar> + ruff_db::Db + Upcast<dyn ruff_db::Db> {}
#[salsa::db]
pub trait Db: ruff_db::Db + Upcast<dyn ruff_db::Db> {}
#[cfg(test)]
pub(crate) mod tests {
use std::sync;
use salsa::DebugWithDb;
use ruff_db::files::Files;
use ruff_db::system::{DbWithTestSystem, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
@ -32,7 +15,7 @@ pub(crate) mod tests {
use super::*;
#[salsa::db(Jar, ruff_db::Jar)]
#[salsa::db]
pub(crate) struct TestDb {
storage: salsa::Storage<Self>,
system: TestSystem,
@ -46,7 +29,7 @@ pub(crate) mod tests {
Self {
storage: salsa::Storage::default(),
system: TestSystem::default(),
vendored: vendored_typeshed_stubs().snapshot(),
vendored: vendored_typeshed_stubs().clone(),
events: sync::Arc::default(),
files: Files::default(),
}
@ -81,6 +64,7 @@ pub(crate) mod tests {
}
}
#[salsa::db]
impl ruff_db::Db for TestDb {
fn vendored(&self) -> &VendoredFileSystem {
&self.vendored
@ -95,6 +79,7 @@ pub(crate) mod tests {
}
}
#[salsa::db]
impl Db for TestDb {}
impl DbWithTestSystem for TestDb {
@ -107,23 +92,14 @@ pub(crate) mod tests {
}
}
#[salsa::db]
impl salsa::Database for TestDb {
fn salsa_event(&self, event: salsa::Event) {
tracing::trace!("event: {:?}", event.debug(self));
self.attach(|_| {
tracing::trace!("event: {event:?}");
let mut events = self.events.lock().unwrap();
events.push(event);
}
}
impl salsa::ParallelDatabase for TestDb {
fn snapshot(&self) -> salsa::Snapshot<Self> {
salsa::Snapshot::new(Self {
storage: self.storage.snapshot(),
system: self.system.snapshot(),
vendored: self.vendored.snapshot(),
files: self.files.snapshot(),
events: self.events.clone(),
})
});
}
}
}

View file

@ -1,6 +1,6 @@
use std::iter::FusedIterator;
pub use db::{Db, Jar};
pub use db::Db;
pub use module::{Module, ModuleKind};
pub use module_name::ModuleName;
pub use resolver::resolve_module;

View file

@ -3,7 +3,6 @@ use std::sync::Arc;
use ruff_db::files::File;
use crate::db::Db;
use crate::module_name::ModuleName;
use crate::path::SearchPath;
@ -62,17 +61,6 @@ impl std::fmt::Debug for Module {
}
}
impl salsa::DebugWithDb<dyn Db> for Module {
fn fmt(&self, f: &mut Formatter<'_>, db: &dyn Db) -> std::fmt::Result {
f.debug_struct("Module")
.field("name", &self.name())
.field("kind", &self.kind())
.field("file", &self.file().debug(db.upcast()))
.field("search_path", &self.search_path())
.finish()
}
}
#[derive(PartialEq, Eq)]
struct ModuleInner {
name: ModuleName,

View file

@ -17,7 +17,7 @@ use crate::state::ResolverState;
/// Resolves a module name to a module.
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
let interned_name = internal::ModuleNameIngredient::new(db, module_name);
let interned_name = ModuleNameIngredient::new(db, module_name);
resolve_module_query(db, interned_name)
}
@ -29,7 +29,7 @@ pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
#[salsa::tracked]
pub(crate) fn resolve_module_query<'db>(
db: &'db dyn Db,
module_name: internal::ModuleNameIngredient<'db>,
module_name: ModuleNameIngredient<'db>,
) -> Option<Module> {
let name = module_name.name(db);
let _span = tracing::trace_span!("resolve_module", %name).entered();
@ -417,22 +417,13 @@ impl ModuleResolutionSettings {
}
}
// The singleton methods generated by salsa are all `pub` instead of `pub(crate)` which triggers
// `unreachable_pub`. Work around this by creating a module and allow `unreachable_pub` for it.
// Salsa also generates uses to `_db` variables for `interned` which triggers `clippy::used_underscore_binding`. Suppress that too
// TODO(micha): Contribute a fix for this upstream where the singleton methods have the same visibility as the struct.
#[allow(unreachable_pub, clippy::used_underscore_binding)]
pub(crate) mod internal {
use crate::module_name::ModuleName;
/// A thin wrapper around `ModuleName` to make it a Salsa ingredient.
///
/// This is needed because Salsa requires that all query arguments are salsa ingredients.
#[salsa::interned]
pub(crate) struct ModuleNameIngredient<'db> {
/// A thin wrapper around `ModuleName` to make it a Salsa ingredient.
///
/// This is needed because Salsa requires that all query arguments are salsa ingredients.
#[salsa::interned]
struct ModuleNameIngredient<'db> {
#[return_ref]
pub(super) name: ModuleName,
}
}
/// Modules that are builtin to the Python interpreter itself.
@ -626,10 +617,11 @@ impl PackageKind {
#[cfg(test)]
mod tests {
use internal::ModuleNameIngredient;
use ruff_db::files::{system_path_to_file, File, FilePath};
use ruff_db::system::{DbWithTestSystem, OsSystem, SystemPath};
use ruff_db::testing::assert_function_query_was_not_run;
use ruff_db::testing::{
assert_const_function_query_was_not_run, assert_function_query_was_not_run,
};
use ruff_db::Db;
use crate::db::tests::TestDb;
@ -1326,10 +1318,10 @@ mod tests {
.unwrap();
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
let events = db.take_salsa_events();
assert_function_query_was_not_run::<resolve_module_query, _, _>(
assert_function_query_was_not_run(
&db,
|res| &res.function,
&ModuleNameIngredient::new(&db, functools_module_name.clone()),
resolve_module_query,
ModuleNameIngredient::new(&db, functools_module_name.clone()),
&events,
);
assert_eq!(functools_module.search_path(), &stdlib);
@ -1578,12 +1570,7 @@ not_a_directory
&FilePath::system("/y/src/bar.py")
);
let events = db.take_salsa_events();
assert_function_query_was_not_run::<editable_install_resolution_paths, _, _>(
&db,
|res| &res.function,
&(),
&events,
);
assert_const_function_query_was_not_run(&db, editable_install_resolution_paths, &events);
}
#[test]

View file

@ -1,55 +1,23 @@
use salsa::DbWithJar;
use red_knot_module_resolver::Db as ResolverDb;
use ruff_db::{Db as SourceDb, Upcast};
use crate::builtins::builtins_scope;
use crate::semantic_index::definition::Definition;
use crate::semantic_index::expression::Expression;
use crate::semantic_index::symbol::ScopeId;
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
use crate::types::{
infer_definition_types, infer_expression_types, infer_scope_types, ClassType, FunctionType,
IntersectionType, UnionType,
};
#[salsa::jar(db=Db)]
pub struct Jar(
ScopeId<'_>,
Definition<'_>,
Expression<'_>,
FunctionType<'_>,
ClassType<'_>,
UnionType<'_>,
IntersectionType<'_>,
symbol_table,
use_def_map,
global_scope,
semantic_index,
infer_definition_types,
infer_expression_types,
infer_scope_types,
builtins_scope,
);
use ruff_db::Upcast;
/// Database giving access to semantic information about a Python program.
pub trait Db: SourceDb + ResolverDb + DbWithJar<Jar> + Upcast<dyn ResolverDb> {}
#[salsa::db]
pub trait Db: ResolverDb + Upcast<dyn ResolverDb> {}
#[cfg(test)]
pub(crate) mod tests {
use std::sync::Arc;
use salsa::DebugWithDb;
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb};
use ruff_db::files::Files;
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
use ruff_db::{Db as SourceDb, Upcast};
use super::{Db, Jar};
use super::Db;
#[salsa::db(Jar, ResolverJar, SourceJar)]
#[salsa::db]
pub(crate) struct TestDb {
storage: salsa::Storage<Self>,
files: Files,
@ -63,7 +31,7 @@ pub(crate) mod tests {
Self {
storage: salsa::Storage::default(),
system: TestSystem::default(),
vendored: vendored_typeshed_stubs().snapshot(),
vendored: vendored_typeshed_stubs().clone(),
events: std::sync::Arc::default(),
files: Files::default(),
}
@ -99,6 +67,7 @@ pub(crate) mod tests {
}
}
#[salsa::db]
impl SourceDb for TestDb {
fn vendored(&self) -> &VendoredFileSystem {
&self.vendored
@ -131,26 +100,20 @@ pub(crate) mod tests {
}
}
#[salsa::db]
impl red_knot_module_resolver::Db for TestDb {}
#[salsa::db]
impl Db for TestDb {}
#[salsa::db]
impl salsa::Database for TestDb {
fn salsa_event(&self, event: salsa::Event) {
tracing::trace!("event: {:?}", event.debug(self));
self.attach(|_| {
tracing::trace!("event: {event:?}");
let mut events = self.events.lock().unwrap();
events.push(event);
}
}
impl salsa::ParallelDatabase for TestDb {
fn snapshot(&self) -> salsa::Snapshot<Self> {
salsa::Snapshot::new(Self {
storage: self.storage.snapshot(),
files: self.files.snapshot(),
system: self.system.snapshot(),
vendored: self.vendored.snapshot(),
events: self.events.clone(),
})
});
}
}
}

View file

@ -2,7 +2,7 @@ use std::hash::BuildHasherDefault;
use rustc_hash::FxHasher;
pub use db::{Db, Jar};
pub use db::Db;
pub use semantic_model::{HasTy, SemanticModel};
pub mod ast_node_ref;

View file

@ -2,6 +2,7 @@ use std::iter::FusedIterator;
use std::sync::Arc;
use rustc_hash::FxHashMap;
use salsa::plumbing::AsId;
use ruff_db::files::File;
use ruff_db::parsed::parsed_module;
@ -17,6 +18,8 @@ use crate::semantic_index::symbol::{
};
use crate::Db;
pub(crate) use self::use_def::UseDefMap;
pub mod ast_ids;
mod builder;
pub mod definition;
@ -24,8 +27,6 @@ pub mod expression;
pub mod symbol;
mod use_def;
pub(crate) use self::use_def::UseDefMap;
type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), ()>;
/// Returns the semantic index for `file`.
@ -33,7 +34,7 @@ type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), ()>;
/// Prefer using [`symbol_table`] when working with symbols from a single scope.
#[salsa::tracked(return_ref, no_eq)]
pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> {
let _span = tracing::trace_span!("semantic_index", ?file).entered();
let _span = tracing::trace_span!("semantic_index", file=?file.path(db)).entered();
let parsed = parsed_module(db.upcast(), file);
@ -47,8 +48,10 @@ pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> {
/// is unchanged.
#[salsa::tracked]
pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<SymbolTable> {
let _span = tracing::trace_span!("symbol_table", ?scope).entered();
let index = semantic_index(db, scope.file(db));
let file = scope.file(db);
let _span =
tracing::trace_span!("symbol_table", scope=?scope.as_id(), file=?file.path(db)).entered();
let index = semantic_index(db, file);
index.symbol_table(scope.file_scope_id(db))
}
@ -60,8 +63,10 @@ pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<Sym
/// is unchanged.
#[salsa::tracked]
pub(crate) fn use_def_map<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<UseDefMap<'db>> {
let _span = tracing::trace_span!("use_def_map", ?scope).entered();
let index = semantic_index(db, scope.file(db));
let file = scope.file(db);
let _span =
tracing::trace_span!("use_def_map", scope=?scope.as_id(), file=?file.path(db)).entered();
let index = semantic_index(db, file);
index.use_def_map(scope.file_scope_id(db))
}
@ -69,7 +74,7 @@ pub(crate) fn use_def_map<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<UseD
/// Returns the module global scope of `file`.
#[salsa::tracked]
pub(crate) fn global_scope(db: &dyn Db, file: File) -> ScopeId<'_> {
let _span = tracing::trace_span!("global_scope", ?file).entered();
let _span = tracing::trace_span!("global_scope", file=?file.path(db)).entered();
FileScopeId::global().to_scope_id(db, file)
}

View file

@ -10,7 +10,7 @@ use crate::{Db, FxOrderSet};
mod display;
mod infer;
pub(crate) use self::infer::{infer_definition_types, infer_expression_types, infer_scope_types};
pub(crate) use self::infer::{infer_definition_types, infer_scope_types};
/// Infer the public type of a symbol (its type as seen from outside its scope).
pub(crate) fn symbol_ty<'db>(

View file

@ -26,7 +26,7 @@ impl Display for DisplayType<'_> {
Type::Unbound => f.write_str("Unbound"),
Type::None => f.write_str("None"),
Type::Module(file) => {
write!(f, "<module '{:?}'>", file.path(self.db.upcast()))
write!(f, "<module '{:?}'>", file.path(self.db))
}
// TODO functions and classes should display using a fully qualified name
Type::Class(class) => write!(f, "Literal[{}]", class.name(self.db)),

View file

@ -22,6 +22,7 @@
//! holds types for every [`Definition`] and expression within the inferred region.
use rustc_hash::FxHashMap;
use salsa;
use salsa::plumbing::AsId;
use red_knot_module_resolver::{resolve_module, ModuleName};
use ruff_db::files::File;
@ -48,7 +49,9 @@ use crate::Db;
#[salsa::tracked(return_ref)]
pub(crate) fn infer_scope_types<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> TypeInference<'db> {
let file = scope.file(db);
let _span = tracing::trace_span!("infer_scope_types", ?scope, ?file).entered();
let _span =
tracing::trace_span!("infer_scope_types", scope=?scope.as_id(), file=?file.path(db))
.entered();
// Using the index here is fine because the code below depends on the AST anyway.
// The isolation of the query is by the return inferred types.
@ -77,7 +80,12 @@ pub(crate) fn infer_definition_types<'db>(
definition: Definition<'db>,
) -> TypeInference<'db> {
let file = definition.file(db);
let _span = tracing::trace_span!("infer_definition_types", ?definition, ?file,).entered();
let _span = tracing::trace_span!(
"infer_definition_types",
definition = ?definition.as_id(),
file = ?file.path(db)
)
.entered();
let index = semantic_index(db, file);
@ -95,7 +103,9 @@ pub(crate) fn infer_expression_types<'db>(
expression: Expression<'db>,
) -> TypeInference<'db> {
let file = expression.file(db);
let _span = tracing::trace_span!("infer_expression_types", ?expression, ?file).entered();
let _span =
tracing::trace_span!("infer_expression_types", expression=?expression.as_id(), file=?file.path(db))
.entered();
let index = semantic_index(db, file);
@ -1491,12 +1501,9 @@ mod tests {
use crate::builtins::builtins_scope;
use crate::db::tests::TestDb;
use crate::semantic_index::definition::Definition;
use crate::semantic_index::semantic_index;
use crate::semantic_index::symbol::FileScopeId;
use crate::types::{
global_scope, global_symbol_ty_by_name, infer_definition_types, symbol_table,
symbol_ty_by_name, use_def_map, Type,
};
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
use crate::types::{global_symbol_ty_by_name, infer_definition_types, symbol_ty_by_name, Type};
use crate::{HasTy, SemanticModel};
fn setup_db() -> TestDb {
@ -2231,6 +2238,14 @@ mod tests {
Ok(())
}
fn first_public_def<'db>(db: &'db TestDb, file: File, name: &str) -> Definition<'db> {
let scope = global_scope(db, file);
*use_def_map(db, scope)
.public_definitions(symbol_table(db, scope).symbol_id_by_name(name).unwrap())
.first()
.unwrap()
}
#[test]
fn big_int() -> anyhow::Result<()> {
let mut db = setup_db();
@ -2315,14 +2330,6 @@ mod tests {
Ok(())
}
fn first_public_def<'db>(db: &'db TestDb, file: File, name: &str) -> Definition<'db> {
let scope = global_scope(db, file);
*use_def_map(db, scope)
.public_definitions(symbol_table(db, scope).symbol_id_by_name(name).unwrap())
.first()
.unwrap()
}
#[test]
fn dependency_public_symbol_type_change() -> anyhow::Result<()> {
let mut db = setup_db();
@ -2375,10 +2382,10 @@ mod tests {
let events = db.take_salsa_events();
assert_function_query_was_not_run::<infer_definition_types, _, _>(
assert_function_query_was_not_run(
&db,
|ty| &ty.function,
&first_public_def(&db, a, "x"),
infer_definition_types,
first_public_def(&db, a, "x"),
&events,
);
@ -2411,10 +2418,10 @@ mod tests {
let events = db.take_salsa_events();
assert_function_query_was_not_run::<infer_definition_types, _, _>(
assert_function_query_was_not_run(
&db,
|ty| &ty.function,
&first_public_def(&db, a, "x"),
infer_definition_types,
first_public_def(&db, a, "x"),
&events,
);
Ok(())

View file

@ -2,14 +2,16 @@ use std::sync::Arc;
use countme::Count;
use dashmap::mapref::entry::Entry;
use salsa::Setter;
pub use path::FilePath;
use ruff_notebook::{Notebook, NotebookError};
use crate::file_revision::FileRevision;
use crate::files::private::FileStatus;
use crate::system::{Metadata, SystemPath, SystemPathBuf, SystemVirtualPath, SystemVirtualPathBuf};
use crate::vendored::{VendoredPath, VendoredPathBuf};
use crate::{Db, FxDashMap};
pub use path::FilePath;
use ruff_notebook::{Notebook, NotebookError};
mod path;
@ -61,7 +63,7 @@ impl Files {
///
/// The operation always succeeds even if the path doesn't exist on disk, isn't accessible or if the path points to a directory.
/// In these cases, a file with status [`FileStatus::Deleted`] is returned.
#[tracing::instrument(level = "trace", skip(self, db), ret)]
#[tracing::instrument(level = "trace", skip(self, db))]
fn system(&self, db: &dyn Db, path: &SystemPath) -> File {
let absolute = SystemPath::absolute(path, db.system().current_directory());
@ -104,7 +106,7 @@ impl Files {
/// Looks up a vendored file by its path. Returns `Some` if a vendored file for the given path
/// exists and `None` otherwise.
#[tracing::instrument(level = "trace", skip(self, db), ret)]
#[tracing::instrument(level = "trace", skip(self, db))]
fn vendored(&self, db: &dyn Db, path: &VendoredPath) -> Option<File> {
let file = match self.inner.vendored_by_path.entry(path.to_path_buf()) {
Entry::Occupied(entry) => *entry.get(),
@ -196,14 +198,6 @@ impl Files {
file.sync(db);
}
}
/// Creates a salsa like snapshot. The instances share
/// the same path-to-file mapping.
pub fn snapshot(&self) -> Self {
Self {
inner: self.inner.clone(),
}
}
}
impl std::fmt::Debug for Files {
@ -221,7 +215,6 @@ impl std::fmt::Debug for Files {
#[salsa::input]
pub struct File {
/// The path of the file.
#[id]
#[return_ref]
pub path: FilePath,

View file

@ -1,12 +1,8 @@
use std::hash::BuildHasherDefault;
use program::Program;
use rustc_hash::FxHasher;
use salsa::DbWithJar;
use crate::files::{File, Files};
use crate::parsed::parsed_module;
use crate::source::{line_index, source_text};
use crate::files::Files;
use crate::system::System;
use crate::vendored::VendoredFileSystem;
@ -22,11 +18,9 @@ pub mod vendored;
pub type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
pub type FxDashSet<K> = dashmap::DashSet<K, BuildHasherDefault<FxHasher>>;
#[salsa::jar(db=Db)]
pub struct Jar(File, Program, source_text, line_index, parsed_module);
/// Most basic database that gives access to files, the host system, source code, and parsed AST.
pub trait Db: DbWithJar<Jar> {
#[salsa::db]
pub trait Db: salsa::Database {
fn vendored(&self) -> &VendoredFileSystem;
fn system(&self) -> &dyn System;
fn files(&self) -> &Files;
@ -42,19 +36,17 @@ pub trait Upcast<T: ?Sized> {
mod tests {
use std::sync::Arc;
use salsa::DebugWithDb;
use crate::files::Files;
use crate::system::TestSystem;
use crate::system::{DbWithTestSystem, System};
use crate::vendored::VendoredFileSystem;
use crate::{Db, Jar};
use crate::Db;
/// Database that can be used for testing.
///
/// Uses an in memory filesystem and it stubs out the vendored files by default.
#[salsa::db]
#[derive(Default)]
#[salsa::db(Jar)]
pub(crate) struct TestDb {
storage: salsa::Storage<Self>,
files: Files,
@ -101,6 +93,7 @@ mod tests {
}
}
#[salsa::db]
impl Db for TestDb {
fn vendored(&self) -> &VendoredFileSystem {
&self.vendored
@ -125,23 +118,14 @@ mod tests {
}
}
#[salsa::db]
impl salsa::Database for TestDb {
fn salsa_event(&self, event: salsa::Event) {
tracing::trace!("event: {:?}", event.debug(self));
salsa::Database::attach(self, |_| {
tracing::trace!("event: {:?}", event);
let mut events = self.events.lock().unwrap();
events.push(event);
}
}
impl salsa::ParallelDatabase for TestDb {
fn snapshot(&self) -> salsa::Snapshot<Self> {
salsa::Snapshot::new(Self {
storage: self.storage.snapshot(),
system: self.system.snapshot(),
files: self.files.snapshot(),
events: self.events.clone(),
vendored: self.vendored.snapshot(),
})
});
}
}
}

View file

@ -22,7 +22,7 @@ use crate::Db;
/// for determining if a query result is unchanged.
#[salsa::tracked(return_ref, no_eq)]
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
let _span = tracing::trace_span!("parse_module", file = ?file).entered();
let _span = tracing::trace_span!("parse_module", file = ?file.path(db)).entered();
let source = source_text(db, file);
let path = file.path(db);
@ -41,7 +41,7 @@ pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
}
/// Cheap cloneable wrapper around the parsed module.
#[derive(Clone, PartialEq)]
#[derive(Clone)]
pub struct ParsedModule {
inner: Arc<Parsed<ModModule>>,
}

View file

@ -1,6 +1,3 @@
// TODO: Fix clippy warnings in Salsa macros
#![allow(clippy::needless_lifetimes, clippy::clone_on_copy)]
use crate::{system::SystemPathBuf, Db};
#[salsa::input(singleton)]

View file

@ -2,7 +2,6 @@ use std::ops::Deref;
use std::sync::Arc;
use countme::Count;
use salsa::DebugWithDb;
use ruff_notebook::Notebook;
use ruff_python_ast::PySourceType;
@ -14,9 +13,10 @@ use crate::Db;
/// Reads the source text of a python text file (must be valid UTF8) or notebook.
#[salsa::tracked]
pub fn source_text(db: &dyn Db, file: File) -> SourceText {
let _span = tracing::trace_span!("source_text", ?file).entered();
let path = file.path(db);
let _span = tracing::trace_span!("source_text", file=?path).entered();
let is_notebook = match file.path(db) {
let is_notebook = match path {
FilePath::System(system) => system.extension().is_some_and(|extension| {
PySourceType::try_from_extension(extension) == Some(PySourceType::Ipynb)
}),
@ -129,7 +129,7 @@ enum SourceTextKind {
/// Computes the [`LineIndex`] for `file`.
#[salsa::tracked]
pub fn line_index(db: &dyn Db, file: File) -> LineIndex {
let _span = tracing::trace_span!("line_index", file = ?file.debug(db)).entered();
let _span = tracing::trace_span!("line_index", file = ?file).entered();
let source = source_text(db, file);
@ -139,6 +139,7 @@ pub fn line_index(db: &dyn Db, file: File) -> LineIndex {
#[cfg(test)]
mod tests {
use salsa::EventKind;
use salsa::Setter as _;
use ruff_source_file::OneIndexed;
use ruff_text_size::TextSize;

View file

@ -68,13 +68,6 @@ impl MemoryFileSystem {
&self.inner.cwd
}
#[must_use]
pub fn snapshot(&self) -> Self {
Self {
inner: self.inner.clone(),
}
}
pub fn metadata(&self, path: impl AsRef<SystemPath>) -> Result<Metadata> {
fn metadata(fs: &MemoryFileSystem, path: &SystemPath) -> Result<Metadata> {
let by_path = fs.inner.by_path.read().unwrap();

View file

@ -49,12 +49,6 @@ impl OsSystem {
fn permissions(_metadata: &std::fs::Metadata) -> Option<u32> {
None
}
pub fn snapshot(&self) -> Self {
Self {
inner: self.inner.clone(),
}
}
}
impl System for OsSystem {

View file

@ -25,12 +25,6 @@ pub struct TestSystem {
}
impl TestSystem {
pub fn snapshot(&self) -> Self {
Self {
inner: self.inner.snapshot(),
}
}
/// Returns the memory file system.
///
/// ## Panics
@ -235,15 +229,6 @@ enum TestSystemInner {
System(Arc<dyn System + RefUnwindSafe + Send + Sync>),
}
impl TestSystemInner {
fn snapshot(&self) -> Self {
match self {
Self::Stub(system) => Self::Stub(system.snapshot()),
Self::System(system) => Self::System(Arc::clone(system)),
}
}
}
impl Default for TestSystemInner {
fn default() -> Self {
Self::Stub(MemoryFileSystem::default())

View file

@ -1,116 +1,230 @@
//! Test helpers for working with Salsa databases
use std::fmt;
use std::marker::PhantomData;
pub fn assert_function_query_was_not_run<Db, Q, QDb, I, R>(
db: &Db,
query: Q,
input: I,
events: &[salsa::Event],
) where
Db: salsa::Database,
Q: Fn(QDb, I) -> R,
I: salsa::plumbing::AsId + std::fmt::Debug + Copy,
{
let id = input.as_id().as_u32();
let (query_name, will_execute_event) = find_will_execute_event(db, query, input, events);
use salsa::id::AsId;
use salsa::ingredient::Ingredient;
use salsa::storage::HasIngredientsFor;
db.attach(|_| {
if let Some(will_execute_event) = will_execute_event {
panic!("Expected query {query_name}({id}) not to have run but it did: {will_execute_event:?}");
}
});
}
pub fn assert_const_function_query_was_not_run<Db, Q, QDb, R>(
db: &Db,
query: Q,
events: &[salsa::Event],
) where
Db: salsa::Database,
Q: Fn(QDb) -> R,
{
let (query_name, will_execute_event) = find_will_execute_event(db, query, (), events);
db.attach(|_| {
if let Some(will_execute_event) = will_execute_event {
panic!(
"Expected query {query_name}() not to have run but it did: {will_execute_event:?}"
);
}
});
}
/// Assert that the Salsa query described by the generic parameter `C`
/// was executed at least once with the input `input`
/// in the history span represented by `events`.
pub fn assert_function_query_was_run<'db, C, Db, Jar>(
db: &'db Db,
to_function: impl FnOnce(&C) -> &salsa::function::FunctionIngredient<C>,
input: &C::Input<'db>,
pub fn assert_function_query_was_run<Db, Q, QDb, I, R>(
db: &Db,
query: Q,
input: I,
events: &[salsa::Event],
) where
C: salsa::function::Configuration<Jar = Jar>
+ salsa::storage::IngredientsFor<Jar = Jar, Ingredients = C>,
Jar: HasIngredientsFor<C>,
Db: salsa::DbWithJar<Jar>,
C::Input<'db>: AsId,
Db: salsa::Database,
Q: Fn(QDb, I) -> R,
I: salsa::plumbing::AsId + std::fmt::Debug + Copy,
{
function_query_was_run(db, to_function, input, events, true);
}
let id = input.as_id().as_u32();
let (query_name, will_execute_event) = find_will_execute_event(db, query, input, events);
/// Assert that there were no executions with the input `input`
/// of the Salsa query described by the generic parameter `C`
/// in the history span represented by `events`.
pub fn assert_function_query_was_not_run<'db, C, Db, Jar>(
db: &'db Db,
to_function: impl FnOnce(&C) -> &salsa::function::FunctionIngredient<C>,
input: &C::Input<'db>,
events: &[salsa::Event],
) where
C: salsa::function::Configuration<Jar = Jar>
+ salsa::storage::IngredientsFor<Jar = Jar, Ingredients = C>,
Jar: HasIngredientsFor<C>,
Db: salsa::DbWithJar<Jar>,
C::Input<'db>: AsId,
{
function_query_was_run(db, to_function, input, events, false);
}
fn function_query_was_run<'db, C, Db, Jar>(
db: &'db Db,
to_function: impl FnOnce(&C) -> &salsa::function::FunctionIngredient<C>,
input: &C::Input<'db>,
events: &[salsa::Event],
should_have_run: bool,
) where
C: salsa::function::Configuration<Jar = Jar>
+ salsa::storage::IngredientsFor<Jar = Jar, Ingredients = C>,
Jar: HasIngredientsFor<C>,
Db: salsa::DbWithJar<Jar>,
C::Input<'db>: AsId,
{
let (jar, _) =
<_ as salsa::storage::HasJar<<C as salsa::storage::IngredientsFor>::Jar>>::jar(db);
let ingredient = jar.ingredient();
let function_ingredient = to_function(ingredient);
let ingredient_index =
<salsa::function::FunctionIngredient<C> as Ingredient<Db>>::ingredient_index(
function_ingredient,
db.attach(|_| {
assert!(
will_execute_event.is_some(),
"Expected query {query_name}({id:?}) to have run but it did not:\n{events:#?}"
);
});
}
let did_run = events.iter().any(|event| {
pub fn find_will_execute_event<'a, Q, I>(
db: &dyn salsa::Database,
query: Q,
input: I,
events: &'a [salsa::Event],
) -> (&'static str, Option<&'a salsa::Event>)
where
I: salsa::plumbing::AsId,
{
let query_name = query_name(&query);
let event = events.iter().find(|event| {
if let salsa::EventKind::WillExecute { database_key } = event.kind {
database_key.ingredient_index() == ingredient_index
dbg!(db
.lookup_ingredient(database_key.ingredient_index())
.debug_name())
== query_name
&& database_key.key_index() == input.as_id()
} else {
false
}
});
if should_have_run && !did_run {
panic!(
"Expected query {:?} to have run but it didn't",
DebugIdx {
db: PhantomData::<Db>,
value_id: input.as_id(),
ingredient: function_ingredient,
}
);
} else if !should_have_run && did_run {
panic!(
"Expected query {:?} not to have run but it did",
DebugIdx {
db: PhantomData::<Db>,
value_id: input.as_id(),
ingredient: function_ingredient,
}
);
}
(query_name, event)
}
struct DebugIdx<'a, I, Db>
where
I: Ingredient<Db>,
{
value_id: salsa::Id,
ingredient: &'a I,
db: PhantomData<Db>,
fn query_name<Q>(_query: &Q) -> &'static str {
let full_qualified_query_name = std::any::type_name::<Q>();
full_qualified_query_name
.rsplit_once("::")
.map(|(_, name)| name)
.unwrap_or(full_qualified_query_name)
}
impl<'a, I, Db> fmt::Debug for DebugIdx<'a, I, Db>
where
I: Ingredient<Db>,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
self.ingredient.fmt_index(Some(self.value_id), f)
#[test]
fn query_was_not_run() {
use crate::tests::TestDb;
use salsa::prelude::*;
#[salsa::input]
struct Input {
text: String,
}
#[salsa::tracked]
fn len(db: &dyn salsa::Database, input: Input) -> usize {
input.text(db).len()
}
let mut db = TestDb::new();
let hello = Input::new(&db, "Hello, world!".to_string());
let goodbye = Input::new(&db, "Goodbye!".to_string());
assert_eq!(len(&db, hello), 13);
assert_eq!(len(&db, goodbye), 8);
// Change the input of one query
goodbye.set_text(&mut db).to("Bye".to_string());
db.clear_salsa_events();
assert_eq!(len(&db, goodbye), 3);
let events = db.take_salsa_events();
assert_function_query_was_run(&db, len, goodbye, &events);
assert_function_query_was_not_run(&db, len, hello, &events);
}
#[test]
#[should_panic(expected = "Expected query len(0) not to have run but it did:")]
fn query_was_not_run_fails_if_query_was_run() {
use crate::tests::TestDb;
use salsa::prelude::*;
#[salsa::input]
struct Input {
text: String,
}
#[salsa::tracked]
fn len(db: &dyn salsa::Database, input: Input) -> usize {
input.text(db).len()
}
let mut db = TestDb::new();
let hello = Input::new(&db, "Hello, world!".to_string());
assert_eq!(len(&db, hello), 13);
// Change the input
hello.set_text(&mut db).to("Hy".to_string());
db.clear_salsa_events();
assert_eq!(len(&db, hello), 2);
let events = db.take_salsa_events();
assert_function_query_was_not_run(&db, len, hello, &events);
}
#[test]
#[should_panic(expected = "Expected query len() not to have run but it did:")]
fn const_query_was_not_run_fails_if_query_was_run() {
use crate::tests::TestDb;
use salsa::prelude::*;
#[salsa::input]
struct Input {
text: String,
}
#[salsa::tracked]
fn len(db: &dyn salsa::Database) -> usize {
db.report_untracked_read();
5
}
let mut db = TestDb::new();
let hello = Input::new(&db, "Hello, world!".to_string());
assert_eq!(len(&db), 5);
// Create a new revision
db.clear_salsa_events();
hello.set_text(&mut db).to("Hy".to_string());
assert_eq!(len(&db), 5);
let events = db.take_salsa_events();
dbg!(&events);
assert_const_function_query_was_not_run(&db, len, &events);
}
#[test]
#[should_panic(expected = "Expected query len(0) to have run but it did not:")]
fn query_was_run_fails_if_query_was_not_run() {
use crate::tests::TestDb;
use salsa::prelude::*;
#[salsa::input]
struct Input {
text: String,
}
#[salsa::tracked]
fn len(db: &dyn salsa::Database, input: Input) -> usize {
input.text(db).len()
}
let mut db = TestDb::new();
let hello = Input::new(&db, "Hello, world!".to_string());
let goodbye = Input::new(&db, "Goodbye!".to_string());
assert_eq!(len(&db, hello), 13);
assert_eq!(len(&db, goodbye), 8);
// Change the input of one query
goodbye.set_text(&mut db).to("Bye".to_string());
db.clear_salsa_events();
assert_eq!(len(&db, goodbye), 3);
let events = db.take_salsa_events();
assert_function_query_was_run(&db, len, hello, &events);
}

View file

@ -20,6 +20,7 @@ type LockedZipArchive<'a> = MutexGuard<'a, VendoredZipArchive>;
///
/// "Files" in the `VendoredFileSystem` are read-only and immutable.
/// Directories are supported, but symlinks and hardlinks cannot exist.
#[derive(Clone)]
pub struct VendoredFileSystem {
inner: Arc<Mutex<VendoredZipArchive>>,
}
@ -39,12 +40,6 @@ impl VendoredFileSystem {
})
}
pub fn snapshot(&self) -> Self {
Self {
inner: Arc::clone(&self.inner),
}
}
pub fn exists(&self, path: impl AsRef<VendoredPath>) -> bool {
fn exists(fs: &VendoredFileSystem, path: &VendoredPath) -> bool {
let normalized = NormalizedVendoredPath::from(path);

View file

@ -573,7 +573,8 @@ impl<'a> FlatBinaryExpressionSlice<'a> {
#[allow(unsafe_code)]
unsafe {
// SAFETY: `BinaryChainSlice` has the same layout as a slice because it uses `repr(transparent)`
&*(slice as *const [OperandOrOperator<'a>] as *const FlatBinaryExpressionSlice<'a>)
&*(std::ptr::from_ref::<[OperandOrOperator<'a>]>(slice)
as *const FlatBinaryExpressionSlice<'a>)
}
}