Upgrade salsa (#15039)

The only code change is that Salsa now requires the `Db` to implement
`Clone` to create "lightweight" snapshots.
This commit is contained in:
Micha Reiser 2024-12-17 16:50:33 +01:00 committed by GitHub
parent c9fdb1f5e3
commit c3b6139f39
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 23 additions and 46 deletions

7
Cargo.lock generated
View file

@ -3193,7 +3193,7 @@ checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
[[package]] [[package]]
name = "salsa" name = "salsa"
version = "0.18.0" version = "0.18.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=254c749b02cde2fd29852a7463a33e800b771758#254c749b02cde2fd29852a7463a33e800b771758" source = "git+https://github.com/salsa-rs/salsa.git?rev=3c7f1694c9efba751dbeeacfbc93b227586e316a#3c7f1694c9efba751dbeeacfbc93b227586e316a"
dependencies = [ dependencies = [
"append-only-vec", "append-only-vec",
"arc-swap", "arc-swap",
@ -3203,6 +3203,7 @@ dependencies = [
"indexmap", "indexmap",
"lazy_static", "lazy_static",
"parking_lot", "parking_lot",
"rayon",
"rustc-hash 2.1.0", "rustc-hash 2.1.0",
"salsa-macro-rules", "salsa-macro-rules",
"salsa-macros", "salsa-macros",
@ -3213,12 +3214,12 @@ dependencies = [
[[package]] [[package]]
name = "salsa-macro-rules" name = "salsa-macro-rules"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=254c749b02cde2fd29852a7463a33e800b771758#254c749b02cde2fd29852a7463a33e800b771758" source = "git+https://github.com/salsa-rs/salsa.git?rev=3c7f1694c9efba751dbeeacfbc93b227586e316a#3c7f1694c9efba751dbeeacfbc93b227586e316a"
[[package]] [[package]]
name = "salsa-macros" name = "salsa-macros"
version = "0.18.0" version = "0.18.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=254c749b02cde2fd29852a7463a33e800b771758#254c749b02cde2fd29852a7463a33e800b771758" source = "git+https://github.com/salsa-rs/salsa.git?rev=3c7f1694c9efba751dbeeacfbc93b227586e316a#3c7f1694c9efba751dbeeacfbc93b227586e316a"
dependencies = [ dependencies = [
"heck", "heck",
"proc-macro2", "proc-macro2",

View file

@ -118,7 +118,7 @@ rand = { version = "0.8.5" }
rayon = { version = "1.10.0" } rayon = { version = "1.10.0" }
regex = { version = "1.10.2" } regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" } rustc-hash = { version = "2.0.0" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "254c749b02cde2fd29852a7463a33e800b771758" } salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "3c7f1694c9efba751dbeeacfbc93b227586e316a" }
schemars = { version = "0.8.16" } schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" } seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] } serde = { version = "1.0.197", features = ["derive"] }

View file

@ -279,7 +279,7 @@ impl MainLoop {
while let Ok(message) = self.receiver.recv() { while let Ok(message) = self.receiver.recv() {
match message { match message {
MainLoopMessage::CheckWorkspace => { MainLoopMessage::CheckWorkspace => {
let db = db.snapshot(); let db = db.clone();
let sender = self.sender.clone(); let sender = self.sender.clone();
// Spawn a new task that checks the workspace. This needs to be done in a separate thread // Spawn a new task that checks the workspace. This needs to be done in a separate thread

View file

@ -27,6 +27,7 @@ pub(crate) mod tests {
use ruff_db::{Db as SourceDb, Upcast}; use ruff_db::{Db as SourceDb, Upcast};
#[salsa::db] #[salsa::db]
#[derive(Clone)]
pub(crate) struct TestDb { pub(crate) struct TestDb {
storage: salsa::Storage<Self>, storage: salsa::Storage<Self>,
files: Files, files: Files,

View file

@ -91,11 +91,11 @@ fn background_request_task<'a, R: traits::BackgroundDocumentRequestHandler>(
let db = match path { let db = match path {
AnySystemPath::System(path) => { AnySystemPath::System(path) => {
match session.workspace_db_for_path(path.as_std_path()) { match session.workspace_db_for_path(path.as_std_path()) {
Some(db) => db.snapshot(), Some(db) => db.clone(),
None => session.default_workspace_db().snapshot(), None => session.default_workspace_db().clone(),
} }
} }
AnySystemPath::SystemVirtual(_) => session.default_workspace_db().snapshot(), AnySystemPath::SystemVirtual(_) => session.default_workspace_db().clone(),
}; };
let Some(snapshot) = session.take_snapshot(url) else { let Some(snapshot) = session.take_snapshot(url) else {

View file

@ -9,6 +9,7 @@ use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Upcast}; use ruff_db::{Db as SourceDb, Upcast};
#[salsa::db] #[salsa::db]
#[derive(Clone)]
pub(crate) struct Db { pub(crate) struct Db {
workspace_root: SystemPathBuf, workspace_root: SystemPathBuf,
storage: salsa::Storage<Self>, storage: salsa::Storage<Self>,

View file

@ -21,6 +21,7 @@ pub trait Db: SemanticDb + Upcast<dyn SemanticDb> {
} }
#[salsa::db] #[salsa::db]
#[derive(Clone)]
pub struct RootDatabase { pub struct RootDatabase {
workspace: Option<Workspace>, workspace: Option<Workspace>,
storage: salsa::Storage<RootDatabase>, storage: salsa::Storage<RootDatabase>,
@ -80,17 +81,6 @@ impl RootDatabase {
{ {
Cancelled::catch(|| f(self)) Cancelled::catch(|| f(self))
} }
#[must_use]
pub fn snapshot(&self) -> Self {
Self {
workspace: self.workspace,
storage: self.storage.clone(),
files: self.files.snapshot(),
system: Arc::clone(&self.system),
rule_selection: Arc::clone(&self.rule_selection),
}
}
} }
impl Upcast<dyn SemanticDb> for RootDatabase { impl Upcast<dyn SemanticDb> for RootDatabase {
@ -184,6 +174,7 @@ pub(crate) mod tests {
use crate::DEFAULT_LINT_REGISTRY; use crate::DEFAULT_LINT_REGISTRY;
#[salsa::db] #[salsa::db]
#[derive(Clone)]
pub(crate) struct TestDb { pub(crate) struct TestDb {
storage: salsa::Storage<Self>, storage: salsa::Storage<Self>,
events: Arc<std::sync::Mutex<Vec<Event>>>, events: Arc<std::sync::Mutex<Vec<Event>>>,

View file

@ -195,13 +195,13 @@ impl Workspace {
let result = Arc::new(std::sync::Mutex::new(Vec::new())); let result = Arc::new(std::sync::Mutex::new(Vec::new()));
let inner_result = Arc::clone(&result); let inner_result = Arc::clone(&result);
let db = db.snapshot(); let db = db.clone();
let workspace_span = workspace_span.clone(); let workspace_span = workspace_span.clone();
rayon::scope(move |scope| { rayon::scope(move |scope| {
for file in &files { for file in &files {
let result = inner_result.clone(); let result = inner_result.clone();
let db = db.snapshot(); let db = db.clone();
let workspace_span = workspace_span.clone(); let workspace_span = workspace_span.clone();
scope.spawn(move |_| { scope.spawn(move |_| {

View file

@ -81,7 +81,7 @@ pub(crate) fn analyze_graph(
// Collect and resolve the imports for each file. // Collect and resolve the imports for each file.
let result = Arc::new(Mutex::new(Vec::new())); let result = Arc::new(Mutex::new(Vec::new()));
let inner_result = Arc::clone(&result); let inner_result = Arc::clone(&result);
let db = db.snapshot(); let db = db.clone();
rayon::scope(move |scope| { rayon::scope(move |scope| {
for resolved_file in paths { for resolved_file in paths {
@ -137,7 +137,7 @@ pub(crate) fn analyze_graph(
continue; continue;
}; };
let db = db.snapshot(); let db = db.clone();
let glob_resolver = glob_resolver.clone(); let glob_resolver = glob_resolver.clone();
let root = root.clone(); let root = root.clone();
let result = inner_result.clone(); let result = inner_result.clone();

View file

@ -48,7 +48,7 @@ pub fn vendored_path_to_file(
} }
/// Lookup table that maps [file paths](`FilePath`) to salsa interned [`File`] instances. /// Lookup table that maps [file paths](`FilePath`) to salsa interned [`File`] instances.
#[derive(Default)] #[derive(Default, Clone)]
pub struct Files { pub struct Files {
inner: Arc<FilesInner>, inner: Arc<FilesInner>,
} }
@ -253,13 +253,6 @@ impl Files {
root.set_revision(db).to(FileRevision::now()); root.set_revision(db).to(FileRevision::now());
} }
} }
#[must_use]
pub fn snapshot(&self) -> Self {
Self {
inner: Arc::clone(&self.inner),
}
}
} }
impl std::fmt::Debug for Files { impl std::fmt::Debug for Files {

View file

@ -48,13 +48,13 @@ mod tests {
/// ///
/// Uses an in memory filesystem and it stubs out the vendored files by default. /// Uses an in memory filesystem and it stubs out the vendored files by default.
#[salsa::db] #[salsa::db]
#[derive(Default)] #[derive(Default, Clone)]
pub(crate) struct TestDb { pub(crate) struct TestDb {
storage: salsa::Storage<Self>, storage: salsa::Storage<Self>,
files: Files, files: Files,
system: TestSystem, system: TestSystem,
vendored: VendoredFileSystem, vendored: VendoredFileSystem,
events: std::sync::Arc<std::sync::Mutex<Vec<salsa::Event>>>, events: Arc<std::sync::Mutex<Vec<salsa::Event>>>,
} }
impl TestDb { impl TestDb {

View file

@ -16,7 +16,7 @@ static EMPTY_VENDORED: std::sync::LazyLock<VendoredFileSystem> = std::sync::Lazy
}); });
#[salsa::db] #[salsa::db]
#[derive(Default)] #[derive(Default, Clone)]
pub struct ModuleDb { pub struct ModuleDb {
storage: salsa::Storage<Self>, storage: salsa::Storage<Self>,
files: Files, files: Files,
@ -55,17 +55,6 @@ impl ModuleDb {
Ok(db) Ok(db)
} }
/// Create a snapshot of the current database.
#[must_use]
pub fn snapshot(&self) -> Self {
Self {
storage: self.storage.clone(),
system: self.system.clone(),
files: self.files.snapshot(),
rule_selection: Arc::clone(&self.rule_selection),
}
}
} }
impl Upcast<dyn SourceDb> for ModuleDb { impl Upcast<dyn SourceDb> for ModuleDb {

View file

@ -22,12 +22,13 @@ use ruff_python_parser::{parse_unchecked, Mode};
/// ///
/// Uses an in memory filesystem and it stubs out the vendored files by default. /// Uses an in memory filesystem and it stubs out the vendored files by default.
#[salsa::db] #[salsa::db]
#[derive(Clone)]
struct TestDb { struct TestDb {
storage: salsa::Storage<Self>, storage: salsa::Storage<Self>,
files: Files, files: Files,
system: TestSystem, system: TestSystem,
vendored: VendoredFileSystem, vendored: VendoredFileSystem,
events: std::sync::Arc<std::sync::Mutex<Vec<salsa::Event>>>, events: std::sync::Arc<Mutex<Vec<salsa::Event>>>,
rule_selection: std::sync::Arc<RuleSelection>, rule_selection: std::sync::Arc<RuleSelection>,
} }