Auto merge of #16335 - lnicola:salsa-lz4-file-text, r=Veykril

internal: Compress file text using LZ4

I haven't tested properly, but this roughly looks like:

```
1246 MB
    59mb   4899 FileTextQuery

1008 MB
    20mb   4899 CompressedFileTextQuery
   555kb   1790 FileTextQuery
```

We might want to test on something more interesting, like `bevy`.
This commit is contained in:
bors 2024-03-11 13:43:33 +00:00
commit 8f8bcfc131
16 changed files with 89 additions and 36 deletions

15
Cargo.lock generated
View file

@ -71,6 +71,7 @@ version = "0.0.0"
dependencies = [ dependencies = [
"cfg", "cfg",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lz4_flex",
"rustc-hash", "rustc-hash",
"salsa", "salsa",
"semver", "semver",
@ -134,9 +135,9 @@ dependencies = [
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.0.89" version = "1.0.90"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0ba8f7aaa012f30d5b2861462f6708eccd49c3c39863fe083a308035f63d723" checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5"
[[package]] [[package]]
name = "cfg" name = "cfg"
@ -874,9 +875,9 @@ checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
[[package]] [[package]]
name = "libloading" name = "libloading"
version = "0.8.2" version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2caa5afb8bf9f3a2652760ce7d4f62d21c4d5a423e68466fca30df82f2330164" checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"windows-targets 0.52.4", "windows-targets 0.52.4",
@ -992,6 +993,12 @@ dependencies = [
"url", "url",
] ]
[[package]]
name = "lz4_flex"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "912b45c753ff5f7f5208307e8ace7d2a2e30d024e26d3509f3dce546c044ce15"
[[package]] [[package]]
name = "mbe" name = "mbe"
version = "0.0.0" version = "0.0.0"

View file

@ -12,6 +12,8 @@ rust-version.workspace = true
doctest = false doctest = false
[dependencies] [dependencies]
lz4_flex = { version = "0.11", default-features = false }
la-arena.workspace = true la-arena.workspace = true
salsa.workspace = true salsa.workspace = true
rustc-hash.workspace = true rustc-hash.workspace = true

View file

@ -7,13 +7,13 @@ use salsa::Durability;
use triomphe::Arc; use triomphe::Arc;
use vfs::FileId; use vfs::FileId;
use crate::{CrateGraph, SourceDatabaseExt, SourceRoot, SourceRootId}; use crate::{CrateGraph, SourceDatabaseExt, SourceDatabaseExt2, SourceRoot, SourceRootId};
/// Encapsulate a bunch of raw `.set` calls on the database. /// Encapsulate a bunch of raw `.set` calls on the database.
#[derive(Default)] #[derive(Default)]
pub struct FileChange { pub struct FileChange {
pub roots: Option<Vec<SourceRoot>>, pub roots: Option<Vec<SourceRoot>>,
pub files_changed: Vec<(FileId, Option<Arc<str>>)>, pub files_changed: Vec<(FileId, Option<String>)>,
pub crate_graph: Option<CrateGraph>, pub crate_graph: Option<CrateGraph>,
} }
@ -42,7 +42,7 @@ impl FileChange {
self.roots = Some(roots); self.roots = Some(roots);
} }
pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<str>>) { pub fn change_file(&mut self, file_id: FileId, new_text: Option<String>) {
self.files_changed.push((file_id, new_text)) self.files_changed.push((file_id, new_text))
} }
@ -68,8 +68,8 @@ impl FileChange {
let source_root = db.source_root(source_root_id); let source_root = db.source_root(source_root_id);
let durability = durability(&source_root); let durability = durability(&source_root);
// XXX: can't actually remove the file, just reset the text // XXX: can't actually remove the file, just reset the text
let text = text.unwrap_or_else(|| Arc::from("")); let text = text.unwrap_or_default();
db.set_file_text_with_durability(file_id, text, durability) db.set_file_text_with_durability(file_id, &text, durability)
} }
if let Some(crate_graph) = self.crate_graph { if let Some(crate_graph) = self.crate_graph {
db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH); db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH);

View file

@ -7,6 +7,7 @@ mod input;
use std::panic; use std::panic;
use salsa::Durability;
use syntax::{ast, Parse, SourceFile}; use syntax::{ast, Parse, SourceFile};
use triomphe::Arc; use triomphe::Arc;
@ -42,6 +43,7 @@ pub trait Upcast<T: ?Sized> {
fn upcast(&self) -> &T; fn upcast(&self) -> &T;
} }
pub const DEFAULT_FILE_TEXT_LRU_CAP: usize = 16;
pub const DEFAULT_PARSE_LRU_CAP: usize = 128; pub const DEFAULT_PARSE_LRU_CAP: usize = 128;
pub const DEFAULT_BORROWCK_LRU_CAP: usize = 1024; pub const DEFAULT_BORROWCK_LRU_CAP: usize = 1024;
@ -89,7 +91,10 @@ fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
#[salsa::query_group(SourceDatabaseExtStorage)] #[salsa::query_group(SourceDatabaseExtStorage)]
pub trait SourceDatabaseExt: SourceDatabase { pub trait SourceDatabaseExt: SourceDatabase {
#[salsa::input] #[salsa::input]
fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>;
fn file_text(&self, file_id: FileId) -> Arc<str>; fn file_text(&self, file_id: FileId) -> Arc<str>;
/// Path to a file, relative to the root of its source root. /// Path to a file, relative to the root of its source root.
/// Source root of the file. /// Source root of the file.
#[salsa::input] #[salsa::input]
@ -101,6 +106,44 @@ pub trait SourceDatabaseExt: SourceDatabase {
fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>; fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>;
} }
fn file_text(db: &dyn SourceDatabaseExt, file_id: FileId) -> Arc<str> {
let bytes = db.compressed_file_text(file_id);
let bytes =
lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail");
let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8");
Arc::from(text)
}
pub trait SourceDatabaseExt2 {
fn set_file_text(&mut self, file_id: FileId, text: &str) {
self.set_file_text_with_durability(file_id, text, Durability::LOW);
}
fn set_file_text_with_durability(
&mut self,
file_id: FileId,
text: &str,
durability: Durability,
);
}
impl<Db: ?Sized + SourceDatabaseExt> SourceDatabaseExt2 for Db {
fn set_file_text_with_durability(
&mut self,
file_id: FileId,
text: &str,
durability: Durability,
) {
let bytes = text.as_bytes();
let compressed = lz4_flex::compress_prepend_size(bytes);
self.set_compressed_file_text_with_durability(
file_id,
Arc::from(compressed.as_slice()),
durability,
)
}
}
fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[CrateId]> { fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[CrateId]> {
let graph = db.crate_graph(); let graph = db.crate_graph();
let mut crates = graph let mut crates = graph

View file

@ -1,6 +1,5 @@
use base_db::{SourceDatabase, SourceDatabaseExt}; use base_db::{SourceDatabase, SourceDatabaseExt2 as _};
use test_fixture::WithFixture; use test_fixture::WithFixture;
use triomphe::Arc;
use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId}; use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId};
@ -17,7 +16,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
}); });
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}") assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
} }
db.set_file_text(pos.file_id, Arc::from(ra_fixture_change)); db.set_file_text(pos.file_id, ra_fixture_change);
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
@ -267,7 +266,7 @@ fn quux() { 92 }
m!(Y); m!(Y);
m!(Z); m!(Z);
"#; "#;
db.set_file_text(pos.file_id, Arc::from(new_text)); db.set_file_text(pos.file_id, new_text);
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {

View file

@ -48,7 +48,7 @@ impl ChangeWithProcMacros {
} }
} }
pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<str>>) { pub fn change_file(&mut self, file_id: FileId, new_text: Option<String>) {
self.source_change.change_file(file_id, new_text) self.source_change.change_file(file_id, new_text)
} }

View file

@ -12,7 +12,7 @@ mod traits;
use std::env; use std::env;
use base_db::{FileRange, SourceDatabaseExt}; use base_db::{FileRange, SourceDatabaseExt2 as _};
use expect_test::Expect; use expect_test::Expect;
use hir_def::{ use hir_def::{
body::{Body, BodySourceMap, SyntheticSyntax}, body::{Body, BodySourceMap, SyntheticSyntax},
@ -584,7 +584,7 @@ fn salsa_bug() {
} }
"; ";
db.set_file_text(pos.file_id, Arc::from(new_text)); db.set_file_text(pos.file_id, new_text);
let module = db.module_for_file(pos.file_id); let module = db.module_for_file(pos.file_id);
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);

View file

@ -1,6 +1,5 @@
use base_db::SourceDatabaseExt; use base_db::SourceDatabaseExt2 as _;
use test_fixture::WithFixture; use test_fixture::WithFixture;
use triomphe::Arc;
use crate::{db::HirDatabase, test_db::TestDB}; use crate::{db::HirDatabase, test_db::TestDB};
@ -33,7 +32,7 @@ fn foo() -> i32 {
1 1
}"; }";
db.set_file_text(pos.file_id, Arc::from(new_text)); db.set_file_text(pos.file_id, new_text);
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
@ -85,7 +84,7 @@ fn baz() -> i32 {
} }
"; ";
db.set_file_text(pos.file_id, Arc::from(new_text)); db.set_file_text(pos.file_id, new_text);
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {

View file

@ -205,6 +205,7 @@ impl RootDatabase {
// SourceDatabaseExt // SourceDatabaseExt
base_db::FileTextQuery base_db::FileTextQuery
base_db::CompressedFileTextQuery
base_db::FileSourceRootQuery base_db::FileSourceRootQuery
base_db::SourceRootQuery base_db::SourceRootQuery
base_db::SourceRootCratesQuery base_db::SourceRootCratesQuery

View file

@ -51,6 +51,7 @@ use std::{fmt, mem::ManuallyDrop};
use base_db::{ use base_db::{
salsa::{self, Durability}, salsa::{self, Durability},
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
DEFAULT_FILE_TEXT_LRU_CAP,
}; };
use hir::db::{DefDatabase, ExpandDatabase, HirDatabase}; use hir::db::{DefDatabase, ExpandDatabase, HirDatabase};
use triomphe::Arc; use triomphe::Arc;
@ -157,6 +158,7 @@ impl RootDatabase {
pub fn update_base_query_lru_capacities(&mut self, lru_capacity: Option<usize>) { pub fn update_base_query_lru_capacities(&mut self, lru_capacity: Option<usize>) {
let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP); let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP);
base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
// macro expansions are usually rather small, so we can afford to keep more of them alive // macro expansions are usually rather small, so we can afford to keep more of them alive
hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity); hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
@ -166,6 +168,7 @@ impl RootDatabase {
pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, usize>) { pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, usize>) {
use hir::db as hir_db; use hir::db as hir_db;
base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
base_db::ParseQuery.in_db_mut(self).set_lru_capacity( base_db::ParseQuery.in_db_mut(self).set_lru_capacity(
lru_capacities lru_capacities
.get(stringify!(ParseQuery)) .get(stringify!(ParseQuery))
@ -199,7 +202,7 @@ impl RootDatabase {
// base_db::ProcMacrosQuery // base_db::ProcMacrosQuery
// SourceDatabaseExt // SourceDatabaseExt
// base_db::FileTextQuery base_db::FileTextQuery
// base_db::FileSourceRootQuery // base_db::FileSourceRootQuery
// base_db::SourceRootQuery // base_db::SourceRootQuery
base_db::SourceRootCratesQuery base_db::SourceRootCratesQuery

View file

@ -259,7 +259,7 @@ impl Analysis {
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },
); );
change.change_file(file_id, Some(Arc::from(text))); change.change_file(file_id, Some(text));
change.set_crate_graph(crate_graph); change.set_crate_graph(crate_graph);
change.set_target_data_layouts(vec![Err("fixture has no layout".into())]); change.set_target_data_layouts(vec![Err("fixture has no layout".into())]);
change.set_toolchains(vec![None]); change.set_toolchains(vec![None]);

View file

@ -361,8 +361,8 @@ fn load_crate_graph(
let changes = vfs.take_changes(); let changes = vfs.take_changes();
for file in changes { for file in changes {
if let vfs::Change::Create(v) | vfs::Change::Modify(v) = file.change { if let vfs::Change::Create(v) | vfs::Change::Modify(v) = file.change {
if let Ok(text) = std::str::from_utf8(&v) { if let Ok(text) = String::from_utf8(v) {
analysis_change.change_file(file.file_id, Some(text.into())) analysis_change.change_file(file.file_id, Some(text))
} }
} }
} }

View file

@ -134,7 +134,7 @@ impl Tester {
let should_have_no_error = text.contains("// check-pass") let should_have_no_error = text.contains("// check-pass")
|| text.contains("// build-pass") || text.contains("// build-pass")
|| text.contains("// run-pass"); || text.contains("// run-pass");
change.change_file(self.root_file, Some(Arc::from(text))); change.change_file(self.root_file, Some(text));
self.host.apply_change(change); self.host.apply_change(change);
let diagnostic_config = DiagnosticsConfig::test_sample(); let diagnostic_config = DiagnosticsConfig::test_sample();

View file

@ -330,7 +330,7 @@ impl GlobalState {
// FIXME: Consider doing normalization in the `vfs` instead? That allows // FIXME: Consider doing normalization in the `vfs` instead? That allows
// getting rid of some locking // getting rid of some locking
let (text, line_endings) = LineEndings::normalize(text); let (text, line_endings) = LineEndings::normalize(text);
(Arc::from(text), line_endings) (text, line_endings)
}) })
} else { } else {
None None

View file

@ -20,7 +20,6 @@ use ide_db::{
}; };
use project_model::CargoConfig; use project_model::CargoConfig;
use test_utils::project_root; use test_utils::project_root;
use triomphe::Arc;
use vfs::{AbsPathBuf, VfsPath}; use vfs::{AbsPathBuf, VfsPath};
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
@ -70,7 +69,7 @@ fn integrated_highlighting_benchmark() {
let mut text = host.analysis().file_text(file_id).unwrap().to_string(); let mut text = host.analysis().file_text(file_id).unwrap().to_string();
text.push_str("\npub fn _dummy() {}\n"); text.push_str("\npub fn _dummy() {}\n");
let mut change = ChangeWithProcMacros::new(); let mut change = ChangeWithProcMacros::new();
change.change_file(file_id, Some(Arc::from(text))); change.change_file(file_id, Some(text));
host.apply_change(change); host.apply_change(change);
} }
@ -125,7 +124,7 @@ fn integrated_completion_benchmark() {
patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)") patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
+ "sel".len(); + "sel".len();
let mut change = ChangeWithProcMacros::new(); let mut change = ChangeWithProcMacros::new();
change.change_file(file_id, Some(Arc::from(text))); change.change_file(file_id, Some(text));
host.apply_change(change); host.apply_change(change);
completion_offset completion_offset
}; };
@ -168,7 +167,7 @@ fn integrated_completion_benchmark() {
patch(&mut text, "sel;\ndb.struct_data(self.id)", ";sel;\ndb.struct_data(self.id)") patch(&mut text, "sel;\ndb.struct_data(self.id)", ";sel;\ndb.struct_data(self.id)")
+ ";sel".len(); + ";sel".len();
let mut change = ChangeWithProcMacros::new(); let mut change = ChangeWithProcMacros::new();
change.change_file(file_id, Some(Arc::from(text))); change.change_file(file_id, Some(text));
host.apply_change(change); host.apply_change(change);
completion_offset completion_offset
}; };
@ -210,7 +209,7 @@ fn integrated_completion_benchmark() {
patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)") patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)")
+ "self.".len(); + "self.".len();
let mut change = ChangeWithProcMacros::new(); let mut change = ChangeWithProcMacros::new();
change.change_file(file_id, Some(Arc::from(text))); change.change_file(file_id, Some(text));
host.apply_change(change); host.apply_change(change);
completion_offset completion_offset
}; };
@ -307,7 +306,7 @@ fn integrated_diagnostics_benchmark() {
let mut text = host.analysis().file_text(file_id).unwrap().to_string(); let mut text = host.analysis().file_text(file_id).unwrap().to_string();
patch(&mut text, "db.struct_data(self.id)", "();\ndb.struct_data(self.id)"); patch(&mut text, "db.struct_data(self.id)", "();\ndb.struct_data(self.id)");
let mut change = ChangeWithProcMacros::new(); let mut change = ChangeWithProcMacros::new();
change.change_file(file_id, Some(Arc::from(text))); change.change_file(file_id, Some(text));
host.apply_change(change); host.apply_change(change);
}; };

View file

@ -149,12 +149,12 @@ impl ChangeFixture {
for entry in fixture { for entry in fixture {
let text = if entry.text.contains(CURSOR_MARKER) { let text = if entry.text.contains(CURSOR_MARKER) {
if entry.text.contains(ESCAPED_CURSOR_MARKER) { if entry.text.contains(ESCAPED_CURSOR_MARKER) {
entry.text.replace(ESCAPED_CURSOR_MARKER, CURSOR_MARKER).into() entry.text.replace(ESCAPED_CURSOR_MARKER, CURSOR_MARKER)
} else { } else {
let (range_or_offset, text) = extract_range_or_offset(&entry.text); let (range_or_offset, text) = extract_range_or_offset(&entry.text);
assert!(file_position.is_none()); assert!(file_position.is_none());
file_position = Some((file_id, range_or_offset)); file_position = Some((file_id, range_or_offset));
text.into() text
} }
} else { } else {
entry.text.as_str().into() entry.text.as_str().into()
@ -251,7 +251,7 @@ impl ChangeFixture {
fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_owned())); fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_owned()));
roots.push(SourceRoot::new_library(fs)); roots.push(SourceRoot::new_library(fs));
source_change.change_file(core_file, Some(mini_core.source_code().into())); source_change.change_file(core_file, Some(mini_core.source_code()));
let all_crates = crate_graph.crates_in_topological_order(); let all_crates = crate_graph.crates_in_topological_order();
@ -287,7 +287,7 @@ impl ChangeFixture {
); );
roots.push(SourceRoot::new_library(fs)); roots.push(SourceRoot::new_library(fs));
source_change.change_file(proc_lib_file, Some(source.into())); source_change.change_file(proc_lib_file, Some(source));
let all_crates = crate_graph.crates_in_topological_order(); let all_crates = crate_graph.crates_in_topological_order();