internal: port rust-analyzer to new Salsa

This commit is contained in:
David Barsky 2024-11-05 12:24:41 -05:00
parent 394374e769
commit 74620e64ec
161 changed files with 3075 additions and 2331 deletions

View file

@ -12,7 +12,7 @@ use url::Url;
use hir::{db::HirDatabase, sym, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs};
use ide_db::{
base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, SourceDatabase},
base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, RootQueryDb},
defs::{Definition, NameClass, NameRefClass},
documentation::{docs_with_rangemap, Documentation, HasDocs},
helpers::pick_best_token,

View file

@ -43,7 +43,7 @@ fn check_external_docs(
fn check_rewrite(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let (analysis, position) = fixture::position(ra_fixture);
let sema = &Semantics::new(&*analysis.db);
let sema = &Semantics::new(&analysis.db);
let (cursor_def, docs) = def_under_cursor(sema, &position);
let res = rewrite_links(sema.db, docs.as_str(), cursor_def);
expect.assert_eq(&res)
@ -54,7 +54,7 @@ fn check_doc_links(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
let (analysis, position, mut expected) = fixture::annotations(ra_fixture);
expected.sort_by_key(key_fn);
let sema = &Semantics::new(&*analysis.db);
let sema = &Semantics::new(&analysis.db);
let (cursor_def, docs) = def_under_cursor(sema, &position);
let defs = extract_definitions_from_docs(&docs);
let actual: Vec<_> = defs

View file

@ -234,7 +234,6 @@ fn _format(
file_id: FileId,
expansion: &str,
) -> Option<String> {
use ide_db::base_db::{FileLoader, SourceDatabase};
// hack until we get hygiene working (same character amount to preserve formatting as much as possible)
const DOLLAR_CRATE_REPLACE: &str = "__r_a_";
const BUILTIN_REPLACE: &str = "builtin__POUND";
@ -248,8 +247,9 @@ fn _format(
};
let expansion = format!("{prefix}{expansion}{suffix}");
let &crate_id = db.relevant_crates(file_id).iter().next()?;
let edition = db.crate_graph()[crate_id].edition;
let upcast_db = ide_db::base_db::Upcast::<dyn ide_db::base_db::RootQueryDb>::upcast(db);
let &crate_id = upcast_db.relevant_crates(file_id).iter().next()?;
let edition = upcast_db.crate_graph()[crate_id].edition;
#[allow(clippy::disallowed_methods)]
let mut cmd = std::process::Command::new(toolchain::Tool::Rustfmt.path());

View file

@ -1,5 +1,5 @@
use ide_db::{
base_db::{CrateOrigin, SourceDatabase},
base_db::{CrateOrigin, RootQueryDb},
FileId, FxIndexSet, RootDatabase,
};

View file

@ -10,7 +10,7 @@ use hir::{
ModuleDef, Semantics,
};
use ide_db::{
base_db::{AnchoredPath, FileLoader, SourceDatabase},
base_db::{AnchoredPath, RootQueryDb, SourceDatabase, Upcast},
defs::{Definition, IdentClass},
famous_defs::FamousDefs,
helpers::pick_best_token,
@ -216,8 +216,9 @@ fn try_lookup_include_path(
}
let path = token.value().ok()?;
let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
let size = sema.db.file_text(file_id).len().try_into().ok()?;
let file_id = Upcast::<dyn RootQueryDb>::upcast(sema.db)
.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
let size = sema.db.file_text(file_id).text(sema.db).len().try_into().ok()?;
Some(NavigationTarget {
file_id,
full_range: TextRange::new(0.into(), size),

View file

@ -2,6 +2,7 @@ use std::iter;
use hir::{db, FilePosition, FileRange, HirFileId, InFile, Semantics};
use ide_db::{
base_db::salsa::AsDynDatabase,
defs::{Definition, IdentClass},
helpers::pick_best_token,
search::{FileReference, ReferenceCategory, SearchScope},
@ -60,7 +61,10 @@ pub(crate) fn highlight_related(
let file_id = sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
let syntax = sema.parse(file_id).syntax().clone();
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
let syntax = sema.parse(editioned_file_id_wrapper).syntax().clone();
let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?`

View file

@ -8,7 +8,7 @@ use hir::{
MethodViolationCode, Name, Semantics, Symbol, Trait, Type, TypeInfo, VariantDef,
};
use ide_db::{
base_db::SourceDatabase,
base_db::RootQueryDb,
defs::Definition,
documentation::HasDocs,
famous_defs::FamousDefs,

View file

@ -47,7 +47,7 @@ fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
.unwrap();
let content = analysis.db.file_text(position.file_id);
let hovered_element = &content[hover.range];
let hovered_element = &content.text(&analysis.db)[hover.range];
let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
expect.assert_eq(&actual)
@ -72,7 +72,7 @@ fn check_hover_fields_limit(
.unwrap()
.unwrap();
let content = analysis.db.file_text(position.file_id);
let content = analysis.db.file_text(position.file_id).text(&analysis.db);
let hovered_element = &content[hover.range];
let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@ -98,7 +98,7 @@ fn check_hover_enum_variants_limit(
.unwrap()
.unwrap();
let content = analysis.db.file_text(position.file_id);
let content = analysis.db.file_text(position.file_id).text(&analysis.db);
let hovered_element = &content[hover.range];
let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@ -124,7 +124,7 @@ fn check_assoc_count(
.unwrap()
.unwrap();
let content = analysis.db.file_text(position.file_id);
let content = analysis.db.file_text(position.file_id).text(&analysis.db);
let hovered_element = &content[hover.range];
let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@ -141,7 +141,7 @@ fn check_hover_no_links(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect:
.unwrap()
.unwrap();
let content = analysis.db.file_text(position.file_id);
let content = analysis.db.file_text(position.file_id).text(&analysis.db);
let hovered_element = &content[hover.range];
let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@ -158,7 +158,7 @@ fn check_hover_no_memory_layout(#[rust_analyzer::rust_fixture] ra_fixture: &str,
.unwrap()
.unwrap();
let content = analysis.db.file_text(position.file_id);
let content = analysis.db.file_text(position.file_id).text(&analysis.db);
let hovered_element = &content[hover.range];
let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@ -179,7 +179,7 @@ fn check_hover_no_markdown(#[rust_analyzer::rust_fixture] ra_fixture: &str, expe
.unwrap()
.unwrap();
let content = analysis.db.file_text(position.file_id);
let content = analysis.db.file_text(position.file_id).text(&analysis.db);
let hovered_element = &content[hover.range];
let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);

View file

@ -8,7 +8,7 @@ use hir::{
sym, ClosureStyle, DisplayTarget, HasVisibility, HirDisplay, HirDisplayError, HirWrite,
ModuleDef, ModuleDefId, Semantics,
};
use ide_db::{famous_defs::FamousDefs, FileRange, RootDatabase};
use ide_db::{base_db::salsa::AsDynDatabase, famous_defs::FamousDefs, FileRange, RootDatabase};
use ide_db::{text_edit::TextEdit, FxHashSet};
use itertools::Itertools;
use smallvec::{smallvec, SmallVec};
@ -86,7 +86,9 @@ pub(crate) fn inlay_hints(
let file_id = sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
let file = sema.parse(file_id);
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
let file = sema.parse(editioned_file_id_wrapper);
let file = file.syntax();
let mut acc = Vec::new();
@ -137,7 +139,9 @@ pub(crate) fn inlay_hints_resolve(
let file_id = sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
let file = sema.parse(file_id);
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
let file = sema.parse(editioned_file_id_wrapper);
let file = file.syntax();
let scope = sema.scope(file)?;

View file

@ -868,15 +868,15 @@ fn main() {
//- minicore: fn
fn main() {
let x = || 2;
//^ {closure#0}
//^ {closure#26624}
let y = |t: i32| x() + t;
//^ {closure#1}
//^ {closure#26625}
let mut t = 5;
//^ i32
let z = |k: i32| { t += k; };
//^ {closure#2}
//^ {closure#26626}
let p = (y, z);
//^ ({closure#1}, {closure#2})
//^ ({closure#26625}, {closure#26626})
}
"#,
);

View file

@ -1,5 +1,5 @@
use hir::{ConstEvalError, DefWithBody, DisplayTarget, Semantics};
use ide_db::{base_db::SourceRootDatabase, FilePosition, LineIndexDatabase, RootDatabase};
use ide_db::{base_db::SourceDatabase, FilePosition, LineIndexDatabase, RootDatabase};
use std::time::{Duration, Instant};
use stdx::format_to;
use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
@ -35,10 +35,10 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<(Dura
_ => return None,
};
let span_formatter = |file_id, text_range: TextRange| {
let path = &db
.source_root(db.file_source_root(file_id))
.path_for_file(&file_id)
.map(|x| x.to_string());
let source_root = db.file_source_root(file_id).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
let path = source_root.path_for_file(&file_id).map(|x| x.to_string());
let path = path.as_deref().unwrap_or("<unknown file>");
match db.line_index(file_id).try_line_col(text_range.start()) {
Some(line_col) => format!("file://{path}:{}:{}", line_col.line + 1, line_col.col),
@ -64,10 +64,9 @@ pub(crate) fn render_const_eval_error(
display_target: DisplayTarget,
) -> String {
let span_formatter = |file_id, text_range: TextRange| {
let path = &db
.source_root(db.file_source_root(file_id))
.path_for_file(&file_id)
.map(|x| x.to_string());
let source_root = db.file_source_root(file_id).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
let path = source_root.path_for_file(&file_id).map(|x| x.to_string());
let path = path.as_deref().unwrap_or("<unknown file>");
match db.line_index(file_id).try_line_col(text_range.start()) {
Some(line_col) => format!("file://{path}:{}:{}", line_col.line + 1, line_col.col),

View file

@ -64,9 +64,9 @@ use fetch_crates::CrateInfo;
use hir::{sym, ChangeWithProcMacros};
use ide_db::{
base_db::{
ra_salsa::{self, ParallelDatabase},
CrateOrigin, CrateWorkspaceData, Env, FileLoader, FileSet, SourceDatabase,
SourceRootDatabase, VfsPath,
salsa::{AsDynDatabase, Cancelled},
CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, Upcast,
VfsPath,
},
prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase,
};
@ -125,7 +125,7 @@ pub use ide_completion::{
};
pub use ide_db::text_edit::{Indel, TextEdit};
pub use ide_db::{
base_db::{Cancelled, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootId},
base_db::{CrateGraph, CrateId, FileChange, SourceRoot, SourceRootId},
documentation::Documentation,
label::Label,
line_index::{LineCol, LineIndex},
@ -217,7 +217,7 @@ impl Default for AnalysisHost {
/// `Analysis` are canceled (most method return `Err(Canceled)`).
#[derive(Debug)]
pub struct Analysis {
db: ra_salsa::Snapshot<RootDatabase>,
db: RootDatabase,
}
// As a general design guideline, `Analysis` API are intended to be independent
@ -276,12 +276,12 @@ impl Analysis {
}
pub fn source_root_id(&self, file_id: FileId) -> Cancellable<SourceRootId> {
self.with_db(|db| db.file_source_root(file_id))
self.with_db(|db| db.file_source_root(file_id).source_root_id(db))
}
pub fn is_local_source_root(&self, source_root_id: SourceRootId) -> Cancellable<bool> {
self.with_db(|db| {
let sr = db.source_root(source_root_id);
let sr = db.source_root(source_root_id).source_root(db);
!sr.is_library
})
}
@ -295,18 +295,28 @@ impl Analysis {
/// Gets the text of the source file.
pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> {
self.with_db(|db| SourceDatabase::file_text(db, file_id))
self.with_db(|db| SourceDatabase::file_text(db, file_id).text(db))
}
/// Gets the syntax tree of the file.
pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
// FIXME edition
self.with_db(|db| db.parse(EditionedFileId::current_edition(file_id)).tree())
self.with_db(|db| {
let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
self.db.as_dyn_database(),
EditionedFileId::current_edition(file_id),
);
db.parse(editioned_file_id_wrapper).tree()
})
}
/// Returns true if this file belongs to an immutable library.
pub fn is_library_file(&self, file_id: FileId) -> Cancellable<bool> {
self.with_db(|db| db.source_root(db.file_source_root(file_id)).is_library)
self.with_db(|db| {
let source_root = db.file_source_root(file_id).source_root_id(db);
db.source_root(source_root).source_root(db).is_library
})
}
/// Gets the file's `LineIndex`: data structure to convert between absolute
@ -324,7 +334,11 @@ impl Analysis {
/// supported).
pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> {
self.with_db(|db| {
let parse = db.parse(EditionedFileId::current_edition(position.file_id));
let file_id = ide_db::base_db::EditionedFileId::new(
self.db.as_dyn_database(),
EditionedFileId::current_edition(position.file_id),
);
let parse = db.parse(file_id);
let file = parse.tree();
matching_brace::matching_brace(&file, position.offset)
})
@ -383,7 +397,11 @@ impl Analysis {
/// stuff like trailing commas.
pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable<TextEdit> {
self.with_db(|db| {
let parse = db.parse(EditionedFileId::current_edition(frange.file_id));
let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
self.db.as_dyn_database(),
EditionedFileId::current_edition(frange.file_id),
);
let parse = db.parse(editioned_file_id_wrapper);
join_lines::join_lines(config, &parse.tree(), frange.range)
})
}
@ -419,9 +437,12 @@ impl Analysis {
pub fn file_structure(&self, file_id: FileId) -> Cancellable<Vec<StructureNode>> {
// FIXME: Edition
self.with_db(|db| {
file_structure::file_structure(
&db.parse(EditionedFileId::current_edition(file_id)).tree(),
)
let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
self.db.as_dyn_database(),
EditionedFileId::current_edition(file_id),
);
file_structure::file_structure(&db.parse(editioned_file_id_wrapper).tree())
})
}
@ -450,9 +471,12 @@ impl Analysis {
/// Returns the set of folding ranges.
pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> {
self.with_db(|db| {
folding_ranges::folding_ranges(
&db.parse(EditionedFileId::current_edition(file_id)).tree(),
)
let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
self.db.as_dyn_database(),
EditionedFileId::current_edition(file_id),
);
folding_ranges::folding_ranges(&db.parse(editioned_file_id_wrapper).tree())
})
}
@ -589,7 +613,10 @@ impl Analysis {
/// Returns crates that this file *might* belong to.
pub fn relevant_crates_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
self.with_db(|db| db.relevant_crates(file_id).iter().copied().collect())
self.with_db(|db| {
let db = Upcast::<dyn RootQueryDb>::upcast(db);
db.relevant_crates(file_id).iter().copied().collect()
})
}
/// Returns the edition of the given crate.
@ -828,7 +855,8 @@ impl Analysis {
where
F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
{
Cancelled::catch(|| f(&self.db))
let snap = self.db.snapshot();
Cancelled::catch(|| f(&snap))
}
}

View file

@ -1,6 +1,6 @@
use hir::{db::DefDatabase, Semantics};
use ide_db::{
base_db::{CrateId, FileLoader},
base_db::{CrateId, RootQueryDb, Upcast},
FileId, FilePosition, RootDatabase,
};
use itertools::Itertools;
@ -54,7 +54,9 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
/// This returns `Vec` because a module may be included from several places.
pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
db.relevant_crates(file_id)
let root_db = Upcast::<dyn RootQueryDb>::upcast(db);
root_db
.relevant_crates(file_id)
.iter()
.copied()
.filter(|&crate_id| db.crate_def_map(crate_id).modules_for_file(file_id).next().is_some())

View file

@ -6,6 +6,7 @@
use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics};
use ide_db::{
base_db::salsa::AsDynDatabase,
defs::{Definition, NameClass, NameRefClass},
rename::{bail, format_err, source_edit_from_references, IdentifierKind},
source_change::SourceChangeBuilder,
@ -85,7 +86,9 @@ pub(crate) fn rename(
let file_id = sema
.attach_first_edition(position.file_id)
.ok_or_else(|| format_err!("No references found at position"))?;
let source_file = sema.parse(file_id);
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
let source_file = sema.parse(editioned_file_id_wrapper);
let syntax = source_file.syntax();
let defs = find_definitions(&sema, syntax, position)?;

View file

@ -9,7 +9,7 @@ use hir::{
};
use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
use ide_db::{
base_db::SourceDatabase,
base_db::RootQueryDb,
defs::Definition,
documentation::docs_from_attrs,
helpers::visit_file_defs,

View file

@ -59,7 +59,7 @@ mod tests {
use expect_test::expect;
use ide_assists::{Assist, AssistResolveStrategy};
use ide_db::{
base_db::ra_salsa::Durability, symbol_index::SymbolsDatabase, FileRange, FxHashSet,
base_db::salsa::Durability, symbol_index::SymbolsDatabase, FileRange, FxHashSet,
RootDatabase,
};
use test_fixture::WithFixture;

View file

@ -3,7 +3,7 @@
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics};
use ide_db::{
base_db::{SourceDatabase, SourceRootDatabase, VfsPath},
base_db::{RootQueryDb, SourceDatabase, VfsPath},
defs::Definition,
documentation::Documentation,
famous_defs::FamousDefs,
@ -267,11 +267,11 @@ impl StaticIndex<'_> {
analysis: &'a Analysis,
vendored_libs_config: VendoredLibrariesConfig<'_>,
) -> StaticIndex<'a> {
let db = &*analysis.db;
let db = &analysis.db;
let work = all_modules(db).into_iter().filter(|module| {
let file_id = module.definition_source_file_id(db).original_file(db);
let source_root = db.file_source_root(file_id.into());
let source_root = db.source_root(source_root);
let source_root = db.file_source_root(file_id.into()).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
let is_vendored = match vendored_libs_config {
VendoredLibrariesConfig::Included { workspace_root } => source_root
.path_for_file(&file_id.into())

View file

@ -1,29 +1,8 @@
use std::{fmt, marker::PhantomData};
use hir::{
db::{AstIdMapQuery, AttrsQuery, BlockDefMapQuery, ParseMacroExpansionQuery},
Attr, Attrs, ExpandResult, MacroFileId, Module,
};
use ide_db::{
base_db::{
ra_salsa::{
debug::{DebugQueryTable, TableEntry},
Query, QueryTable,
},
CompressedFileTextQuery, CrateData, ParseQuery, SourceDatabase, SourceRootId,
},
symbol_index::ModuleSymbolsQuery,
};
use ide_db::{
symbol_index::{LibrarySymbolsQuery, SymbolIndex},
RootDatabase,
};
use ide_db::base_db::{CrateData, RootQueryDb, Upcast};
use ide_db::RootDatabase;
use itertools::Itertools;
use profile::{memory_usage, Bytes};
use span::{EditionedFileId, FileId};
use span::FileId;
use stdx::format_to;
use syntax::{ast, Parse, SyntaxNode};
use triomphe::Arc;
// Feature: Status
//
@ -37,17 +16,17 @@ use triomphe::Arc;
pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
let mut buf = String::new();
format_to!(buf, "{}\n", collect_query(CompressedFileTextQuery.in_db(db)));
format_to!(buf, "{}\n", collect_query(ParseQuery.in_db(db)));
format_to!(buf, "{}\n", collect_query(ParseMacroExpansionQuery.in_db(db)));
format_to!(buf, "{}\n", collect_query(LibrarySymbolsQuery.in_db(db)));
format_to!(buf, "{}\n", collect_query(ModuleSymbolsQuery.in_db(db)));
format_to!(buf, "{} in total\n", memory_usage());
// format_to!(buf, "{}\n", collect_query(CompressedFileTextQuery.in_db(db)));
// format_to!(buf, "{}\n", collect_query(ParseQuery.in_db(db)));
// format_to!(buf, "{}\n", collect_query(ParseMacroExpansionQuery.in_db(db)));
// format_to!(buf, "{}\n", collect_query(LibrarySymbolsQuery.in_db(db)));
// format_to!(buf, "{}\n", collect_query(ModuleSymbolsQuery.in_db(db)));
// format_to!(buf, "{} in total\n", memory_usage());
format_to!(buf, "\nDebug info:\n");
format_to!(buf, "{}\n", collect_query(AttrsQuery.in_db(db)));
format_to!(buf, "{} ast id maps\n", collect_query_count(AstIdMapQuery.in_db(db)));
format_to!(buf, "{} block def maps\n", collect_query_count(BlockDefMapQuery.in_db(db)));
// format_to!(buf, "\nDebug info:\n");
// format_to!(buf, "{}\n", collect_query(AttrsQuery.in_db(db)));
// format_to!(buf, "{} ast id maps\n", collect_query_count(AstIdMapQuery.in_db(db)));
// format_to!(buf, "{} block def maps\n", collect_query_count(BlockDefMapQuery.in_db(db)));
if let Some(file_id) = file_id {
format_to!(buf, "\nCrates for file {}:\n", file_id.index());
@ -55,7 +34,8 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
if crates.is_empty() {
format_to!(buf, "Does not belong to any crate");
}
let crate_graph = db.crate_graph();
let crate_graph = Upcast::<dyn RootQueryDb>::upcast(db).crate_graph();
for crate_id in crates {
let CrateData {
root_file_id,
@ -97,190 +77,3 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
buf.trim().to_owned()
}
fn collect_query<'q, Q>(table: QueryTable<'q, Q>) -> <Q as QueryCollect>::Collector
where
QueryTable<'q, Q>: DebugQueryTable,
Q: QueryCollect,
<Q as Query>::Storage: 'q,
<Q as QueryCollect>::Collector: StatCollect<
<QueryTable<'q, Q> as DebugQueryTable>::Key,
<QueryTable<'q, Q> as DebugQueryTable>::Value,
>,
{
struct StatCollectorWrapper<C>(C);
impl<C: StatCollect<K, V>, K, V> FromIterator<TableEntry<K, V>> for StatCollectorWrapper<C> {
fn from_iter<T>(iter: T) -> StatCollectorWrapper<C>
where
T: IntoIterator<Item = TableEntry<K, V>>,
{
let mut res = C::default();
for entry in iter {
res.collect_entry(entry.key, entry.value);
}
StatCollectorWrapper(res)
}
}
table.entries::<StatCollectorWrapper<<Q as QueryCollect>::Collector>>().0
}
fn collect_query_count<'q, Q>(table: QueryTable<'q, Q>) -> usize
where
QueryTable<'q, Q>: DebugQueryTable,
Q: Query,
<Q as Query>::Storage: 'q,
{
struct EntryCounter(usize);
impl<K, V> FromIterator<TableEntry<K, V>> for EntryCounter {
fn from_iter<T>(iter: T) -> EntryCounter
where
T: IntoIterator<Item = TableEntry<K, V>>,
{
EntryCounter(iter.into_iter().count())
}
}
table.entries::<EntryCounter>().0
}
trait QueryCollect: Query {
type Collector;
}
impl QueryCollect for LibrarySymbolsQuery {
type Collector = SymbolsStats<SourceRootId>;
}
impl QueryCollect for ParseQuery {
type Collector = SyntaxTreeStats<false>;
}
impl QueryCollect for ParseMacroExpansionQuery {
type Collector = SyntaxTreeStats<true>;
}
impl QueryCollect for CompressedFileTextQuery {
type Collector = FilesStats;
}
impl QueryCollect for ModuleSymbolsQuery {
type Collector = SymbolsStats<Module>;
}
impl QueryCollect for AttrsQuery {
type Collector = AttrsStats;
}
trait StatCollect<K, V>: Default {
fn collect_entry(&mut self, key: K, value: Option<V>);
}
#[derive(Default)]
struct FilesStats {
total: usize,
size: Bytes,
}
impl fmt::Display for FilesStats {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(fmt, "{} of files", self.size)
}
}
impl StatCollect<FileId, Arc<[u8]>> for FilesStats {
fn collect_entry(&mut self, _: FileId, value: Option<Arc<[u8]>>) {
self.total += 1;
self.size += value.unwrap().len();
}
}
#[derive(Default)]
pub(crate) struct SyntaxTreeStats<const MACROS: bool> {
total: usize,
pub(crate) retained: usize,
}
impl<const MACROS: bool> fmt::Display for SyntaxTreeStats<MACROS> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
fmt,
"{} trees, {} preserved{}",
self.total,
self.retained,
if MACROS { " (macros)" } else { "" }
)
}
}
impl StatCollect<EditionedFileId, Parse<ast::SourceFile>> for SyntaxTreeStats<false> {
fn collect_entry(&mut self, _: EditionedFileId, value: Option<Parse<ast::SourceFile>>) {
self.total += 1;
self.retained += value.is_some() as usize;
}
}
impl<M> StatCollect<MacroFileId, ExpandResult<(Parse<SyntaxNode>, M)>> for SyntaxTreeStats<true> {
fn collect_entry(
&mut self,
_: MacroFileId,
value: Option<ExpandResult<(Parse<SyntaxNode>, M)>>,
) {
self.total += 1;
self.retained += value.is_some() as usize;
}
}
struct SymbolsStats<Key> {
total: usize,
size: Bytes,
phantom: PhantomData<Key>,
}
impl<Key> Default for SymbolsStats<Key> {
fn default() -> Self {
Self { total: Default::default(), size: Default::default(), phantom: PhantomData }
}
}
impl fmt::Display for SymbolsStats<Module> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(fmt, "{} of module index symbols ({})", self.size, self.total)
}
}
impl fmt::Display for SymbolsStats<SourceRootId> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(fmt, "{} of library index symbols ({})", self.size, self.total)
}
}
impl<Key> StatCollect<Key, Arc<SymbolIndex>> for SymbolsStats<Key> {
fn collect_entry(&mut self, _: Key, value: Option<Arc<SymbolIndex>>) {
if let Some(symbols) = value {
self.total += symbols.len();
self.size += symbols.memory_size();
}
}
}
#[derive(Default)]
struct AttrsStats {
entries: usize,
total: usize,
}
impl fmt::Display for AttrsStats {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let size = self.entries * size_of::<Attrs>() + self.total * size_of::<Attr>();
let size = Bytes::new(size as _);
write!(
fmt,
"{} attribute query entries, {} total attributes ({} for storing entries)",
self.entries, self.total, size
)
}
}
impl<Key> StatCollect<Key, Attrs> for AttrsStats {
fn collect_entry(&mut self, _: Key, value: Option<Attrs>) {
self.entries += 1;
self.total += value.map_or(0, |it| it.len());
}
}

View file

@ -18,7 +18,9 @@ use either::Either;
use hir::{
DefWithBody, HirFileIdExt, InFile, InRealFile, MacroFileIdExt, MacroKind, Name, Semantics,
};
use ide_db::{FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind};
use ide_db::{
base_db::salsa::AsDynDatabase, FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind,
};
use span::EditionedFileId;
use syntax::{
ast::{self, IsString},
@ -203,7 +205,9 @@ pub(crate) fn highlight(
// Determine the root based on the given range.
let (root, range_to_highlight) = {
let file = sema.parse(file_id);
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
let file = sema.parse(editioned_file_id_wrapper);
let source_file = file.syntax();
match range_to_highlight {
Some(range) => {

View file

@ -1,6 +1,7 @@
//! Renders a bit of code as HTML.
use hir::Semantics;
use ide_db::base_db::salsa::AsDynDatabase;
use oorandom::Rand32;
use span::EditionedFileId;
use stdx::format_to;
@ -16,7 +17,9 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
let file_id = sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
let file = sema.parse(file_id);
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
let file = sema.parse(editioned_file_id_wrapper);
let file = file.syntax();
fn rainbowify(seed: u64) -> String {
let mut rng = Rand32::new(seed);

View file

@ -41,14 +41,14 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="10753541418856619067" style="color: hsl(51,52%,47%);">x</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="9865812862466303869" style="color: hsl(329,86%,55%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="5697120079570210533" style="color: hsl(268,86%,80%);">x</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="4222724691718692706" style="color: hsl(156,71%,51%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="4890670724659097491" style="color: hsl(330,46%,45%);">x</span> <span class="operator">=</span> <span class="string_literal">"other color please!"</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="4002942168268782293" style="color: hsl(114,87%,67%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="4890670724659097491" style="color: hsl(330,46%,45%);">x</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="17855021198829413584" style="color: hsl(230,76%,79%);">x</span> <span class="operator">=</span> <span class="string_literal">"other color please!"</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="16380625810977895757" style="color: hsl(262,75%,75%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="17855021198829413584" style="color: hsl(230,76%,79%);">x</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">fn</span> <span class="function declaration">bar</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
<span class="brace">}</span></code></pre>

View file

@ -2,7 +2,7 @@
use hir::{Crate, Module, ModuleDef, Semantics};
use ide_db::{
base_db::{CrateGraph, CrateId, SourceDatabase},
base_db::{CrateGraph, CrateId, RootQueryDb},
FileId, RootDatabase,
};
use syntax::TextRange;

View file

@ -15,10 +15,13 @@
mod on_enter;
use ide_db::{
base_db::{salsa::AsDynDatabase, RootQueryDb},
FilePosition, RootDatabase,
};
use span::{Edition, EditionedFileId};
use std::iter;
use ide_db::{base_db::SourceDatabase, FilePosition, RootDatabase};
use span::{Edition, EditionedFileId};
use syntax::{
algo::{ancestors_at_offset, find_node_at_offset},
ast::{self, edit::IndentLevel, AstToken},
@ -73,7 +76,11 @@ pub(crate) fn on_char_typed(
// FIXME: We are hitting the database here, if we are unlucky this call might block momentarily
// causing the editor to feel sluggish!
let edition = Edition::CURRENT_FIXME;
let file = &db.parse(EditionedFileId::new(position.file_id, edition));
let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
db.as_dyn_database(),
EditionedFileId::new(position.file_id, edition),
);
let file = &db.parse(editioned_file_id_wrapper);
let char_matches_position =
file.tree().syntax().text().char_at(position.offset) == Some(char_typed);
if !stdx::always!(char_matches_position) {

View file

@ -1,8 +1,8 @@
//! Handles the `Enter` key press. At the momently, this only continues
//! comments, but should handle indent some time in the future as well.
use ide_db::RootDatabase;
use ide_db::{base_db::SourceDatabase, FilePosition};
use ide_db::base_db::RootQueryDb;
use ide_db::{base_db::salsa::AsDynDatabase, FilePosition, RootDatabase};
use span::EditionedFileId;
use syntax::{
algo::find_node_at_offset,
@ -51,7 +51,11 @@ use ide_db::text_edit::TextEdit;
//
// ![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif)
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> {
let parse = db.parse(EditionedFileId::current_edition(position.file_id));
let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
db.as_dyn_database(),
EditionedFileId::current_edition(position.file_id),
);
let parse = db.parse(editioned_file_id_wrapper);
let file = parse.tree();
let token = file.syntax().token_at_offset(position.offset).left_biased()?;

View file

@ -1,6 +1,6 @@
use dot::{Id, LabelText};
use ide_db::{
base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceRootDatabase},
base_db::{CrateGraph, CrateId, Dependency, RootQueryDb, SourceDatabase, Upcast},
FxHashSet, RootDatabase,
};
use triomphe::Arc;
@ -16,7 +16,7 @@ use triomphe::Arc;
// |---------|-------------|
// | VS Code | **rust-analyzer: View Crate Graph** |
pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String, String> {
let crate_graph = db.crate_graph();
let crate_graph = Upcast::<dyn RootQueryDb>::upcast(db).crate_graph();
let crates_to_render = crate_graph
.iter()
.filter(|krate| {
@ -24,8 +24,9 @@ pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String,
true
} else {
// Only render workspace crates
let root_id = db.file_source_root(crate_graph[*krate].root_file_id);
!db.source_root(root_id).is_library
let root_id =
db.file_source_root(crate_graph[*krate].root_file_id).source_root_id(db);
!db.source_root(root_id).source_root(db).is_library
}
})
.collect();