Merge pull request #19617 from ChayimFriedman2/more-actual

internal: Make `HirFileId`, `EditionedFileId` and macro files Salsa struct
This commit is contained in:
Chayim Refael Friedman 2025-04-19 19:21:53 +00:00 committed by GitHub
commit 150bb4a00f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
195 changed files with 1473 additions and 1525 deletions

View file

@ -9,8 +9,7 @@ use std::{
use cfg::{CfgAtom, CfgDiff};
use hir::{
Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ImportPathConfig,
ModuleDef, Name,
Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, ImportPathConfig, ModuleDef, Name,
db::{DefDatabase, ExpandDatabase, HirDatabase},
};
use hir_def::{
@ -142,7 +141,9 @@ impl flags::AnalysisStats {
if !source_root.is_library || self.with_deps {
let length = db.file_text(file_id).text(db).lines().count();
let item_stats = db
.file_item_tree(EditionedFileId::current_edition(file_id).into())
.file_item_tree(
EditionedFileId::current_edition(db, file_id).into(),
)
.item_tree_stats()
.into();
@ -152,7 +153,9 @@ impl flags::AnalysisStats {
} else {
let length = db.file_text(file_id).text(db).lines().count();
let item_stats = db
.file_item_tree(EditionedFileId::current_edition(file_id).into())
.file_item_tree(
EditionedFileId::current_edition(db, file_id).into(),
)
.item_tree_stats()
.into();
@ -203,7 +206,7 @@ impl flags::AnalysisStats {
let file_id = module.definition_source_file_id(db);
let file_id = file_id.original_file(db);
let source_root = db.file_source_root(file_id.into()).source_root_id(db);
let source_root = db.file_source_root(file_id.file_id(db)).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
if !source_root.is_library || self.with_deps {
num_crates += 1;
@ -457,6 +460,7 @@ impl flags::AnalysisStats {
let mut sw = self.stop_watch();
for &file_id in &file_ids {
let file_id = file_id.editioned_file_id(db);
let sema = hir::Semantics::new(db);
let display_target = match sema.first_crate(file_id.file_id()) {
Some(krate) => krate.to_display_target(sema.db),
@ -756,7 +760,7 @@ impl flags::AnalysisStats {
};
if let Some(src) = source {
let original_file = src.file_id.original_file(db);
let path = vfs.file_path(original_file.into());
let path = vfs.file_path(original_file.file_id(db));
let syntax_range = src.text_range();
format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
} else {
@ -1069,7 +1073,7 @@ impl flags::AnalysisStats {
};
if let Some(src) = source {
let original_file = src.file_id.original_file(db);
let path = vfs.file_path(original_file.into());
let path = vfs.file_path(original_file.file_id(db));
let syntax_range = src.text_range();
format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
} else {
@ -1123,7 +1127,7 @@ impl flags::AnalysisStats {
term_search_borrowck: true,
},
ide::AssistResolveStrategy::All,
file_id.into(),
analysis.editioned_file_id_to_vfs(file_id),
);
}
for &file_id in &file_ids {
@ -1158,7 +1162,7 @@ impl flags::AnalysisStats {
fields_to_resolve: InlayFieldsToResolve::empty(),
range_exclusive_hints: true,
},
file_id.into(),
analysis.editioned_file_id_to_vfs(file_id),
None,
);
}
@ -1174,7 +1178,7 @@ impl flags::AnalysisStats {
annotate_enum_variant_references: false,
location: ide::AnnotationLocation::AboveName,
},
file_id.into(),
analysis.editioned_file_id_to_vfs(file_id),
)
.unwrap()
.into_iter()
@ -1199,8 +1203,8 @@ fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id:
let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range_rooted(db);
let path = vfs.file_path(original_range.file_id.into());
let line_index = db.line_index(original_range.file_id.into());
let path = vfs.file_path(original_range.file_id.file_id(db));
let line_index = db.line_index(original_range.file_id.file_id(db));
let text_range = original_range.range;
let (start, end) =
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
@ -1215,8 +1219,8 @@ fn location_csv_pat(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, pat_id: Pa
let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range_rooted(db);
let path = vfs.file_path(original_range.file_id.into());
let line_index = db.line_index(original_range.file_id.into());
let path = vfs.file_path(original_range.file_id.file_id(db));
let line_index = db.line_index(original_range.file_id.file_id(db));
let text_range = original_range.range;
let (start, end) =
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
@ -1234,8 +1238,8 @@ fn expr_syntax_range<'a>(
let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range_rooted(db);
let path = vfs.file_path(original_range.file_id.into());
let line_index = db.line_index(original_range.file_id.into());
let path = vfs.file_path(original_range.file_id.file_id(db));
let line_index = db.line_index(original_range.file_id.file_id(db));
let text_range = original_range.range;
let (start, end) =
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
@ -1255,8 +1259,8 @@ fn pat_syntax_range<'a>(
let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range_rooted(db);
let path = vfs.file_path(original_range.file_id.into());
let line_index = db.line_index(original_range.file_id.into());
let path = vfs.file_path(original_range.file_id.file_id(db));
let line_index = db.line_index(original_range.file_id.file_id(db));
let text_range = original_range.range;
let (start, end) =
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));

View file

@ -4,7 +4,7 @@
use project_model::{CargoConfig, RustLibSource};
use rustc_hash::FxHashSet;
use hir::{Crate, HirFileIdExt, Module, db::HirDatabase, sym};
use hir::{Crate, Module, db::HirDatabase, sym};
use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Severity};
use ide_db::{LineIndexDatabase, base_db::SourceDatabase};
use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace_at};
@ -51,7 +51,7 @@ impl flags::Diagnostics {
let work = all_modules(db).into_iter().filter(|module| {
let file_id = module.definition_source_file_id(db).original_file(db);
let source_root = db.file_source_root(file_id.into()).source_root_id(db);
let source_root = db.file_source_root(file_id.file_id(db)).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
!source_root.is_library
});
@ -63,13 +63,13 @@ impl flags::Diagnostics {
module.krate().display_name(db).as_deref().unwrap_or(&sym::unknown).to_owned();
println!(
"processing crate: {crate_name}, module: {}",
_vfs.file_path(file_id.into())
_vfs.file_path(file_id.file_id(db))
);
for diagnostic in analysis
.full_diagnostics(
&DiagnosticsConfig::test_sample(),
AssistResolveStrategy::None,
file_id.into(),
file_id.file_id(db),
)
.unwrap()
{

View file

@ -514,12 +514,13 @@ mod test {
fn position(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (AnalysisHost, FilePosition) {
let mut host = AnalysisHost::default();
let change_fixture = ChangeFixture::parse(ra_fixture);
let change_fixture = ChangeFixture::parse(host.raw_database(), ra_fixture);
host.raw_database_mut().apply_change(change_fixture.change);
let (file_id, range_or_offset) =
change_fixture.file_position.expect("expected a marker ()");
let offset = range_or_offset.expect_offset();
(host, FilePosition { file_id: file_id.into(), offset })
let position = FilePosition { file_id: file_id.file_id(host.raw_database()), offset };
(host, position)
}
/// If expected == "", then assert that there are no symbols (this is basically local symbol)
@ -869,7 +870,7 @@ pub mod example_mod {
let s = "/// foo\nfn bar() {}";
let mut host = AnalysisHost::default();
let change_fixture = ChangeFixture::parse(s);
let change_fixture = ChangeFixture::parse(host.raw_database(), s);
host.raw_database_mut().apply_change(change_fixture.change);
let analysis = host.analysis();

View file

@ -73,7 +73,7 @@ impl flags::Search {
let sr = db.source_root(root).source_root(db);
for file_id in sr.iter() {
for debug_info in match_finder.debug_where_text_equal(
EditionedFileId::current_edition(file_id),
EditionedFileId::current_edition(db, file_id),
debug_snippet,
) {
println!("{debug_info:#?}");

View file

@ -1,10 +1,8 @@
//! Reports references in code that the IDE layer cannot resolve.
use hir::{AnyDiagnostic, Crate, HirFileIdExt as _, Module, Semantics, db::HirDatabase, sym};
use hir::{AnyDiagnostic, Crate, Module, Semantics, db::HirDatabase, sym};
use ide::{AnalysisHost, RootDatabase, TextRange};
use ide_db::{
EditionedFileId, FxHashSet, LineIndexDatabase as _,
base_db::{SourceDatabase, salsa::AsDynDatabase},
defs::NameRefClass,
EditionedFileId, FxHashSet, LineIndexDatabase as _, base_db::SourceDatabase, defs::NameRefClass,
};
use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace_at};
use parser::SyntaxKind;
@ -57,23 +55,24 @@ impl flags::UnresolvedReferences {
let work = all_modules(db).into_iter().filter(|module| {
let file_id = module.definition_source_file_id(db).original_file(db);
let source_root = db.file_source_root(file_id.into()).source_root_id(db);
let source_root = db.file_source_root(file_id.file_id(db)).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
!source_root.is_library
});
for module in work {
let file_id = module.definition_source_file_id(db).original_file(db);
let file_id = file_id.file_id(db);
if !visited_files.contains(&file_id) {
let crate_name =
module.krate().display_name(db).as_deref().unwrap_or(&sym::unknown).to_owned();
let file_path = vfs.file_path(file_id.into());
let file_path = vfs.file_path(file_id);
eprintln!("processing crate: {crate_name}, module: {file_path}",);
let line_index = db.line_index(file_id.into());
let file_text = db.file_text(file_id.into());
let line_index = db.line_index(file_id);
let file_text = db.file_text(file_id);
for range in find_unresolved_references(db, &sema, file_id.into(), &module) {
for range in find_unresolved_references(db, &sema, file_id, &module) {
let line_col = line_index.line_col(range.start());
let line = line_col.line + 1;
let col = line_col.col + 1;
@ -124,7 +123,7 @@ fn find_unresolved_references(
let node = inactive_code.node;
let range = node.map(|it| it.text_range()).original_node_file_range_rooted(db);
if range.file_id != file_id {
if range.file_id.file_id(db) != file_id {
continue;
}
@ -140,10 +139,8 @@ fn all_unresolved_references(
) -> Vec<TextRange> {
let file_id = sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
let file = sema.parse(editioned_file_id_wrapper);
.unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
let file = sema.parse(file_id);
let root = file.syntax();
let mut unresolved_references = Vec::new();