Merge pull request #19617 from ChayimFriedman2/more-actual

internal: Make `HirFileId`, `EditionedFileId` and macro files Salsa struct
This commit is contained in:
Chayim Refael Friedman 2025-04-19 19:21:53 +00:00 committed by GitHub
commit 150bb4a00f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
195 changed files with 1473 additions and 1525 deletions

View file

@ -149,7 +149,7 @@ pub(crate) fn annotations(
source_file_id: FileId,
) -> Option<(TextRange, Option<TextRange>)> {
if let Some(InRealFile { file_id, value }) = node.original_ast_node_rooted(db) {
if file_id == source_file_id {
if file_id.file_id(db) == source_file_id {
return Some((
value.syntax().text_range(),
value.name().map(|name| name.syntax().text_range()),

View file

@ -76,9 +76,9 @@ pub(crate) fn incoming_calls(
}
let range = sema.original_range(name.syntax());
calls.add(nav.call_site, range.into());
calls.add(nav.call_site, range.into_file_id(db));
if let Some(other) = nav.def_site {
calls.add(other, range.into());
calls.add(other, range.into_file_id(db));
}
}
}
@ -143,7 +143,7 @@ pub(crate) fn outgoing_calls(
Some(nav_target.into_iter().zip(iter::repeat(range)))
})
.flatten()
.for_each(|(nav, range)| calls.add(nav, range.into()));
.for_each(|(nav, range)| calls.add(nav, range.into_file_id(db)));
Some(calls.into_items())
}

View file

@ -1,5 +1,5 @@
use hir::db::ExpandDatabase;
use hir::{ExpandResult, InFile, MacroFileIdExt, Semantics};
use hir::{ExpandResult, InFile, Semantics};
use ide_db::{
FileId, RootDatabase, base_db::Crate, helpers::pick_best_token,
syntax_helpers::prettify_macro_expansion,
@ -99,7 +99,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
.display(
db,
sema.attach_first_edition(position.file_id)
.map(|it| it.edition())
.map(|it| it.edition(db))
.unwrap_or(Edition::CURRENT),
)
.to_string(),

View file

@ -7,10 +7,10 @@ use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange};
/// Creates analysis for a single file.
pub(crate) fn file(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileId) {
let mut host = AnalysisHost::default();
let change_fixture = ChangeFixture::parse(ra_fixture);
let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
(host.analysis(), change_fixture.files[0].into())
(host.analysis(), change_fixture.files[0].file_id(&host.db))
}
/// Creates analysis from a multi-file fixture, returns positions marked with $0.
@ -18,23 +18,23 @@ pub(crate) fn position(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Analysis, FilePosition) {
let mut host = AnalysisHost::default();
let change_fixture = ChangeFixture::parse(ra_fixture);
let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
(host.analysis(), FilePosition { file_id: file_id.into(), offset })
(host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset })
}
/// Creates analysis for a single file, returns range marked with a pair of $0.
pub(crate) fn range(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileRange) {
let mut host = AnalysisHost::default();
let change_fixture = ChangeFixture::parse(ra_fixture);
let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let range = range_or_offset.expect_range();
(host.analysis(), FileRange { file_id: file_id.into(), range })
(host.analysis(), FileRange { file_id: file_id.file_id(&host.db), range })
}
/// Creates analysis for a single file, returns range marked with a pair of $0 or a position marked with $0.
@ -42,11 +42,11 @@ pub(crate) fn range_or_position(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Analysis, FileId, RangeOrOffset) {
let mut host = AnalysisHost::default();
let change_fixture = ChangeFixture::parse(ra_fixture);
let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
(host.analysis(), file_id.into(), range_or_offset)
(host.analysis(), file_id.file_id(&host.db), range_or_offset)
}
/// Creates analysis from a multi-file fixture, returns positions marked with $0.
@ -54,24 +54,25 @@ pub(crate) fn annotations(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Analysis, FilePosition, Vec<(FileRange, String)>) {
let mut host = AnalysisHost::default();
let change_fixture = ChangeFixture::parse(ra_fixture);
let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
let db = &host.db;
let annotations = change_fixture
.files
.iter()
.flat_map(|&file_id| {
let file_text = host.analysis().file_text(file_id.into()).unwrap();
let file_text = host.analysis().file_text(file_id.file_id(&host.db)).unwrap();
let annotations = extract_annotations(&file_text);
annotations
.into_iter()
.map(move |(range, data)| (FileRange { file_id: file_id.into(), range }, data))
.map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data))
})
.collect();
(host.analysis(), FilePosition { file_id: file_id.into(), offset }, annotations)
(host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset }, annotations)
}
/// Creates analysis from a multi-file fixture with annotations without $0
@ -79,19 +80,20 @@ pub(crate) fn annotations_without_marker(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Analysis, Vec<(FileRange, String)>) {
let mut host = AnalysisHost::default();
let change_fixture = ChangeFixture::parse(ra_fixture);
let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
let db = &host.db;
let annotations = change_fixture
.files
.iter()
.flat_map(|&file_id| {
let file_text = host.analysis().file_text(file_id.into()).unwrap();
let file_text = host.analysis().file_text(file_id.file_id(db)).unwrap();
let annotations = extract_annotations(&file_text);
annotations
.into_iter()
.map(move |(range, data)| (FileRange { file_id: file_id.into(), range }, data))
.map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data))
})
.collect();
(host.analysis(), annotations)

View file

@ -6,8 +6,7 @@ use crate::{
navigation_target::{self, ToNav},
};
use hir::{
AsAssocItem, AssocItem, CallableKind, FileRange, HasCrate, InFile, MacroFileIdExt, ModuleDef,
Semantics, sym,
AsAssocItem, AssocItem, CallableKind, FileRange, HasCrate, InFile, ModuleDef, Semantics, sym,
};
use ide_db::{
RootDatabase, SymbolKind,
@ -44,7 +43,7 @@ pub(crate) fn goto_definition(
let sema = &Semantics::new(db);
let file = sema.parse_guess_edition(file_id).syntax().clone();
let edition =
sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
IDENT
| INT_NUMBER
@ -364,7 +363,7 @@ fn nav_for_exit_points(
if let Some(FileRange { file_id, range }) = focus_frange {
let contains_frange = |nav: &NavigationTarget| {
nav.file_id == file_id && nav.full_range.contains_range(range)
nav.file_id == file_id.file_id(db) && nav.full_range.contains_range(range)
};
if let Some(def_site) = nav.def_site.as_mut() {

View file

@ -1,9 +1,8 @@
use std::iter;
use hir::{FilePosition, FileRange, HirFileId, InFile, Semantics, db};
use hir::{EditionedFileId, FilePosition, FileRange, HirFileId, InFile, Semantics, db};
use ide_db::{
FxHashMap, FxHashSet, RootDatabase,
base_db::salsa::AsDynDatabase,
defs::{Definition, IdentClass},
helpers::pick_best_token,
search::{FileReference, ReferenceCategory, SearchScope},
@ -12,7 +11,7 @@ use ide_db::{
preorder_expr_with_ctx_checker,
},
};
use span::EditionedFileId;
use span::FileId;
use syntax::{
AstNode,
SyntaxKind::{self, IDENT, INT_NUMBER},
@ -61,16 +60,14 @@ pub(crate) fn highlight_related(
let _p = tracing::info_span!("highlight_related").entered();
let file_id = sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
let syntax = sema.parse(editioned_file_id_wrapper).syntax().clone();
.unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
let span_file_id = file_id.editioned_file_id(sema.db);
let syntax = sema.parse(file_id).syntax().clone();
let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?`
T![->] => 4,
kind if kind.is_keyword(file_id.edition()) => 3,
kind if kind.is_keyword(span_file_id.edition()) => 3,
IDENT | INT_NUMBER => 2,
T![|] => 1,
_ => 0,
@ -92,11 +89,18 @@ pub(crate) fn highlight_related(
T![break] | T![loop] | T![while] | T![continue] if config.break_points => {
highlight_break_points(sema, token).remove(&file_id)
}
T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
_ if config.references => {
highlight_references(sema, token, FilePosition { file_id, offset })
T![|] if config.closure_captures => {
highlight_closure_captures(sema, token, file_id, span_file_id.file_id())
}
T![move] if config.closure_captures => {
highlight_closure_captures(sema, token, file_id, span_file_id.file_id())
}
_ if config.references => highlight_references(
sema,
token,
FilePosition { file_id, offset },
span_file_id.file_id(),
),
_ => None,
}
}
@ -105,6 +109,7 @@ fn highlight_closure_captures(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
file_id: EditionedFileId,
vfs_file_id: FileId,
) -> Option<Vec<HighlightedRange>> {
let closure = token.parent_ancestors().take(2).find_map(ast::ClosureExpr::cast)?;
let search_range = closure.body()?.syntax().text_range();
@ -137,7 +142,7 @@ fn highlight_closure_captures(
.sources(sema.db)
.into_iter()
.flat_map(|x| x.to_nav(sema.db))
.filter(|decl| decl.file_id == file_id)
.filter(|decl| decl.file_id == vfs_file_id)
.filter_map(|decl| decl.focus_range)
.map(move |range| HighlightedRange { range, category })
.chain(usages)
@ -150,6 +155,7 @@ fn highlight_references(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
FilePosition { file_id, offset }: FilePosition,
vfs_file_id: FileId,
) -> Option<Vec<HighlightedRange>> {
let defs = if let Some((range, resolution)) =
sema.check_for_format_args_template(token.clone(), offset)
@ -261,7 +267,7 @@ fn highlight_references(
.sources(sema.db)
.into_iter()
.flat_map(|x| x.to_nav(sema.db))
.filter(|decl| decl.file_id == file_id)
.filter(|decl| decl.file_id == vfs_file_id)
.filter_map(|decl| decl.focus_range)
.map(|range| HighlightedRange { range, category })
.for_each(|x| {
@ -279,7 +285,7 @@ fn highlight_references(
},
};
for nav in navs {
if nav.file_id != file_id {
if nav.file_id != vfs_file_id {
continue;
}
let hl_range = nav.focus_range.map(|range| {

View file

@ -133,7 +133,7 @@ pub(crate) fn hover(
let sema = &hir::Semantics::new(db);
let file = sema.parse_guess_edition(file_id).syntax().clone();
let edition =
sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
let display_target = sema.first_crate(file_id)?.to_display_target(db);
let mut res = if range.is_empty() {
hover_offset(
@ -551,7 +551,7 @@ fn runnable_action(
Definition::Module(it) => runnable_mod(sema, it).map(HoverAction::Runnable),
Definition::Function(func) => {
let src = func.source(sema.db)?;
if src.file_id != file_id {
if src.file_id.file_id().is_none_or(|f| f.file_id(sema.db) != file_id) {
cov_mark::hit!(hover_macro_generated_struct_fn_doc_comment);
cov_mark::hit!(hover_macro_generated_struct_fn_doc_attr);
return None;

View file

@ -5,14 +5,13 @@ use std::{
use either::Either;
use hir::{
ClosureStyle, DisplayTarget, HasVisibility, HirDisplay, HirDisplayError, HirWrite, ModuleDef,
ModuleDefId, Semantics, sym,
ClosureStyle, DisplayTarget, EditionedFileId, HasVisibility, HirDisplay, HirDisplayError,
HirWrite, ModuleDef, ModuleDefId, Semantics, sym,
};
use ide_db::{FileRange, RootDatabase, base_db::salsa::AsDynDatabase, famous_defs::FamousDefs};
use ide_db::{FileRange, RootDatabase, famous_defs::FamousDefs};
use ide_db::{FxHashSet, text_edit::TextEdit};
use itertools::Itertools;
use smallvec::{SmallVec, smallvec};
use span::EditionedFileId;
use stdx::never;
use syntax::{
SmolStr, SyntaxNode, TextRange, TextSize, WalkEvent,
@ -86,10 +85,8 @@ pub(crate) fn inlay_hints(
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
let file = sema.parse(editioned_file_id_wrapper);
.unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
let file = sema.parse(file_id);
let file = file.syntax();
let mut acc = Vec::new();
@ -139,10 +136,8 @@ pub(crate) fn inlay_hints_resolve(
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
let file = sema.parse(editioned_file_id_wrapper);
.unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
let file = sema.parse(file_id);
let file = file.syntax();
let scope = sema.scope(file)?;
@ -212,6 +207,7 @@ fn hints(
file_id: EditionedFileId,
node: SyntaxNode,
) {
let file_id = file_id.editioned_file_id(sema.db);
let Some(krate) = sema.first_crate(file_id.file_id()) else {
return;
};
@ -227,12 +223,12 @@ fn hints(
chaining::hints(hints, famous_defs, config, display_target, &expr);
adjustment::hints(hints, famous_defs, config, display_target, &expr);
match expr {
ast::Expr::CallExpr(it) => param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it)),
ast::Expr::CallExpr(it) => param_name::hints(hints, famous_defs, config, ast::Expr::from(it)),
ast::Expr::MethodCallExpr(it) => {
param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it))
param_name::hints(hints, famous_defs, config, ast::Expr::from(it))
}
ast::Expr::ClosureExpr(it) => {
closure_captures::hints(hints, famous_defs, config, file_id, it.clone());
closure_captures::hints(hints, famous_defs, config, it.clone());
closure_ret::hints(hints, famous_defs, config, display_target, it)
},
ast::Expr::RangeExpr(it) => range_exclusive::hints(hints, famous_defs, config, file_id, it),

View file

@ -3,7 +3,6 @@
//! Tests live in [`bind_pat`][super::bind_pat] module.
use ide_db::famous_defs::FamousDefs;
use ide_db::text_edit::{TextRange, TextSize};
use span::EditionedFileId;
use stdx::{TupleExt, never};
use syntax::ast::{self, AstNode};
@ -15,7 +14,6 @@ pub(super) fn hints(
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
_file_id: EditionedFileId,
closure: ast::ClosureExpr,
) -> Option<()> {
if !config.closure_capture_hints {
@ -75,10 +73,12 @@ pub(super) fn hints(
// force cache the source file, otherwise sema lookup will potentially panic
_ = sema.parse_or_expand(source.file());
source.name().and_then(|name| {
name.syntax()
.original_file_range_opt(sema.db)
.map(TupleExt::head)
.map(Into::into)
name.syntax().original_file_range_opt(sema.db).map(TupleExt::head).map(
|frange| ide_db::FileRange {
file_id: frange.file_id.file_id(sema.db),
range: frange.range,
},
)
})
}),
tooltip: None,

View file

@ -102,7 +102,10 @@ pub(crate) fn hints(
}
};
let linked_location = source_syntax.and_then(|it| sema.original_range_opt(&it));
linked_location.map(Into::into)
linked_location.map(|frange| ide_db::FileRange {
file_id: frange.file_id.file_id(sema.db),
range: frange.range,
})
}),
);

View file

@ -108,7 +108,7 @@ pub(super) fn hints(
.and_then(|d| source_map.pat_syntax(*d).ok())
.and_then(|d| {
Some(FileRange {
file_id: d.file_id.file_id()?.into(),
file_id: d.file_id.file_id()?.file_id(sema.db),
range: d.value.text_range(),
})
})

View file

@ -10,7 +10,6 @@ use either::Either;
use hir::Semantics;
use ide_db::{RootDatabase, famous_defs::FamousDefs};
use span::EditionedFileId;
use stdx::to_lower_snake_case;
use syntax::ast::{self, AstNode, HasArgList, HasName, UnaryOp};
@ -20,7 +19,6 @@ pub(super) fn hints(
acc: &mut Vec<InlayHint>,
FamousDefs(sema, krate): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
_file_id: EditionedFileId,
expr: ast::Expr,
) -> Option<()> {
if !config.parameter_hints {
@ -67,7 +65,10 @@ pub(super) fn hints(
_ => None,
},
}?;
sema.original_range_opt(name_syntax.syntax()).map(Into::into)
sema.original_range_opt(name_syntax.syntax()).map(|frange| ide_db::FileRange {
file_id: frange.file_id.file_id(sema.db),
range: frange.range,
})
}),
);
InlayHint {

View file

@ -58,20 +58,19 @@ mod view_memory_layout;
mod view_mir;
mod view_syntax_tree;
use std::panic::UnwindSafe;
use std::panic::{AssertUnwindSafe, UnwindSafe};
use cfg::CfgOptions;
use fetch_crates::CrateInfo;
use hir::{ChangeWithProcMacros, sym};
use hir::{ChangeWithProcMacros, EditionedFileId, sym};
use ide_db::{
FxHashMap, FxIndexSet, LineIndexDatabase,
base_db::{
CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, VfsPath,
salsa::{AsDynDatabase, Cancelled},
salsa::Cancelled,
},
prime_caches, symbol_index,
};
use span::EditionedFileId;
use syntax::SourceFile;
use triomphe::Arc;
use view_memory_layout::{RecursiveMemoryLayout, view_memory_layout};
@ -306,10 +305,7 @@ impl Analysis {
pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
// FIXME edition
self.with_db(|db| {
let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
self.db.as_dyn_database(),
EditionedFileId::current_edition(file_id),
);
let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
db.parse(editioned_file_id_wrapper).tree()
})
@ -338,10 +334,7 @@ impl Analysis {
/// supported).
pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> {
self.with_db(|db| {
let file_id = ide_db::base_db::EditionedFileId::new(
self.db.as_dyn_database(),
EditionedFileId::current_edition(position.file_id),
);
let file_id = EditionedFileId::current_edition(&self.db, position.file_id);
let parse = db.parse(file_id);
let file = parse.tree();
matching_brace::matching_brace(&file, position.offset)
@ -401,10 +394,8 @@ impl Analysis {
/// stuff like trailing commas.
pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable<TextEdit> {
self.with_db(|db| {
let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
self.db.as_dyn_database(),
EditionedFileId::current_edition(frange.file_id),
);
let editioned_file_id_wrapper =
EditionedFileId::current_edition(&self.db, frange.file_id);
let parse = db.parse(editioned_file_id_wrapper);
join_lines::join_lines(config, &parse.tree(), frange.range)
})
@ -441,10 +432,7 @@ impl Analysis {
pub fn file_structure(&self, file_id: FileId) -> Cancellable<Vec<StructureNode>> {
// FIXME: Edition
self.with_db(|db| {
let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
self.db.as_dyn_database(),
EditionedFileId::current_edition(file_id),
);
let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
file_structure::file_structure(&db.parse(editioned_file_id_wrapper).tree())
})
@ -475,10 +463,7 @@ impl Analysis {
/// Returns the set of folding ranges.
pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> {
self.with_db(|db| {
let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
self.db.as_dyn_database(),
EditionedFileId::current_edition(file_id),
);
let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
folding_ranges::folding_ranges(&db.parse(editioned_file_id_wrapper).tree())
})
@ -534,7 +519,11 @@ impl Analysis {
position: FilePosition,
search_scope: Option<SearchScope>,
) -> Cancellable<Option<Vec<ReferenceSearchResult>>> {
self.with_db(|db| references::find_all_refs(&Semantics::new(db), position, search_scope))
let search_scope = AssertUnwindSafe(search_scope);
self.with_db(|db| {
let _ = &search_scope;
references::find_all_refs(&Semantics::new(db), position, search_scope.0)
})
}
/// Returns a short text describing element at position.
@ -656,7 +645,11 @@ impl Analysis {
position: FilePosition,
search_scope: Option<SearchScope>,
) -> Cancellable<Vec<Runnable>> {
self.with_db(|db| runnables::related_tests(db, position, search_scope))
let search_scope = AssertUnwindSafe(search_scope);
self.with_db(|db| {
let _ = &search_scope;
runnables::related_tests(db, position, search_scope.0)
})
}
/// Computes syntax highlighting for the given file
@ -849,6 +842,10 @@ impl Analysis {
self.with_db(|db| view_memory_layout(db, position))
}
pub fn editioned_file_id_to_vfs(&self, file_id: hir::EditionedFileId) -> FileId {
file_id.file_id(&self.db)
}
/// Performs an operation on the database that may be canceled.
///
/// rust-analyzer needs to be able to answer semantic questions about the

View file

@ -817,14 +817,10 @@ pub(crate) fn orig_range_with_focus_r(
) -> UpmappingResult<(FileRange, Option<TextRange>)> {
let Some(name) = focus_range else { return orig_range_r(db, hir_file, value) };
let call_kind =
|| db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id).kind;
let call_kind = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).kind;
let def_range = || {
db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id)
.def
.definition_range(db)
};
let def_range =
|| db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).def.definition_range(db);
// FIXME: Also make use of the syntax context to determine which site we are at?
let value_range = InFile::new(hir_file, value).original_node_file_range_opt(db);
@ -901,7 +897,7 @@ pub(crate) fn orig_range_with_focus_r(
UpmappingResult {
call_site: (
call_site_range.into(),
call_site_range.into_file_id(db),
call_site_focus.and_then(|hir::FileRange { file_id, range }| {
if call_site_range.file_id == file_id && call_site_range.range.contains_range(range)
{
@ -913,7 +909,7 @@ pub(crate) fn orig_range_with_focus_r(
),
def_site: def_site.map(|(def_site_range, def_site_focus)| {
(
def_site_range.into(),
def_site_range.into_file_id(db),
def_site_focus.and_then(|hir::FileRange { file_id, range }| {
if def_site_range.file_id == file_id
&& def_site_range.range.contains_range(range)
@ -934,7 +930,10 @@ fn orig_range(
value: &SyntaxNode,
) -> UpmappingResult<(FileRange, Option<TextRange>)> {
UpmappingResult {
call_site: (InFile::new(hir_file, value).original_file_range_rooted(db).into(), None),
call_site: (
InFile::new(hir_file, value).original_file_range_rooted(db).into_file_id(db),
None,
),
def_site: None,
}
}
@ -945,7 +944,10 @@ fn orig_range_r(
value: TextRange,
) -> UpmappingResult<(FileRange, Option<TextRange>)> {
UpmappingResult {
call_site: (InFile::new(hir_file, value).original_node_file_range(db).0.into(), None),
call_site: (
InFile::new(hir_file, value).original_node_file_range(db).0.into_file_id(db),
None,
),
def_site: None,
}
}

View file

@ -57,7 +57,9 @@ pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<Crate> {
db.relevant_crates(file_id)
.iter()
.copied()
.filter(|&crate_id| db.crate_def_map(crate_id).modules_for_file(file_id).next().is_some())
.filter(|&crate_id| {
db.crate_def_map(crate_id).modules_for_file(db, file_id).next().is_some()
})
.sorted()
.collect()
}

View file

@ -68,7 +68,7 @@ pub(crate) fn find_all_refs(
.into_iter()
.map(|(file_id, refs)| {
(
file_id.into(),
file_id.file_id(sema.db),
refs.into_iter()
.map(|file_ref| (file_ref.range, file_ref.category))
.unique()
@ -307,8 +307,10 @@ fn handle_control_flow_keywords(
FilePosition { file_id, offset }: FilePosition,
) -> Option<ReferenceSearchResult> {
let file = sema.parse_guess_edition(file_id);
let edition =
sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
let edition = sema
.attach_first_edition(file_id)
.map(|it| it.edition(sema.db))
.unwrap_or(Edition::CURRENT);
let token = file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword(edition))?;
let references = match token.kind() {
@ -328,7 +330,7 @@ fn handle_control_flow_keywords(
.into_iter()
.map(|HighlightedRange { range, category }| (range, category))
.collect();
(file_id.into(), ranges)
(file_id.file_id(sema.db), ranges)
})
.collect();
@ -338,8 +340,8 @@ fn handle_control_flow_keywords(
#[cfg(test)]
mod tests {
use expect_test::{Expect, expect};
use ide_db::FileId;
use span::EditionedFileId;
use hir::EditionedFileId;
use ide_db::{FileId, RootDatabase};
use stdx::format_to;
use crate::{SearchScope, fixture};
@ -1004,7 +1006,9 @@ pub(super) struct Foo$0 {
check_with_scope(
code,
Some(SearchScope::single_file(EditionedFileId::current_edition(FileId::from_raw(2)))),
Some(&mut |db| {
SearchScope::single_file(EditionedFileId::current_edition(db, FileId::from_raw(2)))
}),
expect![[r#"
quux Function FileId(0) 19..35 26..30
@ -1260,11 +1264,12 @@ impl Foo {
fn check_with_scope(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
search_scope: Option<SearchScope>,
search_scope: Option<&mut dyn FnMut(&RootDatabase) -> SearchScope>,
expect: Expect,
) {
let (analysis, pos) = fixture::position(ra_fixture);
let refs = analysis.find_all_refs(pos, search_scope).unwrap().unwrap();
let refs =
analysis.find_all_refs(pos, search_scope.map(|it| it(&analysis.db))).unwrap().unwrap();
let mut actual = String::new();
for mut refs in refs {

View file

@ -4,10 +4,9 @@
//! tests. This module also implements a couple of magic tricks, like renaming
//! `self` and to `self` (to switch between associated function and method).
use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics};
use hir::{AsAssocItem, InFile, Semantics};
use ide_db::{
FileId, FileRange, RootDatabase,
base_db::salsa::AsDynDatabase,
defs::{Definition, NameClass, NameRefClass},
rename::{IdentifierKind, bail, format_err, source_edit_from_references},
source_change::SourceChangeBuilder,
@ -86,9 +85,7 @@ pub(crate) fn rename(
let file_id = sema
.attach_first_edition(position.file_id)
.ok_or_else(|| format_err!("No references found at position"))?;
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
let source_file = sema.parse(editioned_file_id_wrapper);
let source_file = sema.parse(file_id);
let syntax = source_file.syntax();
let defs = find_definitions(&sema, syntax, position)?;
@ -123,7 +120,7 @@ pub(crate) fn rename(
source_change.extend(usages.references.get_mut(&file_id).iter().map(|refs| {
(
position.file_id,
source_edit_from_references(refs, def, new_name, file_id.edition()),
source_edit_from_references(refs, def, new_name, file_id.edition(db)),
)
}));
@ -300,7 +297,7 @@ fn find_definitions(
// remove duplicates, comparing `Definition`s
Ok(v.into_iter()
.unique_by(|&(.., def)| def)
.map(|(a, b, c)| (a.into(), b, c))
.map(|(a, b, c)| (a.into_file_id(sema.db), b, c))
.collect::<Vec<_>>()
.into_iter())
}
@ -371,10 +368,13 @@ fn rename_to_self(
let usages = def.usages(sema).all();
let mut source_change = SourceChange::default();
source_change.extend(usages.iter().map(|(file_id, references)| {
(file_id.into(), source_edit_from_references(references, def, "self", file_id.edition()))
(
file_id.file_id(sema.db),
source_edit_from_references(references, def, "self", file_id.edition(sema.db)),
)
}));
source_change.insert_source_edit(
file_id.original_file(sema.db),
file_id.original_file(sema.db).file_id(sema.db),
TextEdit::replace(param_source.syntax().text_range(), String::from(self_param)),
);
Ok(source_change)
@ -405,9 +405,12 @@ fn rename_self_to_param(
bail!("Cannot rename reference to `_` as it is being referenced multiple times");
}
let mut source_change = SourceChange::default();
source_change.insert_source_edit(file_id.original_file(sema.db), edit);
source_change.insert_source_edit(file_id.original_file(sema.db).file_id(sema.db), edit);
source_change.extend(usages.iter().map(|(file_id, references)| {
(file_id.into(), source_edit_from_references(references, def, new_name, file_id.edition()))
(
file_id.file_id(sema.db),
source_edit_from_references(references, def, new_name, file_id.edition(sema.db)),
)
}));
Ok(source_change)
}

View file

@ -4,8 +4,8 @@ use arrayvec::ArrayVec;
use ast::HasName;
use cfg::{CfgAtom, CfgExpr};
use hir::{
AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, HirFileIdExt, ModPath, Name,
PathKind, Semantics, Symbol, db::HirDatabase, sym, symbols::FxIndexSet,
AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, ModPath, Name, PathKind, Semantics,
Symbol, db::HirDatabase, sym, symbols::FxIndexSet,
};
use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
use ide_db::{
@ -285,8 +285,10 @@ fn find_related_tests_in_module(
let file_id = mod_source.file_id.original_file(sema.db);
let mod_scope = SearchScope::file_range(hir::FileRange { file_id, range: mod_source.value });
let fn_pos =
FilePosition { file_id: file_id.into(), offset: fn_name.syntax().text_range().start() };
let fn_pos = FilePosition {
file_id: file_id.file_id(sema.db),
offset: fn_name.syntax().text_range().start(),
};
find_related_tests(sema, syntax, fn_pos, Some(mod_scope), tests)
}

View file

@ -85,7 +85,7 @@ pub(crate) fn signature_help(
.and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?;
let token = sema.descend_into_macros_single_exact(token);
let edition =
sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
let display_target = sema.first_crate(file_id)?.to_display_target(db);
for node in token.parent_ancestors() {
@ -744,13 +744,14 @@ mod tests {
pub(crate) fn position(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (RootDatabase, FilePosition) {
let change_fixture = ChangeFixture::parse(ra_fixture);
let mut database = RootDatabase::default();
let change_fixture = ChangeFixture::parse(&database, ra_fixture);
database.apply_change(change_fixture.change);
let (file_id, range_or_offset) =
change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
(database, FilePosition { file_id: file_id.into(), offset })
let position = FilePosition { file_id: file_id.file_id(&database), offset };
(database, position)
}
#[track_caller]

View file

@ -78,7 +78,7 @@ mod tests {
ssr_assists(
&db,
&resolve,
FileRange { file_id: file_id.into(), range: range_or_offset.into() },
FileRange { file_id: file_id.file_id(&db), range: range_or_offset.into() },
)
}

View file

@ -2,7 +2,7 @@
//! read-only code browsers and emitting LSIF
use arrayvec::ArrayVec;
use hir::{Crate, HirFileIdExt, Module, Semantics, db::HirDatabase};
use hir::{Crate, Module, Semantics, db::HirDatabase};
use ide_db::{
FileId, FileRange, FxHashMap, FxHashSet, RootDatabase,
base_db::{RootQueryDb, SourceDatabase, VfsPath},
@ -191,8 +191,10 @@ impl StaticIndex<'_> {
// hovers
let sema = hir::Semantics::new(self.db);
let root = sema.parse_guess_edition(file_id).syntax().clone();
let edition =
sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
let edition = sema
.attach_first_edition(file_id)
.map(|it| it.edition(self.db))
.unwrap_or(Edition::CURRENT);
let display_target = match sema.first_crate(file_id) {
Some(krate) => krate.to_display_target(sema.db),
None => return,
@ -292,11 +294,11 @@ impl StaticIndex<'_> {
let db = &analysis.db;
let work = all_modules(db).into_iter().filter(|module| {
let file_id = module.definition_source_file_id(db).original_file(db);
let source_root = db.file_source_root(file_id.into()).source_root_id(db);
let source_root = db.file_source_root(file_id.file_id(&analysis.db)).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
let is_vendored = match vendored_libs_config {
VendoredLibrariesConfig::Included { workspace_root } => source_root
.path_for_file(&file_id.into())
.path_for_file(&file_id.file_id(&analysis.db))
.is_some_and(|module_path| module_path.starts_with(workspace_root)),
VendoredLibrariesConfig::Excluded => false,
};
@ -316,7 +318,7 @@ impl StaticIndex<'_> {
if visited_files.contains(&file_id) {
continue;
}
this.add_file(file_id.into());
this.add_file(file_id.file_id(&analysis.db));
// mark the file
visited_files.insert(file_id);
}

View file

@ -15,13 +15,8 @@ mod tests;
use std::ops::ControlFlow;
use either::Either;
use hir::{
DefWithBody, HirFileIdExt, InFile, InRealFile, MacroFileIdExt, MacroKind, Name, Semantics,
};
use ide_db::{
FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind, base_db::salsa::AsDynDatabase,
};
use span::EditionedFileId;
use hir::{DefWithBody, EditionedFileId, InFile, InRealFile, MacroKind, Name, Semantics};
use ide_db::{FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind};
use syntax::{
AstNode, AstToken, NodeOrToken,
SyntaxKind::*,
@ -201,13 +196,11 @@ pub(crate) fn highlight(
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
.unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
// Determine the root based on the given range.
let (root, range_to_highlight) = {
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
let file = sema.parse(editioned_file_id_wrapper);
let file = sema.parse(file_id);
let source_file = file.syntax();
match range_to_highlight {
Some(range) => {
@ -235,7 +228,7 @@ fn traverse(
krate: Option<hir::Crate>,
range_to_highlight: TextRange,
) {
let is_unlinked = sema.file_to_module_def(file_id).is_none();
let is_unlinked = sema.file_to_module_def(file_id.file_id(sema.db)).is_none();
enum AttrOrDerive {
Attr(ast::Item),
@ -509,7 +502,14 @@ fn string_injections(
{
return ControlFlow::Break(());
}
highlight_format_string(hl, sema, krate, &string, &descended_string, file_id.edition());
highlight_format_string(
hl,
sema,
krate,
&string,
&descended_string,
file_id.edition(sema.db),
);
if !string.is_raw() {
highlight_escape_string(hl, &string);

View file

@ -3,7 +3,7 @@
use std::ops::ControlFlow;
use either::Either;
use hir::{AsAssocItem, HasVisibility, MacroFileIdExt, Semantics};
use hir::{AsAssocItem, HasVisibility, Semantics};
use ide_db::{
FxHashMap, RootDatabase, SymbolKind,
defs::{Definition, IdentClass, NameClass, NameRefClass},

View file

@ -1,9 +1,7 @@
//! Renders a bit of code as HTML.
use hir::Semantics;
use ide_db::base_db::salsa::AsDynDatabase;
use hir::{EditionedFileId, Semantics};
use oorandom::Rand32;
use span::EditionedFileId;
use stdx::format_to;
use syntax::AstNode;
@ -16,10 +14,8 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
let file = sema.parse(editioned_file_id_wrapper);
.unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
let file = sema.parse(file_id);
let file = file.syntax();
fn rainbowify(seed: u64) -> String {
let mut rng = Rand32::new(seed);
@ -43,7 +39,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
macro_bang: true,
syntactic_name_ref_highlighting: false,
},
file_id.into(),
file_id.file_id(db),
None,
);
let text = file.to_string();

View file

@ -3,12 +3,11 @@
use std::mem;
use either::Either;
use hir::{HirFileId, InFile, Semantics, sym};
use hir::{EditionedFileId, HirFileId, InFile, Semantics, sym};
use ide_db::{
SymbolKind, active_parameter::ActiveParameter, defs::Definition,
documentation::docs_with_rangemap, rust_doc::is_rust_fence,
};
use span::EditionedFileId;
use syntax::{
AstToken, NodeOrToken, SyntaxNode, TextRange, TextSize,
ast::{self, AstNode, IsString, QuoteOffsets},

View file

@ -15,11 +15,9 @@
mod on_enter;
use ide_db::{
FilePosition, RootDatabase,
base_db::{RootQueryDb, salsa::AsDynDatabase},
};
use span::{Edition, EditionedFileId};
use hir::EditionedFileId;
use ide_db::{FilePosition, RootDatabase, base_db::RootQueryDb};
use span::Edition;
use std::iter;
use syntax::{
@ -76,10 +74,7 @@ pub(crate) fn on_char_typed(
// FIXME: We are hitting the database here, if we are unlucky this call might block momentarily
// causing the editor to feel sluggish!
let edition = Edition::CURRENT_FIXME;
let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
db.as_dyn_database(),
EditionedFileId::new(position.file_id, edition),
);
let editioned_file_id_wrapper = EditionedFileId::new(db, position.file_id, edition);
let file = &db.parse(editioned_file_id_wrapper);
let char_matches_position =
file.tree().syntax().text().char_at(position.offset) == Some(char_typed);

View file

@ -2,8 +2,7 @@
//! comments, but should handle indent some time in the future as well.
use ide_db::base_db::RootQueryDb;
use ide_db::{FilePosition, RootDatabase, base_db::salsa::AsDynDatabase};
use span::EditionedFileId;
use ide_db::{FilePosition, RootDatabase};
use syntax::{
AstNode, SmolStr, SourceFile,
SyntaxKind::*,
@ -51,10 +50,8 @@ use ide_db::text_edit::TextEdit;
//
// ![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif)
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> {
let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
db.as_dyn_database(),
EditionedFileId::current_edition(position.file_id),
);
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::current_edition(db, position.file_id);
let parse = db.parse(editioned_file_id_wrapper);
let file = parse.tree();
let token = file.syntax().token_at_offset(position.offset).left_biased()?;

View file

@ -1,6 +1,5 @@
use hir::{Semantics, db::DefDatabase};
use hir::{EditionedFileId, Semantics, db::DefDatabase};
use ide_db::{FileId, RootDatabase};
use span::EditionedFileId;
// Feature: Debug ItemTree
//
@ -13,6 +12,6 @@ pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition())
.unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition(db))
}