Merge pull request #19617 from ChayimFriedman2/more-actual

internal: Make `HirFileId`, `EditionedFileId` and macro files Salsa struct
This commit is contained in:
Chayim Refael Friedman 2025-04-19 19:21:53 +00:00 committed by GitHub
commit 150bb4a00f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
195 changed files with 1473 additions and 1525 deletions

View file

@ -6,9 +6,8 @@ use hir_def::{
nameres::{ModuleOrigin, ModuleSource},
src::{HasChildSource, HasSource as _},
};
use hir_expand::{HirFileId, InFile};
use hir_expand::{EditionedFileId, HirFileId, InFile};
use hir_ty::db::InternedClosure;
use span::EditionedFileId;
use syntax::ast;
use tt::TextRange;

View file

@ -82,7 +82,7 @@ use itertools::Itertools;
use nameres::diagnostics::DefDiagnosticKind;
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
use span::{Edition, EditionedFileId, FileId, MacroCallId};
use span::{Edition, FileId};
use stdx::{format_to, impl_from, never};
use syntax::{
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr,
@ -129,7 +129,7 @@ pub use {
{ModuleDefId, TraitId},
},
hir_expand::{
ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt, MacroKind,
EditionedFileId, ExpandResult, HirFileId, MacroCallId, MacroKind,
attrs::{Attr, AttrId},
change::ChangeWithProcMacros,
files::{
@ -954,10 +954,11 @@ fn macro_call_diagnostics(
let node =
InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id()));
let RenderedExpandError { message, error, kind } = err.render_to_string(db);
let precise_location = if err.span().anchor.file_id == file_id {
let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
let precise_location = if editioned_file_id == file_id {
Some(
err.span().range
+ db.ast_id_map(err.span().anchor.file_id.into())
+ db.ast_id_map(editioned_file_id.into())
.get_erased(err.span().anchor.ast_id)
.text_range()
.start(),
@ -1926,7 +1927,7 @@ impl DefWithBody {
source_map
.macro_calls()
.for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id.macro_call_id, acc));
.for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc));
expr_store_diagnostics(db, acc, &source_map);
@ -2145,10 +2146,11 @@ fn expr_store_diagnostics(
ExpressionStoreDiagnostics::MacroError { node, err } => {
let RenderedExpandError { message, error, kind } = err.render_to_string(db);
let precise_location = if err.span().anchor.file_id == node.file_id {
let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
let precise_location = if editioned_file_id == node.file_id {
Some(
err.span().range
+ db.ast_id_map(err.span().anchor.file_id.into())
+ db.ast_id_map(editioned_file_id.into())
.get_erased(err.span().anchor.ast_id)
.text_range()
.start(),
@ -4475,7 +4477,7 @@ impl Impl {
let src = self.source(db)?;
let macro_file = src.file_id.macro_file()?;
let loc = macro_file.macro_call_id.lookup(db);
let loc = macro_file.lookup(db);
let (derive_attr, derive_index) = match loc.kind {
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
let module_id = self.id.lookup(db).container;
@ -4488,9 +4490,8 @@ impl Impl {
}
_ => return None,
};
let file_id = MacroFileId { macro_call_id: derive_attr };
let path = db
.parse_macro_expansion(file_id)
.parse_macro_expansion(derive_attr)
.value
.0
.syntax_node()
@ -4498,7 +4499,7 @@ impl Impl {
.nth(derive_index as usize)
.and_then(<ast::Attr as AstNode>::cast)
.and_then(|it| it.path())?;
Some(InMacroFile { file_id, value: path })
Some(InMacroFile { file_id: derive_attr, value: path })
}
pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool {

View file

@ -20,11 +20,11 @@ use hir_def::{
type_ref::Mutability,
};
use hir_expand::{
ExpandResult, FileRange, HirFileIdExt, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
attrs::collect_attrs,
builtin::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
files::InRealFile,
files::{FileRangeWrapper, InRealFile},
hygiene::SyntaxContextExt as _,
inert_attr_macro::find_builtin_attr_idx,
mod_path::{ModPath, PathKind},
@ -35,7 +35,7 @@ use intern::{Interned, Symbol, sym};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{SmallVec, smallvec};
use span::{EditionedFileId, FileId, HirFileIdRepr, SyntaxContext};
use span::{Edition, FileId, SyntaxContext};
use stdx::TupleExt;
use syntax::{
AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
@ -46,10 +46,10 @@ use syntax::{
use crate::{
Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam,
Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource,
HirFileId, Impl, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro,
Module, ModuleDef, Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait,
TraitAlias, TupleField, Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource, Impl,
InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait, TraitAlias, TupleField,
Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
db::HirDatabase,
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
source_analyzer::{SourceAnalyzer, name_hygiene, resolve_hir_path},
@ -136,8 +136,8 @@ pub struct Semantics<'db, DB> {
pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>,
/// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
/// MacroCall to its expansion's MacroCallId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroCallId>>,
}
impl<DB> fmt::Debug for Semantics<'_, DB> {
@ -305,8 +305,8 @@ impl<'db> SemanticsImpl<'db> {
SemanticsImpl { db, s2d_cache: Default::default(), macro_call_cache: Default::default() }
}
pub fn parse(&self, file_id: base_db::EditionedFileId) -> ast::SourceFile {
let hir_file_id = file_id.editioned_file_id(self.db).into();
pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
let hir_file_id = file_id.into();
let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), hir_file_id);
tree
@ -322,27 +322,26 @@ impl<'db> SemanticsImpl<'db> {
pub fn attach_first_edition(&self, file: FileId) -> Option<EditionedFileId> {
Some(EditionedFileId::new(
self.db,
file,
self.file_to_module_defs(file).next()?.krate().edition(self.db),
))
}
pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
let editioned_file_id = self
let file_id = self
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
let file_id = base_db::EditionedFileId::new(self.db, editioned_file_id);
.unwrap_or_else(|| EditionedFileId::new(self.db, file_id, Edition::CURRENT));
let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), editioned_file_id.into());
self.cache(tree.syntax().clone(), file_id.into());
tree
}
pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
let module = self.file_to_module_defs(file_id.file_id()).next()?;
match file_id {
HirFileId::FileId(file_id) => {
let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?;
let def_map = self.db.crate_def_map(module.krate().id);
match def_map[module.id.local_id].origin {
ModuleOrigin::CrateRoot { .. } => None,
@ -357,9 +356,8 @@ impl<'db> SemanticsImpl<'db> {
_ => unreachable!("FileId can only belong to a file module"),
}
}
HirFileIdRepr::MacroFile(macro_file) => {
let node =
self.db.lookup_intern_macro_call(macro_file.macro_call_id).to_node(self.db);
HirFileId::MacroFile(macro_file) => {
let node = self.db.lookup_intern_macro_call(macro_file).to_node(self.db);
let root = find_root(&node.value);
self.cache(root, node.file_id);
Some(node)
@ -384,7 +382,7 @@ impl<'db> SemanticsImpl<'db> {
node
}
pub fn expand(&self, file_id: MacroFileId) -> ExpandResult<SyntaxNode> {
pub fn expand(&self, file_id: MacroCallId) -> ExpandResult<SyntaxNode> {
let res = self.db.parse_macro_expansion(file_id).map(|it| it.0.syntax_node());
self.cache(res.value.clone(), file_id.into());
res
@ -402,13 +400,11 @@ impl<'db> SemanticsImpl<'db> {
pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
let file_id = self.find_file(attr.syntax()).file_id;
let krate = match file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
self.file_to_module_defs(file_id.file_id()).next()?.krate().id
}
HirFileIdRepr::MacroFile(macro_file) => {
self.db.lookup_intern_macro_call(macro_file.macro_call_id).krate
let krate = match file_id {
HirFileId::FileId(file_id) => {
self.file_to_module_defs(file_id.file_id(self.db)).next()?.krate().id
}
HirFileId::MacroFile(macro_file) => self.db.lookup_intern_macro_call(macro_file).krate,
};
hir_expand::check_cfg_attr_value(self.db, attr, krate)
}
@ -423,7 +419,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_call = InFile::new(sa.file_id, macro_call);
let file_id = sa.expand(self.db, macro_call)?;
let macro_call = self.db.lookup_intern_macro_call(file_id.macro_call_id);
let macro_call = self.db.lookup_intern_macro_call(file_id);
let skip = matches!(
macro_call.def.kind,
@ -456,7 +452,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<SyntaxNode>> {
let src = self.wrap_node_infile(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
Some(self.expand(macro_call_id.as_macro_file()))
Some(self.expand(macro_call_id))
}
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
@ -465,7 +461,7 @@ impl<'db> SemanticsImpl<'db> {
let call_id = self.with_ctx(|ctx| {
ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
})?;
Some(self.parse_or_expand(call_id.as_file()))
Some(self.parse_or_expand(call_id.into()))
}
pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
@ -485,7 +481,7 @@ impl<'db> SemanticsImpl<'db> {
.derive_macro_calls(attr)?
.into_iter()
.flat_map(|call| {
let file_id = call?.as_macro_file();
let file_id = call?;
let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
let root_node = value.0.syntax_node();
self.cache(root_node.clone(), file_id.into());
@ -526,7 +522,7 @@ impl<'db> SemanticsImpl<'db> {
Some(result)
}
pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroFileId)>> {
pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroCallId)>> {
let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
@ -542,7 +538,7 @@ impl<'db> SemanticsImpl<'db> {
.derive_helpers_in_scope(InFile::new(sa.file_id, id))?
.iter()
.filter(|&(name, _, _)| *name == attr_name)
.map(|&(_, macro_, call)| (macro_.into(), call.as_macro_file()))
.map(|&(_, macro_, call)| (macro_.into(), call))
.collect();
res.is_empty().not().then_some(res)
}
@ -564,7 +560,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_file = analyzer.expansion(macro_call)?;
hir_expand::db::expand_speculative(
self.db,
macro_file.macro_call_id,
macro_file,
speculative_args.syntax(),
token_to_map,
)
@ -572,16 +568,11 @@ impl<'db> SemanticsImpl<'db> {
pub fn speculative_expand_raw(
&self,
macro_file: MacroFileId,
macro_file: MacroCallId,
speculative_args: &SyntaxNode,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
hir_expand::db::expand_speculative(
self.db,
macro_file.macro_call_id,
speculative_args,
token_to_map,
)
hir_expand::db::expand_speculative(self.db, macro_file, speculative_args, token_to_map)
}
/// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
@ -1025,7 +1016,7 @@ impl<'db> SemanticsImpl<'db> {
None => {
stack.push((
file_id.into(),
smallvec![(token, SyntaxContext::root(file_id.edition()))],
smallvec![(token, SyntaxContext::root(file_id.edition(self.db)))],
));
}
}
@ -1054,7 +1045,6 @@ impl<'db> SemanticsImpl<'db> {
})
});
if let Some((call_id, item)) = containing_attribute_macro_call {
let file_id = call_id.as_macro_file();
let attr_id = match self.db.lookup_intern_macro_call(call_id).kind {
hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => {
invoc_attr_index.ast_index()
@ -1083,7 +1073,7 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or_else(|| text_range.start());
let text_range = TextRange::new(start, text_range.end());
filter_duplicates(tokens, text_range);
return process_expansion_for_token(&mut stack, file_id);
return process_expansion_for_token(&mut stack, call_id);
}
// Then check for token trees, that means we are either in a function-like macro or
@ -1127,7 +1117,7 @@ impl<'db> SemanticsImpl<'db> {
.eager_arg(self.db)
.and_then(|arg| {
// also descend into eager expansions
process_expansion_for_token(&mut stack, arg.as_macro_file())
process_expansion_for_token(&mut stack, arg)
}))
}
// derive or derive helper
@ -1151,7 +1141,6 @@ impl<'db> SemanticsImpl<'db> {
match derive_call {
Some(call_id) => {
// resolved to a derive
let file_id = call_id.as_macro_file();
let text_range = attr.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this
@ -1159,7 +1148,7 @@ impl<'db> SemanticsImpl<'db> {
!text_range.contains_range(t.text_range())
});
return process_expansion_for_token(
&mut stack, file_id,
&mut stack, call_id,
);
}
None => Some(adt),
@ -1207,10 +1196,7 @@ impl<'db> SemanticsImpl<'db> {
// as there may be multiple derives registering the same helper
// name, we gotta make sure to call this for all of them!
// FIXME: We need to call `f` for all of them as well though!
res = res.or(process_expansion_for_token(
&mut stack,
derive.as_macro_file(),
));
res = res.or(process_expansion_for_token(&mut stack, *derive));
}
res
}
@ -1288,10 +1274,14 @@ impl<'db> SemanticsImpl<'db> {
)
}
pub fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
pub fn diagnostics_display_range(
&self,
src: InFile<SyntaxNodePtr>,
) -> FileRangeWrapper<FileId> {
let root = self.parse_or_expand(src.file_id);
let node = src.map(|it| it.to_node(&root));
node.as_ref().original_file_range_rooted(self.db)
let FileRange { file_id, range } = node.as_ref().original_file_range_rooted(self.db);
FileRangeWrapper { file_id: file_id.file_id(self.db), range }
}
fn token_ancestors_with_macros(
@ -1890,20 +1880,16 @@ fn macro_call_to_macro_id(
ctx: &mut SourceToDefCtx<'_, '_>,
macro_call_id: MacroCallId,
) -> Option<MacroId> {
use span::HirFileIdRepr;
let db: &dyn ExpandDatabase = ctx.db;
let loc = db.lookup_intern_macro_call(macro_call_id);
match loc.def.ast_id() {
Either::Left(it) => {
let node = match it.file_id.repr() {
HirFileIdRepr::FileId(editioned_file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
let node = match it.file_id {
HirFileId::FileId(file_id) => {
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
HirFileIdRepr::MacroFile(macro_file) => {
HirFileId::MacroFile(macro_file) => {
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
it.to_ptr(db).to_node(&expansion_info.expanded().value)
}
@ -1911,13 +1897,11 @@ fn macro_call_to_macro_id(
ctx.macro_to_def(InFile::new(it.file_id, &node))
}
Either::Right(it) => {
let node = match it.file_id.repr() {
HirFileIdRepr::FileId(editioned_file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
let node = match it.file_id {
HirFileId::FileId(file_id) => {
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
HirFileIdRepr::MacroFile(macro_file) => {
HirFileId::MacroFile(macro_file) => {
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
it.to_ptr(db).to_node(&expansion_info.expanded().value)
}

View file

@ -197,7 +197,7 @@ impl ChildBySource for DefWithBodyId {
}
sm.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(|(ast, &exp_id)| {
res[keys::MACRO_CALL].insert(ast.value, exp_id.macro_call_id);
res[keys::MACRO_CALL].insert(ast.value, exp_id);
});
for (block, def_map) in body.blocks(db) {

View file

@ -98,12 +98,12 @@ use hir_def::{
hir::{BindingId, Expr, LabelId},
};
use hir_expand::{
ExpansionInfo, HirFileId, HirFileIdExt, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
attrs::AttrId, name::AsName,
EditionedFileId, ExpansionInfo, HirFileId, InMacroFile, MacroCallId, attrs::AttrId,
name::AsName,
};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::{EditionedFileId, FileId};
use span::FileId;
use stdx::impl_from;
use syntax::{
AstNode, AstPtr, SyntaxNode,
@ -116,9 +116,9 @@ use crate::{InFile, InlineAsmOperand, db::HirDatabase, semantics::child_by_sourc
#[derive(Default)]
pub(super) struct SourceToDefCache {
pub(super) dynmap_cache: FxHashMap<(ChildContainer, HirFileId), DynMap>,
expansion_info_cache: FxHashMap<MacroFileId, ExpansionInfo>,
expansion_info_cache: FxHashMap<MacroCallId, ExpansionInfo>,
pub(super) file_to_def_cache: FxHashMap<FileId, SmallVec<[ModuleId; 1]>>,
pub(super) included_file_cache: FxHashMap<EditionedFileId, Option<MacroFileId>>,
pub(super) included_file_cache: FxHashMap<EditionedFileId, Option<MacroCallId>>,
/// Rootnode to HirFileId cache
pub(super) root_to_file_cache: FxHashMap<SyntaxNode, HirFileId>,
}
@ -138,14 +138,14 @@ impl SourceToDefCache {
&mut self,
db: &dyn HirDatabase,
file: EditionedFileId,
) -> Option<MacroFileId> {
) -> Option<MacroCallId> {
if let Some(&m) = self.included_file_cache.get(&file) {
return m;
}
self.included_file_cache.insert(file, None);
for &crate_id in db.relevant_crates(file.into()).iter() {
for &crate_id in db.relevant_crates(file.file_id(db)).iter() {
db.include_macro_invoc(crate_id).iter().for_each(|&(macro_call_id, file_id)| {
self.included_file_cache.insert(file_id, Some(MacroFileId { macro_call_id }));
self.included_file_cache.insert(file_id, Some(macro_call_id));
});
}
self.included_file_cache.get(&file).copied().flatten()
@ -154,7 +154,7 @@ impl SourceToDefCache {
pub(super) fn get_or_insert_expansion(
&mut self,
db: &dyn HirDatabase,
macro_file: MacroFileId,
macro_file: MacroCallId,
) -> &ExpansionInfo {
self.expansion_info_cache.entry(macro_file).or_insert_with(|| {
let exp_info = macro_file.expansion_info(db);
@ -184,7 +184,7 @@ impl SourceToDefCtx<'_, '_> {
let n_mods = mods.len();
let modules = |file| {
crate_def_map
.modules_for_file(file)
.modules_for_file(self.db, file)
.map(|local_id| crate_def_map.module_id(local_id))
};
mods.extend(modules(file));
@ -193,18 +193,16 @@ impl SourceToDefCtx<'_, '_> {
self.db
.include_macro_invoc(crate_id)
.iter()
.filter(|&&(_, file_id)| file_id == file)
.filter(|&&(_, file_id)| file_id.file_id(self.db) == file)
.flat_map(|&(macro_call_id, file_id)| {
self.cache
.included_file_cache
.insert(file_id, Some(MacroFileId { macro_call_id }));
self.cache.included_file_cache.insert(file_id, Some(macro_call_id));
modules(
macro_call_id
.lookup(self.db)
.kind
.file_id()
.original_file(self.db)
.file_id(),
.file_id(self.db),
)
}),
);
@ -234,7 +232,7 @@ impl SourceToDefCtx<'_, '_> {
}
None => {
let file_id = src.file_id.original_file(self.db);
self.file_to_def(file_id.file_id()).first().copied()
self.file_to_def(file_id.file_id(self.db)).first().copied()
}
}?;
@ -247,7 +245,7 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn source_file_to_def(&mut self, src: InFile<&ast::SourceFile>) -> Option<ModuleId> {
let _p = tracing::info_span!("source_file_to_def").entered();
let file_id = src.file_id.original_file(self.db);
self.file_to_def(file_id.file_id()).first().copied()
self.file_to_def(file_id.file_id(self.db)).first().copied()
}
pub(super) fn trait_to_def(&mut self, src: InFile<&ast::Trait>) -> Option<TraitId> {
@ -526,8 +524,10 @@ impl SourceToDefCtx<'_, '_> {
return Some(def);
}
let def =
self.file_to_def(src.file_id.original_file(self.db).file_id()).first().copied()?;
let def = self
.file_to_def(src.file_id.original_file(self.db).file_id(self.db))
.first()
.copied()?;
Some(def.into())
}

View file

@ -29,7 +29,7 @@ use hir_def::{
type_ref::{Mutability, TypeRef, TypeRefId},
};
use hir_expand::{
HirFileId, InFile, MacroFileId, MacroFileIdExt,
HirFileId, InFile, MacroCallId,
mod_path::{ModPath, PathKind, path},
name::{AsName, Name},
};
@ -47,10 +47,9 @@ use hir_ty::{
use intern::sym;
use itertools::Itertools;
use smallvec::SmallVec;
use syntax::ast::{RangeItem, RangeOp};
use syntax::{
SyntaxKind, SyntaxNode, TextRange, TextSize,
ast::{self, AstNode},
ast::{self, AstNode, RangeItem, RangeOp},
};
use triomphe::Arc;
@ -216,7 +215,7 @@ impl SourceAnalyzer {
})
}
pub(crate) fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroFileId> {
pub(crate) fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
self.store_sm()?.expansion(node)
}
@ -750,7 +749,7 @@ impl SourceAnalyzer {
let bs = self.store_sm()?;
bs.expansion(macro_call).and_then(|it| {
// FIXME: Block def maps
let def = it.macro_call_id.lookup(db).def;
let def = it.lookup(db).def;
db.crate_def_map(def.krate)
.macro_def_to_macro_id
.get(&def.kind.erased_ast_id())
@ -1197,15 +1196,11 @@ impl SourceAnalyzer {
&self,
db: &dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>,
) -> Option<MacroFileId> {
) -> Option<MacroCallId> {
self.store_sm().and_then(|bs| bs.expansion(macro_call)).or_else(|| {
self.resolver
.item_scope()
.macro_invoc(
macro_call
.with_value(db.ast_id_map(macro_call.file_id).ast_id(macro_call.value)),
)
.map(|it| it.as_macro_file())
self.resolver.item_scope().macro_invoc(
macro_call.with_value(db.ast_id_map(macro_call.file_id).ast_id(macro_call.value)),
)
})
}