Merge pull request #19617 from ChayimFriedman2/more-actual

internal: Make `HirFileId`, `EditionedFileId` and macro files Salsa struct
This commit is contained in:
Chayim Refael Friedman 2025-04-19 19:21:53 +00:00 committed by GitHub
commit 150bb4a00f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
195 changed files with 1473 additions and 1525 deletions

View file

@ -16,11 +16,11 @@ use intern::Symbol;
use la_arena::{Arena, Idx, RawIdx};
use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
use salsa::{Durability, Setter};
use span::{Edition, EditionedFileId};
use span::Edition;
use triomphe::Arc;
use vfs::{AbsPathBuf, AnchoredPath, FileId, VfsPath, file_set::FileSet};
use crate::{CrateWorkspaceData, RootQueryDb};
use crate::{CrateWorkspaceData, EditionedFileId, RootQueryDb};
pub type ProcMacroPaths = FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), String>>;
@ -773,8 +773,8 @@ pub(crate) fn transitive_rev_deps(db: &dyn RootQueryDb, of: Crate) -> FxHashSet<
}
impl BuiltCrateData {
pub fn root_file_id(&self) -> EditionedFileId {
EditionedFileId::new(self.root_file_id, self.edition)
pub fn root_file_id(&self, db: &dyn salsa::Database) -> EditionedFileId {
EditionedFileId::new(db, self.root_file_id, self.edition)
}
}

View file

@ -20,6 +20,7 @@ use rustc_hash::{FxHashSet, FxHasher};
pub use salsa::{self};
use salsa::{Durability, Setter};
pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
use span::Edition;
use syntax::{Parse, SyntaxError, ast};
use triomphe::Arc;
pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet};
@ -151,21 +152,39 @@ impl Files {
}
}
#[salsa::interned(no_lifetime)]
#[salsa::interned(no_lifetime, constructor=from_span)]
pub struct EditionedFileId {
pub editioned_file_id: span::EditionedFileId,
}
impl EditionedFileId {
pub fn file_id(&self, db: &dyn salsa::Database) -> vfs::FileId {
// Salsa already uses the name `new`...
#[inline]
pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition) -> Self {
EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition))
}
#[inline]
pub fn current_edition(db: &dyn salsa::Database, file_id: FileId) -> Self {
EditionedFileId::new(db, file_id, Edition::CURRENT)
}
#[inline]
pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
let id = self.editioned_file_id(db);
id.file_id()
}
fn unpack(&self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
#[inline]
pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
let id = self.editioned_file_id(db);
(id.file_id(), id.edition())
}
#[inline]
pub fn edition(self, db: &dyn SourceDatabase) -> Edition {
self.editioned_file_id(db).edition()
}
}
#[salsa::input]
@ -189,7 +208,7 @@ pub struct SourceRootInput {
#[query_group::query_group]
pub trait RootQueryDb: SourceDatabase + salsa::Database {
/// Parses the file into the syntax tree.
#[salsa::invoke_actual(parse)]
#[salsa::invoke(parse)]
#[salsa::lru(128)]
fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
@ -201,6 +220,7 @@ pub trait RootQueryDb: SourceDatabase + salsa::Database {
fn toolchain_channel(&self, krate: Crate) -> Option<ReleaseChannel>;
/// Crates whose root file is in `id`.
#[salsa::invoke_interned(source_root_crates)]
fn source_root_crates(&self, id: SourceRootId) -> Arc<[Crate]>;
#[salsa::transparent]

View file

@ -1,10 +1,9 @@
//! Defines database & queries for name resolution.
use base_db::{Crate, RootQueryDb, SourceDatabase};
use either::Either;
use hir_expand::{HirFileId, MacroDefId, db::ExpandDatabase};
use hir_expand::{EditionedFileId, HirFileId, MacroCallId, MacroDefId, db::ExpandDatabase};
use intern::sym;
use la_arena::ArenaMap;
use span::{EditionedFileId, MacroCallId};
use syntax::{AstPtr, ast};
use thin_vec::ThinVec;
use triomphe::Arc;
@ -109,26 +108,26 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
#[salsa::invoke(ItemTree::file_item_tree_query)]
fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>;
#[salsa::invoke_actual(ItemTree::block_item_tree_query)]
#[salsa::invoke(ItemTree::block_item_tree_query)]
fn block_item_tree(&self, block_id: BlockId) -> Arc<ItemTree>;
#[salsa::invoke_actual(DefMap::crate_local_def_map_query)]
#[salsa::invoke(DefMap::crate_local_def_map_query)]
fn crate_local_def_map(&self, krate: Crate) -> (Arc<DefMap>, Arc<LocalDefMap>);
#[salsa::invoke_actual(DefMap::crate_def_map_query)]
#[salsa::invoke(DefMap::crate_def_map_query)]
fn crate_def_map(&self, krate: Crate) -> Arc<DefMap>;
/// Computes the block-level `DefMap`.
#[salsa::invoke_actual(DefMap::block_def_map_query)]
#[salsa::invoke(DefMap::block_def_map_query)]
fn block_def_map(&self, block: BlockId) -> Arc<DefMap>;
/// Turns a MacroId into a MacroDefId, describing the macro's definition post name resolution.
#[salsa::invoke_actual(macro_def)]
#[salsa::invoke(macro_def)]
fn macro_def(&self, m: MacroId) -> MacroDefId;
// region:data
#[salsa::invoke_actual(VariantFields::query)]
#[salsa::invoke(VariantFields::query)]
fn variant_fields_with_source_map(
&self,
id: VariantId,
@ -139,24 +138,24 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
self.enum_variants_with_diagnostics(id).0
}
#[salsa::invoke_actual(EnumVariants::enum_variants_query)]
#[salsa::invoke(EnumVariants::enum_variants_query)]
fn enum_variants_with_diagnostics(
&self,
id: EnumId,
) -> (Arc<EnumVariants>, Option<Arc<ThinVec<InactiveEnumVariantCode>>>);
#[salsa::transparent]
#[salsa::invoke_actual(ImplItems::impl_items_query)]
#[salsa::invoke(ImplItems::impl_items_query)]
fn impl_items(&self, e: ImplId) -> Arc<ImplItems>;
#[salsa::invoke_actual(ImplItems::impl_items_with_diagnostics_query)]
#[salsa::invoke(ImplItems::impl_items_with_diagnostics_query)]
fn impl_items_with_diagnostics(&self, e: ImplId) -> (Arc<ImplItems>, DefDiagnostics);
#[salsa::transparent]
#[salsa::invoke_actual(TraitItems::trait_items_query)]
#[salsa::invoke(TraitItems::trait_items_query)]
fn trait_items(&self, e: TraitId) -> Arc<TraitItems>;
#[salsa::invoke_actual(TraitItems::trait_items_with_diagnostics_query)]
#[salsa::invoke(TraitItems::trait_items_with_diagnostics_query)]
fn trait_items_with_diagnostics(&self, tr: TraitId) -> (Arc<TraitItems>, DefDiagnostics);
#[salsa::tracked]
@ -214,61 +213,61 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
self.type_alias_signature_with_source_map(e).0
}
#[salsa::invoke_actual(TraitSignature::query)]
#[salsa::invoke(TraitSignature::query)]
fn trait_signature_with_source_map(
&self,
trait_: TraitId,
) -> (Arc<TraitSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(ImplSignature::query)]
#[salsa::invoke(ImplSignature::query)]
fn impl_signature_with_source_map(
&self,
impl_: ImplId,
) -> (Arc<ImplSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(StructSignature::query)]
#[salsa::invoke(StructSignature::query)]
fn struct_signature_with_source_map(
&self,
struct_: StructId,
) -> (Arc<StructSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(UnionSignature::query)]
#[salsa::invoke(UnionSignature::query)]
fn union_signature_with_source_map(
&self,
union_: UnionId,
) -> (Arc<UnionSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(EnumSignature::query)]
#[salsa::invoke(EnumSignature::query)]
fn enum_signature_with_source_map(
&self,
e: EnumId,
) -> (Arc<EnumSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(ConstSignature::query)]
#[salsa::invoke(ConstSignature::query)]
fn const_signature_with_source_map(
&self,
e: ConstId,
) -> (Arc<ConstSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(StaticSignature::query)]
#[salsa::invoke(StaticSignature::query)]
fn static_signature_with_source_map(
&self,
e: StaticId,
) -> (Arc<StaticSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(FunctionSignature::query)]
#[salsa::invoke(FunctionSignature::query)]
fn function_signature_with_source_map(
&self,
e: FunctionId,
) -> (Arc<FunctionSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(TraitAliasSignature::query)]
#[salsa::invoke(TraitAliasSignature::query)]
fn trait_alias_signature_with_source_map(
&self,
e: TraitAliasId,
) -> (Arc<TraitAliasSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(TypeAliasSignature::query)]
#[salsa::invoke(TypeAliasSignature::query)]
fn type_alias_signature_with_source_map(
&self,
e: TypeAliasId,
@ -283,7 +282,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
#[salsa::invoke(Body::body_query)]
fn body(&self, def: DefWithBodyId) -> Arc<Body>;
#[salsa::invoke_actual(ExprScopes::expr_scopes_query)]
#[salsa::invoke(ExprScopes::expr_scopes_query)]
fn expr_scopes(&self, def: DefWithBodyId) -> Arc<ExprScopes>;
#[salsa::transparent]
@ -306,17 +305,18 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
// region:attrs
#[salsa::invoke_actual(Attrs::fields_attrs_query)]
#[salsa::invoke(Attrs::fields_attrs_query)]
fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
// should this really be a query?
#[salsa::invoke_actual(crate::attr::fields_attrs_source_map)]
#[salsa::invoke(crate::attr::fields_attrs_source_map)]
fn fields_attrs_source_map(
&self,
def: VariantId,
) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>;
#[salsa::invoke(AttrsWithOwner::attrs_query)]
// FIXME: Make this a non-interned query.
#[salsa::invoke_interned(AttrsWithOwner::attrs_query)]
fn attrs(&self, def: AttrDefId) -> Attrs;
#[salsa::transparent]
@ -328,39 +328,39 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
#[salsa::invoke(LangItems::lang_item_query)]
fn lang_item(&self, start_crate: Crate, item: LangItem) -> Option<LangItemTarget>;
#[salsa::invoke_actual(ImportMap::import_map_query)]
#[salsa::invoke(ImportMap::import_map_query)]
fn import_map(&self, krate: Crate) -> Arc<ImportMap>;
// region:visibilities
#[salsa::invoke_actual(visibility::field_visibilities_query)]
#[salsa::invoke(visibility::field_visibilities_query)]
fn field_visibilities(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Visibility>>;
// FIXME: unify function_visibility and const_visibility?
#[salsa::invoke_actual(visibility::function_visibility_query)]
#[salsa::invoke(visibility::function_visibility_query)]
fn function_visibility(&self, def: FunctionId) -> Visibility;
#[salsa::invoke_actual(visibility::const_visibility_query)]
#[salsa::invoke(visibility::const_visibility_query)]
fn const_visibility(&self, def: ConstId) -> Visibility;
#[salsa::invoke_actual(visibility::type_alias_visibility_query)]
#[salsa::invoke(visibility::type_alias_visibility_query)]
fn type_alias_visibility(&self, def: TypeAliasId) -> Visibility;
// endregion:visibilities
#[salsa::invoke_actual(LangItems::crate_lang_items_query)]
#[salsa::invoke(LangItems::crate_lang_items_query)]
fn crate_lang_items(&self, krate: Crate) -> Option<Arc<LangItems>>;
#[salsa::invoke_actual(crate::lang_item::notable_traits_in_deps)]
#[salsa::invoke(crate::lang_item::notable_traits_in_deps)]
fn notable_traits_in_deps(&self, krate: Crate) -> Arc<[Arc<[TraitId]>]>;
#[salsa::invoke_actual(crate::lang_item::crate_notable_traits)]
#[salsa::invoke(crate::lang_item::crate_notable_traits)]
fn crate_notable_traits(&self, krate: Crate) -> Option<Arc<[TraitId]>>;
#[salsa::invoke_actual(crate_supports_no_std)]
#[salsa::invoke(crate_supports_no_std)]
fn crate_supports_no_std(&self, crate_id: Crate) -> bool;
#[salsa::invoke_actual(include_macro_invoc)]
#[salsa::invoke(include_macro_invoc)]
fn include_macro_invoc(&self, crate_id: Crate) -> Arc<[(MacroCallId, EditionedFileId)]>;
}
@ -382,7 +382,7 @@ fn include_macro_invoc(
}
fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool {
let file = crate_id.data(db).root_file_id();
let file = crate_id.data(db).root_file_id(db);
let item_tree = db.file_item_tree(file.into());
let attrs = item_tree.raw_attrs(AttrOwner::TopLevel);
for attr in &**attrs {

View file

@ -13,11 +13,11 @@ use std::ops::{Deref, Index};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{ExpandError, InFile, mod_path::ModPath, name::Name};
use hir_expand::{ExpandError, InFile, MacroCallId, mod_path::ModPath, name::Name};
use la_arena::{Arena, ArenaMap};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::{Edition, MacroFileId, SyntaxContext};
use span::{Edition, SyntaxContext};
use syntax::{AstPtr, SyntaxNodePtr, ast};
use triomphe::Arc;
use tt::TextRange;
@ -138,7 +138,7 @@ pub struct ExpressionStoreSourceMap {
template_map: Option<Box<FormatTemplate>>,
pub expansions: FxHashMap<InFile<MacroCallPtr>, MacroFileId>,
pub expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
/// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
/// the source map (since they're just as volatile).
@ -645,12 +645,12 @@ impl ExpressionStoreSourceMap {
self.expr_map.get(&src).cloned()
}
pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<MacroFileId> {
pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
let src = node.map(AstPtr::new);
self.expansions.get(&src).cloned()
}
pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroFileId)> + '_ {
pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroCallId)> + '_ {
self.expansions.iter().map(|(&a, &b)| (a, b))
}
@ -696,11 +696,11 @@ impl ExpressionStoreSourceMap {
self.expr_map.get(&src).copied()
}
pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroFileId)> {
pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroCallId)> {
self.expansions.iter()
}
pub fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroFileId> {
pub fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
self.expansions.get(&node.map(AstPtr::new)).copied()
}

View file

@ -4,11 +4,9 @@ use std::mem;
use base_db::Crate;
use drop_bomb::DropBomb;
use hir_expand::attrs::RawAttrs;
use hir_expand::eager::EagerCallBackFn;
use hir_expand::{
ExpandError, ExpandErrorKind, ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
mod_path::ModPath, span_map::SpanMap,
attrs::RawAttrs, eager::EagerCallBackFn, mod_path::ModPath, span_map::SpanMap,
};
use span::{AstIdMap, Edition, SyntaxContext};
use syntax::ast::HasAttrs;
@ -183,8 +181,7 @@ impl Expander {
));
}
let macro_file = call_id.as_macro_file();
let res = db.parse_macro_expansion(macro_file);
let res = db.parse_macro_expansion(call_id);
let err = err.or(res.err);
ExpandResult {
@ -192,7 +189,7 @@ impl Expander {
let parse = res.value.0.cast::<T>();
self.recursion_depth += 1;
let old_file_id = std::mem::replace(&mut self.current_file_id, macro_file.into());
let old_file_id = std::mem::replace(&mut self.current_file_id, call_id.into());
let old_span_map =
std::mem::replace(&mut self.span_map, db.span_map(self.current_file_id));
let prev_ast_id_map =

View file

@ -9,13 +9,12 @@ use std::mem;
use either::Either;
use hir_expand::{
InFile, Lookup, MacroDefId,
HirFileId, InFile, Lookup, MacroDefId,
mod_path::tool_path,
name::{AsName, Name},
};
use intern::{Symbol, sym};
use rustc_hash::FxHashMap;
use span::HirFileId;
use stdx::never;
use syntax::{
AstNode, AstPtr, AstToken as _, SyntaxNodePtr,
@ -1887,10 +1886,7 @@ impl ExprCollector<'_> {
self.module.krate(),
resolver,
&mut |ptr, call| {
_ = self
.source_map
.expansions
.insert(ptr.map(|(it, _)| it), call.as_macro_file());
_ = self.source_map.expansions.insert(ptr.map(|(it, _)| it), call);
},
)
}
@ -2516,7 +2512,7 @@ impl ExprCollector<'_> {
None
} else {
hygiene_id.lookup().outer_expn(self.db).map(|expansion| {
let expansion = self.db.lookup_intern_macro_call(expansion);
let expansion = self.db.lookup_intern_macro_call(expansion.into());
(hygiene_id.lookup().parent(self.db), expansion.def)
})
};
@ -2546,7 +2542,7 @@ impl ExprCollector<'_> {
hygiene_id =
HygieneId::new(parent_ctx.opaque_and_semitransparent(self.db));
hygiene_info = parent_ctx.outer_expn(self.db).map(|expansion| {
let expansion = self.db.lookup_intern_macro_call(expansion);
let expansion = self.db.lookup_intern_macro_call(expansion.into());
(parent_ctx.parent(self.db), expansion.def)
});
}

View file

@ -215,7 +215,7 @@ pub(super) fn lower_path(
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
let syn_ctxt = collector.expander.ctx_for_range(path.segment()?.syntax().text_range());
if let Some(macro_call_id) = syn_ctxt.outer_expn(collector.db) {
if collector.db.lookup_intern_macro_call(macro_call_id).def.local_inner {
if collector.db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner {
kind = match resolve_crate_root(collector.db, syn_ctxt) {
Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate,

View file

@ -319,7 +319,6 @@ fn compute_expr_scopes(
mod tests {
use base_db::RootQueryDb;
use hir_expand::{InFile, name::AsName};
use salsa::AsDynDatabase;
use span::FileId;
use syntax::{AstNode, algo::find_node_at_offset, ast};
use test_fixture::WithFixture;
@ -331,7 +330,7 @@ mod tests {
let krate = db.test_crate();
let crate_def_map = db.crate_def_map(krate);
let module = crate_def_map.modules_for_file(file_id).next().unwrap();
let module = crate_def_map.modules_for_file(db, file_id).next().unwrap();
let (_, def) = crate_def_map[module].scope.entries().next().unwrap();
match def.take_values().unwrap() {
ModuleDefId::FunctionId(it) => it,
@ -354,11 +353,9 @@ mod tests {
let editioned_file_id = position.file_id;
let offset = position.offset;
let (file_id, _) = editioned_file_id.unpack();
let editioned_file_id_wrapper =
base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
let (file_id, _) = editioned_file_id.unpack(&db);
let file_syntax = db.parse(editioned_file_id_wrapper).syntax_node();
let file_syntax = db.parse(editioned_file_id).syntax_node();
let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap();
let function = find_function(&db, file_id);
@ -512,11 +509,9 @@ fn foo() {
let editioned_file_id = position.file_id;
let offset = position.offset;
let (file_id, _) = editioned_file_id.unpack();
let file_id_wrapper =
base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
let (file_id, _) = editioned_file_id.unpack(&db);
let file = db.parse(file_id_wrapper).ok().unwrap();
let file = db.parse(editioned_file_id).ok().unwrap();
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
.expect("failed to find a name at the target offset");
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap();

View file

@ -189,8 +189,8 @@ fn f() {
}
"#,
expect![[r#"
BlockId(4801) in BlockRelativeModuleId { block: Some(BlockId(4800)), local_id: Idx::<ModuleData>(1) }
BlockId(4800) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
BlockId(3801) in BlockRelativeModuleId { block: Some(BlockId(3800)), local_id: Idx::<ModuleData>(1) }
BlockId(3800) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
crate scope
"#]],
);

View file

@ -35,9 +35,9 @@ macro_rules! f {
};
}
struct#0:1@58..64#19456# MyTraitMap2#0:2@31..42#ROOT2024# {#0:1@72..73#19456#
map#0:1@86..89#19456#:#0:1@89..90#19456# #0:1@89..90#19456#::#0:1@91..93#19456#std#0:1@93..96#19456#::#0:1@96..98#19456#collections#0:1@98..109#19456#::#0:1@109..111#19456#HashSet#0:1@111..118#19456#<#0:1@118..119#19456#(#0:1@119..120#19456#)#0:1@120..121#19456#>#0:1@121..122#19456#,#0:1@122..123#19456#
}#0:1@132..133#19456#
struct#0:1@58..64#14336# MyTraitMap2#0:2@31..42#ROOT2024# {#0:1@72..73#14336#
map#0:1@86..89#14336#:#0:1@89..90#14336# #0:1@89..90#14336#::#0:1@91..93#14336#std#0:1@93..96#14336#::#0:1@96..98#14336#collections#0:1@98..109#14336#::#0:1@109..111#14336#HashSet#0:1@111..118#14336#<#0:1@118..119#14336#(#0:1@119..120#14336#)#0:1@120..121#14336#>#0:1@121..122#14336#,#0:1@122..123#14336#
}#0:1@132..133#14336#
"#]],
);
}
@ -197,7 +197,7 @@ macro_rules! mk_struct {
#[macro_use]
mod foo;
struct#1:1@59..65#19456# Foo#0:2@32..35#ROOT2024#(#1:1@70..71#19456#u32#0:2@41..44#ROOT2024#)#1:1@74..75#19456#;#1:1@75..76#19456#
struct#1:1@59..65#14336# Foo#0:2@32..35#ROOT2024#(#1:1@70..71#14336#u32#0:2@41..44#ROOT2024#)#1:1@74..75#14336#;#1:1@75..76#14336#
"#]],
);
}
@ -423,10 +423,10 @@ m! { foo, bar }
macro_rules! m {
($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
}
impl#\19456# Bar#\19456# {#\19456#
fn#\19456# foo#\ROOT2024#(#\19456#)#\19456# {#\19456#}#\19456#
fn#\19456# bar#\ROOT2024#(#\19456#)#\19456# {#\19456#}#\19456#
}#\19456#
impl#\14336# Bar#\14336# {#\14336#
fn#\14336# foo#\ROOT2024#(#\14336#)#\14336# {#\14336#}#\14336#
fn#\14336# bar#\ROOT2024#(#\14336#)#\14336# {#\14336#}#\14336#
}#\14336#
"#]],
);
}

View file

@ -19,14 +19,13 @@ use std::{iter, ops::Range, sync};
use base_db::RootQueryDb;
use expect_test::Expect;
use hir_expand::{
InFile, MacroCallKind, MacroFileId, MacroFileIdExt, MacroKind,
InFile, MacroCallKind, MacroKind,
db::ExpandDatabase,
proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind},
span_map::SpanMapRef,
};
use intern::Symbol;
use itertools::Itertools;
use salsa::AsDynDatabase;
use span::{Edition, Span};
use stdx::{format_to, format_to_acc};
use syntax::{
@ -67,8 +66,6 @@ fn check_errors(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect)
let editioned_file_id =
ast_id.file_id.file_id().expect("macros inside macros are not supported");
let editioned_file_id =
base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
let ast = db.parse(editioned_file_id).syntax_node();
let ast_id_map = db.ast_id_map(ast_id.file_id);
@ -143,8 +140,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
)
.unwrap();
let macro_call_id = res.value.unwrap();
let macro_file = MacroFileId { macro_call_id };
let mut expansion_result = db.parse_macro_expansion(macro_file);
let mut expansion_result = db.parse_macro_expansion(macro_call_id);
expansion_result.err = expansion_result.err.or(res.err);
expansions.push((macro_call.value.clone(), expansion_result));
}

View file

@ -62,14 +62,14 @@ use std::ops::Deref;
use base_db::Crate;
use hir_expand::{
ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, mod_path::ModPath, name::Name,
proc_macro::ProcMacroKind,
EditionedFileId, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, mod_path::ModPath,
name::Name, proc_macro::ProcMacroKind,
};
use intern::Symbol;
use itertools::Itertools;
use la_arena::Arena;
use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, EditionedFileId, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID};
use span::{Edition, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID};
use stdx::format_to;
use syntax::{AstNode, SmolStr, SyntaxNode, ToSmolStr, ast};
use triomphe::Arc;
@ -328,9 +328,7 @@ impl ModuleOrigin {
match self {
&ModuleOrigin::File { definition: editioned_file_id, .. }
| &ModuleOrigin::CrateRoot { definition: editioned_file_id } => {
let definition = base_db::EditionedFileId::new(db, editioned_file_id);
let sf = db.parse(definition).tree();
let sf = db.parse(editioned_file_id).tree();
InFile::new(editioned_file_id.into(), ModuleSource::SourceFile(sf))
}
&ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new(
@ -389,7 +387,7 @@ impl DefMap {
.entered();
let module_data = ModuleData::new(
ModuleOrigin::CrateRoot { definition: krate.root_file_id() },
ModuleOrigin::CrateRoot { definition: krate.root_file_id(db) },
Visibility::Public,
);
@ -402,7 +400,7 @@ impl DefMap {
let (def_map, local_def_map) = collector::collect_defs(
db,
def_map,
TreeId::new(krate.root_file_id().into(), None),
TreeId::new(krate.root_file_id(db).into(), None),
None,
);
@ -488,11 +486,15 @@ impl DefMap {
}
impl DefMap {
pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ {
pub fn modules_for_file<'a>(
&'a self,
db: &'a dyn DefDatabase,
file_id: FileId,
) -> impl Iterator<Item = LocalModuleId> + 'a {
self.modules
.iter()
.filter(move |(_id, data)| {
data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id)
data.origin.file_id().map(|file_id| file_id.file_id(db)) == Some(file_id)
})
.map(|(id, _data)| id)
}

View file

@ -1,13 +1,12 @@
//! Expansion of associated items
use hir_expand::{AstId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind, name::Name};
use span::MacroCallId;
use syntax::ast;
use triomphe::Arc;
use crate::{
AssocItemId, AstIdWithPath, ConstLoc, FunctionId, FunctionLoc, ImplId, ItemContainerId,
ItemLoc, ModuleId, TraitId, TypeAliasId, TypeAliasLoc,
ItemLoc, MacroCallId, ModuleId, TraitId, TypeAliasId, TypeAliasLoc,
db::DefDatabase,
item_tree::{AssocItem, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId},
macro_call_as_call_id,
@ -296,9 +295,8 @@ impl<'a> AssocItemCollector<'a> {
tracing::warn!("macro expansion is too deep");
return;
}
let file_id = macro_call_id.as_file();
let tree_id = TreeId::new(file_id, None);
let item_tree = self.db.file_item_tree(file_id);
let tree_id = TreeId::new(macro_call_id.into(), None);
let item_tree = self.db.file_item_tree(macro_call_id.into());
self.depth += 1;
for item in item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item) {

View file

@ -9,8 +9,8 @@ use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin};
use cfg::{CfgAtom, CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind,
MacroFileIdExt,
EditionedFileId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId,
MacroDefKind,
attrs::{Attr, AttrId},
builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro},
mod_path::{ModPath, PathKind},
@ -21,7 +21,7 @@ use intern::{Interned, sym};
use itertools::{Itertools, izip};
use la_arena::Idx;
use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, EditionedFileId, FileAstId, SyntaxContext};
use span::{Edition, FileAstId, SyntaxContext};
use syntax::ast;
use triomphe::Arc;
@ -250,7 +250,7 @@ impl DefCollector<'_> {
fn seed_with_top_level(&mut self) {
let _p = tracing::info_span!("seed_with_top_level").entered();
let file_id = self.def_map.krate.data(self.db).root_file_id();
let file_id = self.def_map.krate.data(self.db).root_file_id(self.db);
let item_tree = self.db.file_item_tree(file_id.into());
let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
@ -1512,11 +1512,11 @@ impl DefCollector<'_> {
tracing::warn!("macro expansion is too deep");
return;
}
let file_id = macro_call_id.as_file();
let file_id = macro_call_id.into();
let item_tree = self.db.file_item_tree(file_id);
let mod_dir = if macro_call_id.as_macro_file().is_include_macro(self.db) {
let mod_dir = if macro_call_id.is_include_macro(self.db) {
ModDir::root()
} else {
self.mod_dirs[&module_id].clone()

View file

@ -1,8 +1,7 @@
//! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec;
use base_db::AnchoredPath;
use hir_expand::{HirFileIdExt, name::Name};
use span::EditionedFileId;
use hir_expand::{EditionedFileId, name::Name};
use crate::{HirFileId, db::DefDatabase};
@ -79,7 +78,7 @@ impl ModDir {
let orig_file_id = file_id.original_file_respecting_includes(db);
for candidate in candidate_files.iter() {
let path = AnchoredPath { anchor: orig_file_id.file_id(), path: candidate.as_str() };
let path = AnchoredPath { anchor: orig_file_id.file_id(db), path: candidate.as_str() };
if let Some(file_id) = db.resolve_path(path) {
let is_mod_rs = candidate.ends_with("/mod.rs");
@ -92,7 +91,7 @@ impl ModDir {
if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) {
return Ok((
// FIXME: Edition, is this rightr?
EditionedFileId::new(file_id, orig_file_id.edition()),
EditionedFileId::new(db, file_id, orig_file_id.edition(db)),
is_mod_rs,
mod_dir,
));

View file

@ -18,7 +18,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
});
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
}
db.set_file_text(pos.file_id.file_id(), ra_fixture_change);
db.set_file_text(pos.file_id.file_id(&db), ra_fixture_change);
{
let events = db.log_executed(|| {
@ -55,7 +55,7 @@ pub const BAZ: u32 = 0;
let mut add_crate = |crate_name, root_file_idx: usize| {
new_crate_graph.add_crate_root(
files[root_file_idx].file_id(),
files[root_file_idx].file_id(&db),
Edition::CURRENT,
Some(CrateDisplayName::from_canonical_name(crate_name)),
None,
@ -348,7 +348,7 @@ fn quux() { 92 }
m!(Y);
m!(Z);
"#;
db.set_file_text(pos.file_id.file_id(), new_text);
db.set_file_text(pos.file_id.file_id(&db), new_text);
{
let events = db.log_executed(|| {

View file

@ -917,7 +917,7 @@ fn handle_macro_def_scope(
// and use its parent expansion.
*hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db));
*hygiene_info = parent_ctx.outer_expn(db).map(|expansion| {
let expansion = db.lookup_intern_macro_call(expansion);
let expansion = db.lookup_intern_macro_call(expansion.into());
(parent_ctx.parent(db), expansion.def)
});
}
@ -932,7 +932,7 @@ fn hygiene_info(
if !hygiene_id.is_root() {
let ctx = hygiene_id.lookup();
ctx.outer_expn(db).map(|expansion| {
let expansion = db.lookup_intern_macro_call(expansion);
let expansion = db.lookup_intern_macro_call(expansion.into());
(ctx.parent(db), expansion.def)
})
} else {

View file

@ -8,7 +8,7 @@ use base_db::{
};
use hir_expand::{InFile, files::FilePosition};
use salsa::{AsDynDatabase, Durability};
use span::{EditionedFileId, FileId};
use span::FileId;
use syntax::{AstNode, algo, ast};
use triomphe::Arc;
@ -135,7 +135,7 @@ impl TestDB {
for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate);
for (local_id, data) in crate_def_map.modules() {
if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) {
if data.origin.file_id().map(|file_id| file_id.file_id(self)) == Some(file_id) {
return crate_def_map.module_id(local_id);
}
}
@ -144,7 +144,7 @@ impl TestDB {
}
pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId {
let file_module = self.module_for_file(position.file_id.file_id());
let file_module = self.module_for_file(position.file_id.file_id(self));
let mut def_map = file_module.def_map(self);
let module = self.mod_at_position(&def_map, position);
@ -246,10 +246,7 @@ impl TestDB {
let source_map = self.body_with_source_map(def_with_body).1;
let scopes = self.expr_scopes(def_with_body);
let editioned_file_id_wrapper =
base_db::EditionedFileId::new(self.as_dyn_database(), position.file_id);
let root_syntax_node = self.parse(editioned_file_id_wrapper).syntax_node();
let root_syntax_node = self.parse(position.file_id).syntax_node();
let scope_iter =
algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| {
let block = ast::BlockExpr::cast(node)?;

View file

@ -1,8 +1,8 @@
//! Builtin attributes.
use intern::sym;
use span::{MacroCallId, Span};
use span::Span;
use crate::{ExpandResult, MacroCallKind, db::ExpandDatabase, name, tt};
use crate::{ExpandResult, MacroCallId, MacroCallKind, db::ExpandDatabase, name, tt};
use super::quote;

View file

@ -4,13 +4,13 @@ use intern::sym;
use itertools::{Itertools, izip};
use parser::SyntaxKind;
use rustc_hash::FxHashSet;
use span::{Edition, MacroCallId, Span, SyntaxContext};
use span::{Edition, Span, SyntaxContext};
use stdx::never;
use syntax_bridge::DocCommentDesugarMode;
use tracing::debug;
use crate::{
ExpandError, ExpandResult,
ExpandError, ExpandResult, MacroCallId,
builtin::quote::{dollar_crate, quote},
db::ExpandDatabase,
hygiene::span_with_def_site_ctxt,
@ -59,7 +59,7 @@ impl BuiltinDeriveExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
self.expander()(db, span, tt)
}
}

View file

@ -8,7 +8,7 @@ use intern::{
sym::{self},
};
use mbe::{DelimiterKind, expect_fragment};
use span::{Edition, EditionedFileId, FileId, Span};
use span::{Edition, FileId, Span};
use stdx::format_to;
use syntax::{
format_smolstr,
@ -17,7 +17,7 @@ use syntax::{
use syntax_bridge::syntax_node_to_token_tree;
use crate::{
ExpandError, ExpandResult, HirFileIdExt, Lookup as _, MacroCallId,
EditionedFileId, ExpandError, ExpandResult, Lookup as _, MacroCallId,
builtin::quote::{WithDelimiter, dollar_crate, quote},
db::ExpandDatabase,
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt},
@ -72,7 +72,7 @@ impl BuiltinFnLikeExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
self.expander()(db, id, tt, span)
}
@ -89,7 +89,7 @@ impl EagerExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
self.expander()(db, id, tt, span)
}
@ -224,7 +224,7 @@ fn assert_expand(
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
let mut iter = tt.iter();
@ -345,7 +345,7 @@ fn panic_expand(
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let dollar_crate = dollar_crate(span);
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
let mac = if use_panic_2021(db, call_site_span) {
sym::panic_2021.clone()
@ -376,7 +376,7 @@ fn unreachable_expand(
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let dollar_crate = dollar_crate(span);
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
let mac = if use_panic_2021(db, call_site_span) {
sym::unreachable_2021.clone()
@ -407,7 +407,7 @@ fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool {
let Some(expn) = span.ctx.outer_expn(db) else {
break false;
};
let expn = db.lookup_intern_macro_call(expn);
let expn = db.lookup_intern_macro_call(expn.into());
// FIXME: Record allow_internal_unstable in the macro def (not been done yet because it
// would consume quite a bit extra memory for all call locs...)
// if let Some(features) = expn.def.allow_internal_unstable {
@ -663,7 +663,7 @@ fn relative_file(
err_span: Span,
) -> Result<EditionedFileId, ExpandError> {
let lookup = db.lookup_intern_macro_call(call_id);
let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id();
let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id(db);
let path = AnchoredPath { anchor: call_site, path: path_str };
let res: FileId = db
.resolve_path(path)
@ -672,7 +672,7 @@ fn relative_file(
if res == call_site && !allow_recursion {
Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`")))
} else {
Ok(EditionedFileId::new(res, lookup.krate.data(db).edition))
Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition))
}
}
@ -731,10 +731,8 @@ fn include_expand(
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let (file_id_wrapper, editioned_file_id) = match include_input_to_file_id(db, arg_id, tt) {
Ok(editioned_file_id) => {
(base_db::EditionedFileId::new(db, editioned_file_id), editioned_file_id)
}
let editioned_file_id = match include_input_to_file_id(db, arg_id, tt) {
Ok(editioned_file_id) => editioned_file_id,
Err(e) => {
return ExpandResult::new(
tt::TopSubtree::empty(DelimSpan { open: span, close: span }),
@ -745,7 +743,7 @@ fn include_expand(
let span_map = db.real_span_map(editioned_file_id);
// FIXME: Parse errors
ExpandResult::ok(syntax_node_to_token_tree(
&db.parse(file_id_wrapper).syntax_node(),
&db.parse(editioned_file_id).syntax_node(),
SpanMap::RealSpanMap(span_map),
span,
syntax_bridge::DocCommentDesugarMode::ProcMacro,
@ -807,7 +805,7 @@ fn include_str_expand(
}
};
let text = db.file_text(file_id.file_id());
let text = db.file_text(file_id.file_id(db));
let text = &*text.text(db);
ExpandResult::ok(quote!(call_site =>#text))

View file

@ -4,19 +4,15 @@ use base_db::{Crate, RootQueryDb};
use either::Either;
use mbe::MatchedArmIndex;
use rustc_hash::FxHashSet;
use salsa::plumbing::AsId;
use span::{
AstIdMap, Edition, EditionedFileId, HirFileId, HirFileIdRepr, MacroCallId, MacroFileId, Span,
SyntaxContext,
};
use span::{AstIdMap, Edition, Span, SyntaxContext};
use syntax::{AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
use triomphe::Arc;
use crate::{
AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
EagerExpander, ExpandError, ExpandResult, ExpandTo, MacroCallKind, MacroCallLoc, MacroDefId,
MacroDefKind,
EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
attrs::{AttrId, collect_attrs},
builtin::pseudo_derive_attr_expansion,
cfg_process,
@ -63,9 +59,10 @@ pub trait ExpandDatabase: RootQueryDb {
fn proc_macros(&self) -> Arc<ProcMacros>;
/// Incrementality query to prevent queries from directly depending on `ExpandDatabase::proc_macros`.
#[salsa::invoke_actual(crate::proc_macro::proc_macros_for_crate)]
#[salsa::invoke(crate::proc_macro::proc_macros_for_crate)]
fn proc_macros_for_crate(&self, krate: Crate) -> Option<Arc<CrateProcMacros>>;
#[salsa::invoke(ast_id_map)]
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
#[salsa::transparent]
@ -75,7 +72,7 @@ pub trait ExpandDatabase: RootQueryDb {
#[salsa::lru(512)]
fn parse_macro_expansion(
&self,
macro_file: span::MacroFileId,
macro_file: MacroCallId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
#[salsa::transparent]
@ -84,7 +81,7 @@ pub trait ExpandDatabase: RootQueryDb {
#[salsa::transparent]
#[salsa::invoke(crate::span_map::expansion_span_map)]
fn expansion_span_map(&self, file_id: MacroFileId) -> Arc<ExpansionSpanMap>;
fn expansion_span_map(&self, file_id: MacroCallId) -> Arc<ExpansionSpanMap>;
#[salsa::invoke(crate::span_map::real_span_map)]
fn real_span_map(&self, file_id: EditionedFileId) -> Arc<RealSpanMap>;
@ -136,6 +133,7 @@ pub trait ExpandDatabase: RootQueryDb {
/// directly depend on as that would cause to frequent invalidations, mainly because of the
/// parse queries being LRU cached. If they weren't the invalidations would only happen if the
/// user wrote in the file that defines the proc-macro.
#[salsa::invoke_interned(proc_macro_span)]
fn proc_macro_span(&self, fun: AstId<ast::Fn>) -> Span;
/// Firewall query that returns the errors from the `parse_macro_expansion` query.
@ -149,30 +147,17 @@ pub trait ExpandDatabase: RootQueryDb {
fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContext;
}
#[salsa::interned(no_lifetime, id = span::MacroCallId)]
pub struct MacroCallWrapper {
pub loc: MacroCallLoc,
}
fn intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallLoc) -> MacroCallId {
MacroCallWrapper::new(db, macro_call).0
}
fn lookup_intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> MacroCallLoc {
MacroCallWrapper::ingredient(db).data(db.as_dyn_database(), macro_call.as_id()).0.clone()
}
#[salsa::interned(no_lifetime, id = span::SyntaxContext)]
pub struct SyntaxContextWrapper {
pub data: SyntaxContext,
}
fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) -> SyntaxContext {
match file.repr() {
HirFileIdRepr::FileId(_) => SyntaxContext::root(edition),
HirFileIdRepr::MacroFile(m) => {
let kind = db.lookup_intern_macro_call(m.macro_call_id).kind;
db.macro_arg_considering_derives(m.macro_call_id, &kind).2.ctx
match file {
HirFileId::FileId(_) => SyntaxContext::root(edition),
HirFileId::MacroFile(m) => {
let kind = db.lookup_intern_macro_call(m).kind;
db.macro_arg_considering_derives(m, &kind).2.ctx
}
}
}
@ -300,9 +285,9 @@ pub fn expand_speculative(
loc.krate,
&tt,
attr_arg.as_ref(),
span_with_def_site_ctxt(db, span, actual_macro_call, loc.def.edition),
span_with_call_site_ctxt(db, span, actual_macro_call, loc.def.edition),
span_with_mixed_site_ctxt(db, span, actual_macro_call, loc.def.edition),
span_with_def_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
span_with_call_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
span_with_mixed_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
)
}
MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
@ -346,20 +331,16 @@ pub fn expand_speculative(
Some((node.syntax_node(), token))
}
fn ast_id_map(db: &dyn ExpandDatabase, file_id: span::HirFileId) -> triomphe::Arc<AstIdMap> {
fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> triomphe::Arc<AstIdMap> {
triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
}
/// Main public API -- parses a hir file, not caring whether it's a real
/// file or a macro expansion.
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
match file_id.repr() {
HirFileIdRepr::FileId(editioned_file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
db.parse(file_id).syntax_node()
}
HirFileIdRepr::MacroFile(macro_file) => {
match file_id {
HirFileId::FileId(file_id) => db.parse(file_id).syntax_node(),
HirFileId::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.0.syntax_node()
}
}
@ -369,14 +350,13 @@ fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
// instead of having it be untyped
fn parse_macro_expansion(
db: &dyn ExpandDatabase,
macro_file: MacroFileId,
macro_file: MacroCallId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = tracing::info_span!("parse_macro_expansion").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let loc = db.lookup_intern_macro_call(macro_file);
let def_edition = loc.def.edition;
let expand_to = loc.expand_to();
let mbe::ValueResult { value: (tt, matched_arm), err } =
macro_expand(db, macro_file.macro_call_id, loc);
let mbe::ValueResult { value: (tt, matched_arm), err } = macro_expand(db, macro_file, loc);
let (parse, mut rev_token_map) = token_tree_to_syntax_node(
db,
@ -397,7 +377,7 @@ fn parse_macro_expansion_error(
macro_call_id: MacroCallId,
) -> Option<Arc<ExpandResult<Arc<[SyntaxError]>>>> {
let e: ExpandResult<Arc<[SyntaxError]>> =
db.parse_macro_expansion(MacroFileId { macro_call_id }).map(|it| Arc::from(it.0.errors()));
db.parse_macro_expansion(macro_call_id).map(|it| Arc::from(it.0.errors()));
if e.value.is_empty() && e.err.is_none() { None } else { Some(Arc::new(e)) }
}
@ -405,16 +385,11 @@ pub(crate) fn parse_with_map(
db: &dyn ExpandDatabase,
file_id: HirFileId,
) -> (Parse<SyntaxNode>, SpanMap) {
match file_id.repr() {
HirFileIdRepr::FileId(editioned_file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
(
db.parse(file_id).to_syntax(),
SpanMap::RealSpanMap(db.real_span_map(editioned_file_id)),
)
match file_id {
HirFileId::FileId(file_id) => {
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
}
HirFileIdRepr::MacroFile(macro_file) => {
HirFileId::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse, SpanMap::ExpansionSpanMap(map))
}
@ -734,9 +709,9 @@ fn expand_proc_macro(
loc.krate,
&macro_arg,
attr_arg,
span_with_def_site_ctxt(db, span, id, loc.def.edition),
span_with_call_site_ctxt(db, span, id, loc.def.edition),
span_with_mixed_site_ctxt(db, span, id, loc.def.edition),
span_with_def_site_ctxt(db, span, id.into(), loc.def.edition),
span_with_call_site_ctxt(db, span, id.into(), loc.def.edition),
span_with_mixed_site_ctxt(db, span, id.into(), loc.def.edition),
)
};
@ -784,3 +759,11 @@ fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
})
}
}
fn intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallLoc) -> MacroCallId {
MacroCallId::new(db, macro_call)
}
fn lookup_intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> MacroCallLoc {
macro_call.loc(db)
}

View file

@ -2,14 +2,14 @@
use base_db::Crate;
use intern::sym;
use span::{Edition, HirFileIdRepr, MacroCallId, Span, SyntaxContext};
use span::{Edition, Span, SyntaxContext};
use stdx::TupleExt;
use syntax::{AstNode, ast};
use syntax_bridge::DocCommentDesugarMode;
use triomphe::Arc;
use crate::{
AstId, ExpandError, ExpandErrorKind, ExpandResult, Lookup,
AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId,
attrs::RawAttrs,
db::ExpandDatabase,
hygiene::{Transparency, apply_mark},
@ -42,7 +42,10 @@ impl DeclarativeMacroExpander {
.mac
.expand(
&tt,
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency, self.edition),
|s| {
s.ctx =
apply_mark(db, s.ctx, call_id.into(), self.transparency, self.edition)
},
span,
loc.def.edition,
)
@ -106,7 +109,8 @@ impl DeclarativeMacroExpander {
def_crate.data(db).edition
} else {
// UNWRAP-SAFETY: Only the root context has no outer expansion
let krate = db.lookup_intern_macro_call(ctx.outer_expn(db).unwrap()).def.krate;
let krate =
db.lookup_intern_macro_call(ctx.outer_expn(db).unwrap().into()).def.krate;
krate.data(db).edition
}
};
@ -160,9 +164,9 @@ impl DeclarativeMacroExpander {
transparency(&macro_def).unwrap_or(Transparency::Opaque),
),
};
let edition = ctx_edition(match id.file_id.repr() {
HirFileIdRepr::MacroFile(macro_file) => macro_file.macro_call_id.lookup(db).ctxt,
HirFileIdRepr::FileId(file) => SyntaxContext::root(file.edition()),
let edition = ctx_edition(match id.file_id {
HirFileId::MacroFile(macro_file) => macro_file.lookup(db).ctxt,
HirFileId::FileId(file) => SyntaxContext::root(file.edition(db)),
});
Arc::new(DeclarativeMacroExpander { mac, transparency, edition })
}

View file

@ -63,7 +63,7 @@ pub fn expand_eager_macro_input(
#[allow(deprecated)] // builtin eager macros are never derives
let (_, _, span) = db.macro_arg(arg_id);
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file());
db.parse_macro_expansion(arg_id);
let mut arg_map = ExpansionSpanMap::empty();
@ -73,7 +73,7 @@ pub fn expand_eager_macro_input(
&arg_exp_map,
&mut arg_map,
TextSize::new(0),
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
InFile::new(arg_id.into(), arg_exp.syntax_node()),
krate,
call_site,
resolver,
@ -134,10 +134,8 @@ fn lazy_expand(
call_site,
);
eager_callback(ast_id.map(|ast_id| (AstPtr::new(macro_call), ast_id)), id);
let macro_file = id.as_macro_file();
db.parse_macro_expansion(macro_file)
.map(|parse| (InFile::new(macro_file.into(), parse.0), parse.1))
db.parse_macro_expansion(id).map(|parse| (InFile::new(id.into(), parse.0), parse.1))
}
fn eager_macro_recur(
@ -224,7 +222,7 @@ fn eager_macro_recur(
call_id,
);
let ExpandResult { value: (parse, map), err: err2 } =
db.parse_macro_expansion(call_id.as_macro_file());
db.parse_macro_expansion(call_id);
map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span));

View file

@ -2,14 +2,11 @@
use std::borrow::Borrow;
use either::Either;
use span::{
AstIdNode, EditionedFileId, ErasedFileAstId, FileAstId, HirFileId, HirFileIdRepr, MacroFileId,
SyntaxContext,
};
use span::{AstIdNode, ErasedFileAstId, FileAstId, FileId, SyntaxContext};
use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize};
use crate::{
MacroFileIdExt, MacroKind,
EditionedFileId, HirFileId, MacroCallId, MacroKind,
db::{self, ExpandDatabase},
map_node_range_up, map_node_range_up_rooted, span_for_offset,
};
@ -27,7 +24,7 @@ pub struct InFileWrapper<FileKind, T> {
pub value: T,
}
pub type InFile<T> = InFileWrapper<HirFileId, T>;
pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>;
pub type InMacroFile<T> = InFileWrapper<MacroCallId, T>;
pub type InRealFile<T> = InFileWrapper<EditionedFileId, T>;
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
@ -36,12 +33,13 @@ pub struct FilePositionWrapper<FileKind> {
pub offset: TextSize,
}
pub type HirFilePosition = FilePositionWrapper<HirFileId>;
pub type MacroFilePosition = FilePositionWrapper<MacroFileId>;
pub type MacroFilePosition = FilePositionWrapper<MacroCallId>;
pub type FilePosition = FilePositionWrapper<EditionedFileId>;
impl From<FilePositionWrapper<EditionedFileId>> for FilePositionWrapper<span::FileId> {
fn from(value: FilePositionWrapper<EditionedFileId>) -> Self {
FilePositionWrapper { file_id: value.file_id.into(), offset: value.offset }
impl FilePosition {
#[inline]
pub fn into_file_id(self, db: &dyn ExpandDatabase) -> FilePositionWrapper<FileId> {
FilePositionWrapper { file_id: self.file_id.file_id(db), offset: self.offset }
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
@ -50,12 +48,13 @@ pub struct FileRangeWrapper<FileKind> {
pub range: TextRange,
}
pub type HirFileRange = FileRangeWrapper<HirFileId>;
pub type MacroFileRange = FileRangeWrapper<MacroFileId>;
pub type MacroFileRange = FileRangeWrapper<MacroCallId>;
pub type FileRange = FileRangeWrapper<EditionedFileId>;
impl From<FileRangeWrapper<EditionedFileId>> for FileRangeWrapper<span::FileId> {
fn from(value: FileRangeWrapper<EditionedFileId>) -> Self {
FileRangeWrapper { file_id: value.file_id.into(), range: value.range }
impl FileRange {
#[inline]
pub fn into_file_id(self, db: &dyn ExpandDatabase) -> FileRangeWrapper<FileId> {
FileRangeWrapper { file_id: self.file_id.file_id(db), range: self.range }
}
}
@ -163,12 +162,10 @@ trait FileIdToSyntax: Copy {
impl FileIdToSyntax for EditionedFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
let file_id = base_db::EditionedFileId::new(db, self);
db.parse(file_id).syntax_node()
db.parse(self).syntax_node()
}
}
impl FileIdToSyntax for MacroFileId {
impl FileIdToSyntax for MacroCallId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse_macro_expansion(self).value.0.syntax_node()
}
@ -221,7 +218,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => db
.lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id)
.lookup_intern_macro_call(node.file_id.macro_file()?)
.to_node_item(db)
.syntax()
.cloned()
@ -238,7 +235,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => db
.lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id)
.lookup_intern_macro_call(node.file_id.macro_file()?)
.to_node_item(db)
.syntax()
.cloned()
@ -278,11 +275,11 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
) -> Option<InRealFile<SyntaxNode>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
let file_id = match self.file_id {
HirFileId::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value.borrow().clone() });
}
HirFileIdRepr::MacroFile(m)
HirFileId::MacroFile(m)
if matches!(m.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) =>
{
m
@ -296,11 +293,9 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
self.value.borrow().text_range(),
)?;
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
let kind = self.kind();
let value = db
.parse(file_id)
.parse(editioned_file_id)
.syntax_node()
.covering_element(range)
.ancestors()
@ -332,9 +327,9 @@ impl InMacroFile<SyntaxToken> {
impl InFile<SyntaxToken> {
/// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
match self.file_id {
HirFileId::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileId::MacroFile(mac_file) => {
let (range, ctxt) = span_for_offset(
db,
&db.expansion_span_map(mac_file),
@ -348,7 +343,7 @@ impl InFile<SyntaxToken> {
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range(db)
}
}
@ -356,11 +351,11 @@ impl InFile<SyntaxToken> {
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
match self.file_id {
HirFileId::FileId(file_id) => {
Some(FileRange { file_id, range: self.value.text_range() })
}
HirFileIdRepr::MacroFile(mac_file) => {
HirFileId::MacroFile(mac_file) => {
let (range, ctxt) = span_for_offset(
db,
&db.expansion_span_map(mac_file),
@ -386,15 +381,15 @@ impl InFile<TextRange> {
self,
db: &dyn db::ExpandDatabase,
) -> (FileRange, SyntaxContext) {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(FileRange { file_id, range: self.value }, SyntaxContext::root(file_id.edition()))
match self.file_id {
HirFileId::FileId(file_id) => {
(FileRange { file_id, range: self.value }, SyntaxContext::root(file_id.edition(db)))
}
HirFileIdRepr::MacroFile(mac_file) => {
HirFileId::MacroFile(mac_file) => {
match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
None => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
let loc = db.lookup_intern_macro_call(mac_file);
(loc.kind.original_call_range(db), SyntaxContext::root(loc.def.edition))
}
}
@ -403,13 +398,13 @@ impl InFile<TextRange> {
}
pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => {
match self.file_id {
HirFileId::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileId::MacroFile(mac_file) => {
match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range(db)
}
}
@ -421,13 +416,13 @@ impl InFile<TextRange> {
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => {
match self.file_id {
HirFileId::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileId::MacroFile(mac_file) => {
match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range_with_body(db)
}
}
@ -439,12 +434,12 @@ impl InFile<TextRange> {
self,
db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContext)> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => Some((
match self.file_id {
HirFileId::FileId(file_id) => Some((
FileRange { file_id, range: self.value },
SyntaxContext::root(file_id.edition()),
SyntaxContext::root(file_id.edition(db)),
)),
HirFileIdRepr::MacroFile(mac_file) => {
HirFileId::MacroFile(mac_file) => {
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value)
}
}
@ -455,11 +450,11 @@ impl<N: AstNode> InFile<N> {
pub fn original_ast_node_rooted(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
let file_id = match self.file_id {
HirFileId::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value });
}
HirFileIdRepr::MacroFile(m) => m,
HirFileId::MacroFile(m) => m,
};
if !matches!(file_id.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) {
return None;
@ -471,10 +466,8 @@ impl<N: AstNode> InFile<N> {
self.value.syntax().text_range(),
)?;
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let anc = db.parse(file_id).syntax_node().covering_element(range);
let anc = db.parse(editioned_file_id).syntax_node().covering_element(range);
let value = anc.ancestors().find_map(N::cast)?;
Some(InRealFile::new(editioned_file_id, value))
}
@ -482,9 +475,9 @@ impl<N: AstNode> InFile<N> {
impl<T> InFile<T> {
pub fn into_real_file(self) -> Result<InRealFile<T>, InFile<T>> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }),
HirFileIdRepr::MacroFile(_) => Err(self),
match self.file_id {
HirFileId::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }),
HirFileId::MacroFile(_) => Err(self),
}
}
}

View file

@ -81,7 +81,7 @@ pub(super) fn apply_mark(
return apply_mark_internal(db, ctxt, call_id, transparency, edition);
}
let call_site_ctxt = db.lookup_intern_macro_call(call_id).ctxt;
let call_site_ctxt = db.lookup_intern_macro_call(call_id.into()).ctxt;
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
call_site_ctxt.normalize_to_macros_2_0(db)
} else {

View file

@ -27,6 +27,7 @@ mod prettify_macro_expansion_;
use attrs::collect_attrs;
use rustc_hash::FxHashMap;
use salsa::plumbing::{AsId, FromId};
use stdx::TupleExt;
use triomphe::Arc;
@ -35,10 +36,7 @@ use std::hash::Hash;
use base_db::Crate;
use either::Either;
use span::{
Edition, EditionedFileId, ErasedFileAstId, FileAstId, HirFileIdRepr, Span, SpanAnchor,
SyntaxContext,
};
use span::{Edition, ErasedFileAstId, FileAstId, Span, SpanAnchor, SyntaxContext};
use syntax::{
SyntaxNode, SyntaxToken, TextRange, TextSize,
ast::{self, AstNode},
@ -62,8 +60,8 @@ pub use crate::{
prettify_macro_expansion_::prettify_macro_expansion,
};
pub use base_db::EditionedFileId;
pub use mbe::{DeclarativeMacro, ValueResult};
pub use span::{HirFileId, MacroCallId, MacroFileId};
pub mod tt {
pub use span::Span;
@ -343,51 +341,34 @@ pub enum MacroCallKind {
},
}
pub trait HirFileIdExt {
fn edition(self, db: &dyn ExpandDatabase) -> Edition;
/// Returns the original file of this macro call hierarchy.
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId;
/// Returns the original file of this macro call hierarchy while going into the included file if
/// one of the calls comes from an `include!``.
fn original_file_respecting_includes(self, db: &dyn ExpandDatabase) -> EditionedFileId;
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>>;
fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>>;
}
impl HirFileIdExt for HirFileId {
fn edition(self, db: &dyn ExpandDatabase) -> Edition {
match self.repr() {
HirFileIdRepr::FileId(file_id) => file_id.edition(),
HirFileIdRepr::MacroFile(m) => db.lookup_intern_macro_call(m.macro_call_id).def.edition,
impl HirFileId {
pub fn edition(self, db: &dyn ExpandDatabase) -> Edition {
match self {
HirFileId::FileId(file_id) => file_id.editioned_file_id(db).edition(),
HirFileId::MacroFile(m) => db.lookup_intern_macro_call(m).def.edition,
}
}
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
pub fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
let mut file_id = self;
loop {
match file_id.repr() {
HirFileIdRepr::FileId(id) => break id,
HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
match file_id {
HirFileId::FileId(id) => break id,
HirFileId::MacroFile(macro_call_id) => {
file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id()
}
}
}
}
fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> EditionedFileId {
pub fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> EditionedFileId {
loop {
match self.repr() {
HirFileIdRepr::FileId(id) => break id,
HirFileIdRepr::MacroFile(file) => {
let loc = db.lookup_intern_macro_call(file.macro_call_id);
match self {
HirFileId::FileId(id) => break id,
HirFileId::MacroFile(file) => {
let loc = db.lookup_intern_macro_call(file);
if loc.def.is_include() {
if let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind {
if let Ok(it) =
include_input_to_file_id(db, file.macro_call_id, &eager.arg)
{
if let Ok(it) = include_input_to_file_id(db, file, &eager.arg) {
break it;
}
}
@ -398,23 +379,26 @@ impl HirFileIdExt for HirFileId {
}
}
fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>> {
let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db);
pub fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>> {
let mut call = db.lookup_intern_macro_call(self.macro_file()?).to_node(db);
loop {
match call.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
match call.file_id {
HirFileId::FileId(file_id) => {
break Some(InRealFile { file_id, value: call.value });
}
HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
HirFileId::MacroFile(macro_call_id) => {
call = db.lookup_intern_macro_call(macro_call_id).to_node(db);
}
}
}
}
fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>> {
pub fn as_builtin_derive_attr_node(
&self,
db: &dyn ExpandDatabase,
) -> Option<InFile<ast::Attr>> {
let macro_file = self.macro_file()?;
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let loc = db.lookup_intern_macro_call(macro_file);
let attr = match loc.def.kind {
MacroDefKind::BuiltInDerive(..) => loc.to_node(db),
_ => return None,
@ -441,57 +425,34 @@ pub enum MacroKind {
ProcMacro,
}
pub trait MacroFileIdExt {
fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool;
fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool;
fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId>;
fn expansion_level(self, db: &dyn ExpandDatabase) -> u32;
/// If this is a macro call, returns the syntax node of the call.
fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode>;
fn parent(self, db: &dyn ExpandDatabase) -> HirFileId;
fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo;
fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind;
/// Return whether this file is an include macro
fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool;
fn is_eager(&self, db: &dyn ExpandDatabase) -> bool;
/// Return whether this file is the pseudo expansion of the derive attribute.
/// See [`crate::builtin_attr_macro::derive_attr_expand`].
fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool;
}
impl MacroFileIdExt for MacroFileId {
fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
db.lookup_intern_macro_call(self.macro_call_id).to_node(db)
impl MacroCallId {
pub fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
db.lookup_intern_macro_call(self).to_node(db)
}
fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 {
pub fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 {
let mut level = 0;
let mut macro_file = self;
loop {
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let loc = db.lookup_intern_macro_call(macro_file);
level += 1;
macro_file = match loc.kind.file_id().repr() {
HirFileIdRepr::FileId(_) => break level,
HirFileIdRepr::MacroFile(it) => it,
macro_file = match loc.kind.file_id() {
HirFileId::FileId(_) => break level,
HirFileId::MacroFile(it) => it,
};
}
}
fn parent(self, db: &dyn ExpandDatabase) -> HirFileId {
db.lookup_intern_macro_call(self.macro_call_id).kind.file_id()
pub fn parent(self, db: &dyn ExpandDatabase) -> HirFileId {
db.lookup_intern_macro_call(self).kind.file_id()
}
/// Return expansion information if it is a macro-expansion file
fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo {
pub fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo {
ExpansionInfo::new(db, self)
}
fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind {
match db.lookup_intern_macro_call(self.macro_call_id).def.kind {
pub fn kind(self, db: &dyn ExpandDatabase) -> MacroKind {
match db.lookup_intern_macro_call(self).def.kind {
MacroDefKind::Declarative(..) => MacroKind::Declarative,
MacroDefKind::BuiltIn(..) | MacroDefKind::BuiltInEager(..) => {
MacroKind::DeclarativeBuiltIn
@ -504,33 +465,33 @@ impl MacroFileIdExt for MacroFileId {
}
}
fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self.macro_call_id).def.is_include()
pub fn is_include_macro(self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self).def.is_include()
}
fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self.macro_call_id).def.is_include_like()
pub fn is_include_like_macro(self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self).def.is_include_like()
}
fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self.macro_call_id).def.is_env_or_option_env()
pub fn is_env_or_option_env(self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self).def.is_env_or_option_env()
}
fn is_eager(&self, db: &dyn ExpandDatabase) -> bool {
let loc = db.lookup_intern_macro_call(self.macro_call_id);
pub fn is_eager(self, db: &dyn ExpandDatabase) -> bool {
let loc = db.lookup_intern_macro_call(self);
matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
}
fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId> {
let loc = db.lookup_intern_macro_call(self.macro_call_id);
pub fn eager_arg(self, db: &dyn ExpandDatabase) -> Option<MacroCallId> {
let loc = db.lookup_intern_macro_call(self);
match &loc.kind {
MacroCallKind::FnLike { eager, .. } => eager.as_ref().map(|it| it.arg_id),
_ => None,
}
}
fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool {
let loc = db.lookup_intern_macro_call(self.macro_call_id);
pub fn is_derive_attr_pseudo_expansion(self, db: &dyn ExpandDatabase) -> bool {
let loc = db.lookup_intern_macro_call(self);
loc.def.is_attribute_derive()
}
}
@ -728,11 +689,11 @@ impl MacroCallKind {
pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id().repr() {
HirFileIdRepr::MacroFile(file) => {
kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
match kind.file_id() {
HirFileId::MacroFile(file) => {
kind = db.lookup_intern_macro_call(file).kind;
}
HirFileIdRepr::FileId(file_id) => break file_id,
HirFileId::FileId(file_id) => break file_id,
}
};
@ -753,11 +714,11 @@ impl MacroCallKind {
pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id().repr() {
HirFileIdRepr::MacroFile(file) => {
kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
match kind.file_id() {
HirFileId::MacroFile(file) => {
kind = db.lookup_intern_macro_call(file).kind;
}
HirFileIdRepr::FileId(file_id) => break file_id,
HirFileId::FileId(file_id) => break file_id,
}
};
@ -898,7 +859,7 @@ impl ExpansionInfo {
let span = self.exp_map.span_at(token.start());
match &self.arg_map {
SpanMap::RealSpanMap(_) => {
let file_id = span.anchor.file_id.into();
let file_id = EditionedFileId::from_span(db, span.anchor.file_id).into();
let anchor_offset =
db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start();
InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
@ -921,9 +882,9 @@ impl ExpansionInfo {
}
}
pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo {
pub fn new(db: &dyn ExpandDatabase, macro_file: MacroCallId) -> ExpansionInfo {
let _p = tracing::info_span!("ExpansionInfo::new").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let loc = db.lookup_intern_macro_call(macro_file);
let arg_tt = loc.kind.arg(db);
let arg_map = db.span_map(arg_tt.file_id);
@ -955,9 +916,10 @@ pub fn map_node_range_up_rooted(
start = start.min(span.range.start());
end = end.max(span.range.end());
}
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some(FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset })
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some(FileRange { file_id, range: TextRange::new(start, end) + anchor_offset })
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
@ -980,12 +942,10 @@ pub fn map_node_range_up(
start = start.min(span.range.start());
end = end.max(span.range.end());
}
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some((
FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset },
ctx,
))
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some((FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }, ctx))
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
@ -1004,8 +964,9 @@ pub fn map_node_range_up_aggregated(
);
}
for ((anchor, _), range) in &mut map {
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
*range += anchor_offset;
}
map
@ -1018,12 +979,10 @@ pub fn span_for_offset(
offset: TextSize,
) -> (FileRange, SyntaxContext) {
let span = exp_map.span_at(offset);
let anchor_offset = db
.ast_id_map(span.anchor.file_id.into())
.get_erased(span.anchor.ast_id)
.text_range()
.start();
(FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
let file_id = EditionedFileId::from_span(db, span.anchor.file_id);
let anchor_offset =
db.ast_id_map(file_id.into()).get_erased(span.anchor.ast_id).text_range().start();
(FileRange { file_id, range: span.range + anchor_offset }, span.ctx)
}
/// In Rust, macros expand token trees to token trees. When we want to turn a
@ -1091,3 +1050,77 @@ impl ExpandTo {
}
intern::impl_internable!(ModPath, attrs::AttrInput);
#[salsa::interned(no_lifetime)]
#[doc(alias = "MacroFileId")]
pub struct MacroCallId {
pub loc: MacroCallLoc,
}
impl From<span::MacroCallId> for MacroCallId {
#[inline]
fn from(value: span::MacroCallId) -> Self {
MacroCallId::from_id(value.0)
}
}
impl From<MacroCallId> for span::MacroCallId {
#[inline]
fn from(value: MacroCallId) -> span::MacroCallId {
span::MacroCallId(value.as_id())
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
pub enum HirFileId {
FileId(EditionedFileId),
MacroFile(MacroCallId),
}
impl From<EditionedFileId> for HirFileId {
#[inline]
fn from(file_id: EditionedFileId) -> Self {
HirFileId::FileId(file_id)
}
}
impl From<MacroCallId> for HirFileId {
#[inline]
fn from(file_id: MacroCallId) -> Self {
HirFileId::MacroFile(file_id)
}
}
impl HirFileId {
#[inline]
pub fn macro_file(self) -> Option<MacroCallId> {
match self {
HirFileId::FileId(_) => None,
HirFileId::MacroFile(it) => Some(it),
}
}
#[inline]
pub fn is_macro(self) -> bool {
matches!(self, HirFileId::MacroFile(_))
}
#[inline]
pub fn file_id(self) -> Option<EditionedFileId> {
match self {
HirFileId::FileId(it) => Some(it),
HirFileId::MacroFile(_) => None,
}
}
}
impl PartialEq<EditionedFileId> for HirFileId {
fn eq(&self, &other: &EditionedFileId) -> bool {
*self == HirFileId::from(other)
}
}
impl PartialEq<HirFileId> for EditionedFileId {
fn eq(&self, &other: &HirFileId) -> bool {
other == HirFileId::from(*self)
}
}

View file

@ -278,7 +278,7 @@ fn convert_path(
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
let syn_ctx = span_for_range(segment.syntax().text_range());
if let Some(macro_call_id) = syn_ctx.outer_expn(db) {
if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
if db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner {
mod_path.kind = match resolve_crate_root(db, syn_ctx) {
Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate,
@ -353,7 +353,7 @@ pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContext) -> O
result_mark = Some(mark);
}
result_mark.map(|call| db.lookup_intern_macro_call(call).def.krate)
result_mark.map(|call| db.lookup_intern_macro_call(call.into()).def.krate)
}
pub use crate::name as __name;

View file

@ -26,7 +26,7 @@ pub fn prettify_macro_expansion(
syntax_ctx_id_to_dollar_crate_replacement.entry(ctx).or_insert_with(|| {
let macro_call_id =
ctx.outer_expn(db).expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
let macro_call = db.lookup_intern_macro_call(macro_call_id);
let macro_call = db.lookup_intern_macro_call(macro_call_id.into());
let macro_def_crate = macro_call.def.krate;
// First, if this is the same crate as the macro, nothing will work but `crate`.
// If not, if the target trait has the macro's crate as a dependency, using the dependency name

View file

@ -1,13 +1,13 @@
//! Span maps for real files and macro expansions.
use span::{EditionedFileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContext};
use span::{Span, SyntaxContext};
use stdx::TupleExt;
use syntax::{AstNode, TextRange, ast};
use triomphe::Arc;
pub use span::RealSpanMap;
use crate::{attrs::collect_attrs, db::ExpandDatabase};
use crate::{HirFileId, MacroCallId, attrs::collect_attrs, db::ExpandDatabase};
pub type ExpansionSpanMap = span::SpanMap<SyntaxContext>;
@ -61,9 +61,9 @@ impl SpanMap {
#[inline]
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
HirFileIdRepr::MacroFile(m) => {
match file_id {
HirFileId::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
HirFileId::MacroFile(m) => {
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
}
}
@ -81,15 +81,13 @@ impl SpanMapRef<'_> {
pub(crate) fn real_span_map(
db: &dyn ExpandDatabase,
editioned_file_id: EditionedFileId,
editioned_file_id: base_db::EditionedFileId,
) -> Arc<RealSpanMap> {
use syntax::ast::HasModuleItem;
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(editioned_file_id.into());
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
let tree = db.parse(file_id).tree();
let tree = db.parse(editioned_file_id).tree();
// This is an incrementality layer. Basically we can't use absolute ranges for our spans as that
// would mean we'd invalidate everything whenever we type. So instead we make the text ranges
// relative to some AstIds reducing the risk of invalidation as typing somewhere no longer
@ -140,7 +138,7 @@ pub(crate) fn real_span_map(
});
Arc::new(RealSpanMap::from_file(
editioned_file_id,
editioned_file_id.editioned_file_id(db),
pairs.into_boxed_slice(),
tree.syntax().text_range().end(),
))
@ -148,7 +146,7 @@ pub(crate) fn real_span_map(
pub(crate) fn expansion_span_map(
db: &dyn ExpandDatabase,
file_id: MacroFileId,
file_id: MacroCallId,
) -> Arc<ExpansionSpanMap> {
db.parse_macro_expansion(file_id).value.1
}

View file

@ -16,13 +16,8 @@ use triomphe::Arc;
use crate::{
Const, ConstData, ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution,
TraitEnvironment, Ty, TyBuilder,
db::{HirDatabase, HirDatabaseData},
display::DisplayTarget,
generics::Generics,
infer::InferenceContext,
lower::ParamLoweringMode,
to_placeholder_idx,
TraitEnvironment, Ty, TyBuilder, db::HirDatabase, display::DisplayTarget, generics::Generics,
infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx,
};
use super::mir::{MirEvalError, MirLowerError, interpret_mir, lower_to_mir, pad16};
@ -228,7 +223,6 @@ pub fn try_const_isize(db: &dyn HirDatabase, c: &Const) -> Option<i128> {
pub(crate) fn const_eval_recover(
_: &dyn HirDatabase,
_: &Cycle,
_: HirDatabaseData,
_: GeneralConstId,
_: Substitution,
_: Option<Arc<TraitEnvironment>>,

View file

@ -1,11 +1,11 @@
use base_db::RootQueryDb;
use chalk_ir::Substitution;
use hir_def::db::DefDatabase;
use hir_expand::EditionedFileId;
use rustc_apfloat::{
Float,
ieee::{Half as f16, Quad as f128},
};
use span::EditionedFileId;
use test_fixture::WithFixture;
use test_utils::skip_slow_tests;
@ -116,14 +116,14 @@ fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
}
fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const, ConstEvalError> {
let module_id = db.module_for_file(file_id.file_id());
let module_id = db.module_for_file(file_id.file_id(db));
let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope;
let const_id = scope
.declarations()
.find_map(|x| match x {
hir_def::ModuleDefId::ConstId(x) => {
if db.const_signature(x).name.as_ref()?.display(db, file_id.edition()).to_string()
if db.const_signature(x).name.as_ref()?.display(db, file_id.edition(db)).to_string()
== "GOAL"
{
Some(x)

View file

@ -30,16 +30,16 @@ use crate::{
#[query_group::query_group]
pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke_actual(crate::infer::infer_query)]
#[salsa::invoke(crate::infer::infer_query)]
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
// region:mir
#[salsa::invoke_actual(crate::mir::mir_body_query)]
#[salsa::invoke(crate::mir::mir_body_query)]
#[salsa::cycle(crate::mir::mir_body_recover)]
fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke_actual(crate::mir::mir_body_for_closure_query)]
#[salsa::invoke(crate::mir::mir_body_for_closure_query)]
fn mir_body_for_closure(&self, def: InternedClosureId) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_query)]
@ -59,7 +59,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
env: Arc<TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke_actual(crate::mir::borrowck_query)]
#[salsa::invoke(crate::mir::borrowck_query)]
#[salsa::lru(2024)]
fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<[BorrowckResult]>, MirLowerError>;
@ -72,11 +72,11 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
trait_env: Option<Arc<TraitEnvironment>>,
) -> Result<Const, ConstEvalError>;
#[salsa::invoke_actual(crate::consteval::const_eval_static_query)]
#[salsa::invoke(crate::consteval::const_eval_static_query)]
#[salsa::cycle(crate::consteval::const_eval_static_recover)]
fn const_eval_static(&self, def: StaticId) -> Result<Const, ConstEvalError>;
#[salsa::invoke_actual(crate::consteval::const_eval_discriminant_variant)]
#[salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
#[salsa::cycle(crate::consteval::const_eval_discriminant_recover)]
fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>;
@ -103,10 +103,10 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::cycle(crate::layout::layout_of_ty_recover)]
fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke_actual(crate::layout::target_data_layout_query)]
#[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: Crate) -> Result<Arc<TargetDataLayout>, Arc<str>>;
#[salsa::invoke_actual(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
#[salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option<DynCompatibilityViolation>;
#[salsa::invoke(crate::lower::ty_query)]
@ -114,53 +114,54 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fn ty(&self, def: TyDefId) -> Binders<Ty>;
#[salsa::cycle(crate::lower::type_for_type_alias_with_diagnostics_query_recover)]
#[salsa::invoke_actual(crate::lower::type_for_type_alias_with_diagnostics_query)]
#[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)]
fn type_for_type_alias_with_diagnostics(&self, def: TypeAliasId) -> (Binders<Ty>, Diagnostics);
/// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is
/// a `StructId` or `EnumVariantId` with a record constructor.
#[salsa::invoke_actual(crate::lower::value_ty_query)]
#[salsa::invoke(crate::lower::value_ty_query)]
fn value_ty(&self, def: ValueTyDefId) -> Option<Binders<Ty>>;
#[salsa::invoke_actual(crate::lower::impl_self_ty_with_diagnostics_query)]
#[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)]
#[salsa::cycle(crate::lower::impl_self_ty_with_diagnostics_recover)]
fn impl_self_ty_with_diagnostics(&self, def: ImplId) -> (Binders<Ty>, Diagnostics);
#[salsa::invoke_actual(crate::lower::impl_self_ty_query)]
#[salsa::invoke(crate::lower::impl_self_ty_query)]
#[salsa::transparent]
fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
#[salsa::invoke(crate::lower::const_param_ty_with_diagnostics_query)]
// FIXME: Make this a non-interned query.
#[salsa::invoke_interned(crate::lower::const_param_ty_with_diagnostics_query)]
fn const_param_ty_with_diagnostics(&self, def: ConstParamId) -> (Ty, Diagnostics);
#[salsa::invoke(crate::lower::const_param_ty_query)]
#[salsa::transparent]
fn const_param_ty(&self, def: ConstParamId) -> Ty;
#[salsa::invoke_actual(crate::lower::impl_trait_with_diagnostics_query)]
#[salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)]
fn impl_trait_with_diagnostics(&self, def: ImplId) -> Option<(Binders<TraitRef>, Diagnostics)>;
#[salsa::invoke_actual(crate::lower::impl_trait_query)]
#[salsa::invoke(crate::lower::impl_trait_query)]
#[salsa::transparent]
fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
#[salsa::invoke_actual(crate::lower::field_types_with_diagnostics_query)]
#[salsa::invoke(crate::lower::field_types_with_diagnostics_query)]
fn field_types_with_diagnostics(
&self,
var: VariantId,
) -> (Arc<ArenaMap<LocalFieldId, Binders<Ty>>>, Diagnostics);
#[salsa::invoke_actual(crate::lower::field_types_query)]
#[salsa::invoke(crate::lower::field_types_query)]
#[salsa::transparent]
fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
#[salsa::invoke_actual(crate::lower::callable_item_signature_query)]
#[salsa::invoke(crate::lower::callable_item_signature_query)]
fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
#[salsa::invoke_actual(crate::lower::return_type_impl_traits)]
#[salsa::invoke(crate::lower::return_type_impl_traits)]
fn return_type_impl_traits(&self, def: FunctionId) -> Option<Arc<Binders<ImplTraits>>>;
#[salsa::invoke_actual(crate::lower::type_alias_impl_traits)]
#[salsa::invoke(crate::lower::type_alias_impl_traits)]
fn type_alias_impl_traits(&self, def: TypeAliasId) -> Option<Arc<Binders<ImplTraits>>>;
#[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
@ -172,41 +173,41 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
assoc_name: Option<Name>,
) -> GenericPredicates;
#[salsa::invoke_actual(crate::lower::generic_predicates_query)]
#[salsa::invoke(crate::lower::generic_predicates_query)]
fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates;
#[salsa::invoke_actual(crate::lower::generic_predicates_without_parent_with_diagnostics_query)]
#[salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)]
fn generic_predicates_without_parent_with_diagnostics(
&self,
def: GenericDefId,
) -> (GenericPredicates, Diagnostics);
#[salsa::invoke_actual(crate::lower::generic_predicates_without_parent_query)]
#[salsa::invoke(crate::lower::generic_predicates_without_parent_query)]
#[salsa::transparent]
fn generic_predicates_without_parent(&self, def: GenericDefId) -> GenericPredicates;
#[salsa::invoke_actual(crate::lower::trait_environment_for_body_query)]
#[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent]
fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<TraitEnvironment>;
#[salsa::invoke_actual(crate::lower::trait_environment_query)]
#[salsa::invoke(crate::lower::trait_environment_query)]
fn trait_environment(&self, def: GenericDefId) -> Arc<TraitEnvironment>;
#[salsa::invoke_actual(crate::lower::generic_defaults_with_diagnostics_query)]
#[salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)]
#[salsa::cycle(crate::lower::generic_defaults_with_diagnostics_recover)]
fn generic_defaults_with_diagnostics(
&self,
def: GenericDefId,
) -> (GenericDefaults, Diagnostics);
#[salsa::invoke_actual(crate::lower::generic_defaults_query)]
#[salsa::invoke(crate::lower::generic_defaults_query)]
#[salsa::transparent]
fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults;
#[salsa::invoke_actual(InherentImpls::inherent_impls_in_crate_query)]
#[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
fn inherent_impls_in_crate(&self, krate: Crate) -> Arc<InherentImpls>;
#[salsa::invoke_actual(InherentImpls::inherent_impls_in_block_query)]
#[salsa::invoke(InherentImpls::inherent_impls_in_block_query)]
fn inherent_impls_in_block(&self, block: BlockId) -> Option<Arc<InherentImpls>>;
/// Collects all crates in the dependency graph that have impls for the
@ -220,13 +221,13 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fp: TyFingerprint,
) -> SmallVec<[Crate; 2]>;
#[salsa::invoke_actual(TraitImpls::trait_impls_in_crate_query)]
#[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
fn trait_impls_in_crate(&self, krate: Crate) -> Arc<TraitImpls>;
#[salsa::invoke_actual(TraitImpls::trait_impls_in_block_query)]
#[salsa::invoke(TraitImpls::trait_impls_in_block_query)]
fn trait_impls_in_block(&self, block: BlockId) -> Option<Arc<TraitImpls>>;
#[salsa::invoke_actual(TraitImpls::trait_impls_in_deps_query)]
#[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
fn trait_impls_in_deps(&self, krate: Crate) -> Arc<[Arc<TraitImpls>]>;
// Interned IDs for Chalk integration
@ -251,7 +252,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::interned]
fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId;
#[salsa::invoke_actual(chalk_db::associated_ty_data_query)]
#[salsa::invoke(chalk_db::associated_ty_data_query)]
fn associated_ty_data(&self, id: TypeAliasId) -> sync::Arc<chalk_db::AssociatedTyDatum>;
#[salsa::invoke(chalk_db::trait_datum_query)]
@ -268,16 +269,16 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fn impl_datum(&self, krate: Crate, impl_id: chalk_db::ImplId)
-> sync::Arc<chalk_db::ImplDatum>;
#[salsa::invoke_actual(chalk_db::fn_def_datum_query)]
#[salsa::invoke(chalk_db::fn_def_datum_query)]
fn fn_def_datum(&self, fn_def_id: CallableDefId) -> sync::Arc<chalk_db::FnDefDatum>;
#[salsa::invoke_actual(chalk_db::fn_def_variance_query)]
#[salsa::invoke(chalk_db::fn_def_variance_query)]
fn fn_def_variance(&self, fn_def_id: CallableDefId) -> chalk_db::Variances;
#[salsa::invoke_actual(chalk_db::adt_variance_query)]
#[salsa::invoke(chalk_db::adt_variance_query)]
fn adt_variance(&self, adt_id: AdtId) -> chalk_db::Variances;
#[salsa::invoke_actual(crate::variance::variances_of)]
#[salsa::invoke(crate::variance::variances_of)]
#[salsa::cycle(crate::variance::variances_of_cycle)]
fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>;

View file

@ -21,7 +21,7 @@ use hir_def::{
item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
};
use hir_expand::{
HirFileId, HirFileIdExt,
HirFileId,
name::{AsName, Name},
};
use intern::sym;
@ -644,7 +644,7 @@ impl<'a> DeclValidator<'a> {
return;
};
let edition = file_id.original_file(self.db).edition();
let edition = file_id.original_file(self.db).edition(self.db);
let diagnostic = IncorrectCase {
file: file_id,
ident_type,

View file

@ -7,12 +7,11 @@ use hir_def::signatures::StructFlags;
use stdx::never;
use triomphe::Arc;
use crate::db::HirDatabaseData;
use crate::{
AliasTy, Canonical, CanonicalVarKinds, InEnvironment, Interner, ProjectionTy, TraitEnvironment,
Ty, TyBuilder, TyKind, db::HirDatabase, method_resolution::TyFingerprint,
AliasTy, Canonical, CanonicalVarKinds, ConcreteConst, ConstScalar, ConstValue, InEnvironment,
Interner, ProjectionTy, TraitEnvironment, Ty, TyBuilder, TyKind, db::HirDatabase,
method_resolution::TyFingerprint,
};
use crate::{ConcreteConst, ConstScalar, ConstValue};
fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool {
let module = match adt {
@ -197,7 +196,6 @@ fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment>) -> bool {
pub(crate) fn has_drop_glue_recover(
_db: &dyn HirDatabase,
_cycle: &salsa::Cycle,
_: HirDatabaseData,
_ty: Ty,
_env: Arc<TraitEnvironment>,
) -> DropGlue {

View file

@ -33,7 +33,7 @@ fn check_dyn_compatibility<'a>(
expected.into_iter().map(|(id, osvs)| (id, FxHashSet::from_iter(osvs))).collect();
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
for (trait_id, name) in file_ids.into_iter().flat_map(|file_id| {
let module_id = db.module_for_file(file_id);
let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
scope
@ -43,7 +43,7 @@ fn check_dyn_compatibility<'a>(
let name = db
.trait_signature(trait_id)
.name
.display_no_db(file_id.edition())
.display_no_db(file_id.edition(&db))
.to_smolstr();
Some((trait_id, name))
} else {

View file

@ -20,7 +20,7 @@ use triomphe::Arc;
use crate::{
Interner, ProjectionTy, Substitution, TraitEnvironment, Ty,
consteval::try_const_usize,
db::{HirDatabase, HirDatabaseData, InternedClosure},
db::{HirDatabase, InternedClosure},
infer::normalize,
utils::ClosureSubst,
};
@ -368,7 +368,6 @@ pub fn layout_of_ty_query(
pub(crate) fn layout_of_ty_recover(
_: &dyn HirDatabase,
_: &Cycle,
_: HirDatabaseData,
_: Ty,
_: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {

View file

@ -20,7 +20,7 @@ use crate::{
layout::{Layout, LayoutError, field_ty},
};
use super::{HirDatabaseData, LayoutCx};
use super::LayoutCx;
pub fn layout_of_adt_query(
db: &dyn HirDatabase,
@ -134,7 +134,6 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
pub(crate) fn layout_of_adt_recover(
_: &dyn HirDatabase,
_: &Cycle,
_: HirDatabaseData,
_: AdtId,
_: Substitution,
_: Arc<TraitEnvironment>,

View file

@ -38,7 +38,7 @@ fn eval_goal(
let adt_or_type_alias_id = file_ids
.into_iter()
.find_map(|file_id| {
let module_id = db.module_for_file(file_id.file_id());
let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let adt_or_type_alias_id = scope.declarations().find_map(|x| match x {
@ -47,14 +47,18 @@ fn eval_goal(
hir_def::AdtId::StructId(x) => db
.struct_signature(x)
.name
.display_no_db(file_id.edition())
.display_no_db(file_id.edition(&db))
.to_smolstr(),
hir_def::AdtId::UnionId(x) => db
.union_signature(x)
.name
.display_no_db(file_id.edition(&db))
.to_smolstr(),
hir_def::AdtId::EnumId(x) => db
.enum_signature(x)
.name
.display_no_db(file_id.edition(&db))
.to_smolstr(),
hir_def::AdtId::UnionId(x) => {
db.union_signature(x).name.display_no_db(file_id.edition()).to_smolstr()
}
hir_def::AdtId::EnumId(x) => {
db.enum_signature(x).name.display_no_db(file_id.edition()).to_smolstr()
}
};
(name == "Goal").then_some(Either::Left(x))
}
@ -62,7 +66,7 @@ fn eval_goal(
let name = db
.type_alias_signature(x)
.name
.display_no_db(file_id.edition())
.display_no_db(file_id.edition(&db))
.to_smolstr();
(name == "Goal").then_some(Either::Right(x))
}
@ -99,7 +103,7 @@ fn eval_expr(
);
let (db, file_id) = TestDB::with_single_file(&ra_fixture);
let module_id = db.module_for_file(file_id.file_id());
let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let function_id = scope
@ -107,7 +111,7 @@ fn eval_expr(
.find_map(|x| match x {
hir_def::ModuleDefId::FunctionId(x) => {
let name =
db.function_signature(x).name.display_no_db(file_id.edition()).to_smolstr();
db.function_signature(x).name.display_no_db(file_id.edition(&db)).to_smolstr();
(name == "main").then_some(x)
}
_ => None,
@ -117,7 +121,7 @@ fn eval_expr(
let b = hir_body
.bindings
.iter()
.find(|x| x.1.name.display_no_db(file_id.edition()).to_smolstr() == "goal")
.find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal")
.unwrap()
.0;
let infer = db.infer(function_id.into());

View file

@ -59,7 +59,7 @@ use crate::{
QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder,
TyKind, WhereClause, all_super_traits,
consteval::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic},
db::{HirDatabase, HirDatabaseData},
db::HirDatabase,
error_lifetime,
generics::{Generics, generics, trait_self_param_idx},
lower::{
@ -957,7 +957,6 @@ pub(crate) fn generic_predicates_for_param_query(
pub(crate) fn generic_predicates_for_param_recover(
_db: &dyn HirDatabase,
_cycle: &salsa::Cycle,
_: HirDatabaseData,
_def: GenericDefId,
_param_id: TypeOrConstParamId,
_assoc_name: Option<Name>,

View file

@ -16,7 +16,7 @@ use hir_def::{
resolver::{HasResolver, TypeNs, ValueNs},
signatures::{StaticFlags, StructFlags},
};
use hir_expand::{HirFileIdExt, InFile, mod_path::path, name::Name};
use hir_expand::{InFile, mod_path::path, name::Name};
use intern::sym;
use la_arena::ArenaMap;
use rustc_abi::TargetDataLayout;
@ -409,7 +409,7 @@ impl MirEvalError {
};
let file_id = span.file_id.original_file(db);
let text_range = span.value.text_range();
writeln!(f, "{}", span_formatter(file_id.file_id(), text_range))?;
writeln!(f, "{}", span_formatter(file_id.file_id(db), text_range))?;
}
}
match err {

View file

@ -1,5 +1,6 @@
use hir_def::db::DefDatabase;
use span::{Edition, EditionedFileId};
use hir_expand::EditionedFileId;
use span::Edition;
use syntax::{TextRange, TextSize};
use test_fixture::WithFixture;
@ -9,7 +10,7 @@ use crate::{Interner, Substitution, db::HirDatabase, mir::MirLowerError, test_db
use super::{MirEvalError, interpret_mir};
fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> {
let module_id = db.module_for_file(file_id);
let module_id = db.module_for_file(file_id.file_id(db));
let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope;
let func_id = scope
@ -69,7 +70,7 @@ fn check_pass_and_stdio(
let span_formatter = |file, range: TextRange| {
format!("{:?} {:?}..{:?}", file, line_index(range.start()), line_index(range.end()))
};
let krate = db.module_for_file(file_id).krate();
let krate = db.module_for_file(file_id.file_id(&db)).krate();
e.pretty_print(&mut err, &db, span_formatter, DisplayTarget::from_crate(&db, krate))
.unwrap();
panic!("Error in interpreting: {err}");

View file

@ -19,7 +19,7 @@ use triomphe::Arc;
use crate::{
Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
consteval::{intern_const_scalar, unknown_const},
db::{HirDatabase, HirDatabaseData, InternedClosure, InternedClosureId},
db::{HirDatabase, InternedClosure, InternedClosureId},
from_placeholder_idx,
generics::{Generics, generics},
infer::normalize,
@ -316,7 +316,6 @@ pub fn monomorphized_mir_body_query(
pub(crate) fn monomorphized_mir_body_recover(
_: &dyn HirDatabase,
_: &salsa::Cycle,
_: HirDatabaseData,
_: DefWithBodyId,
_: Substitution,
_: Arc<crate::TraitEnvironment>,

View file

@ -8,9 +8,10 @@ use base_db::{
};
use hir_def::{ModuleId, db::DefDatabase};
use hir_expand::EditionedFileId;
use rustc_hash::FxHashMap;
use salsa::{AsDynDatabase, Durability};
use span::{EditionedFileId, FileId};
use span::FileId;
use syntax::TextRange;
use test_utils::extract_annotations;
use triomphe::Arc;
@ -119,7 +120,7 @@ impl TestDB {
for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate);
for (local_id, data) in crate_def_map.modules() {
if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) {
if data.origin.file_id().map(|file_id| file_id.file_id(self)) == Some(file_id) {
return Some(crate_def_map.module_id(local_id));
}
}
@ -145,7 +146,7 @@ impl TestDB {
files
.into_iter()
.filter_map(|file_id| {
let text = self.file_text(file_id.file_id());
let text = self.file_text(file_id.file_id(self));
let annotations = extract_annotations(&text.text(self));
if annotations.is_empty() {
return None;

View file

@ -126,7 +126,7 @@ fn check_impl(
let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new();
for file_id in files {
let module = db.module_for_file_opt(file_id);
let module = db.module_for_file_opt(file_id.file_id(&db));
let module = match module {
Some(m) => m,
None => continue,
@ -387,7 +387,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
}
};
let module = db.module_for_file(file_id);
let module = db.module_for_file(file_id.file_id(&db));
let def_map = module.def_map(&db);
let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new();
@ -568,7 +568,7 @@ fn salsa_bug() {
",
);
let module = db.module_for_file(pos.file_id);
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
@ -605,9 +605,9 @@ fn salsa_bug() {
}
";
db.set_file_text(pos.file_id.file_id(), new_text);
db.set_file_text(pos.file_id.file_id(&db), new_text);
let module = db.module_for_file(pos.file_id);
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(match def {

View file

@ -1,9 +1,9 @@
use expect_test::{Expect, expect};
use hir_def::db::DefDatabase;
use hir_expand::files::InFileWrapper;
use hir_expand::{HirFileId, files::InFileWrapper};
use itertools::Itertools;
use salsa::plumbing::FromId;
use span::{HirFileId, TextRange};
use span::TextRange;
use syntax::{AstNode, AstPtr};
use test_fixture::WithFixture;
@ -16,7 +16,7 @@ use super::visit_module;
fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let module = db.module_for_file(file_id);
let module = db.module_for_file(file_id.file_id(&db));
let def_map = module.def_map(&db);
let mut defs = Vec::new();

View file

@ -17,7 +17,7 @@ fn foo() -> i32 {
);
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id());
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def {
@ -35,11 +35,11 @@ fn foo() -> i32 {
1
}";
db.set_file_text(pos.file_id.file_id(), new_text);
db.set_file_text(pos.file_id.file_id(&db), new_text);
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id());
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def {
@ -68,7 +68,7 @@ fn baz() -> i32 {
);
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id());
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def {
@ -91,11 +91,11 @@ fn baz() -> i32 {
}
";
db.set_file_text(pos.file_id.file_id(), new_text);
db.set_file_text(pos.file_id.file_id(&db), new_text);
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id());
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def {

View file

@ -979,7 +979,7 @@ struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let mut defs: Vec<GenericDefId> = Vec::new();
let module = db.module_for_file_opt(file_id).unwrap();
let module = db.module_for_file_opt(file_id.file_id(&db)).unwrap();
let def_map = module.def_map(&db);
crate::tests::visit_module(&db, &def_map, module.local_id, &mut |it| {
defs.push(match it {

View file

@ -6,9 +6,8 @@ use hir_def::{
nameres::{ModuleOrigin, ModuleSource},
src::{HasChildSource, HasSource as _},
};
use hir_expand::{HirFileId, InFile};
use hir_expand::{EditionedFileId, HirFileId, InFile};
use hir_ty::db::InternedClosure;
use span::EditionedFileId;
use syntax::ast;
use tt::TextRange;

View file

@ -82,7 +82,7 @@ use itertools::Itertools;
use nameres::diagnostics::DefDiagnosticKind;
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
use span::{Edition, EditionedFileId, FileId, MacroCallId};
use span::{Edition, FileId};
use stdx::{format_to, impl_from, never};
use syntax::{
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr,
@ -129,7 +129,7 @@ pub use {
{ModuleDefId, TraitId},
},
hir_expand::{
ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt, MacroKind,
EditionedFileId, ExpandResult, HirFileId, MacroCallId, MacroKind,
attrs::{Attr, AttrId},
change::ChangeWithProcMacros,
files::{
@ -954,10 +954,11 @@ fn macro_call_diagnostics(
let node =
InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id()));
let RenderedExpandError { message, error, kind } = err.render_to_string(db);
let precise_location = if err.span().anchor.file_id == file_id {
let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
let precise_location = if editioned_file_id == file_id {
Some(
err.span().range
+ db.ast_id_map(err.span().anchor.file_id.into())
+ db.ast_id_map(editioned_file_id.into())
.get_erased(err.span().anchor.ast_id)
.text_range()
.start(),
@ -1926,7 +1927,7 @@ impl DefWithBody {
source_map
.macro_calls()
.for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id.macro_call_id, acc));
.for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc));
expr_store_diagnostics(db, acc, &source_map);
@ -2145,10 +2146,11 @@ fn expr_store_diagnostics(
ExpressionStoreDiagnostics::MacroError { node, err } => {
let RenderedExpandError { message, error, kind } = err.render_to_string(db);
let precise_location = if err.span().anchor.file_id == node.file_id {
let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
let precise_location = if editioned_file_id == node.file_id {
Some(
err.span().range
+ db.ast_id_map(err.span().anchor.file_id.into())
+ db.ast_id_map(editioned_file_id.into())
.get_erased(err.span().anchor.ast_id)
.text_range()
.start(),
@ -4475,7 +4477,7 @@ impl Impl {
let src = self.source(db)?;
let macro_file = src.file_id.macro_file()?;
let loc = macro_file.macro_call_id.lookup(db);
let loc = macro_file.lookup(db);
let (derive_attr, derive_index) = match loc.kind {
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
let module_id = self.id.lookup(db).container;
@ -4488,9 +4490,8 @@ impl Impl {
}
_ => return None,
};
let file_id = MacroFileId { macro_call_id: derive_attr };
let path = db
.parse_macro_expansion(file_id)
.parse_macro_expansion(derive_attr)
.value
.0
.syntax_node()
@ -4498,7 +4499,7 @@ impl Impl {
.nth(derive_index as usize)
.and_then(<ast::Attr as AstNode>::cast)
.and_then(|it| it.path())?;
Some(InMacroFile { file_id, value: path })
Some(InMacroFile { file_id: derive_attr, value: path })
}
pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool {

View file

@ -20,11 +20,11 @@ use hir_def::{
type_ref::Mutability,
};
use hir_expand::{
ExpandResult, FileRange, HirFileIdExt, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
attrs::collect_attrs,
builtin::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
files::InRealFile,
files::{FileRangeWrapper, InRealFile},
hygiene::SyntaxContextExt as _,
inert_attr_macro::find_builtin_attr_idx,
mod_path::{ModPath, PathKind},
@ -35,7 +35,7 @@ use intern::{Interned, Symbol, sym};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{SmallVec, smallvec};
use span::{EditionedFileId, FileId, HirFileIdRepr, SyntaxContext};
use span::{Edition, FileId, SyntaxContext};
use stdx::TupleExt;
use syntax::{
AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
@ -46,10 +46,10 @@ use syntax::{
use crate::{
Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam,
Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource,
HirFileId, Impl, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro,
Module, ModuleDef, Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait,
TraitAlias, TupleField, Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource, Impl,
InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait, TraitAlias, TupleField,
Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
db::HirDatabase,
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
source_analyzer::{SourceAnalyzer, name_hygiene, resolve_hir_path},
@ -136,8 +136,8 @@ pub struct Semantics<'db, DB> {
pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>,
/// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
/// MacroCall to its expansion's MacroCallId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroCallId>>,
}
impl<DB> fmt::Debug for Semantics<'_, DB> {
@ -305,8 +305,8 @@ impl<'db> SemanticsImpl<'db> {
SemanticsImpl { db, s2d_cache: Default::default(), macro_call_cache: Default::default() }
}
pub fn parse(&self, file_id: base_db::EditionedFileId) -> ast::SourceFile {
let hir_file_id = file_id.editioned_file_id(self.db).into();
pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
let hir_file_id = file_id.into();
let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), hir_file_id);
tree
@ -322,27 +322,26 @@ impl<'db> SemanticsImpl<'db> {
pub fn attach_first_edition(&self, file: FileId) -> Option<EditionedFileId> {
Some(EditionedFileId::new(
self.db,
file,
self.file_to_module_defs(file).next()?.krate().edition(self.db),
))
}
pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
let editioned_file_id = self
let file_id = self
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
let file_id = base_db::EditionedFileId::new(self.db, editioned_file_id);
.unwrap_or_else(|| EditionedFileId::new(self.db, file_id, Edition::CURRENT));
let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), editioned_file_id.into());
self.cache(tree.syntax().clone(), file_id.into());
tree
}
pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
let module = self.file_to_module_defs(file_id.file_id()).next()?;
match file_id {
HirFileId::FileId(file_id) => {
let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?;
let def_map = self.db.crate_def_map(module.krate().id);
match def_map[module.id.local_id].origin {
ModuleOrigin::CrateRoot { .. } => None,
@ -357,9 +356,8 @@ impl<'db> SemanticsImpl<'db> {
_ => unreachable!("FileId can only belong to a file module"),
}
}
HirFileIdRepr::MacroFile(macro_file) => {
let node =
self.db.lookup_intern_macro_call(macro_file.macro_call_id).to_node(self.db);
HirFileId::MacroFile(macro_file) => {
let node = self.db.lookup_intern_macro_call(macro_file).to_node(self.db);
let root = find_root(&node.value);
self.cache(root, node.file_id);
Some(node)
@ -384,7 +382,7 @@ impl<'db> SemanticsImpl<'db> {
node
}
pub fn expand(&self, file_id: MacroFileId) -> ExpandResult<SyntaxNode> {
pub fn expand(&self, file_id: MacroCallId) -> ExpandResult<SyntaxNode> {
let res = self.db.parse_macro_expansion(file_id).map(|it| it.0.syntax_node());
self.cache(res.value.clone(), file_id.into());
res
@ -402,13 +400,11 @@ impl<'db> SemanticsImpl<'db> {
pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
let file_id = self.find_file(attr.syntax()).file_id;
let krate = match file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
self.file_to_module_defs(file_id.file_id()).next()?.krate().id
}
HirFileIdRepr::MacroFile(macro_file) => {
self.db.lookup_intern_macro_call(macro_file.macro_call_id).krate
let krate = match file_id {
HirFileId::FileId(file_id) => {
self.file_to_module_defs(file_id.file_id(self.db)).next()?.krate().id
}
HirFileId::MacroFile(macro_file) => self.db.lookup_intern_macro_call(macro_file).krate,
};
hir_expand::check_cfg_attr_value(self.db, attr, krate)
}
@ -423,7 +419,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_call = InFile::new(sa.file_id, macro_call);
let file_id = sa.expand(self.db, macro_call)?;
let macro_call = self.db.lookup_intern_macro_call(file_id.macro_call_id);
let macro_call = self.db.lookup_intern_macro_call(file_id);
let skip = matches!(
macro_call.def.kind,
@ -456,7 +452,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<SyntaxNode>> {
let src = self.wrap_node_infile(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
Some(self.expand(macro_call_id.as_macro_file()))
Some(self.expand(macro_call_id))
}
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
@ -465,7 +461,7 @@ impl<'db> SemanticsImpl<'db> {
let call_id = self.with_ctx(|ctx| {
ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
})?;
Some(self.parse_or_expand(call_id.as_file()))
Some(self.parse_or_expand(call_id.into()))
}
pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
@ -485,7 +481,7 @@ impl<'db> SemanticsImpl<'db> {
.derive_macro_calls(attr)?
.into_iter()
.flat_map(|call| {
let file_id = call?.as_macro_file();
let file_id = call?;
let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
let root_node = value.0.syntax_node();
self.cache(root_node.clone(), file_id.into());
@ -526,7 +522,7 @@ impl<'db> SemanticsImpl<'db> {
Some(result)
}
pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroFileId)>> {
pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroCallId)>> {
let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
@ -542,7 +538,7 @@ impl<'db> SemanticsImpl<'db> {
.derive_helpers_in_scope(InFile::new(sa.file_id, id))?
.iter()
.filter(|&(name, _, _)| *name == attr_name)
.map(|&(_, macro_, call)| (macro_.into(), call.as_macro_file()))
.map(|&(_, macro_, call)| (macro_.into(), call))
.collect();
res.is_empty().not().then_some(res)
}
@ -564,7 +560,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_file = analyzer.expansion(macro_call)?;
hir_expand::db::expand_speculative(
self.db,
macro_file.macro_call_id,
macro_file,
speculative_args.syntax(),
token_to_map,
)
@ -572,16 +568,11 @@ impl<'db> SemanticsImpl<'db> {
pub fn speculative_expand_raw(
&self,
macro_file: MacroFileId,
macro_file: MacroCallId,
speculative_args: &SyntaxNode,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
hir_expand::db::expand_speculative(
self.db,
macro_file.macro_call_id,
speculative_args,
token_to_map,
)
hir_expand::db::expand_speculative(self.db, macro_file, speculative_args, token_to_map)
}
/// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
@ -1025,7 +1016,7 @@ impl<'db> SemanticsImpl<'db> {
None => {
stack.push((
file_id.into(),
smallvec![(token, SyntaxContext::root(file_id.edition()))],
smallvec![(token, SyntaxContext::root(file_id.edition(self.db)))],
));
}
}
@ -1054,7 +1045,6 @@ impl<'db> SemanticsImpl<'db> {
})
});
if let Some((call_id, item)) = containing_attribute_macro_call {
let file_id = call_id.as_macro_file();
let attr_id = match self.db.lookup_intern_macro_call(call_id).kind {
hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => {
invoc_attr_index.ast_index()
@ -1083,7 +1073,7 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or_else(|| text_range.start());
let text_range = TextRange::new(start, text_range.end());
filter_duplicates(tokens, text_range);
return process_expansion_for_token(&mut stack, file_id);
return process_expansion_for_token(&mut stack, call_id);
}
// Then check for token trees, that means we are either in a function-like macro or
@ -1127,7 +1117,7 @@ impl<'db> SemanticsImpl<'db> {
.eager_arg(self.db)
.and_then(|arg| {
// also descend into eager expansions
process_expansion_for_token(&mut stack, arg.as_macro_file())
process_expansion_for_token(&mut stack, arg)
}))
}
// derive or derive helper
@ -1151,7 +1141,6 @@ impl<'db> SemanticsImpl<'db> {
match derive_call {
Some(call_id) => {
// resolved to a derive
let file_id = call_id.as_macro_file();
let text_range = attr.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this
@ -1159,7 +1148,7 @@ impl<'db> SemanticsImpl<'db> {
!text_range.contains_range(t.text_range())
});
return process_expansion_for_token(
&mut stack, file_id,
&mut stack, call_id,
);
}
None => Some(adt),
@ -1207,10 +1196,7 @@ impl<'db> SemanticsImpl<'db> {
// as there may be multiple derives registering the same helper
// name, we gotta make sure to call this for all of them!
// FIXME: We need to call `f` for all of them as well though!
res = res.or(process_expansion_for_token(
&mut stack,
derive.as_macro_file(),
));
res = res.or(process_expansion_for_token(&mut stack, *derive));
}
res
}
@ -1288,10 +1274,14 @@ impl<'db> SemanticsImpl<'db> {
)
}
pub fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
pub fn diagnostics_display_range(
&self,
src: InFile<SyntaxNodePtr>,
) -> FileRangeWrapper<FileId> {
let root = self.parse_or_expand(src.file_id);
let node = src.map(|it| it.to_node(&root));
node.as_ref().original_file_range_rooted(self.db)
let FileRange { file_id, range } = node.as_ref().original_file_range_rooted(self.db);
FileRangeWrapper { file_id: file_id.file_id(self.db), range }
}
fn token_ancestors_with_macros(
@ -1890,20 +1880,16 @@ fn macro_call_to_macro_id(
ctx: &mut SourceToDefCtx<'_, '_>,
macro_call_id: MacroCallId,
) -> Option<MacroId> {
use span::HirFileIdRepr;
let db: &dyn ExpandDatabase = ctx.db;
let loc = db.lookup_intern_macro_call(macro_call_id);
match loc.def.ast_id() {
Either::Left(it) => {
let node = match it.file_id.repr() {
HirFileIdRepr::FileId(editioned_file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
let node = match it.file_id {
HirFileId::FileId(file_id) => {
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
HirFileIdRepr::MacroFile(macro_file) => {
HirFileId::MacroFile(macro_file) => {
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
it.to_ptr(db).to_node(&expansion_info.expanded().value)
}
@ -1911,13 +1897,11 @@ fn macro_call_to_macro_id(
ctx.macro_to_def(InFile::new(it.file_id, &node))
}
Either::Right(it) => {
let node = match it.file_id.repr() {
HirFileIdRepr::FileId(editioned_file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
let node = match it.file_id {
HirFileId::FileId(file_id) => {
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
HirFileIdRepr::MacroFile(macro_file) => {
HirFileId::MacroFile(macro_file) => {
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
it.to_ptr(db).to_node(&expansion_info.expanded().value)
}

View file

@ -197,7 +197,7 @@ impl ChildBySource for DefWithBodyId {
}
sm.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(|(ast, &exp_id)| {
res[keys::MACRO_CALL].insert(ast.value, exp_id.macro_call_id);
res[keys::MACRO_CALL].insert(ast.value, exp_id);
});
for (block, def_map) in body.blocks(db) {

View file

@ -98,12 +98,12 @@ use hir_def::{
hir::{BindingId, Expr, LabelId},
};
use hir_expand::{
ExpansionInfo, HirFileId, HirFileIdExt, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
attrs::AttrId, name::AsName,
EditionedFileId, ExpansionInfo, HirFileId, InMacroFile, MacroCallId, attrs::AttrId,
name::AsName,
};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::{EditionedFileId, FileId};
use span::FileId;
use stdx::impl_from;
use syntax::{
AstNode, AstPtr, SyntaxNode,
@ -116,9 +116,9 @@ use crate::{InFile, InlineAsmOperand, db::HirDatabase, semantics::child_by_sourc
#[derive(Default)]
pub(super) struct SourceToDefCache {
pub(super) dynmap_cache: FxHashMap<(ChildContainer, HirFileId), DynMap>,
expansion_info_cache: FxHashMap<MacroFileId, ExpansionInfo>,
expansion_info_cache: FxHashMap<MacroCallId, ExpansionInfo>,
pub(super) file_to_def_cache: FxHashMap<FileId, SmallVec<[ModuleId; 1]>>,
pub(super) included_file_cache: FxHashMap<EditionedFileId, Option<MacroFileId>>,
pub(super) included_file_cache: FxHashMap<EditionedFileId, Option<MacroCallId>>,
/// Rootnode to HirFileId cache
pub(super) root_to_file_cache: FxHashMap<SyntaxNode, HirFileId>,
}
@ -138,14 +138,14 @@ impl SourceToDefCache {
&mut self,
db: &dyn HirDatabase,
file: EditionedFileId,
) -> Option<MacroFileId> {
) -> Option<MacroCallId> {
if let Some(&m) = self.included_file_cache.get(&file) {
return m;
}
self.included_file_cache.insert(file, None);
for &crate_id in db.relevant_crates(file.into()).iter() {
for &crate_id in db.relevant_crates(file.file_id(db)).iter() {
db.include_macro_invoc(crate_id).iter().for_each(|&(macro_call_id, file_id)| {
self.included_file_cache.insert(file_id, Some(MacroFileId { macro_call_id }));
self.included_file_cache.insert(file_id, Some(macro_call_id));
});
}
self.included_file_cache.get(&file).copied().flatten()
@ -154,7 +154,7 @@ impl SourceToDefCache {
pub(super) fn get_or_insert_expansion(
&mut self,
db: &dyn HirDatabase,
macro_file: MacroFileId,
macro_file: MacroCallId,
) -> &ExpansionInfo {
self.expansion_info_cache.entry(macro_file).or_insert_with(|| {
let exp_info = macro_file.expansion_info(db);
@ -184,7 +184,7 @@ impl SourceToDefCtx<'_, '_> {
let n_mods = mods.len();
let modules = |file| {
crate_def_map
.modules_for_file(file)
.modules_for_file(self.db, file)
.map(|local_id| crate_def_map.module_id(local_id))
};
mods.extend(modules(file));
@ -193,18 +193,16 @@ impl SourceToDefCtx<'_, '_> {
self.db
.include_macro_invoc(crate_id)
.iter()
.filter(|&&(_, file_id)| file_id == file)
.filter(|&&(_, file_id)| file_id.file_id(self.db) == file)
.flat_map(|&(macro_call_id, file_id)| {
self.cache
.included_file_cache
.insert(file_id, Some(MacroFileId { macro_call_id }));
self.cache.included_file_cache.insert(file_id, Some(macro_call_id));
modules(
macro_call_id
.lookup(self.db)
.kind
.file_id()
.original_file(self.db)
.file_id(),
.file_id(self.db),
)
}),
);
@ -234,7 +232,7 @@ impl SourceToDefCtx<'_, '_> {
}
None => {
let file_id = src.file_id.original_file(self.db);
self.file_to_def(file_id.file_id()).first().copied()
self.file_to_def(file_id.file_id(self.db)).first().copied()
}
}?;
@ -247,7 +245,7 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn source_file_to_def(&mut self, src: InFile<&ast::SourceFile>) -> Option<ModuleId> {
let _p = tracing::info_span!("source_file_to_def").entered();
let file_id = src.file_id.original_file(self.db);
self.file_to_def(file_id.file_id()).first().copied()
self.file_to_def(file_id.file_id(self.db)).first().copied()
}
pub(super) fn trait_to_def(&mut self, src: InFile<&ast::Trait>) -> Option<TraitId> {
@ -526,8 +524,10 @@ impl SourceToDefCtx<'_, '_> {
return Some(def);
}
let def =
self.file_to_def(src.file_id.original_file(self.db).file_id()).first().copied()?;
let def = self
.file_to_def(src.file_id.original_file(self.db).file_id(self.db))
.first()
.copied()?;
Some(def.into())
}

View file

@ -29,7 +29,7 @@ use hir_def::{
type_ref::{Mutability, TypeRef, TypeRefId},
};
use hir_expand::{
HirFileId, InFile, MacroFileId, MacroFileIdExt,
HirFileId, InFile, MacroCallId,
mod_path::{ModPath, PathKind, path},
name::{AsName, Name},
};
@ -47,10 +47,9 @@ use hir_ty::{
use intern::sym;
use itertools::Itertools;
use smallvec::SmallVec;
use syntax::ast::{RangeItem, RangeOp};
use syntax::{
SyntaxKind, SyntaxNode, TextRange, TextSize,
ast::{self, AstNode},
ast::{self, AstNode, RangeItem, RangeOp},
};
use triomphe::Arc;
@ -216,7 +215,7 @@ impl SourceAnalyzer {
})
}
pub(crate) fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroFileId> {
pub(crate) fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
self.store_sm()?.expansion(node)
}
@ -750,7 +749,7 @@ impl SourceAnalyzer {
let bs = self.store_sm()?;
bs.expansion(macro_call).and_then(|it| {
// FIXME: Block def maps
let def = it.macro_call_id.lookup(db).def;
let def = it.lookup(db).def;
db.crate_def_map(def.krate)
.macro_def_to_macro_id
.get(&def.kind.erased_ast_id())
@ -1197,15 +1196,11 @@ impl SourceAnalyzer {
&self,
db: &dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>,
) -> Option<MacroFileId> {
) -> Option<MacroCallId> {
self.store_sm().and_then(|bs| bs.expansion(macro_call)).or_else(|| {
self.resolver
.item_scope()
.macro_invoc(
macro_call
.with_value(db.ast_id_map(macro_call.file_id).ast_id(macro_call.value)),
)
.map(|it| it.as_macro_file())
self.resolver.item_scope().macro_invoc(
macro_call.with_value(db.ast_id_map(macro_call.file_id).ast_id(macro_call.value)),
)
})
}

View file

@ -1,8 +1,6 @@
//! See [`AssistContext`].
use hir::{FileRange, Semantics};
use ide_db::EditionedFileId;
use ide_db::base_db::salsa::AsDynDatabase;
use hir::{EditionedFileId, FileRange, Semantics};
use ide_db::{FileId, RootDatabase, label::Label};
use syntax::Edition;
use syntax::{
@ -65,10 +63,7 @@ impl<'a> AssistContext<'a> {
config: &'a AssistConfig,
frange: FileRange,
) -> AssistContext<'a> {
let editioned_file_id =
ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), frange.file_id);
let source_file = sema.parse(editioned_file_id);
let source_file = sema.parse(frange.file_id);
let start = frange.range.start();
let end = frange.range.end();
@ -109,12 +104,16 @@ impl<'a> AssistContext<'a> {
self.frange.range.start()
}
pub(crate) fn vfs_file_id(&self) -> FileId {
self.frange.file_id.file_id(self.db())
}
pub(crate) fn file_id(&self) -> EditionedFileId {
self.frange.file_id
}
pub(crate) fn edition(&self) -> Edition {
self.frange.file_id.edition()
self.frange.file_id.edition(self.db())
}
pub(crate) fn has_empty_selection(&self) -> bool {
@ -169,7 +168,7 @@ impl Assists {
pub(crate) fn new(ctx: &AssistContext<'_>, resolve: AssistResolveStrategy) -> Assists {
Assists {
resolve,
file: ctx.frange.file_id.file_id(),
file: ctx.frange.file_id.file_id(ctx.db()),
buf: Vec::new(),
allowed: ctx.config.allowed.clone(),
}

View file

@ -48,7 +48,7 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
editor.replace(expr.syntax(), block_expr.syntax());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View file

@ -3,7 +3,6 @@ use std::iter::{self, Peekable};
use either::Either;
use hir::{Adt, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics, sym};
use ide_db::RootDatabase;
use ide_db::base_db::salsa::AsDynDatabase;
use ide_db::syntax_helpers::suggest_name;
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
use itertools::Itertools;
@ -257,12 +256,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
// Just replace the element that the original range came from
let old_place = {
// Find the original element
let editioned_file_id = ide_db::base_db::EditionedFileId::new(
ctx.sema.db.as_dyn_database(),
arm_list_range.file_id,
);
let file = ctx.sema.parse(editioned_file_id);
let file = ctx.sema.parse(arm_list_range.file_id);
let old_place = file.syntax().covering_element(arm_list_range.range);
match old_place {
@ -300,7 +294,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
}
editor.add_mappings(make.take());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View file

@ -119,7 +119,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
}
}
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)?
} else {
@ -181,7 +181,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
}
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View file

@ -147,7 +147,7 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
}
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@ -230,7 +230,7 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>
}
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View file

@ -42,7 +42,7 @@ pub(crate) fn bind_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
format!("Bind as `let _ = {ident_pat};`"),
param.syntax().text_range(),
|builder| {
let line_index = ctx.db().line_index(ctx.file_id().into());
let line_index = ctx.db().line_index(ctx.vfs_file_id());
let indent = func.indent_level();
let text_indent = indent + 1;

View file

@ -135,7 +135,7 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
editor.replace(expr.syntax(), mcall.syntax());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@ -233,7 +233,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
editor.replace(mcall.syntax().clone(), if_expr.syntax().clone());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View file

@ -209,7 +209,7 @@ fn replace_usages(
delayed_mutations: &mut Vec<(ImportScope, ast::Path)>,
) {
for (file_id, references) in usages {
edit.edit_file(file_id.file_id());
edit.edit_file(file_id.file_id(ctx.db()));
let refs_with_imports = augment_references_with_imports(ctx, references, target_module);

View file

@ -249,7 +249,7 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>)
);
fn_ = fn_.dedent(IndentLevel::from_token(&fn_.syntax().last_token().unwrap()));
builder.edit_file(ctx.file_id());
builder.edit_file(ctx.vfs_file_id());
match &closure_name {
Some((closure_decl, _, _)) => {
fn_ = fn_.indent(closure_decl.indent_level());
@ -506,9 +506,8 @@ fn wrap_capture_in_deref_if_needed(
}
fn capture_as_arg(ctx: &AssistContext<'_>, capture: &ClosureCapture) -> ast::Expr {
let place =
parse_expr_from_str(&capture.display_place_source_code(ctx.db()), ctx.file_id().edition())
.expect("`display_place_source_code()` produced an invalid expr");
let place = parse_expr_from_str(&capture.display_place_source_code(ctx.db()), ctx.edition())
.expect("`display_place_source_code()` produced an invalid expr");
let needs_mut = match capture.kind() {
CaptureKind::SharedRef => false,
CaptureKind::MutableRef | CaptureKind::UniqueSharedRef => true,
@ -587,7 +586,7 @@ fn handle_call(
let indent =
if insert_newlines { first_arg_indent.unwrap().to_string() } else { String::new() };
// FIXME: This text manipulation seems risky.
let text = ctx.db().file_text(file_id.file_id()).text(ctx.db());
let text = ctx.db().file_text(file_id.file_id(ctx.db())).text(ctx.db());
let mut text = text[..u32::from(range.end()).try_into().unwrap()].trim_end();
if !text.ends_with(')') {
return None;
@ -630,7 +629,7 @@ fn handle_call(
to_insert.push(',');
}
builder.edit_file(file_id);
builder.edit_file(file_id.file_id(ctx.db()));
builder.insert(offset, to_insert);
Some(())

View file

@ -101,7 +101,7 @@ pub(crate) fn convert_for_loop_to_while_let(
editor.replace(for_loop.syntax(), while_loop.syntax());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View file

@ -99,7 +99,7 @@ fn edit_struct_def(
let tuple_fields = ast::make::tuple_field_list(tuple_fields);
let record_fields_text_range = record_fields.syntax().text_range();
edit.edit_file(ctx.file_id());
edit.edit_file(ctx.vfs_file_id());
edit.replace(record_fields_text_range, tuple_fields.syntax().text());
if let Either::Left(strukt) = strukt {
@ -149,7 +149,7 @@ fn edit_struct_references(
let usages = strukt_def.usages(&ctx.sema).include_self_refs().all();
for (file_id, refs) in usages {
edit.edit_file(file_id.file_id());
edit.edit_file(file_id.file_id(ctx.db()));
for r in refs {
process_struct_name_reference(ctx, r, edit);
}
@ -227,7 +227,7 @@ fn edit_field_references(
let def = Definition::Field(field);
let usages = def.usages(&ctx.sema).all();
for (file_id, refs) in usages {
edit.edit_file(file_id.file_id());
edit.edit_file(file_id.file_id(ctx.db()));
for r in refs {
if let Some(name_ref) = r.name.as_name_ref() {
// Only edit the field reference if it's part of a `.field` access

View file

@ -106,7 +106,7 @@ fn replace_usages(
target_module: &hir::Module,
) {
for (file_id, references) in usages.iter() {
edit.edit_file(file_id.file_id());
edit.edit_file(file_id.file_id(ctx.db()));
let refs_with_imports =
augment_references_with_imports(edit, ctx, references, struct_name, target_module);

View file

@ -95,7 +95,7 @@ fn edit_struct_def(
let record_fields = ast::make::record_field_list(record_fields);
let tuple_fields_text_range = tuple_fields.syntax().text_range();
edit.edit_file(ctx.file_id());
edit.edit_file(ctx.vfs_file_id());
if let Either::Left(strukt) = strukt {
if let Some(w) = strukt.where_clause() {
@ -142,7 +142,7 @@ fn edit_struct_references(
match node {
ast::TupleStructPat(tuple_struct_pat) => {
let file_range = ctx.sema.original_range_opt(&node)?;
edit.edit_file(file_range.file_id);
edit.edit_file(file_range.file_id.file_id(ctx.db()));
edit.replace(
file_range.range,
ast::make::record_pat_with_fields(
@ -197,7 +197,7 @@ fn edit_struct_references(
};
for (file_id, refs) in usages {
edit.edit_file(file_id.file_id());
edit.edit_file(file_id.file_id(ctx.db()));
for r in refs {
for node in r.name.syntax().ancestors() {
if edit_node(edit, node).is_some() {
@ -222,7 +222,7 @@ fn edit_field_references(
let def = Definition::Field(field);
let usages = def.usages(&ctx.sema).all();
for (file_id, refs) in usages {
edit.edit_file(file_id.file_id());
edit.edit_file(file_id.file_id(ctx.db()));
for r in refs {
if let Some(name_ref) = r.name.as_name_ref() {
edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text());

View file

@ -66,7 +66,7 @@ fn destructure_struct_binding_impl(
let mut editor = builder.make_editor(data.ident_pat.syntax());
destructure_pat(ctx, &mut editor, data, &field_names);
update_usages(ctx, &mut editor, data, &field_names.into_iter().collect());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
}
struct StructEditData {

View file

@ -64,7 +64,7 @@ fn expand_record_rest_pattern(
editor.replace(old_field_list.syntax(), new_field_list.syntax());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@ -154,7 +154,7 @@ fn expand_tuple_struct_rest_pattern(
editor.replace(pat.syntax(), new_pat.syntax());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View file

@ -1,8 +1,7 @@
use std::iter;
use either::Either;
use hir::{HasSource, HirFileIdExt, ModuleSource};
use ide_db::base_db::salsa::AsDynDatabase;
use hir::{HasSource, ModuleSource};
use ide_db::{
FileId, FxHashMap, FxHashSet,
assists::AssistId,
@ -114,7 +113,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let (usages_to_be_processed, record_fields, use_stmts_to_be_inserted) =
module.get_usages_and_record_fields(ctx);
builder.edit_file(ctx.file_id());
builder.edit_file(ctx.vfs_file_id());
use_stmts_to_be_inserted.into_iter().for_each(|(_, use_stmt)| {
builder.insert(ctx.selection_trimmed().end(), format!("\n{use_stmt}"));
});
@ -126,7 +125,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let mut usages_to_be_processed_for_cur_file = vec![];
for (file_id, usages) in usages_to_be_processed {
if file_id == ctx.file_id() {
if file_id == ctx.vfs_file_id() {
usages_to_be_processed_for_cur_file = usages;
continue;
}
@ -136,7 +135,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
}
}
builder.edit_file(ctx.file_id());
builder.edit_file(ctx.vfs_file_id());
for (text_range, usage) in usages_to_be_processed_for_cur_file {
builder.replace(text_range, usage);
}
@ -333,10 +332,7 @@ impl Module {
let mut use_stmts_set = FxHashSet::default();
for (file_id, refs) in node_def.usages(&ctx.sema).all() {
let editioned_file_id =
ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id);
let source_file = ctx.sema.parse(editioned_file_id);
let source_file = ctx.sema.parse(file_id);
let usages = refs.into_iter().filter_map(|FileReference { range, .. }| {
// handle normal usages
let name_ref = find_node_at_range::<ast::NameRef>(source_file.syntax(), range)?;
@ -368,7 +364,7 @@ impl Module {
None
});
refs_in_files.entry(file_id.file_id()).or_default().extend(usages);
refs_in_files.entry(file_id.file_id(ctx.db())).or_default().extend(usages);
}
}
@ -463,10 +459,7 @@ impl Module {
let file_id = ctx.file_id();
let usage_res = def.usages(&ctx.sema).in_scope(&SearchScope::single_file(file_id)).all();
let editioned_file_id =
ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id);
let file = ctx.sema.parse(editioned_file_id);
let file = ctx.sema.parse(file_id);
// track uses which does not exists in `Use`
let mut uses_exist_in_sel = false;
@ -492,7 +485,7 @@ impl Module {
ctx,
curr_parent_module,
selection_range,
file_id.file_id(),
file_id.file_id(ctx.db()),
);
// Find use stmt that use def in current file
@ -679,7 +672,7 @@ fn check_def_in_mod_and_out_sel(
let have_same_parent = if let Some(ast_module) = &curr_parent_module {
ctx.sema.to_module_def(ast_module).is_some_and(|it| it == $x.module(ctx.db()))
} else {
source.file_id.original_file(ctx.db()) == curr_file_id
source.file_id.original_file(ctx.db()).file_id(ctx.db()) == curr_file_id
};
let in_sel = !selection_range.contains_range(source.value.syntax().text_range());
@ -695,7 +688,7 @@ fn check_def_in_mod_and_out_sel(
(Some(ast_module), Some(hir_module)) => {
ctx.sema.to_module_def(ast_module).is_some_and(|it| it == hir_module)
}
_ => source.file_id.original_file(ctx.db()) == curr_file_id,
_ => source.file_id.original_file(ctx.db()).file_id(ctx.db()) == curr_file_id,
};
if have_same_parent {

View file

@ -74,7 +74,7 @@ pub(crate) fn extract_struct_from_enum_variant(
def_file_references = Some(references);
continue;
}
builder.edit_file(file_id.file_id());
builder.edit_file(file_id.file_id(ctx.db()));
let processed = process_references(
ctx,
builder,
@ -87,7 +87,7 @@ pub(crate) fn extract_struct_from_enum_variant(
apply_references(ctx.config.insert_use, path, node, import, edition)
});
}
builder.edit_file(ctx.file_id());
builder.edit_file(ctx.vfs_file_id());
let variant = builder.make_mut(variant.clone());
if let Some(references) = def_file_references {

View file

@ -87,7 +87,7 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
],
);
builder.add_file_edits(ctx.file_id(), edit);
builder.add_file_edits(ctx.vfs_file_id(), edit);
},
)
}

View file

@ -263,7 +263,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
}
editor.add_mappings(make.finish_with_mappings());
edit.add_file_edits(ctx.file_id(), editor);
edit.add_file_edits(ctx.vfs_file_id(), editor);
edit.rename();
},
);
@ -378,7 +378,7 @@ fn get_literal_name(ctx: &AssistContext<'_>, expr: &ast::Expr) -> Option<String>
return None;
}
match LexedStr::single_token(ctx.file_id().edition(), &inner) {
match LexedStr::single_token(ctx.edition(), &inner) {
Some((SyntaxKind::IDENT, None)) => Some(inner),
_ => None,
}

View file

@ -1,6 +1,4 @@
use hir::{
HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef, db::HirDatabase,
};
use hir::{HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef, db::HirDatabase};
use ide_db::FileId;
use syntax::{
AstNode, TextRange,
@ -132,7 +130,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
);
acc.add(AssistId::quick_fix("fix_visibility"), assist_label, target, |edit| {
edit.edit_file(target_file.file_id());
edit.edit_file(target_file.file_id(ctx.db()));
let vis_owner = edit.make_mut(vis_owner);
vis_owner.set_visibility(Some(missing_visibility.clone_for_update()));
@ -159,7 +157,11 @@ fn target_data_for_def(
let in_file_syntax = source.syntax();
let file_id = in_file_syntax.file_id;
let range = in_file_syntax.value.text_range();
Some((ast::AnyHasVisibility::new(source.value), range, file_id.original_file(db).file_id()))
Some((
ast::AnyHasVisibility::new(source.value),
range,
file_id.original_file(db).file_id(db),
))
}
let target_name;
@ -201,7 +203,7 @@ fn target_data_for_def(
let in_file_source = m.declaration_source(db)?;
let file_id = in_file_source.file_id.original_file(db);
let range = in_file_source.value.syntax().text_range();
(ast::AnyHasVisibility::new(in_file_source.value), range, file_id.file_id())
(ast::AnyHasVisibility::new(in_file_source.value), range, file_id.file_id(db))
}
// FIXME
hir::ModuleDef::Macro(_) => return None,

View file

@ -55,7 +55,7 @@ pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
editor.replace(lhs.syntax(), rhs.syntax());
editor.replace(rhs.syntax(), lhs.syntax());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View file

@ -55,7 +55,7 @@ pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
editor.replace(next.clone(), prev.clone());
}
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
})
}

View file

@ -35,7 +35,7 @@ pub(crate) fn flip_or_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
let mut editor = builder.make_editor(parent.syntax());
editor.replace(before.clone(), after.clone());
editor.replace(after, before);
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
})
}

View file

@ -36,7 +36,7 @@ pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let mut editor = builder.make_editor(parent.syntax());
editor.replace(before.clone(), after.clone());
editor.replace(after, before);
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View file

@ -1,5 +1,5 @@
use crate::assist_context::{AssistContext, Assists};
use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module};
use hir::{HasVisibility, HirDisplay, Module};
use ide_db::{
FileId,
assists::AssistId,
@ -134,9 +134,9 @@ fn target_data_for_generate_constant(
.any(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains('\n'));
let post_string =
if siblings_has_newline { format!("{indent}") } else { format!("\n{indent}") };
Some((offset, indent + 1, Some(file_id.file_id()), post_string))
Some((offset, indent + 1, Some(file_id.file_id(ctx.db())), post_string))
}
_ => Some((TextSize::from(0), 0.into(), Some(file_id.file_id()), "\n".into())),
_ => Some((TextSize::from(0), 0.into(), Some(file_id.file_id(ctx.db())), "\n".into())),
}
}

View file

@ -124,7 +124,7 @@ impl Field {
) -> Option<Field> {
let db = ctx.sema.db;
let module = ctx.sema.file_to_module_def(ctx.file_id())?;
let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?;
let edition = module.krate().edition(ctx.db());
let (name, range, ty) = match f {

View file

@ -66,7 +66,7 @@ pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>)
if let Some(it) = enum_node.variant_list() {
it.add_variant(&mut editor, &variant);
}
builder.add_file_edits(file_id, editor);
builder.add_file_edits(file_id.file_id(ctx.db()), editor);
})
}

View file

@ -117,7 +117,7 @@ pub(crate) fn generate_fn_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>)
}
}
builder.add_file_edits(ctx.file_id(), edit);
builder.add_file_edits(ctx.vfs_file_id(), edit);
},
);
}

View file

@ -1,8 +1,7 @@
use hir::{
Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics,
StructKind, Type, TypeInfo,
Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, StructKind, Type,
TypeInfo,
};
use ide_db::base_db::salsa::AsDynDatabase;
use ide_db::{
FileId, FxHashMap, FxHashSet, RootDatabase, SnippetCap,
defs::{Definition, NameRefClass},
@ -207,14 +206,11 @@ fn get_adt_source(
) -> Option<(Option<ast::Impl>, FileId)> {
let range = adt.source(ctx.sema.db)?.syntax().original_file_range_rooted(ctx.sema.db);
let editioned_file_id =
ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), range.file_id);
let file = ctx.sema.parse(editioned_file_id);
let file = ctx.sema.parse(range.file_id);
let adt_source =
ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;
find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()])
.map(|impl_| (impl_, range.file_id.file_id()))
.map(|impl_| (impl_, range.file_id.file_id(ctx.db())))
}
struct FunctionBuilder {
@ -501,7 +497,7 @@ fn get_fn_target(
target_module: Option<Module>,
call: CallExpr,
) -> Option<(GeneratedFunctionTarget, FileId)> {
let mut file = ctx.file_id().into();
let mut file = ctx.vfs_file_id();
let target = match target_module {
Some(target_module) => {
let (in_file, target) = next_space_for_fn_in_module(ctx.db(), target_module);
@ -1191,7 +1187,7 @@ fn next_space_for_fn_in_module(
}
};
(file.file_id(), assist_item)
(file.file_id(db), assist_item)
}
#[derive(Clone, Copy)]

View file

@ -71,6 +71,7 @@ use crate::{
// ```
pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let def_file = ctx.file_id();
let vfs_def_file = ctx.vfs_file_id();
let name = ctx.find_node_at_offset::<ast::Name>()?;
let ast_func = name.syntax().parent().and_then(ast::Fn::cast)?;
let func_body = ast_func.body()?;
@ -106,7 +107,8 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let current_file_usage = usages.references.remove(&def_file);
let mut remove_def = true;
let mut inline_refs_for_file = |file_id, refs: Vec<FileReference>| {
let mut inline_refs_for_file = |file_id: EditionedFileId, refs: Vec<FileReference>| {
let file_id = file_id.file_id(ctx.db());
builder.edit_file(file_id);
let call_krate = ctx.sema.file_to_module_def(file_id).map(|it| it.krate());
let count = refs.len();
@ -143,7 +145,7 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
}
match current_file_usage {
Some(refs) => inline_refs_for_file(def_file, refs),
None => builder.edit_file(def_file),
None => builder.edit_file(vfs_def_file),
}
if remove_def {
builder.delete(ast_func.syntax().text_range());
@ -194,7 +196,7 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
let name_ref: ast::NameRef = ctx.find_node_at_offset()?;
let call_info = CallInfo::from_name_ref(
name_ref.clone(),
ctx.sema.file_to_module_def(ctx.file_id())?.krate().into(),
ctx.sema.file_to_module_def(ctx.vfs_file_id())?.krate().into(),
)?;
let (function, label) = match &call_info.node {
ast::CallableExpr::Call(call) => {

View file

@ -110,7 +110,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
}
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View file

@ -38,7 +38,7 @@ use crate::{AssistContext, AssistId, Assists};
pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?;
let macro_call = ctx.sema.to_def(&unexpanded)?;
let target_crate_id = ctx.sema.file_to_module_def(ctx.file_id())?.krate().into();
let target_crate_id = ctx.sema.file_to_module_def(ctx.vfs_file_id())?.krate().into();
let text_range = unexpanded.syntax().text_range();
acc.add(
@ -46,8 +46,8 @@ pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
"Inline macro".to_owned(),
text_range,
|builder| {
let expanded = ctx.sema.parse_or_expand(macro_call.as_file());
let span_map = ctx.sema.db.expansion_span_map(macro_call.as_macro_file());
let expanded = ctx.sema.parse_or_expand(macro_call.into());
let span_map = ctx.sema.db.expansion_span_map(macro_call);
// Don't call `prettify_macro_expansion()` outside the actual assist action; it does some heavy rowan tree manipulation,
// which can be very costly for big macros when it is done *even without the assist being invoked*.
let expanded = prettify_macro_expansion(ctx.db(), expanded, &span_map, target_crate_id);

View file

@ -87,17 +87,17 @@ pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>)
builder.replace(target, replacement);
}
if file_id == ctx.file_id() {
if file_id == ctx.vfs_file_id() {
builder.delete(ast_alias.syntax().text_range());
definition_deleted = true;
}
};
for (file_id, refs) in usages.into_iter() {
inline_refs_for_file(file_id.file_id(), refs);
inline_refs_for_file(file_id.file_id(ctx.db()), refs);
}
if !definition_deleted {
builder.edit_file(ctx.file_id());
builder.edit_file(ctx.vfs_file_id());
builder.delete(ast_alias.syntax().text_range());
}
},

View file

@ -59,7 +59,7 @@ pub(crate) fn introduce_named_type_parameter(
}
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View file

@ -105,7 +105,7 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
builder.delete(range_to_delete);
let usages = usages.iter().flat_map(|(file_id, usages)| {
let edition = file_id.edition();
let edition = file_id.edition(ctx.db());
usages.iter().map(move |usage| (edition, usage.range))
});
for (edition, range) in usages {

View file

@ -22,7 +22,7 @@ use crate::{
// ```
pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?;
let module = ctx.sema.file_to_module_def(ctx.file_id())?;
let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?;
// Enable this assist if the user select all "meaningful" content in the source file
let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed());
let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range());
@ -38,13 +38,13 @@ pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let target = source_file.syntax().text_range();
let module_name = module.name(ctx.db())?.as_str().to_smolstr();
let path = format!("../{module_name}.rs");
let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path };
let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path };
acc.add(
AssistId::refactor("move_from_mod_rs"),
format!("Convert {module_name}/mod.rs to {module_name}.rs"),
target,
|builder| {
builder.move_file(ctx.file_id(), dst);
builder.move_file(ctx.vfs_file_id(), dst);
},
)
}

View file

@ -104,7 +104,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) ->
buf,
);
let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path };
let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path };
builder.create_file(dst, contents);
},
)

View file

@ -22,7 +22,7 @@ use crate::{
// ```
pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?;
let module = ctx.sema.file_to_module_def(ctx.file_id())?;
let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?;
// Enable this assist if the user select all "meaningful" content in the source file
let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed());
let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range());
@ -38,13 +38,13 @@ pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let target = source_file.syntax().text_range();
let module_name = module.name(ctx.db())?.as_str().to_smolstr();
let path = format!("./{module_name}/mod.rs");
let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path };
let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path };
acc.add(
AssistId::refactor("move_to_mod_rs"),
format!("Convert {module_name}.rs to {module_name}/mod.rs"),
target,
|builder| {
builder.move_file(ctx.file_id(), dst);
builder.move_file(ctx.vfs_file_id(), dst);
},
)
}

View file

@ -28,6 +28,6 @@ pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
_ => (),
}
editor.delete(mut_token);
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
})
}

View file

@ -59,7 +59,7 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
editor.add_mappings(make.finish_with_mappings());
}
editor.replace(parens.syntax(), expr.syntax());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View file

@ -1,6 +1,6 @@
use std::collections::hash_map::Entry;
use hir::{FileRange, HirFileIdExt, InFile, InRealFile, Module, ModuleSource};
use hir::{FileRange, InFile, InRealFile, Module, ModuleSource};
use ide_db::text_edit::TextRange;
use ide_db::{
FxHashMap, RootDatabase,

View file

@ -1,6 +1,4 @@
use ide_db::{
EditionedFileId, base_db::salsa::AsDynDatabase, defs::Definition, search::FileReference,
};
use ide_db::{EditionedFileId, defs::Definition, search::FileReference};
use syntax::{
AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, T, TextRange,
algo::{find_node_at_range, least_common_ancestor_element},
@ -90,7 +88,7 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) ->
for (file_id, references) in fn_def.usages(&ctx.sema).all() {
process_usages(ctx, builder, file_id, references, param_position, is_self_present);
}
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@ -98,15 +96,13 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) ->
fn process_usages(
ctx: &AssistContext<'_>,
builder: &mut SourceChangeBuilder,
file_id: EditionedFileId,
editioned_file_id: EditionedFileId,
references: Vec<FileReference>,
arg_to_remove: usize,
is_self_present: bool,
) {
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id);
let source_file = ctx.sema.parse(editioned_file_id_wrapper);
let source_file = ctx.sema.parse(editioned_file_id);
let file_id = editioned_file_id.file_id(ctx.db());
builder.edit_file(file_id);
let possible_ranges = references
.into_iter()

View file

@ -82,7 +82,7 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
}
}
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View file

@ -106,7 +106,7 @@ pub(crate) fn reorder_impl_items(acc: &mut Assists, ctx: &AssistContext<'_>) ->
.zip(sorted)
.for_each(|(old, new)| editor.replace(old.syntax(), new.syntax()));
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View file

@ -1,4 +1,4 @@
use hir::{InFile, MacroFileIdExt, ModuleDef};
use hir::{InFile, ModuleDef};
use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
use itertools::Itertools;
use syntax::{

View file

@ -142,7 +142,7 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
let mut editor = builder.make_editor(if_expr.syntax());
editor.replace(if_expr.syntax(), expr.syntax());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@ -291,7 +291,7 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'
let mut editor = builder.make_editor(match_expr.syntax());
editor.replace(match_expr.syntax(), if_let_expr.syntax());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

Some files were not shown because too many files have changed in this diff Show more