Merge pull request #19617 from ChayimFriedman2/more-actual

internal: Make `HirFileId`, `EditionedFileId` and macro files Salsa struct
This commit is contained in:
Chayim Refael Friedman 2025-04-19 19:21:53 +00:00 committed by GitHub
commit 150bb4a00f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
195 changed files with 1473 additions and 1525 deletions

View file

@ -16,11 +16,11 @@ use intern::Symbol;
use la_arena::{Arena, Idx, RawIdx}; use la_arena::{Arena, Idx, RawIdx};
use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
use salsa::{Durability, Setter}; use salsa::{Durability, Setter};
use span::{Edition, EditionedFileId}; use span::Edition;
use triomphe::Arc; use triomphe::Arc;
use vfs::{AbsPathBuf, AnchoredPath, FileId, VfsPath, file_set::FileSet}; use vfs::{AbsPathBuf, AnchoredPath, FileId, VfsPath, file_set::FileSet};
use crate::{CrateWorkspaceData, RootQueryDb}; use crate::{CrateWorkspaceData, EditionedFileId, RootQueryDb};
pub type ProcMacroPaths = FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), String>>; pub type ProcMacroPaths = FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), String>>;
@ -773,8 +773,8 @@ pub(crate) fn transitive_rev_deps(db: &dyn RootQueryDb, of: Crate) -> FxHashSet<
} }
impl BuiltCrateData { impl BuiltCrateData {
pub fn root_file_id(&self) -> EditionedFileId { pub fn root_file_id(&self, db: &dyn salsa::Database) -> EditionedFileId {
EditionedFileId::new(self.root_file_id, self.edition) EditionedFileId::new(db, self.root_file_id, self.edition)
} }
} }

View file

@ -20,6 +20,7 @@ use rustc_hash::{FxHashSet, FxHasher};
pub use salsa::{self}; pub use salsa::{self};
use salsa::{Durability, Setter}; use salsa::{Durability, Setter};
pub use semver::{BuildMetadata, Prerelease, Version, VersionReq}; pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
use span::Edition;
use syntax::{Parse, SyntaxError, ast}; use syntax::{Parse, SyntaxError, ast};
use triomphe::Arc; use triomphe::Arc;
pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet}; pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet};
@ -151,21 +152,39 @@ impl Files {
} }
} }
#[salsa::interned(no_lifetime)] #[salsa::interned(no_lifetime, constructor=from_span)]
pub struct EditionedFileId { pub struct EditionedFileId {
pub editioned_file_id: span::EditionedFileId, pub editioned_file_id: span::EditionedFileId,
} }
impl EditionedFileId { impl EditionedFileId {
pub fn file_id(&self, db: &dyn salsa::Database) -> vfs::FileId { // Salsa already uses the name `new`...
#[inline]
pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition) -> Self {
EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition))
}
#[inline]
pub fn current_edition(db: &dyn salsa::Database, file_id: FileId) -> Self {
EditionedFileId::new(db, file_id, Edition::CURRENT)
}
#[inline]
pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
let id = self.editioned_file_id(db); let id = self.editioned_file_id(db);
id.file_id() id.file_id()
} }
fn unpack(&self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) { #[inline]
pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
let id = self.editioned_file_id(db); let id = self.editioned_file_id(db);
(id.file_id(), id.edition()) (id.file_id(), id.edition())
} }
#[inline]
pub fn edition(self, db: &dyn SourceDatabase) -> Edition {
self.editioned_file_id(db).edition()
}
} }
#[salsa::input] #[salsa::input]
@ -189,7 +208,7 @@ pub struct SourceRootInput {
#[query_group::query_group] #[query_group::query_group]
pub trait RootQueryDb: SourceDatabase + salsa::Database { pub trait RootQueryDb: SourceDatabase + salsa::Database {
/// Parses the file into the syntax tree. /// Parses the file into the syntax tree.
#[salsa::invoke_actual(parse)] #[salsa::invoke(parse)]
#[salsa::lru(128)] #[salsa::lru(128)]
fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>; fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
@ -201,6 +220,7 @@ pub trait RootQueryDb: SourceDatabase + salsa::Database {
fn toolchain_channel(&self, krate: Crate) -> Option<ReleaseChannel>; fn toolchain_channel(&self, krate: Crate) -> Option<ReleaseChannel>;
/// Crates whose root file is in `id`. /// Crates whose root file is in `id`.
#[salsa::invoke_interned(source_root_crates)]
fn source_root_crates(&self, id: SourceRootId) -> Arc<[Crate]>; fn source_root_crates(&self, id: SourceRootId) -> Arc<[Crate]>;
#[salsa::transparent] #[salsa::transparent]

View file

@ -1,10 +1,9 @@
//! Defines database & queries for name resolution. //! Defines database & queries for name resolution.
use base_db::{Crate, RootQueryDb, SourceDatabase}; use base_db::{Crate, RootQueryDb, SourceDatabase};
use either::Either; use either::Either;
use hir_expand::{HirFileId, MacroDefId, db::ExpandDatabase}; use hir_expand::{EditionedFileId, HirFileId, MacroCallId, MacroDefId, db::ExpandDatabase};
use intern::sym; use intern::sym;
use la_arena::ArenaMap; use la_arena::ArenaMap;
use span::{EditionedFileId, MacroCallId};
use syntax::{AstPtr, ast}; use syntax::{AstPtr, ast};
use thin_vec::ThinVec; use thin_vec::ThinVec;
use triomphe::Arc; use triomphe::Arc;
@ -109,26 +108,26 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
#[salsa::invoke(ItemTree::file_item_tree_query)] #[salsa::invoke(ItemTree::file_item_tree_query)]
fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>; fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>;
#[salsa::invoke_actual(ItemTree::block_item_tree_query)] #[salsa::invoke(ItemTree::block_item_tree_query)]
fn block_item_tree(&self, block_id: BlockId) -> Arc<ItemTree>; fn block_item_tree(&self, block_id: BlockId) -> Arc<ItemTree>;
#[salsa::invoke_actual(DefMap::crate_local_def_map_query)] #[salsa::invoke(DefMap::crate_local_def_map_query)]
fn crate_local_def_map(&self, krate: Crate) -> (Arc<DefMap>, Arc<LocalDefMap>); fn crate_local_def_map(&self, krate: Crate) -> (Arc<DefMap>, Arc<LocalDefMap>);
#[salsa::invoke_actual(DefMap::crate_def_map_query)] #[salsa::invoke(DefMap::crate_def_map_query)]
fn crate_def_map(&self, krate: Crate) -> Arc<DefMap>; fn crate_def_map(&self, krate: Crate) -> Arc<DefMap>;
/// Computes the block-level `DefMap`. /// Computes the block-level `DefMap`.
#[salsa::invoke_actual(DefMap::block_def_map_query)] #[salsa::invoke(DefMap::block_def_map_query)]
fn block_def_map(&self, block: BlockId) -> Arc<DefMap>; fn block_def_map(&self, block: BlockId) -> Arc<DefMap>;
/// Turns a MacroId into a MacroDefId, describing the macro's definition post name resolution. /// Turns a MacroId into a MacroDefId, describing the macro's definition post name resolution.
#[salsa::invoke_actual(macro_def)] #[salsa::invoke(macro_def)]
fn macro_def(&self, m: MacroId) -> MacroDefId; fn macro_def(&self, m: MacroId) -> MacroDefId;
// region:data // region:data
#[salsa::invoke_actual(VariantFields::query)] #[salsa::invoke(VariantFields::query)]
fn variant_fields_with_source_map( fn variant_fields_with_source_map(
&self, &self,
id: VariantId, id: VariantId,
@ -139,24 +138,24 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
self.enum_variants_with_diagnostics(id).0 self.enum_variants_with_diagnostics(id).0
} }
#[salsa::invoke_actual(EnumVariants::enum_variants_query)] #[salsa::invoke(EnumVariants::enum_variants_query)]
fn enum_variants_with_diagnostics( fn enum_variants_with_diagnostics(
&self, &self,
id: EnumId, id: EnumId,
) -> (Arc<EnumVariants>, Option<Arc<ThinVec<InactiveEnumVariantCode>>>); ) -> (Arc<EnumVariants>, Option<Arc<ThinVec<InactiveEnumVariantCode>>>);
#[salsa::transparent] #[salsa::transparent]
#[salsa::invoke_actual(ImplItems::impl_items_query)] #[salsa::invoke(ImplItems::impl_items_query)]
fn impl_items(&self, e: ImplId) -> Arc<ImplItems>; fn impl_items(&self, e: ImplId) -> Arc<ImplItems>;
#[salsa::invoke_actual(ImplItems::impl_items_with_diagnostics_query)] #[salsa::invoke(ImplItems::impl_items_with_diagnostics_query)]
fn impl_items_with_diagnostics(&self, e: ImplId) -> (Arc<ImplItems>, DefDiagnostics); fn impl_items_with_diagnostics(&self, e: ImplId) -> (Arc<ImplItems>, DefDiagnostics);
#[salsa::transparent] #[salsa::transparent]
#[salsa::invoke_actual(TraitItems::trait_items_query)] #[salsa::invoke(TraitItems::trait_items_query)]
fn trait_items(&self, e: TraitId) -> Arc<TraitItems>; fn trait_items(&self, e: TraitId) -> Arc<TraitItems>;
#[salsa::invoke_actual(TraitItems::trait_items_with_diagnostics_query)] #[salsa::invoke(TraitItems::trait_items_with_diagnostics_query)]
fn trait_items_with_diagnostics(&self, tr: TraitId) -> (Arc<TraitItems>, DefDiagnostics); fn trait_items_with_diagnostics(&self, tr: TraitId) -> (Arc<TraitItems>, DefDiagnostics);
#[salsa::tracked] #[salsa::tracked]
@ -214,61 +213,61 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
self.type_alias_signature_with_source_map(e).0 self.type_alias_signature_with_source_map(e).0
} }
#[salsa::invoke_actual(TraitSignature::query)] #[salsa::invoke(TraitSignature::query)]
fn trait_signature_with_source_map( fn trait_signature_with_source_map(
&self, &self,
trait_: TraitId, trait_: TraitId,
) -> (Arc<TraitSignature>, Arc<ExpressionStoreSourceMap>); ) -> (Arc<TraitSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(ImplSignature::query)] #[salsa::invoke(ImplSignature::query)]
fn impl_signature_with_source_map( fn impl_signature_with_source_map(
&self, &self,
impl_: ImplId, impl_: ImplId,
) -> (Arc<ImplSignature>, Arc<ExpressionStoreSourceMap>); ) -> (Arc<ImplSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(StructSignature::query)] #[salsa::invoke(StructSignature::query)]
fn struct_signature_with_source_map( fn struct_signature_with_source_map(
&self, &self,
struct_: StructId, struct_: StructId,
) -> (Arc<StructSignature>, Arc<ExpressionStoreSourceMap>); ) -> (Arc<StructSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(UnionSignature::query)] #[salsa::invoke(UnionSignature::query)]
fn union_signature_with_source_map( fn union_signature_with_source_map(
&self, &self,
union_: UnionId, union_: UnionId,
) -> (Arc<UnionSignature>, Arc<ExpressionStoreSourceMap>); ) -> (Arc<UnionSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(EnumSignature::query)] #[salsa::invoke(EnumSignature::query)]
fn enum_signature_with_source_map( fn enum_signature_with_source_map(
&self, &self,
e: EnumId, e: EnumId,
) -> (Arc<EnumSignature>, Arc<ExpressionStoreSourceMap>); ) -> (Arc<EnumSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(ConstSignature::query)] #[salsa::invoke(ConstSignature::query)]
fn const_signature_with_source_map( fn const_signature_with_source_map(
&self, &self,
e: ConstId, e: ConstId,
) -> (Arc<ConstSignature>, Arc<ExpressionStoreSourceMap>); ) -> (Arc<ConstSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(StaticSignature::query)] #[salsa::invoke(StaticSignature::query)]
fn static_signature_with_source_map( fn static_signature_with_source_map(
&self, &self,
e: StaticId, e: StaticId,
) -> (Arc<StaticSignature>, Arc<ExpressionStoreSourceMap>); ) -> (Arc<StaticSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(FunctionSignature::query)] #[salsa::invoke(FunctionSignature::query)]
fn function_signature_with_source_map( fn function_signature_with_source_map(
&self, &self,
e: FunctionId, e: FunctionId,
) -> (Arc<FunctionSignature>, Arc<ExpressionStoreSourceMap>); ) -> (Arc<FunctionSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(TraitAliasSignature::query)] #[salsa::invoke(TraitAliasSignature::query)]
fn trait_alias_signature_with_source_map( fn trait_alias_signature_with_source_map(
&self, &self,
e: TraitAliasId, e: TraitAliasId,
) -> (Arc<TraitAliasSignature>, Arc<ExpressionStoreSourceMap>); ) -> (Arc<TraitAliasSignature>, Arc<ExpressionStoreSourceMap>);
#[salsa::invoke_actual(TypeAliasSignature::query)] #[salsa::invoke(TypeAliasSignature::query)]
fn type_alias_signature_with_source_map( fn type_alias_signature_with_source_map(
&self, &self,
e: TypeAliasId, e: TypeAliasId,
@ -283,7 +282,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
#[salsa::invoke(Body::body_query)] #[salsa::invoke(Body::body_query)]
fn body(&self, def: DefWithBodyId) -> Arc<Body>; fn body(&self, def: DefWithBodyId) -> Arc<Body>;
#[salsa::invoke_actual(ExprScopes::expr_scopes_query)] #[salsa::invoke(ExprScopes::expr_scopes_query)]
fn expr_scopes(&self, def: DefWithBodyId) -> Arc<ExprScopes>; fn expr_scopes(&self, def: DefWithBodyId) -> Arc<ExprScopes>;
#[salsa::transparent] #[salsa::transparent]
@ -306,17 +305,18 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
// region:attrs // region:attrs
#[salsa::invoke_actual(Attrs::fields_attrs_query)] #[salsa::invoke(Attrs::fields_attrs_query)]
fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>; fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
// should this really be a query? // should this really be a query?
#[salsa::invoke_actual(crate::attr::fields_attrs_source_map)] #[salsa::invoke(crate::attr::fields_attrs_source_map)]
fn fields_attrs_source_map( fn fields_attrs_source_map(
&self, &self,
def: VariantId, def: VariantId,
) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>; ) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>;
#[salsa::invoke(AttrsWithOwner::attrs_query)] // FIXME: Make this a non-interned query.
#[salsa::invoke_interned(AttrsWithOwner::attrs_query)]
fn attrs(&self, def: AttrDefId) -> Attrs; fn attrs(&self, def: AttrDefId) -> Attrs;
#[salsa::transparent] #[salsa::transparent]
@ -328,39 +328,39 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
#[salsa::invoke(LangItems::lang_item_query)] #[salsa::invoke(LangItems::lang_item_query)]
fn lang_item(&self, start_crate: Crate, item: LangItem) -> Option<LangItemTarget>; fn lang_item(&self, start_crate: Crate, item: LangItem) -> Option<LangItemTarget>;
#[salsa::invoke_actual(ImportMap::import_map_query)] #[salsa::invoke(ImportMap::import_map_query)]
fn import_map(&self, krate: Crate) -> Arc<ImportMap>; fn import_map(&self, krate: Crate) -> Arc<ImportMap>;
// region:visibilities // region:visibilities
#[salsa::invoke_actual(visibility::field_visibilities_query)] #[salsa::invoke(visibility::field_visibilities_query)]
fn field_visibilities(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Visibility>>; fn field_visibilities(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Visibility>>;
// FIXME: unify function_visibility and const_visibility? // FIXME: unify function_visibility and const_visibility?
#[salsa::invoke_actual(visibility::function_visibility_query)] #[salsa::invoke(visibility::function_visibility_query)]
fn function_visibility(&self, def: FunctionId) -> Visibility; fn function_visibility(&self, def: FunctionId) -> Visibility;
#[salsa::invoke_actual(visibility::const_visibility_query)] #[salsa::invoke(visibility::const_visibility_query)]
fn const_visibility(&self, def: ConstId) -> Visibility; fn const_visibility(&self, def: ConstId) -> Visibility;
#[salsa::invoke_actual(visibility::type_alias_visibility_query)] #[salsa::invoke(visibility::type_alias_visibility_query)]
fn type_alias_visibility(&self, def: TypeAliasId) -> Visibility; fn type_alias_visibility(&self, def: TypeAliasId) -> Visibility;
// endregion:visibilities // endregion:visibilities
#[salsa::invoke_actual(LangItems::crate_lang_items_query)] #[salsa::invoke(LangItems::crate_lang_items_query)]
fn crate_lang_items(&self, krate: Crate) -> Option<Arc<LangItems>>; fn crate_lang_items(&self, krate: Crate) -> Option<Arc<LangItems>>;
#[salsa::invoke_actual(crate::lang_item::notable_traits_in_deps)] #[salsa::invoke(crate::lang_item::notable_traits_in_deps)]
fn notable_traits_in_deps(&self, krate: Crate) -> Arc<[Arc<[TraitId]>]>; fn notable_traits_in_deps(&self, krate: Crate) -> Arc<[Arc<[TraitId]>]>;
#[salsa::invoke_actual(crate::lang_item::crate_notable_traits)] #[salsa::invoke(crate::lang_item::crate_notable_traits)]
fn crate_notable_traits(&self, krate: Crate) -> Option<Arc<[TraitId]>>; fn crate_notable_traits(&self, krate: Crate) -> Option<Arc<[TraitId]>>;
#[salsa::invoke_actual(crate_supports_no_std)] #[salsa::invoke(crate_supports_no_std)]
fn crate_supports_no_std(&self, crate_id: Crate) -> bool; fn crate_supports_no_std(&self, crate_id: Crate) -> bool;
#[salsa::invoke_actual(include_macro_invoc)] #[salsa::invoke(include_macro_invoc)]
fn include_macro_invoc(&self, crate_id: Crate) -> Arc<[(MacroCallId, EditionedFileId)]>; fn include_macro_invoc(&self, crate_id: Crate) -> Arc<[(MacroCallId, EditionedFileId)]>;
} }
@ -382,7 +382,7 @@ fn include_macro_invoc(
} }
fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool { fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool {
let file = crate_id.data(db).root_file_id(); let file = crate_id.data(db).root_file_id(db);
let item_tree = db.file_item_tree(file.into()); let item_tree = db.file_item_tree(file.into());
let attrs = item_tree.raw_attrs(AttrOwner::TopLevel); let attrs = item_tree.raw_attrs(AttrOwner::TopLevel);
for attr in &**attrs { for attr in &**attrs {

View file

@ -13,11 +13,11 @@ use std::ops::{Deref, Index};
use cfg::{CfgExpr, CfgOptions}; use cfg::{CfgExpr, CfgOptions};
use either::Either; use either::Either;
use hir_expand::{ExpandError, InFile, mod_path::ModPath, name::Name}; use hir_expand::{ExpandError, InFile, MacroCallId, mod_path::ModPath, name::Name};
use la_arena::{Arena, ArenaMap}; use la_arena::{Arena, ArenaMap};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use smallvec::SmallVec; use smallvec::SmallVec;
use span::{Edition, MacroFileId, SyntaxContext}; use span::{Edition, SyntaxContext};
use syntax::{AstPtr, SyntaxNodePtr, ast}; use syntax::{AstPtr, SyntaxNodePtr, ast};
use triomphe::Arc; use triomphe::Arc;
use tt::TextRange; use tt::TextRange;
@ -138,7 +138,7 @@ pub struct ExpressionStoreSourceMap {
template_map: Option<Box<FormatTemplate>>, template_map: Option<Box<FormatTemplate>>,
pub expansions: FxHashMap<InFile<MacroCallPtr>, MacroFileId>, pub expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
/// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in /// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
/// the source map (since they're just as volatile). /// the source map (since they're just as volatile).
@ -645,12 +645,12 @@ impl ExpressionStoreSourceMap {
self.expr_map.get(&src).cloned() self.expr_map.get(&src).cloned()
} }
pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<MacroFileId> { pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
let src = node.map(AstPtr::new); let src = node.map(AstPtr::new);
self.expansions.get(&src).cloned() self.expansions.get(&src).cloned()
} }
pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroFileId)> + '_ { pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroCallId)> + '_ {
self.expansions.iter().map(|(&a, &b)| (a, b)) self.expansions.iter().map(|(&a, &b)| (a, b))
} }
@ -696,11 +696,11 @@ impl ExpressionStoreSourceMap {
self.expr_map.get(&src).copied() self.expr_map.get(&src).copied()
} }
pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroFileId)> { pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroCallId)> {
self.expansions.iter() self.expansions.iter()
} }
pub fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroFileId> { pub fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
self.expansions.get(&node.map(AstPtr::new)).copied() self.expansions.get(&node.map(AstPtr::new)).copied()
} }

View file

@ -4,11 +4,9 @@ use std::mem;
use base_db::Crate; use base_db::Crate;
use drop_bomb::DropBomb; use drop_bomb::DropBomb;
use hir_expand::attrs::RawAttrs;
use hir_expand::eager::EagerCallBackFn;
use hir_expand::{ use hir_expand::{
ExpandError, ExpandErrorKind, ExpandResult, HirFileId, InFile, Lookup, MacroCallId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
mod_path::ModPath, span_map::SpanMap, attrs::RawAttrs, eager::EagerCallBackFn, mod_path::ModPath, span_map::SpanMap,
}; };
use span::{AstIdMap, Edition, SyntaxContext}; use span::{AstIdMap, Edition, SyntaxContext};
use syntax::ast::HasAttrs; use syntax::ast::HasAttrs;
@ -183,8 +181,7 @@ impl Expander {
)); ));
} }
let macro_file = call_id.as_macro_file(); let res = db.parse_macro_expansion(call_id);
let res = db.parse_macro_expansion(macro_file);
let err = err.or(res.err); let err = err.or(res.err);
ExpandResult { ExpandResult {
@ -192,7 +189,7 @@ impl Expander {
let parse = res.value.0.cast::<T>(); let parse = res.value.0.cast::<T>();
self.recursion_depth += 1; self.recursion_depth += 1;
let old_file_id = std::mem::replace(&mut self.current_file_id, macro_file.into()); let old_file_id = std::mem::replace(&mut self.current_file_id, call_id.into());
let old_span_map = let old_span_map =
std::mem::replace(&mut self.span_map, db.span_map(self.current_file_id)); std::mem::replace(&mut self.span_map, db.span_map(self.current_file_id));
let prev_ast_id_map = let prev_ast_id_map =

View file

@ -9,13 +9,12 @@ use std::mem;
use either::Either; use either::Either;
use hir_expand::{ use hir_expand::{
InFile, Lookup, MacroDefId, HirFileId, InFile, Lookup, MacroDefId,
mod_path::tool_path, mod_path::tool_path,
name::{AsName, Name}, name::{AsName, Name},
}; };
use intern::{Symbol, sym}; use intern::{Symbol, sym};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use span::HirFileId;
use stdx::never; use stdx::never;
use syntax::{ use syntax::{
AstNode, AstPtr, AstToken as _, SyntaxNodePtr, AstNode, AstPtr, AstToken as _, SyntaxNodePtr,
@ -1887,10 +1886,7 @@ impl ExprCollector<'_> {
self.module.krate(), self.module.krate(),
resolver, resolver,
&mut |ptr, call| { &mut |ptr, call| {
_ = self _ = self.source_map.expansions.insert(ptr.map(|(it, _)| it), call);
.source_map
.expansions
.insert(ptr.map(|(it, _)| it), call.as_macro_file());
}, },
) )
} }
@ -2516,7 +2512,7 @@ impl ExprCollector<'_> {
None None
} else { } else {
hygiene_id.lookup().outer_expn(self.db).map(|expansion| { hygiene_id.lookup().outer_expn(self.db).map(|expansion| {
let expansion = self.db.lookup_intern_macro_call(expansion); let expansion = self.db.lookup_intern_macro_call(expansion.into());
(hygiene_id.lookup().parent(self.db), expansion.def) (hygiene_id.lookup().parent(self.db), expansion.def)
}) })
}; };
@ -2546,7 +2542,7 @@ impl ExprCollector<'_> {
hygiene_id = hygiene_id =
HygieneId::new(parent_ctx.opaque_and_semitransparent(self.db)); HygieneId::new(parent_ctx.opaque_and_semitransparent(self.db));
hygiene_info = parent_ctx.outer_expn(self.db).map(|expansion| { hygiene_info = parent_ctx.outer_expn(self.db).map(|expansion| {
let expansion = self.db.lookup_intern_macro_call(expansion); let expansion = self.db.lookup_intern_macro_call(expansion.into());
(parent_ctx.parent(self.db), expansion.def) (parent_ctx.parent(self.db), expansion.def)
}); });
} }

View file

@ -215,7 +215,7 @@ pub(super) fn lower_path(
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
let syn_ctxt = collector.expander.ctx_for_range(path.segment()?.syntax().text_range()); let syn_ctxt = collector.expander.ctx_for_range(path.segment()?.syntax().text_range());
if let Some(macro_call_id) = syn_ctxt.outer_expn(collector.db) { if let Some(macro_call_id) = syn_ctxt.outer_expn(collector.db) {
if collector.db.lookup_intern_macro_call(macro_call_id).def.local_inner { if collector.db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner {
kind = match resolve_crate_root(collector.db, syn_ctxt) { kind = match resolve_crate_root(collector.db, syn_ctxt) {
Some(crate_root) => PathKind::DollarCrate(crate_root), Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate, None => PathKind::Crate,

View file

@ -319,7 +319,6 @@ fn compute_expr_scopes(
mod tests { mod tests {
use base_db::RootQueryDb; use base_db::RootQueryDb;
use hir_expand::{InFile, name::AsName}; use hir_expand::{InFile, name::AsName};
use salsa::AsDynDatabase;
use span::FileId; use span::FileId;
use syntax::{AstNode, algo::find_node_at_offset, ast}; use syntax::{AstNode, algo::find_node_at_offset, ast};
use test_fixture::WithFixture; use test_fixture::WithFixture;
@ -331,7 +330,7 @@ mod tests {
let krate = db.test_crate(); let krate = db.test_crate();
let crate_def_map = db.crate_def_map(krate); let crate_def_map = db.crate_def_map(krate);
let module = crate_def_map.modules_for_file(file_id).next().unwrap(); let module = crate_def_map.modules_for_file(db, file_id).next().unwrap();
let (_, def) = crate_def_map[module].scope.entries().next().unwrap(); let (_, def) = crate_def_map[module].scope.entries().next().unwrap();
match def.take_values().unwrap() { match def.take_values().unwrap() {
ModuleDefId::FunctionId(it) => it, ModuleDefId::FunctionId(it) => it,
@ -354,11 +353,9 @@ mod tests {
let editioned_file_id = position.file_id; let editioned_file_id = position.file_id;
let offset = position.offset; let offset = position.offset;
let (file_id, _) = editioned_file_id.unpack(); let (file_id, _) = editioned_file_id.unpack(&db);
let editioned_file_id_wrapper =
base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
let file_syntax = db.parse(editioned_file_id_wrapper).syntax_node(); let file_syntax = db.parse(editioned_file_id).syntax_node();
let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap(); let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap();
let function = find_function(&db, file_id); let function = find_function(&db, file_id);
@ -512,11 +509,9 @@ fn foo() {
let editioned_file_id = position.file_id; let editioned_file_id = position.file_id;
let offset = position.offset; let offset = position.offset;
let (file_id, _) = editioned_file_id.unpack(); let (file_id, _) = editioned_file_id.unpack(&db);
let file_id_wrapper =
base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
let file = db.parse(file_id_wrapper).ok().unwrap(); let file = db.parse(editioned_file_id).ok().unwrap();
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()) let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
.expect("failed to find a name at the target offset"); .expect("failed to find a name at the target offset");
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap(); let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap();

View file

@ -189,8 +189,8 @@ fn f() {
} }
"#, "#,
expect![[r#" expect![[r#"
BlockId(4801) in BlockRelativeModuleId { block: Some(BlockId(4800)), local_id: Idx::<ModuleData>(1) } BlockId(3801) in BlockRelativeModuleId { block: Some(BlockId(3800)), local_id: Idx::<ModuleData>(1) }
BlockId(4800) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) } BlockId(3800) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
crate scope crate scope
"#]], "#]],
); );

View file

@ -35,9 +35,9 @@ macro_rules! f {
}; };
} }
struct#0:1@58..64#19456# MyTraitMap2#0:2@31..42#ROOT2024# {#0:1@72..73#19456# struct#0:1@58..64#14336# MyTraitMap2#0:2@31..42#ROOT2024# {#0:1@72..73#14336#
map#0:1@86..89#19456#:#0:1@89..90#19456# #0:1@89..90#19456#::#0:1@91..93#19456#std#0:1@93..96#19456#::#0:1@96..98#19456#collections#0:1@98..109#19456#::#0:1@109..111#19456#HashSet#0:1@111..118#19456#<#0:1@118..119#19456#(#0:1@119..120#19456#)#0:1@120..121#19456#>#0:1@121..122#19456#,#0:1@122..123#19456# map#0:1@86..89#14336#:#0:1@89..90#14336# #0:1@89..90#14336#::#0:1@91..93#14336#std#0:1@93..96#14336#::#0:1@96..98#14336#collections#0:1@98..109#14336#::#0:1@109..111#14336#HashSet#0:1@111..118#14336#<#0:1@118..119#14336#(#0:1@119..120#14336#)#0:1@120..121#14336#>#0:1@121..122#14336#,#0:1@122..123#14336#
}#0:1@132..133#19456# }#0:1@132..133#14336#
"#]], "#]],
); );
} }
@ -197,7 +197,7 @@ macro_rules! mk_struct {
#[macro_use] #[macro_use]
mod foo; mod foo;
struct#1:1@59..65#19456# Foo#0:2@32..35#ROOT2024#(#1:1@70..71#19456#u32#0:2@41..44#ROOT2024#)#1:1@74..75#19456#;#1:1@75..76#19456# struct#1:1@59..65#14336# Foo#0:2@32..35#ROOT2024#(#1:1@70..71#14336#u32#0:2@41..44#ROOT2024#)#1:1@74..75#14336#;#1:1@75..76#14336#
"#]], "#]],
); );
} }
@ -423,10 +423,10 @@ m! { foo, bar }
macro_rules! m { macro_rules! m {
($($i:ident),*) => ( impl Bar { $(fn $i() {})* } ); ($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
} }
impl#\19456# Bar#\19456# {#\19456# impl#\14336# Bar#\14336# {#\14336#
fn#\19456# foo#\ROOT2024#(#\19456#)#\19456# {#\19456#}#\19456# fn#\14336# foo#\ROOT2024#(#\14336#)#\14336# {#\14336#}#\14336#
fn#\19456# bar#\ROOT2024#(#\19456#)#\19456# {#\19456#}#\19456# fn#\14336# bar#\ROOT2024#(#\14336#)#\14336# {#\14336#}#\14336#
}#\19456# }#\14336#
"#]], "#]],
); );
} }

View file

@ -19,14 +19,13 @@ use std::{iter, ops::Range, sync};
use base_db::RootQueryDb; use base_db::RootQueryDb;
use expect_test::Expect; use expect_test::Expect;
use hir_expand::{ use hir_expand::{
InFile, MacroCallKind, MacroFileId, MacroFileIdExt, MacroKind, InFile, MacroCallKind, MacroKind,
db::ExpandDatabase, db::ExpandDatabase,
proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind}, proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind},
span_map::SpanMapRef, span_map::SpanMapRef,
}; };
use intern::Symbol; use intern::Symbol;
use itertools::Itertools; use itertools::Itertools;
use salsa::AsDynDatabase;
use span::{Edition, Span}; use span::{Edition, Span};
use stdx::{format_to, format_to_acc}; use stdx::{format_to, format_to_acc};
use syntax::{ use syntax::{
@ -67,8 +66,6 @@ fn check_errors(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect)
let editioned_file_id = let editioned_file_id =
ast_id.file_id.file_id().expect("macros inside macros are not supported"); ast_id.file_id.file_id().expect("macros inside macros are not supported");
let editioned_file_id =
base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
let ast = db.parse(editioned_file_id).syntax_node(); let ast = db.parse(editioned_file_id).syntax_node();
let ast_id_map = db.ast_id_map(ast_id.file_id); let ast_id_map = db.ast_id_map(ast_id.file_id);
@ -143,8 +140,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
) )
.unwrap(); .unwrap();
let macro_call_id = res.value.unwrap(); let macro_call_id = res.value.unwrap();
let macro_file = MacroFileId { macro_call_id }; let mut expansion_result = db.parse_macro_expansion(macro_call_id);
let mut expansion_result = db.parse_macro_expansion(macro_file);
expansion_result.err = expansion_result.err.or(res.err); expansion_result.err = expansion_result.err.or(res.err);
expansions.push((macro_call.value.clone(), expansion_result)); expansions.push((macro_call.value.clone(), expansion_result));
} }

View file

@ -62,14 +62,14 @@ use std::ops::Deref;
use base_db::Crate; use base_db::Crate;
use hir_expand::{ use hir_expand::{
ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, mod_path::ModPath, name::Name, EditionedFileId, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, mod_path::ModPath,
proc_macro::ProcMacroKind, name::Name, proc_macro::ProcMacroKind,
}; };
use intern::Symbol; use intern::Symbol;
use itertools::Itertools; use itertools::Itertools;
use la_arena::Arena; use la_arena::Arena;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, EditionedFileId, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID}; use span::{Edition, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID};
use stdx::format_to; use stdx::format_to;
use syntax::{AstNode, SmolStr, SyntaxNode, ToSmolStr, ast}; use syntax::{AstNode, SmolStr, SyntaxNode, ToSmolStr, ast};
use triomphe::Arc; use triomphe::Arc;
@ -328,9 +328,7 @@ impl ModuleOrigin {
match self { match self {
&ModuleOrigin::File { definition: editioned_file_id, .. } &ModuleOrigin::File { definition: editioned_file_id, .. }
| &ModuleOrigin::CrateRoot { definition: editioned_file_id } => { | &ModuleOrigin::CrateRoot { definition: editioned_file_id } => {
let definition = base_db::EditionedFileId::new(db, editioned_file_id); let sf = db.parse(editioned_file_id).tree();
let sf = db.parse(definition).tree();
InFile::new(editioned_file_id.into(), ModuleSource::SourceFile(sf)) InFile::new(editioned_file_id.into(), ModuleSource::SourceFile(sf))
} }
&ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new( &ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new(
@ -389,7 +387,7 @@ impl DefMap {
.entered(); .entered();
let module_data = ModuleData::new( let module_data = ModuleData::new(
ModuleOrigin::CrateRoot { definition: krate.root_file_id() }, ModuleOrigin::CrateRoot { definition: krate.root_file_id(db) },
Visibility::Public, Visibility::Public,
); );
@ -402,7 +400,7 @@ impl DefMap {
let (def_map, local_def_map) = collector::collect_defs( let (def_map, local_def_map) = collector::collect_defs(
db, db,
def_map, def_map,
TreeId::new(krate.root_file_id().into(), None), TreeId::new(krate.root_file_id(db).into(), None),
None, None,
); );
@ -488,11 +486,15 @@ impl DefMap {
} }
impl DefMap { impl DefMap {
pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ { pub fn modules_for_file<'a>(
&'a self,
db: &'a dyn DefDatabase,
file_id: FileId,
) -> impl Iterator<Item = LocalModuleId> + 'a {
self.modules self.modules
.iter() .iter()
.filter(move |(_id, data)| { .filter(move |(_id, data)| {
data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) data.origin.file_id().map(|file_id| file_id.file_id(db)) == Some(file_id)
}) })
.map(|(id, _data)| id) .map(|(id, _data)| id)
} }

View file

@ -1,13 +1,12 @@
//! Expansion of associated items //! Expansion of associated items
use hir_expand::{AstId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind, name::Name}; use hir_expand::{AstId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind, name::Name};
use span::MacroCallId;
use syntax::ast; use syntax::ast;
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
AssocItemId, AstIdWithPath, ConstLoc, FunctionId, FunctionLoc, ImplId, ItemContainerId, AssocItemId, AstIdWithPath, ConstLoc, FunctionId, FunctionLoc, ImplId, ItemContainerId,
ItemLoc, ModuleId, TraitId, TypeAliasId, TypeAliasLoc, ItemLoc, MacroCallId, ModuleId, TraitId, TypeAliasId, TypeAliasLoc,
db::DefDatabase, db::DefDatabase,
item_tree::{AssocItem, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId}, item_tree::{AssocItem, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId},
macro_call_as_call_id, macro_call_as_call_id,
@ -296,9 +295,8 @@ impl<'a> AssocItemCollector<'a> {
tracing::warn!("macro expansion is too deep"); tracing::warn!("macro expansion is too deep");
return; return;
} }
let file_id = macro_call_id.as_file(); let tree_id = TreeId::new(macro_call_id.into(), None);
let tree_id = TreeId::new(file_id, None); let item_tree = self.db.file_item_tree(macro_call_id.into());
let item_tree = self.db.file_item_tree(file_id);
self.depth += 1; self.depth += 1;
for item in item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item) { for item in item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item) {

View file

@ -9,8 +9,8 @@ use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin};
use cfg::{CfgAtom, CfgExpr, CfgOptions}; use cfg::{CfgAtom, CfgExpr, CfgOptions};
use either::Either; use either::Either;
use hir_expand::{ use hir_expand::{
ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, EditionedFileId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId,
MacroFileIdExt, MacroDefKind,
attrs::{Attr, AttrId}, attrs::{Attr, AttrId},
builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro}, builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro},
mod_path::{ModPath, PathKind}, mod_path::{ModPath, PathKind},
@ -21,7 +21,7 @@ use intern::{Interned, sym};
use itertools::{Itertools, izip}; use itertools::{Itertools, izip};
use la_arena::Idx; use la_arena::Idx;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, EditionedFileId, FileAstId, SyntaxContext}; use span::{Edition, FileAstId, SyntaxContext};
use syntax::ast; use syntax::ast;
use triomphe::Arc; use triomphe::Arc;
@ -250,7 +250,7 @@ impl DefCollector<'_> {
fn seed_with_top_level(&mut self) { fn seed_with_top_level(&mut self) {
let _p = tracing::info_span!("seed_with_top_level").entered(); let _p = tracing::info_span!("seed_with_top_level").entered();
let file_id = self.def_map.krate.data(self.db).root_file_id(); let file_id = self.def_map.krate.data(self.db).root_file_id(self.db);
let item_tree = self.db.file_item_tree(file_id.into()); let item_tree = self.db.file_item_tree(file_id.into());
let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate); let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap(); let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
@ -1512,11 +1512,11 @@ impl DefCollector<'_> {
tracing::warn!("macro expansion is too deep"); tracing::warn!("macro expansion is too deep");
return; return;
} }
let file_id = macro_call_id.as_file(); let file_id = macro_call_id.into();
let item_tree = self.db.file_item_tree(file_id); let item_tree = self.db.file_item_tree(file_id);
let mod_dir = if macro_call_id.as_macro_file().is_include_macro(self.db) { let mod_dir = if macro_call_id.is_include_macro(self.db) {
ModDir::root() ModDir::root()
} else { } else {
self.mod_dirs[&module_id].clone() self.mod_dirs[&module_id].clone()

View file

@ -1,8 +1,7 @@
//! This module resolves `mod foo;` declaration to file. //! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec; use arrayvec::ArrayVec;
use base_db::AnchoredPath; use base_db::AnchoredPath;
use hir_expand::{HirFileIdExt, name::Name}; use hir_expand::{EditionedFileId, name::Name};
use span::EditionedFileId;
use crate::{HirFileId, db::DefDatabase}; use crate::{HirFileId, db::DefDatabase};
@ -79,7 +78,7 @@ impl ModDir {
let orig_file_id = file_id.original_file_respecting_includes(db); let orig_file_id = file_id.original_file_respecting_includes(db);
for candidate in candidate_files.iter() { for candidate in candidate_files.iter() {
let path = AnchoredPath { anchor: orig_file_id.file_id(), path: candidate.as_str() }; let path = AnchoredPath { anchor: orig_file_id.file_id(db), path: candidate.as_str() };
if let Some(file_id) = db.resolve_path(path) { if let Some(file_id) = db.resolve_path(path) {
let is_mod_rs = candidate.ends_with("/mod.rs"); let is_mod_rs = candidate.ends_with("/mod.rs");
@ -92,7 +91,7 @@ impl ModDir {
if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) { if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) {
return Ok(( return Ok((
// FIXME: Edition, is this rightr? // FIXME: Edition, is this rightr?
EditionedFileId::new(file_id, orig_file_id.edition()), EditionedFileId::new(db, file_id, orig_file_id.edition(db)),
is_mod_rs, is_mod_rs,
mod_dir, mod_dir,
)); ));

View file

@ -18,7 +18,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
}); });
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}") assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
} }
db.set_file_text(pos.file_id.file_id(), ra_fixture_change); db.set_file_text(pos.file_id.file_id(&db), ra_fixture_change);
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
@ -55,7 +55,7 @@ pub const BAZ: u32 = 0;
let mut add_crate = |crate_name, root_file_idx: usize| { let mut add_crate = |crate_name, root_file_idx: usize| {
new_crate_graph.add_crate_root( new_crate_graph.add_crate_root(
files[root_file_idx].file_id(), files[root_file_idx].file_id(&db),
Edition::CURRENT, Edition::CURRENT,
Some(CrateDisplayName::from_canonical_name(crate_name)), Some(CrateDisplayName::from_canonical_name(crate_name)),
None, None,
@ -348,7 +348,7 @@ fn quux() { 92 }
m!(Y); m!(Y);
m!(Z); m!(Z);
"#; "#;
db.set_file_text(pos.file_id.file_id(), new_text); db.set_file_text(pos.file_id.file_id(&db), new_text);
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {

View file

@ -917,7 +917,7 @@ fn handle_macro_def_scope(
// and use its parent expansion. // and use its parent expansion.
*hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db)); *hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db));
*hygiene_info = parent_ctx.outer_expn(db).map(|expansion| { *hygiene_info = parent_ctx.outer_expn(db).map(|expansion| {
let expansion = db.lookup_intern_macro_call(expansion); let expansion = db.lookup_intern_macro_call(expansion.into());
(parent_ctx.parent(db), expansion.def) (parent_ctx.parent(db), expansion.def)
}); });
} }
@ -932,7 +932,7 @@ fn hygiene_info(
if !hygiene_id.is_root() { if !hygiene_id.is_root() {
let ctx = hygiene_id.lookup(); let ctx = hygiene_id.lookup();
ctx.outer_expn(db).map(|expansion| { ctx.outer_expn(db).map(|expansion| {
let expansion = db.lookup_intern_macro_call(expansion); let expansion = db.lookup_intern_macro_call(expansion.into());
(ctx.parent(db), expansion.def) (ctx.parent(db), expansion.def)
}) })
} else { } else {

View file

@ -8,7 +8,7 @@ use base_db::{
}; };
use hir_expand::{InFile, files::FilePosition}; use hir_expand::{InFile, files::FilePosition};
use salsa::{AsDynDatabase, Durability}; use salsa::{AsDynDatabase, Durability};
use span::{EditionedFileId, FileId}; use span::FileId;
use syntax::{AstNode, algo, ast}; use syntax::{AstNode, algo, ast};
use triomphe::Arc; use triomphe::Arc;
@ -135,7 +135,7 @@ impl TestDB {
for &krate in self.relevant_crates(file_id).iter() { for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate); let crate_def_map = self.crate_def_map(krate);
for (local_id, data) in crate_def_map.modules() { for (local_id, data) in crate_def_map.modules() {
if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) { if data.origin.file_id().map(|file_id| file_id.file_id(self)) == Some(file_id) {
return crate_def_map.module_id(local_id); return crate_def_map.module_id(local_id);
} }
} }
@ -144,7 +144,7 @@ impl TestDB {
} }
pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId { pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId {
let file_module = self.module_for_file(position.file_id.file_id()); let file_module = self.module_for_file(position.file_id.file_id(self));
let mut def_map = file_module.def_map(self); let mut def_map = file_module.def_map(self);
let module = self.mod_at_position(&def_map, position); let module = self.mod_at_position(&def_map, position);
@ -246,10 +246,7 @@ impl TestDB {
let source_map = self.body_with_source_map(def_with_body).1; let source_map = self.body_with_source_map(def_with_body).1;
let scopes = self.expr_scopes(def_with_body); let scopes = self.expr_scopes(def_with_body);
let editioned_file_id_wrapper = let root_syntax_node = self.parse(position.file_id).syntax_node();
base_db::EditionedFileId::new(self.as_dyn_database(), position.file_id);
let root_syntax_node = self.parse(editioned_file_id_wrapper).syntax_node();
let scope_iter = let scope_iter =
algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| { algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| {
let block = ast::BlockExpr::cast(node)?; let block = ast::BlockExpr::cast(node)?;

View file

@ -1,8 +1,8 @@
//! Builtin attributes. //! Builtin attributes.
use intern::sym; use intern::sym;
use span::{MacroCallId, Span}; use span::Span;
use crate::{ExpandResult, MacroCallKind, db::ExpandDatabase, name, tt}; use crate::{ExpandResult, MacroCallId, MacroCallKind, db::ExpandDatabase, name, tt};
use super::quote; use super::quote;

View file

@ -4,13 +4,13 @@ use intern::sym;
use itertools::{Itertools, izip}; use itertools::{Itertools, izip};
use parser::SyntaxKind; use parser::SyntaxKind;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use span::{Edition, MacroCallId, Span, SyntaxContext}; use span::{Edition, Span, SyntaxContext};
use stdx::never; use stdx::never;
use syntax_bridge::DocCommentDesugarMode; use syntax_bridge::DocCommentDesugarMode;
use tracing::debug; use tracing::debug;
use crate::{ use crate::{
ExpandError, ExpandResult, ExpandError, ExpandResult, MacroCallId,
builtin::quote::{dollar_crate, quote}, builtin::quote::{dollar_crate, quote},
db::ExpandDatabase, db::ExpandDatabase,
hygiene::span_with_def_site_ctxt, hygiene::span_with_def_site_ctxt,
@ -59,7 +59,7 @@ impl BuiltinDeriveExpander {
tt: &tt::TopSubtree, tt: &tt::TopSubtree,
span: Span, span: Span,
) -> ExpandResult<tt::TopSubtree> { ) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT); let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
self.expander()(db, span, tt) self.expander()(db, span, tt)
} }
} }

View file

@ -8,7 +8,7 @@ use intern::{
sym::{self}, sym::{self},
}; };
use mbe::{DelimiterKind, expect_fragment}; use mbe::{DelimiterKind, expect_fragment};
use span::{Edition, EditionedFileId, FileId, Span}; use span::{Edition, FileId, Span};
use stdx::format_to; use stdx::format_to;
use syntax::{ use syntax::{
format_smolstr, format_smolstr,
@ -17,7 +17,7 @@ use syntax::{
use syntax_bridge::syntax_node_to_token_tree; use syntax_bridge::syntax_node_to_token_tree;
use crate::{ use crate::{
ExpandError, ExpandResult, HirFileIdExt, Lookup as _, MacroCallId, EditionedFileId, ExpandError, ExpandResult, Lookup as _, MacroCallId,
builtin::quote::{WithDelimiter, dollar_crate, quote}, builtin::quote::{WithDelimiter, dollar_crate, quote},
db::ExpandDatabase, db::ExpandDatabase,
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt}, hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt},
@ -72,7 +72,7 @@ impl BuiltinFnLikeExpander {
tt: &tt::TopSubtree, tt: &tt::TopSubtree,
span: Span, span: Span,
) -> ExpandResult<tt::TopSubtree> { ) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT); let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
self.expander()(db, id, tt, span) self.expander()(db, id, tt, span)
} }
@ -89,7 +89,7 @@ impl EagerExpander {
tt: &tt::TopSubtree, tt: &tt::TopSubtree,
span: Span, span: Span,
) -> ExpandResult<tt::TopSubtree> { ) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT); let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
self.expander()(db, id, tt, span) self.expander()(db, id, tt, span)
} }
@ -224,7 +224,7 @@ fn assert_expand(
tt: &tt::TopSubtree, tt: &tt::TopSubtree,
span: Span, span: Span,
) -> ExpandResult<tt::TopSubtree> { ) -> ExpandResult<tt::TopSubtree> {
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT); let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
let mut iter = tt.iter(); let mut iter = tt.iter();
@ -345,7 +345,7 @@ fn panic_expand(
span: Span, span: Span,
) -> ExpandResult<tt::TopSubtree> { ) -> ExpandResult<tt::TopSubtree> {
let dollar_crate = dollar_crate(span); let dollar_crate = dollar_crate(span);
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT); let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
let mac = if use_panic_2021(db, call_site_span) { let mac = if use_panic_2021(db, call_site_span) {
sym::panic_2021.clone() sym::panic_2021.clone()
@ -376,7 +376,7 @@ fn unreachable_expand(
span: Span, span: Span,
) -> ExpandResult<tt::TopSubtree> { ) -> ExpandResult<tt::TopSubtree> {
let dollar_crate = dollar_crate(span); let dollar_crate = dollar_crate(span);
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT); let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
let mac = if use_panic_2021(db, call_site_span) { let mac = if use_panic_2021(db, call_site_span) {
sym::unreachable_2021.clone() sym::unreachable_2021.clone()
@ -407,7 +407,7 @@ fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool {
let Some(expn) = span.ctx.outer_expn(db) else { let Some(expn) = span.ctx.outer_expn(db) else {
break false; break false;
}; };
let expn = db.lookup_intern_macro_call(expn); let expn = db.lookup_intern_macro_call(expn.into());
// FIXME: Record allow_internal_unstable in the macro def (not been done yet because it // FIXME: Record allow_internal_unstable in the macro def (not been done yet because it
// would consume quite a bit extra memory for all call locs...) // would consume quite a bit extra memory for all call locs...)
// if let Some(features) = expn.def.allow_internal_unstable { // if let Some(features) = expn.def.allow_internal_unstable {
@ -663,7 +663,7 @@ fn relative_file(
err_span: Span, err_span: Span,
) -> Result<EditionedFileId, ExpandError> { ) -> Result<EditionedFileId, ExpandError> {
let lookup = db.lookup_intern_macro_call(call_id); let lookup = db.lookup_intern_macro_call(call_id);
let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id(); let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id(db);
let path = AnchoredPath { anchor: call_site, path: path_str }; let path = AnchoredPath { anchor: call_site, path: path_str };
let res: FileId = db let res: FileId = db
.resolve_path(path) .resolve_path(path)
@ -672,7 +672,7 @@ fn relative_file(
if res == call_site && !allow_recursion { if res == call_site && !allow_recursion {
Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`"))) Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`")))
} else { } else {
Ok(EditionedFileId::new(res, lookup.krate.data(db).edition)) Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition))
} }
} }
@ -731,10 +731,8 @@ fn include_expand(
tt: &tt::TopSubtree, tt: &tt::TopSubtree,
span: Span, span: Span,
) -> ExpandResult<tt::TopSubtree> { ) -> ExpandResult<tt::TopSubtree> {
let (file_id_wrapper, editioned_file_id) = match include_input_to_file_id(db, arg_id, tt) { let editioned_file_id = match include_input_to_file_id(db, arg_id, tt) {
Ok(editioned_file_id) => { Ok(editioned_file_id) => editioned_file_id,
(base_db::EditionedFileId::new(db, editioned_file_id), editioned_file_id)
}
Err(e) => { Err(e) => {
return ExpandResult::new( return ExpandResult::new(
tt::TopSubtree::empty(DelimSpan { open: span, close: span }), tt::TopSubtree::empty(DelimSpan { open: span, close: span }),
@ -745,7 +743,7 @@ fn include_expand(
let span_map = db.real_span_map(editioned_file_id); let span_map = db.real_span_map(editioned_file_id);
// FIXME: Parse errors // FIXME: Parse errors
ExpandResult::ok(syntax_node_to_token_tree( ExpandResult::ok(syntax_node_to_token_tree(
&db.parse(file_id_wrapper).syntax_node(), &db.parse(editioned_file_id).syntax_node(),
SpanMap::RealSpanMap(span_map), SpanMap::RealSpanMap(span_map),
span, span,
syntax_bridge::DocCommentDesugarMode::ProcMacro, syntax_bridge::DocCommentDesugarMode::ProcMacro,
@ -807,7 +805,7 @@ fn include_str_expand(
} }
}; };
let text = db.file_text(file_id.file_id()); let text = db.file_text(file_id.file_id(db));
let text = &*text.text(db); let text = &*text.text(db);
ExpandResult::ok(quote!(call_site =>#text)) ExpandResult::ok(quote!(call_site =>#text))

View file

@ -4,19 +4,15 @@ use base_db::{Crate, RootQueryDb};
use either::Either; use either::Either;
use mbe::MatchedArmIndex; use mbe::MatchedArmIndex;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use salsa::plumbing::AsId; use span::{AstIdMap, Edition, Span, SyntaxContext};
use span::{
AstIdMap, Edition, EditionedFileId, HirFileId, HirFileIdRepr, MacroCallId, MacroFileId, Span,
SyntaxContext,
};
use syntax::{AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, ast}; use syntax::{AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree}; use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
EagerExpander, ExpandError, ExpandResult, ExpandTo, MacroCallKind, MacroCallLoc, MacroDefId, EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId,
MacroDefKind, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
attrs::{AttrId, collect_attrs}, attrs::{AttrId, collect_attrs},
builtin::pseudo_derive_attr_expansion, builtin::pseudo_derive_attr_expansion,
cfg_process, cfg_process,
@ -63,9 +59,10 @@ pub trait ExpandDatabase: RootQueryDb {
fn proc_macros(&self) -> Arc<ProcMacros>; fn proc_macros(&self) -> Arc<ProcMacros>;
/// Incrementality query to prevent queries from directly depending on `ExpandDatabase::proc_macros`. /// Incrementality query to prevent queries from directly depending on `ExpandDatabase::proc_macros`.
#[salsa::invoke_actual(crate::proc_macro::proc_macros_for_crate)] #[salsa::invoke(crate::proc_macro::proc_macros_for_crate)]
fn proc_macros_for_crate(&self, krate: Crate) -> Option<Arc<CrateProcMacros>>; fn proc_macros_for_crate(&self, krate: Crate) -> Option<Arc<CrateProcMacros>>;
#[salsa::invoke(ast_id_map)]
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>; fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
#[salsa::transparent] #[salsa::transparent]
@ -75,7 +72,7 @@ pub trait ExpandDatabase: RootQueryDb {
#[salsa::lru(512)] #[salsa::lru(512)]
fn parse_macro_expansion( fn parse_macro_expansion(
&self, &self,
macro_file: span::MacroFileId, macro_file: MacroCallId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>; ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
#[salsa::transparent] #[salsa::transparent]
@ -84,7 +81,7 @@ pub trait ExpandDatabase: RootQueryDb {
#[salsa::transparent] #[salsa::transparent]
#[salsa::invoke(crate::span_map::expansion_span_map)] #[salsa::invoke(crate::span_map::expansion_span_map)]
fn expansion_span_map(&self, file_id: MacroFileId) -> Arc<ExpansionSpanMap>; fn expansion_span_map(&self, file_id: MacroCallId) -> Arc<ExpansionSpanMap>;
#[salsa::invoke(crate::span_map::real_span_map)] #[salsa::invoke(crate::span_map::real_span_map)]
fn real_span_map(&self, file_id: EditionedFileId) -> Arc<RealSpanMap>; fn real_span_map(&self, file_id: EditionedFileId) -> Arc<RealSpanMap>;
@ -136,6 +133,7 @@ pub trait ExpandDatabase: RootQueryDb {
/// directly depend on as that would cause to frequent invalidations, mainly because of the /// directly depend on as that would cause to frequent invalidations, mainly because of the
/// parse queries being LRU cached. If they weren't the invalidations would only happen if the /// parse queries being LRU cached. If they weren't the invalidations would only happen if the
/// user wrote in the file that defines the proc-macro. /// user wrote in the file that defines the proc-macro.
#[salsa::invoke_interned(proc_macro_span)]
fn proc_macro_span(&self, fun: AstId<ast::Fn>) -> Span; fn proc_macro_span(&self, fun: AstId<ast::Fn>) -> Span;
/// Firewall query that returns the errors from the `parse_macro_expansion` query. /// Firewall query that returns the errors from the `parse_macro_expansion` query.
@ -149,30 +147,17 @@ pub trait ExpandDatabase: RootQueryDb {
fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContext; fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContext;
} }
#[salsa::interned(no_lifetime, id = span::MacroCallId)]
pub struct MacroCallWrapper {
pub loc: MacroCallLoc,
}
fn intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallLoc) -> MacroCallId {
MacroCallWrapper::new(db, macro_call).0
}
fn lookup_intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> MacroCallLoc {
MacroCallWrapper::ingredient(db).data(db.as_dyn_database(), macro_call.as_id()).0.clone()
}
#[salsa::interned(no_lifetime, id = span::SyntaxContext)] #[salsa::interned(no_lifetime, id = span::SyntaxContext)]
pub struct SyntaxContextWrapper { pub struct SyntaxContextWrapper {
pub data: SyntaxContext, pub data: SyntaxContext,
} }
fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) -> SyntaxContext { fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) -> SyntaxContext {
match file.repr() { match file {
HirFileIdRepr::FileId(_) => SyntaxContext::root(edition), HirFileId::FileId(_) => SyntaxContext::root(edition),
HirFileIdRepr::MacroFile(m) => { HirFileId::MacroFile(m) => {
let kind = db.lookup_intern_macro_call(m.macro_call_id).kind; let kind = db.lookup_intern_macro_call(m).kind;
db.macro_arg_considering_derives(m.macro_call_id, &kind).2.ctx db.macro_arg_considering_derives(m, &kind).2.ctx
} }
} }
} }
@ -300,9 +285,9 @@ pub fn expand_speculative(
loc.krate, loc.krate,
&tt, &tt,
attr_arg.as_ref(), attr_arg.as_ref(),
span_with_def_site_ctxt(db, span, actual_macro_call, loc.def.edition), span_with_def_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
span_with_call_site_ctxt(db, span, actual_macro_call, loc.def.edition), span_with_call_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
span_with_mixed_site_ctxt(db, span, actual_macro_call, loc.def.edition), span_with_mixed_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
) )
} }
MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => { MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
@ -346,20 +331,16 @@ pub fn expand_speculative(
Some((node.syntax_node(), token)) Some((node.syntax_node(), token))
} }
fn ast_id_map(db: &dyn ExpandDatabase, file_id: span::HirFileId) -> triomphe::Arc<AstIdMap> { fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> triomphe::Arc<AstIdMap> {
triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id))) triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
} }
/// Main public API -- parses a hir file, not caring whether it's a real /// Main public API -- parses a hir file, not caring whether it's a real
/// file or a macro expansion. /// file or a macro expansion.
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode { fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
match file_id.repr() { match file_id {
HirFileIdRepr::FileId(editioned_file_id) => { HirFileId::FileId(file_id) => db.parse(file_id).syntax_node(),
let file_id = base_db::EditionedFileId::new(db, editioned_file_id); HirFileId::MacroFile(macro_file) => {
db.parse(file_id).syntax_node()
}
HirFileIdRepr::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.0.syntax_node() db.parse_macro_expansion(macro_file).value.0.syntax_node()
} }
} }
@ -369,14 +350,13 @@ fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
// instead of having it be untyped // instead of having it be untyped
fn parse_macro_expansion( fn parse_macro_expansion(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
macro_file: MacroFileId, macro_file: MacroCallId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> { ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = tracing::info_span!("parse_macro_expansion").entered(); let _p = tracing::info_span!("parse_macro_expansion").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); let loc = db.lookup_intern_macro_call(macro_file);
let def_edition = loc.def.edition; let def_edition = loc.def.edition;
let expand_to = loc.expand_to(); let expand_to = loc.expand_to();
let mbe::ValueResult { value: (tt, matched_arm), err } = let mbe::ValueResult { value: (tt, matched_arm), err } = macro_expand(db, macro_file, loc);
macro_expand(db, macro_file.macro_call_id, loc);
let (parse, mut rev_token_map) = token_tree_to_syntax_node( let (parse, mut rev_token_map) = token_tree_to_syntax_node(
db, db,
@ -397,7 +377,7 @@ fn parse_macro_expansion_error(
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
) -> Option<Arc<ExpandResult<Arc<[SyntaxError]>>>> { ) -> Option<Arc<ExpandResult<Arc<[SyntaxError]>>>> {
let e: ExpandResult<Arc<[SyntaxError]>> = let e: ExpandResult<Arc<[SyntaxError]>> =
db.parse_macro_expansion(MacroFileId { macro_call_id }).map(|it| Arc::from(it.0.errors())); db.parse_macro_expansion(macro_call_id).map(|it| Arc::from(it.0.errors()));
if e.value.is_empty() && e.err.is_none() { None } else { Some(Arc::new(e)) } if e.value.is_empty() && e.err.is_none() { None } else { Some(Arc::new(e)) }
} }
@ -405,16 +385,11 @@ pub(crate) fn parse_with_map(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
file_id: HirFileId, file_id: HirFileId,
) -> (Parse<SyntaxNode>, SpanMap) { ) -> (Parse<SyntaxNode>, SpanMap) {
match file_id.repr() { match file_id {
HirFileIdRepr::FileId(editioned_file_id) => { HirFileId::FileId(file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id); (db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
(
db.parse(file_id).to_syntax(),
SpanMap::RealSpanMap(db.real_span_map(editioned_file_id)),
)
} }
HirFileIdRepr::MacroFile(macro_file) => { HirFileId::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value; let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse, SpanMap::ExpansionSpanMap(map)) (parse, SpanMap::ExpansionSpanMap(map))
} }
@ -734,9 +709,9 @@ fn expand_proc_macro(
loc.krate, loc.krate,
&macro_arg, &macro_arg,
attr_arg, attr_arg,
span_with_def_site_ctxt(db, span, id, loc.def.edition), span_with_def_site_ctxt(db, span, id.into(), loc.def.edition),
span_with_call_site_ctxt(db, span, id, loc.def.edition), span_with_call_site_ctxt(db, span, id.into(), loc.def.edition),
span_with_mixed_site_ctxt(db, span, id, loc.def.edition), span_with_mixed_site_ctxt(db, span, id.into(), loc.def.edition),
) )
}; };
@ -784,3 +759,11 @@ fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
}) })
} }
} }
fn intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallLoc) -> MacroCallId {
MacroCallId::new(db, macro_call)
}
fn lookup_intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> MacroCallLoc {
macro_call.loc(db)
}

View file

@ -2,14 +2,14 @@
use base_db::Crate; use base_db::Crate;
use intern::sym; use intern::sym;
use span::{Edition, HirFileIdRepr, MacroCallId, Span, SyntaxContext}; use span::{Edition, Span, SyntaxContext};
use stdx::TupleExt; use stdx::TupleExt;
use syntax::{AstNode, ast}; use syntax::{AstNode, ast};
use syntax_bridge::DocCommentDesugarMode; use syntax_bridge::DocCommentDesugarMode;
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
AstId, ExpandError, ExpandErrorKind, ExpandResult, Lookup, AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId,
attrs::RawAttrs, attrs::RawAttrs,
db::ExpandDatabase, db::ExpandDatabase,
hygiene::{Transparency, apply_mark}, hygiene::{Transparency, apply_mark},
@ -42,7 +42,10 @@ impl DeclarativeMacroExpander {
.mac .mac
.expand( .expand(
&tt, &tt,
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency, self.edition), |s| {
s.ctx =
apply_mark(db, s.ctx, call_id.into(), self.transparency, self.edition)
},
span, span,
loc.def.edition, loc.def.edition,
) )
@ -106,7 +109,8 @@ impl DeclarativeMacroExpander {
def_crate.data(db).edition def_crate.data(db).edition
} else { } else {
// UNWRAP-SAFETY: Only the root context has no outer expansion // UNWRAP-SAFETY: Only the root context has no outer expansion
let krate = db.lookup_intern_macro_call(ctx.outer_expn(db).unwrap()).def.krate; let krate =
db.lookup_intern_macro_call(ctx.outer_expn(db).unwrap().into()).def.krate;
krate.data(db).edition krate.data(db).edition
} }
}; };
@ -160,9 +164,9 @@ impl DeclarativeMacroExpander {
transparency(&macro_def).unwrap_or(Transparency::Opaque), transparency(&macro_def).unwrap_or(Transparency::Opaque),
), ),
}; };
let edition = ctx_edition(match id.file_id.repr() { let edition = ctx_edition(match id.file_id {
HirFileIdRepr::MacroFile(macro_file) => macro_file.macro_call_id.lookup(db).ctxt, HirFileId::MacroFile(macro_file) => macro_file.lookup(db).ctxt,
HirFileIdRepr::FileId(file) => SyntaxContext::root(file.edition()), HirFileId::FileId(file) => SyntaxContext::root(file.edition(db)),
}); });
Arc::new(DeclarativeMacroExpander { mac, transparency, edition }) Arc::new(DeclarativeMacroExpander { mac, transparency, edition })
} }

View file

@ -63,7 +63,7 @@ pub fn expand_eager_macro_input(
#[allow(deprecated)] // builtin eager macros are never derives #[allow(deprecated)] // builtin eager macros are never derives
let (_, _, span) = db.macro_arg(arg_id); let (_, _, span) = db.macro_arg(arg_id);
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file()); db.parse_macro_expansion(arg_id);
let mut arg_map = ExpansionSpanMap::empty(); let mut arg_map = ExpansionSpanMap::empty();
@ -73,7 +73,7 @@ pub fn expand_eager_macro_input(
&arg_exp_map, &arg_exp_map,
&mut arg_map, &mut arg_map,
TextSize::new(0), TextSize::new(0),
InFile::new(arg_id.as_file(), arg_exp.syntax_node()), InFile::new(arg_id.into(), arg_exp.syntax_node()),
krate, krate,
call_site, call_site,
resolver, resolver,
@ -134,10 +134,8 @@ fn lazy_expand(
call_site, call_site,
); );
eager_callback(ast_id.map(|ast_id| (AstPtr::new(macro_call), ast_id)), id); eager_callback(ast_id.map(|ast_id| (AstPtr::new(macro_call), ast_id)), id);
let macro_file = id.as_macro_file();
db.parse_macro_expansion(macro_file) db.parse_macro_expansion(id).map(|parse| (InFile::new(id.into(), parse.0), parse.1))
.map(|parse| (InFile::new(macro_file.into(), parse.0), parse.1))
} }
fn eager_macro_recur( fn eager_macro_recur(
@ -224,7 +222,7 @@ fn eager_macro_recur(
call_id, call_id,
); );
let ExpandResult { value: (parse, map), err: err2 } = let ExpandResult { value: (parse, map), err: err2 } =
db.parse_macro_expansion(call_id.as_macro_file()); db.parse_macro_expansion(call_id);
map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span)); map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span));

View file

@ -2,14 +2,11 @@
use std::borrow::Borrow; use std::borrow::Borrow;
use either::Either; use either::Either;
use span::{ use span::{AstIdNode, ErasedFileAstId, FileAstId, FileId, SyntaxContext};
AstIdNode, EditionedFileId, ErasedFileAstId, FileAstId, HirFileId, HirFileIdRepr, MacroFileId,
SyntaxContext,
};
use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize}; use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize};
use crate::{ use crate::{
MacroFileIdExt, MacroKind, EditionedFileId, HirFileId, MacroCallId, MacroKind,
db::{self, ExpandDatabase}, db::{self, ExpandDatabase},
map_node_range_up, map_node_range_up_rooted, span_for_offset, map_node_range_up, map_node_range_up_rooted, span_for_offset,
}; };
@ -27,7 +24,7 @@ pub struct InFileWrapper<FileKind, T> {
pub value: T, pub value: T,
} }
pub type InFile<T> = InFileWrapper<HirFileId, T>; pub type InFile<T> = InFileWrapper<HirFileId, T>;
pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>; pub type InMacroFile<T> = InFileWrapper<MacroCallId, T>;
pub type InRealFile<T> = InFileWrapper<EditionedFileId, T>; pub type InRealFile<T> = InFileWrapper<EditionedFileId, T>;
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
@ -36,12 +33,13 @@ pub struct FilePositionWrapper<FileKind> {
pub offset: TextSize, pub offset: TextSize,
} }
pub type HirFilePosition = FilePositionWrapper<HirFileId>; pub type HirFilePosition = FilePositionWrapper<HirFileId>;
pub type MacroFilePosition = FilePositionWrapper<MacroFileId>; pub type MacroFilePosition = FilePositionWrapper<MacroCallId>;
pub type FilePosition = FilePositionWrapper<EditionedFileId>; pub type FilePosition = FilePositionWrapper<EditionedFileId>;
impl From<FilePositionWrapper<EditionedFileId>> for FilePositionWrapper<span::FileId> { impl FilePosition {
fn from(value: FilePositionWrapper<EditionedFileId>) -> Self { #[inline]
FilePositionWrapper { file_id: value.file_id.into(), offset: value.offset } pub fn into_file_id(self, db: &dyn ExpandDatabase) -> FilePositionWrapper<FileId> {
FilePositionWrapper { file_id: self.file_id.file_id(db), offset: self.offset }
} }
} }
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
@ -50,12 +48,13 @@ pub struct FileRangeWrapper<FileKind> {
pub range: TextRange, pub range: TextRange,
} }
pub type HirFileRange = FileRangeWrapper<HirFileId>; pub type HirFileRange = FileRangeWrapper<HirFileId>;
pub type MacroFileRange = FileRangeWrapper<MacroFileId>; pub type MacroFileRange = FileRangeWrapper<MacroCallId>;
pub type FileRange = FileRangeWrapper<EditionedFileId>; pub type FileRange = FileRangeWrapper<EditionedFileId>;
impl From<FileRangeWrapper<EditionedFileId>> for FileRangeWrapper<span::FileId> { impl FileRange {
fn from(value: FileRangeWrapper<EditionedFileId>) -> Self { #[inline]
FileRangeWrapper { file_id: value.file_id.into(), range: value.range } pub fn into_file_id(self, db: &dyn ExpandDatabase) -> FileRangeWrapper<FileId> {
FileRangeWrapper { file_id: self.file_id.file_id(db), range: self.range }
} }
} }
@ -163,12 +162,10 @@ trait FileIdToSyntax: Copy {
impl FileIdToSyntax for EditionedFileId { impl FileIdToSyntax for EditionedFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode { fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
let file_id = base_db::EditionedFileId::new(db, self); db.parse(self).syntax_node()
db.parse(file_id).syntax_node()
} }
} }
impl FileIdToSyntax for MacroFileId { impl FileIdToSyntax for MacroCallId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode { fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse_macro_expansion(self).value.0.syntax_node() db.parse_macro_expansion(self).value.0.syntax_node()
} }
@ -221,7 +218,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() { let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)), Some(parent) => Some(node.with_value(parent)),
None => db None => db
.lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id) .lookup_intern_macro_call(node.file_id.macro_file()?)
.to_node_item(db) .to_node_item(db)
.syntax() .syntax()
.cloned() .cloned()
@ -238,7 +235,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() { let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)), Some(parent) => Some(node.with_value(parent)),
None => db None => db
.lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id) .lookup_intern_macro_call(node.file_id.macro_file()?)
.to_node_item(db) .to_node_item(db)
.syntax() .syntax()
.cloned() .cloned()
@ -278,11 +275,11 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
) -> Option<InRealFile<SyntaxNode>> { ) -> Option<InRealFile<SyntaxNode>> {
// This kind of upmapping can only be achieved in attribute expanded files, // This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() { let file_id = match self.file_id {
HirFileIdRepr::FileId(file_id) => { HirFileId::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value.borrow().clone() }); return Some(InRealFile { file_id, value: self.value.borrow().clone() });
} }
HirFileIdRepr::MacroFile(m) HirFileId::MacroFile(m)
if matches!(m.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) => if matches!(m.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) =>
{ {
m m
@ -296,11 +293,9 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
self.value.borrow().text_range(), self.value.borrow().text_range(),
)?; )?;
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
let kind = self.kind(); let kind = self.kind();
let value = db let value = db
.parse(file_id) .parse(editioned_file_id)
.syntax_node() .syntax_node()
.covering_element(range) .covering_element(range)
.ancestors() .ancestors()
@ -332,9 +327,9 @@ impl InMacroFile<SyntaxToken> {
impl InFile<SyntaxToken> { impl InFile<SyntaxToken> {
/// Falls back to the macro call range if the node cannot be mapped up fully. /// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() { match self.file_id {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileId::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => { HirFileId::MacroFile(mac_file) => {
let (range, ctxt) = span_for_offset( let (range, ctxt) = span_for_offset(
db, db,
&db.expansion_span_map(mac_file), &db.expansion_span_map(mac_file),
@ -348,7 +343,7 @@ impl InFile<SyntaxToken> {
} }
// Fall back to whole macro call. // Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range(db) loc.kind.original_call_range(db)
} }
} }
@ -356,11 +351,11 @@ impl InFile<SyntaxToken> {
/// Attempts to map the syntax node back up its macro calls. /// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> { pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
match self.file_id.repr() { match self.file_id {
HirFileIdRepr::FileId(file_id) => { HirFileId::FileId(file_id) => {
Some(FileRange { file_id, range: self.value.text_range() }) Some(FileRange { file_id, range: self.value.text_range() })
} }
HirFileIdRepr::MacroFile(mac_file) => { HirFileId::MacroFile(mac_file) => {
let (range, ctxt) = span_for_offset( let (range, ctxt) = span_for_offset(
db, db,
&db.expansion_span_map(mac_file), &db.expansion_span_map(mac_file),
@ -386,15 +381,15 @@ impl InFile<TextRange> {
self, self,
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
) -> (FileRange, SyntaxContext) { ) -> (FileRange, SyntaxContext) {
match self.file_id.repr() { match self.file_id {
HirFileIdRepr::FileId(file_id) => { HirFileId::FileId(file_id) => {
(FileRange { file_id, range: self.value }, SyntaxContext::root(file_id.edition())) (FileRange { file_id, range: self.value }, SyntaxContext::root(file_id.edition(db)))
} }
HirFileIdRepr::MacroFile(mac_file) => { HirFileId::MacroFile(mac_file) => {
match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) { match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it, Some(it) => it,
None => { None => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); let loc = db.lookup_intern_macro_call(mac_file);
(loc.kind.original_call_range(db), SyntaxContext::root(loc.def.edition)) (loc.kind.original_call_range(db), SyntaxContext::root(loc.def.edition))
} }
} }
@ -403,13 +398,13 @@ impl InFile<TextRange> {
} }
pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange { pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() { match self.file_id {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value }, HirFileId::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => { HirFileId::MacroFile(mac_file) => {
match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) { match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it, Some(it) => it,
_ => { _ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range(db) loc.kind.original_call_range(db)
} }
} }
@ -421,13 +416,13 @@ impl InFile<TextRange> {
self, self,
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
) -> FileRange { ) -> FileRange {
match self.file_id.repr() { match self.file_id {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value }, HirFileId::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => { HirFileId::MacroFile(mac_file) => {
match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) { match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it, Some(it) => it,
_ => { _ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range_with_body(db) loc.kind.original_call_range_with_body(db)
} }
} }
@ -439,12 +434,12 @@ impl InFile<TextRange> {
self, self,
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContext)> { ) -> Option<(FileRange, SyntaxContext)> {
match self.file_id.repr() { match self.file_id {
HirFileIdRepr::FileId(file_id) => Some(( HirFileId::FileId(file_id) => Some((
FileRange { file_id, range: self.value }, FileRange { file_id, range: self.value },
SyntaxContext::root(file_id.edition()), SyntaxContext::root(file_id.edition(db)),
)), )),
HirFileIdRepr::MacroFile(mac_file) => { HirFileId::MacroFile(mac_file) => {
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) map_node_range_up(db, &db.expansion_span_map(mac_file), self.value)
} }
} }
@ -455,11 +450,11 @@ impl<N: AstNode> InFile<N> {
pub fn original_ast_node_rooted(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> { pub fn original_ast_node_rooted(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> {
// This kind of upmapping can only be achieved in attribute expanded files, // This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() { let file_id = match self.file_id {
HirFileIdRepr::FileId(file_id) => { HirFileId::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value }); return Some(InRealFile { file_id, value: self.value });
} }
HirFileIdRepr::MacroFile(m) => m, HirFileId::MacroFile(m) => m,
}; };
if !matches!(file_id.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) { if !matches!(file_id.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) {
return None; return None;
@ -471,10 +466,8 @@ impl<N: AstNode> InFile<N> {
self.value.syntax().text_range(), self.value.syntax().text_range(),
)?; )?;
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes? // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let anc = db.parse(file_id).syntax_node().covering_element(range); let anc = db.parse(editioned_file_id).syntax_node().covering_element(range);
let value = anc.ancestors().find_map(N::cast)?; let value = anc.ancestors().find_map(N::cast)?;
Some(InRealFile::new(editioned_file_id, value)) Some(InRealFile::new(editioned_file_id, value))
} }
@ -482,9 +475,9 @@ impl<N: AstNode> InFile<N> {
impl<T> InFile<T> { impl<T> InFile<T> {
pub fn into_real_file(self) -> Result<InRealFile<T>, InFile<T>> { pub fn into_real_file(self) -> Result<InRealFile<T>, InFile<T>> {
match self.file_id.repr() { match self.file_id {
HirFileIdRepr::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }), HirFileId::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }),
HirFileIdRepr::MacroFile(_) => Err(self), HirFileId::MacroFile(_) => Err(self),
} }
} }
} }

View file

@ -81,7 +81,7 @@ pub(super) fn apply_mark(
return apply_mark_internal(db, ctxt, call_id, transparency, edition); return apply_mark_internal(db, ctxt, call_id, transparency, edition);
} }
let call_site_ctxt = db.lookup_intern_macro_call(call_id).ctxt; let call_site_ctxt = db.lookup_intern_macro_call(call_id.into()).ctxt;
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent { let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
call_site_ctxt.normalize_to_macros_2_0(db) call_site_ctxt.normalize_to_macros_2_0(db)
} else { } else {

View file

@ -27,6 +27,7 @@ mod prettify_macro_expansion_;
use attrs::collect_attrs; use attrs::collect_attrs;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use salsa::plumbing::{AsId, FromId};
use stdx::TupleExt; use stdx::TupleExt;
use triomphe::Arc; use triomphe::Arc;
@ -35,10 +36,7 @@ use std::hash::Hash;
use base_db::Crate; use base_db::Crate;
use either::Either; use either::Either;
use span::{ use span::{Edition, ErasedFileAstId, FileAstId, Span, SpanAnchor, SyntaxContext};
Edition, EditionedFileId, ErasedFileAstId, FileAstId, HirFileIdRepr, Span, SpanAnchor,
SyntaxContext,
};
use syntax::{ use syntax::{
SyntaxNode, SyntaxToken, TextRange, TextSize, SyntaxNode, SyntaxToken, TextRange, TextSize,
ast::{self, AstNode}, ast::{self, AstNode},
@ -62,8 +60,8 @@ pub use crate::{
prettify_macro_expansion_::prettify_macro_expansion, prettify_macro_expansion_::prettify_macro_expansion,
}; };
pub use base_db::EditionedFileId;
pub use mbe::{DeclarativeMacro, ValueResult}; pub use mbe::{DeclarativeMacro, ValueResult};
pub use span::{HirFileId, MacroCallId, MacroFileId};
pub mod tt { pub mod tt {
pub use span::Span; pub use span::Span;
@ -343,51 +341,34 @@ pub enum MacroCallKind {
}, },
} }
pub trait HirFileIdExt { impl HirFileId {
fn edition(self, db: &dyn ExpandDatabase) -> Edition; pub fn edition(self, db: &dyn ExpandDatabase) -> Edition {
/// Returns the original file of this macro call hierarchy. match self {
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId; HirFileId::FileId(file_id) => file_id.editioned_file_id(db).edition(),
HirFileId::MacroFile(m) => db.lookup_intern_macro_call(m).def.edition,
/// Returns the original file of this macro call hierarchy while going into the included file if
/// one of the calls comes from an `include!``.
fn original_file_respecting_includes(self, db: &dyn ExpandDatabase) -> EditionedFileId;
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>>;
fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>>;
}
impl HirFileIdExt for HirFileId {
fn edition(self, db: &dyn ExpandDatabase) -> Edition {
match self.repr() {
HirFileIdRepr::FileId(file_id) => file_id.edition(),
HirFileIdRepr::MacroFile(m) => db.lookup_intern_macro_call(m.macro_call_id).def.edition,
} }
} }
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId { pub fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
let mut file_id = self; let mut file_id = self;
loop { loop {
match file_id.repr() { match file_id {
HirFileIdRepr::FileId(id) => break id, HirFileId::FileId(id) => break id,
HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => { HirFileId::MacroFile(macro_call_id) => {
file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id() file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id()
} }
} }
} }
} }
fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> EditionedFileId { pub fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> EditionedFileId {
loop { loop {
match self.repr() { match self {
HirFileIdRepr::FileId(id) => break id, HirFileId::FileId(id) => break id,
HirFileIdRepr::MacroFile(file) => { HirFileId::MacroFile(file) => {
let loc = db.lookup_intern_macro_call(file.macro_call_id); let loc = db.lookup_intern_macro_call(file);
if loc.def.is_include() { if loc.def.is_include() {
if let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind { if let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind {
if let Ok(it) = if let Ok(it) = include_input_to_file_id(db, file, &eager.arg) {
include_input_to_file_id(db, file.macro_call_id, &eager.arg)
{
break it; break it;
} }
} }
@ -398,23 +379,26 @@ impl HirFileIdExt for HirFileId {
} }
} }
fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>> { pub fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>> {
let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db); let mut call = db.lookup_intern_macro_call(self.macro_file()?).to_node(db);
loop { loop {
match call.file_id.repr() { match call.file_id {
HirFileIdRepr::FileId(file_id) => { HirFileId::FileId(file_id) => {
break Some(InRealFile { file_id, value: call.value }); break Some(InRealFile { file_id, value: call.value });
} }
HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => { HirFileId::MacroFile(macro_call_id) => {
call = db.lookup_intern_macro_call(macro_call_id).to_node(db); call = db.lookup_intern_macro_call(macro_call_id).to_node(db);
} }
} }
} }
} }
fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>> { pub fn as_builtin_derive_attr_node(
&self,
db: &dyn ExpandDatabase,
) -> Option<InFile<ast::Attr>> {
let macro_file = self.macro_file()?; let macro_file = self.macro_file()?;
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); let loc = db.lookup_intern_macro_call(macro_file);
let attr = match loc.def.kind { let attr = match loc.def.kind {
MacroDefKind::BuiltInDerive(..) => loc.to_node(db), MacroDefKind::BuiltInDerive(..) => loc.to_node(db),
_ => return None, _ => return None,
@ -441,57 +425,34 @@ pub enum MacroKind {
ProcMacro, ProcMacro,
} }
pub trait MacroFileIdExt { impl MacroCallId {
fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool; pub fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool; db.lookup_intern_macro_call(self).to_node(db)
fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId>;
fn expansion_level(self, db: &dyn ExpandDatabase) -> u32;
/// If this is a macro call, returns the syntax node of the call.
fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode>;
fn parent(self, db: &dyn ExpandDatabase) -> HirFileId;
fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo;
fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind;
/// Return whether this file is an include macro
fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool;
fn is_eager(&self, db: &dyn ExpandDatabase) -> bool;
/// Return whether this file is the pseudo expansion of the derive attribute.
/// See [`crate::builtin_attr_macro::derive_attr_expand`].
fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool;
}
impl MacroFileIdExt for MacroFileId {
fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
db.lookup_intern_macro_call(self.macro_call_id).to_node(db)
} }
fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 { pub fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 {
let mut level = 0; let mut level = 0;
let mut macro_file = self; let mut macro_file = self;
loop { loop {
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); let loc = db.lookup_intern_macro_call(macro_file);
level += 1; level += 1;
macro_file = match loc.kind.file_id().repr() { macro_file = match loc.kind.file_id() {
HirFileIdRepr::FileId(_) => break level, HirFileId::FileId(_) => break level,
HirFileIdRepr::MacroFile(it) => it, HirFileId::MacroFile(it) => it,
}; };
} }
} }
fn parent(self, db: &dyn ExpandDatabase) -> HirFileId { pub fn parent(self, db: &dyn ExpandDatabase) -> HirFileId {
db.lookup_intern_macro_call(self.macro_call_id).kind.file_id() db.lookup_intern_macro_call(self).kind.file_id()
} }
/// Return expansion information if it is a macro-expansion file /// Return expansion information if it is a macro-expansion file
fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo { pub fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo {
ExpansionInfo::new(db, self) ExpansionInfo::new(db, self)
} }
fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind { pub fn kind(self, db: &dyn ExpandDatabase) -> MacroKind {
match db.lookup_intern_macro_call(self.macro_call_id).def.kind { match db.lookup_intern_macro_call(self).def.kind {
MacroDefKind::Declarative(..) => MacroKind::Declarative, MacroDefKind::Declarative(..) => MacroKind::Declarative,
MacroDefKind::BuiltIn(..) | MacroDefKind::BuiltInEager(..) => { MacroDefKind::BuiltIn(..) | MacroDefKind::BuiltInEager(..) => {
MacroKind::DeclarativeBuiltIn MacroKind::DeclarativeBuiltIn
@ -504,33 +465,33 @@ impl MacroFileIdExt for MacroFileId {
} }
} }
fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool { pub fn is_include_macro(self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self.macro_call_id).def.is_include() db.lookup_intern_macro_call(self).def.is_include()
} }
fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool { pub fn is_include_like_macro(self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self.macro_call_id).def.is_include_like() db.lookup_intern_macro_call(self).def.is_include_like()
} }
fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool { pub fn is_env_or_option_env(self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self.macro_call_id).def.is_env_or_option_env() db.lookup_intern_macro_call(self).def.is_env_or_option_env()
} }
fn is_eager(&self, db: &dyn ExpandDatabase) -> bool { pub fn is_eager(self, db: &dyn ExpandDatabase) -> bool {
let loc = db.lookup_intern_macro_call(self.macro_call_id); let loc = db.lookup_intern_macro_call(self);
matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
} }
fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId> { pub fn eager_arg(self, db: &dyn ExpandDatabase) -> Option<MacroCallId> {
let loc = db.lookup_intern_macro_call(self.macro_call_id); let loc = db.lookup_intern_macro_call(self);
match &loc.kind { match &loc.kind {
MacroCallKind::FnLike { eager, .. } => eager.as_ref().map(|it| it.arg_id), MacroCallKind::FnLike { eager, .. } => eager.as_ref().map(|it| it.arg_id),
_ => None, _ => None,
} }
} }
fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool { pub fn is_derive_attr_pseudo_expansion(self, db: &dyn ExpandDatabase) -> bool {
let loc = db.lookup_intern_macro_call(self.macro_call_id); let loc = db.lookup_intern_macro_call(self);
loc.def.is_attribute_derive() loc.def.is_attribute_derive()
} }
} }
@ -728,11 +689,11 @@ impl MacroCallKind {
pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange { pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self; let mut kind = self;
let file_id = loop { let file_id = loop {
match kind.file_id().repr() { match kind.file_id() {
HirFileIdRepr::MacroFile(file) => { HirFileId::MacroFile(file) => {
kind = db.lookup_intern_macro_call(file.macro_call_id).kind; kind = db.lookup_intern_macro_call(file).kind;
} }
HirFileIdRepr::FileId(file_id) => break file_id, HirFileId::FileId(file_id) => break file_id,
} }
}; };
@ -753,11 +714,11 @@ impl MacroCallKind {
pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange { pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self; let mut kind = self;
let file_id = loop { let file_id = loop {
match kind.file_id().repr() { match kind.file_id() {
HirFileIdRepr::MacroFile(file) => { HirFileId::MacroFile(file) => {
kind = db.lookup_intern_macro_call(file.macro_call_id).kind; kind = db.lookup_intern_macro_call(file).kind;
} }
HirFileIdRepr::FileId(file_id) => break file_id, HirFileId::FileId(file_id) => break file_id,
} }
}; };
@ -898,7 +859,7 @@ impl ExpansionInfo {
let span = self.exp_map.span_at(token.start()); let span = self.exp_map.span_at(token.start());
match &self.arg_map { match &self.arg_map {
SpanMap::RealSpanMap(_) => { SpanMap::RealSpanMap(_) => {
let file_id = span.anchor.file_id.into(); let file_id = EditionedFileId::from_span(db, span.anchor.file_id).into();
let anchor_offset = let anchor_offset =
db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start(); db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start();
InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] } InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
@ -921,9 +882,9 @@ impl ExpansionInfo {
} }
} }
pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo { pub fn new(db: &dyn ExpandDatabase, macro_file: MacroCallId) -> ExpansionInfo {
let _p = tracing::info_span!("ExpansionInfo::new").entered(); let _p = tracing::info_span!("ExpansionInfo::new").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); let loc = db.lookup_intern_macro_call(macro_file);
let arg_tt = loc.kind.arg(db); let arg_tt = loc.kind.arg(db);
let arg_map = db.span_map(arg_tt.file_id); let arg_map = db.span_map(arg_tt.file_id);
@ -955,9 +916,10 @@ pub fn map_node_range_up_rooted(
start = start.min(span.range.start()); start = start.min(span.range.start());
end = end.max(span.range.end()); end = end.max(span.range.end());
} }
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let anchor_offset = let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start(); db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some(FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset }) Some(FileRange { file_id, range: TextRange::new(start, end) + anchor_offset })
} }
/// Maps up the text range out of the expansion hierarchy back into the original file its from. /// Maps up the text range out of the expansion hierarchy back into the original file its from.
@ -980,12 +942,10 @@ pub fn map_node_range_up(
start = start.min(span.range.start()); start = start.min(span.range.start());
end = end.max(span.range.end()); end = end.max(span.range.end());
} }
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let anchor_offset = let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start(); db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some(( Some((FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }, ctx))
FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset },
ctx,
))
} }
/// Maps up the text range out of the expansion hierarchy back into the original file its from. /// Maps up the text range out of the expansion hierarchy back into the original file its from.
@ -1004,8 +964,9 @@ pub fn map_node_range_up_aggregated(
); );
} }
for ((anchor, _), range) in &mut map { for ((anchor, _), range) in &mut map {
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let anchor_offset = let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start(); db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
*range += anchor_offset; *range += anchor_offset;
} }
map map
@ -1018,12 +979,10 @@ pub fn span_for_offset(
offset: TextSize, offset: TextSize,
) -> (FileRange, SyntaxContext) { ) -> (FileRange, SyntaxContext) {
let span = exp_map.span_at(offset); let span = exp_map.span_at(offset);
let anchor_offset = db let file_id = EditionedFileId::from_span(db, span.anchor.file_id);
.ast_id_map(span.anchor.file_id.into()) let anchor_offset =
.get_erased(span.anchor.ast_id) db.ast_id_map(file_id.into()).get_erased(span.anchor.ast_id).text_range().start();
.text_range() (FileRange { file_id, range: span.range + anchor_offset }, span.ctx)
.start();
(FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
} }
/// In Rust, macros expand token trees to token trees. When we want to turn a /// In Rust, macros expand token trees to token trees. When we want to turn a
@ -1091,3 +1050,77 @@ impl ExpandTo {
} }
intern::impl_internable!(ModPath, attrs::AttrInput); intern::impl_internable!(ModPath, attrs::AttrInput);
#[salsa::interned(no_lifetime)]
#[doc(alias = "MacroFileId")]
pub struct MacroCallId {
pub loc: MacroCallLoc,
}
impl From<span::MacroCallId> for MacroCallId {
#[inline]
fn from(value: span::MacroCallId) -> Self {
MacroCallId::from_id(value.0)
}
}
impl From<MacroCallId> for span::MacroCallId {
#[inline]
fn from(value: MacroCallId) -> span::MacroCallId {
span::MacroCallId(value.as_id())
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
pub enum HirFileId {
FileId(EditionedFileId),
MacroFile(MacroCallId),
}
impl From<EditionedFileId> for HirFileId {
#[inline]
fn from(file_id: EditionedFileId) -> Self {
HirFileId::FileId(file_id)
}
}
impl From<MacroCallId> for HirFileId {
#[inline]
fn from(file_id: MacroCallId) -> Self {
HirFileId::MacroFile(file_id)
}
}
impl HirFileId {
#[inline]
pub fn macro_file(self) -> Option<MacroCallId> {
match self {
HirFileId::FileId(_) => None,
HirFileId::MacroFile(it) => Some(it),
}
}
#[inline]
pub fn is_macro(self) -> bool {
matches!(self, HirFileId::MacroFile(_))
}
#[inline]
pub fn file_id(self) -> Option<EditionedFileId> {
match self {
HirFileId::FileId(it) => Some(it),
HirFileId::MacroFile(_) => None,
}
}
}
impl PartialEq<EditionedFileId> for HirFileId {
fn eq(&self, &other: &EditionedFileId) -> bool {
*self == HirFileId::from(other)
}
}
impl PartialEq<HirFileId> for EditionedFileId {
fn eq(&self, &other: &HirFileId) -> bool {
other == HirFileId::from(*self)
}
}

View file

@ -278,7 +278,7 @@ fn convert_path(
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
let syn_ctx = span_for_range(segment.syntax().text_range()); let syn_ctx = span_for_range(segment.syntax().text_range());
if let Some(macro_call_id) = syn_ctx.outer_expn(db) { if let Some(macro_call_id) = syn_ctx.outer_expn(db) {
if db.lookup_intern_macro_call(macro_call_id).def.local_inner { if db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner {
mod_path.kind = match resolve_crate_root(db, syn_ctx) { mod_path.kind = match resolve_crate_root(db, syn_ctx) {
Some(crate_root) => PathKind::DollarCrate(crate_root), Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate, None => PathKind::Crate,
@ -353,7 +353,7 @@ pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContext) -> O
result_mark = Some(mark); result_mark = Some(mark);
} }
result_mark.map(|call| db.lookup_intern_macro_call(call).def.krate) result_mark.map(|call| db.lookup_intern_macro_call(call.into()).def.krate)
} }
pub use crate::name as __name; pub use crate::name as __name;

View file

@ -26,7 +26,7 @@ pub fn prettify_macro_expansion(
syntax_ctx_id_to_dollar_crate_replacement.entry(ctx).or_insert_with(|| { syntax_ctx_id_to_dollar_crate_replacement.entry(ctx).or_insert_with(|| {
let macro_call_id = let macro_call_id =
ctx.outer_expn(db).expect("`$crate` cannot come from `SyntaxContextId::ROOT`"); ctx.outer_expn(db).expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
let macro_call = db.lookup_intern_macro_call(macro_call_id); let macro_call = db.lookup_intern_macro_call(macro_call_id.into());
let macro_def_crate = macro_call.def.krate; let macro_def_crate = macro_call.def.krate;
// First, if this is the same crate as the macro, nothing will work but `crate`. // First, if this is the same crate as the macro, nothing will work but `crate`.
// If not, if the target trait has the macro's crate as a dependency, using the dependency name // If not, if the target trait has the macro's crate as a dependency, using the dependency name

View file

@ -1,13 +1,13 @@
//! Span maps for real files and macro expansions. //! Span maps for real files and macro expansions.
use span::{EditionedFileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContext}; use span::{Span, SyntaxContext};
use stdx::TupleExt; use stdx::TupleExt;
use syntax::{AstNode, TextRange, ast}; use syntax::{AstNode, TextRange, ast};
use triomphe::Arc; use triomphe::Arc;
pub use span::RealSpanMap; pub use span::RealSpanMap;
use crate::{attrs::collect_attrs, db::ExpandDatabase}; use crate::{HirFileId, MacroCallId, attrs::collect_attrs, db::ExpandDatabase};
pub type ExpansionSpanMap = span::SpanMap<SyntaxContext>; pub type ExpansionSpanMap = span::SpanMap<SyntaxContext>;
@ -61,9 +61,9 @@ impl SpanMap {
#[inline] #[inline]
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap { pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
match file_id.repr() { match file_id {
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)), HirFileId::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
HirFileIdRepr::MacroFile(m) => { HirFileId::MacroFile(m) => {
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1) SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
} }
} }
@ -81,15 +81,13 @@ impl SpanMapRef<'_> {
pub(crate) fn real_span_map( pub(crate) fn real_span_map(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
editioned_file_id: EditionedFileId, editioned_file_id: base_db::EditionedFileId,
) -> Arc<RealSpanMap> { ) -> Arc<RealSpanMap> {
use syntax::ast::HasModuleItem; use syntax::ast::HasModuleItem;
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)]; let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(editioned_file_id.into()); let ast_id_map = db.ast_id_map(editioned_file_id.into());
let file_id = base_db::EditionedFileId::new(db, editioned_file_id); let tree = db.parse(editioned_file_id).tree();
let tree = db.parse(file_id).tree();
// This is an incrementality layer. Basically we can't use absolute ranges for our spans as that // This is an incrementality layer. Basically we can't use absolute ranges for our spans as that
// would mean we'd invalidate everything whenever we type. So instead we make the text ranges // would mean we'd invalidate everything whenever we type. So instead we make the text ranges
// relative to some AstIds reducing the risk of invalidation as typing somewhere no longer // relative to some AstIds reducing the risk of invalidation as typing somewhere no longer
@ -140,7 +138,7 @@ pub(crate) fn real_span_map(
}); });
Arc::new(RealSpanMap::from_file( Arc::new(RealSpanMap::from_file(
editioned_file_id, editioned_file_id.editioned_file_id(db),
pairs.into_boxed_slice(), pairs.into_boxed_slice(),
tree.syntax().text_range().end(), tree.syntax().text_range().end(),
)) ))
@ -148,7 +146,7 @@ pub(crate) fn real_span_map(
pub(crate) fn expansion_span_map( pub(crate) fn expansion_span_map(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
file_id: MacroFileId, file_id: MacroCallId,
) -> Arc<ExpansionSpanMap> { ) -> Arc<ExpansionSpanMap> {
db.parse_macro_expansion(file_id).value.1 db.parse_macro_expansion(file_id).value.1
} }

View file

@ -16,13 +16,8 @@ use triomphe::Arc;
use crate::{ use crate::{
Const, ConstData, ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, Const, ConstData, ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution,
TraitEnvironment, Ty, TyBuilder, TraitEnvironment, Ty, TyBuilder, db::HirDatabase, display::DisplayTarget, generics::Generics,
db::{HirDatabase, HirDatabaseData}, infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx,
display::DisplayTarget,
generics::Generics,
infer::InferenceContext,
lower::ParamLoweringMode,
to_placeholder_idx,
}; };
use super::mir::{MirEvalError, MirLowerError, interpret_mir, lower_to_mir, pad16}; use super::mir::{MirEvalError, MirLowerError, interpret_mir, lower_to_mir, pad16};
@ -228,7 +223,6 @@ pub fn try_const_isize(db: &dyn HirDatabase, c: &Const) -> Option<i128> {
pub(crate) fn const_eval_recover( pub(crate) fn const_eval_recover(
_: &dyn HirDatabase, _: &dyn HirDatabase,
_: &Cycle, _: &Cycle,
_: HirDatabaseData,
_: GeneralConstId, _: GeneralConstId,
_: Substitution, _: Substitution,
_: Option<Arc<TraitEnvironment>>, _: Option<Arc<TraitEnvironment>>,

View file

@ -1,11 +1,11 @@
use base_db::RootQueryDb; use base_db::RootQueryDb;
use chalk_ir::Substitution; use chalk_ir::Substitution;
use hir_def::db::DefDatabase; use hir_def::db::DefDatabase;
use hir_expand::EditionedFileId;
use rustc_apfloat::{ use rustc_apfloat::{
Float, Float,
ieee::{Half as f16, Quad as f128}, ieee::{Half as f16, Quad as f128},
}; };
use span::EditionedFileId;
use test_fixture::WithFixture; use test_fixture::WithFixture;
use test_utils::skip_slow_tests; use test_utils::skip_slow_tests;
@ -116,14 +116,14 @@ fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
} }
fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const, ConstEvalError> { fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const, ConstEvalError> {
let module_id = db.module_for_file(file_id.file_id()); let module_id = db.module_for_file(file_id.file_id(db));
let def_map = module_id.def_map(db); let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope; let scope = &def_map[module_id.local_id].scope;
let const_id = scope let const_id = scope
.declarations() .declarations()
.find_map(|x| match x { .find_map(|x| match x {
hir_def::ModuleDefId::ConstId(x) => { hir_def::ModuleDefId::ConstId(x) => {
if db.const_signature(x).name.as_ref()?.display(db, file_id.edition()).to_string() if db.const_signature(x).name.as_ref()?.display(db, file_id.edition(db)).to_string()
== "GOAL" == "GOAL"
{ {
Some(x) Some(x)

View file

@ -30,16 +30,16 @@ use crate::{
#[query_group::query_group] #[query_group::query_group]
pub trait HirDatabase: DefDatabase + std::fmt::Debug { pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke_actual(crate::infer::infer_query)] #[salsa::invoke(crate::infer::infer_query)]
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>; fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
// region:mir // region:mir
#[salsa::invoke_actual(crate::mir::mir_body_query)] #[salsa::invoke(crate::mir::mir_body_query)]
#[salsa::cycle(crate::mir::mir_body_recover)] #[salsa::cycle(crate::mir::mir_body_recover)]
fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>; fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke_actual(crate::mir::mir_body_for_closure_query)] #[salsa::invoke(crate::mir::mir_body_for_closure_query)]
fn mir_body_for_closure(&self, def: InternedClosureId) -> Result<Arc<MirBody>, MirLowerError>; fn mir_body_for_closure(&self, def: InternedClosureId) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_query)] #[salsa::invoke(crate::mir::monomorphized_mir_body_query)]
@ -59,7 +59,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
env: Arc<TraitEnvironment>, env: Arc<TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>; ) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke_actual(crate::mir::borrowck_query)] #[salsa::invoke(crate::mir::borrowck_query)]
#[salsa::lru(2024)] #[salsa::lru(2024)]
fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<[BorrowckResult]>, MirLowerError>; fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<[BorrowckResult]>, MirLowerError>;
@ -72,11 +72,11 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
trait_env: Option<Arc<TraitEnvironment>>, trait_env: Option<Arc<TraitEnvironment>>,
) -> Result<Const, ConstEvalError>; ) -> Result<Const, ConstEvalError>;
#[salsa::invoke_actual(crate::consteval::const_eval_static_query)] #[salsa::invoke(crate::consteval::const_eval_static_query)]
#[salsa::cycle(crate::consteval::const_eval_static_recover)] #[salsa::cycle(crate::consteval::const_eval_static_recover)]
fn const_eval_static(&self, def: StaticId) -> Result<Const, ConstEvalError>; fn const_eval_static(&self, def: StaticId) -> Result<Const, ConstEvalError>;
#[salsa::invoke_actual(crate::consteval::const_eval_discriminant_variant)] #[salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
#[salsa::cycle(crate::consteval::const_eval_discriminant_recover)] #[salsa::cycle(crate::consteval::const_eval_discriminant_recover)]
fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>; fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>;
@ -103,10 +103,10 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::cycle(crate::layout::layout_of_ty_recover)] #[salsa::cycle(crate::layout::layout_of_ty_recover)]
fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>; fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke_actual(crate::layout::target_data_layout_query)] #[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: Crate) -> Result<Arc<TargetDataLayout>, Arc<str>>; fn target_data_layout(&self, krate: Crate) -> Result<Arc<TargetDataLayout>, Arc<str>>;
#[salsa::invoke_actual(crate::dyn_compatibility::dyn_compatibility_of_trait_query)] #[salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option<DynCompatibilityViolation>; fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option<DynCompatibilityViolation>;
#[salsa::invoke(crate::lower::ty_query)] #[salsa::invoke(crate::lower::ty_query)]
@ -114,53 +114,54 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fn ty(&self, def: TyDefId) -> Binders<Ty>; fn ty(&self, def: TyDefId) -> Binders<Ty>;
#[salsa::cycle(crate::lower::type_for_type_alias_with_diagnostics_query_recover)] #[salsa::cycle(crate::lower::type_for_type_alias_with_diagnostics_query_recover)]
#[salsa::invoke_actual(crate::lower::type_for_type_alias_with_diagnostics_query)] #[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)]
fn type_for_type_alias_with_diagnostics(&self, def: TypeAliasId) -> (Binders<Ty>, Diagnostics); fn type_for_type_alias_with_diagnostics(&self, def: TypeAliasId) -> (Binders<Ty>, Diagnostics);
/// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is /// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is
/// a `StructId` or `EnumVariantId` with a record constructor. /// a `StructId` or `EnumVariantId` with a record constructor.
#[salsa::invoke_actual(crate::lower::value_ty_query)] #[salsa::invoke(crate::lower::value_ty_query)]
fn value_ty(&self, def: ValueTyDefId) -> Option<Binders<Ty>>; fn value_ty(&self, def: ValueTyDefId) -> Option<Binders<Ty>>;
#[salsa::invoke_actual(crate::lower::impl_self_ty_with_diagnostics_query)] #[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)]
#[salsa::cycle(crate::lower::impl_self_ty_with_diagnostics_recover)] #[salsa::cycle(crate::lower::impl_self_ty_with_diagnostics_recover)]
fn impl_self_ty_with_diagnostics(&self, def: ImplId) -> (Binders<Ty>, Diagnostics); fn impl_self_ty_with_diagnostics(&self, def: ImplId) -> (Binders<Ty>, Diagnostics);
#[salsa::invoke_actual(crate::lower::impl_self_ty_query)] #[salsa::invoke(crate::lower::impl_self_ty_query)]
#[salsa::transparent] #[salsa::transparent]
fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>; fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
#[salsa::invoke(crate::lower::const_param_ty_with_diagnostics_query)] // FIXME: Make this a non-interned query.
#[salsa::invoke_interned(crate::lower::const_param_ty_with_diagnostics_query)]
fn const_param_ty_with_diagnostics(&self, def: ConstParamId) -> (Ty, Diagnostics); fn const_param_ty_with_diagnostics(&self, def: ConstParamId) -> (Ty, Diagnostics);
#[salsa::invoke(crate::lower::const_param_ty_query)] #[salsa::invoke(crate::lower::const_param_ty_query)]
#[salsa::transparent] #[salsa::transparent]
fn const_param_ty(&self, def: ConstParamId) -> Ty; fn const_param_ty(&self, def: ConstParamId) -> Ty;
#[salsa::invoke_actual(crate::lower::impl_trait_with_diagnostics_query)] #[salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)]
fn impl_trait_with_diagnostics(&self, def: ImplId) -> Option<(Binders<TraitRef>, Diagnostics)>; fn impl_trait_with_diagnostics(&self, def: ImplId) -> Option<(Binders<TraitRef>, Diagnostics)>;
#[salsa::invoke_actual(crate::lower::impl_trait_query)] #[salsa::invoke(crate::lower::impl_trait_query)]
#[salsa::transparent] #[salsa::transparent]
fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>; fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
#[salsa::invoke_actual(crate::lower::field_types_with_diagnostics_query)] #[salsa::invoke(crate::lower::field_types_with_diagnostics_query)]
fn field_types_with_diagnostics( fn field_types_with_diagnostics(
&self, &self,
var: VariantId, var: VariantId,
) -> (Arc<ArenaMap<LocalFieldId, Binders<Ty>>>, Diagnostics); ) -> (Arc<ArenaMap<LocalFieldId, Binders<Ty>>>, Diagnostics);
#[salsa::invoke_actual(crate::lower::field_types_query)] #[salsa::invoke(crate::lower::field_types_query)]
#[salsa::transparent] #[salsa::transparent]
fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>; fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
#[salsa::invoke_actual(crate::lower::callable_item_signature_query)] #[salsa::invoke(crate::lower::callable_item_signature_query)]
fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig; fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
#[salsa::invoke_actual(crate::lower::return_type_impl_traits)] #[salsa::invoke(crate::lower::return_type_impl_traits)]
fn return_type_impl_traits(&self, def: FunctionId) -> Option<Arc<Binders<ImplTraits>>>; fn return_type_impl_traits(&self, def: FunctionId) -> Option<Arc<Binders<ImplTraits>>>;
#[salsa::invoke_actual(crate::lower::type_alias_impl_traits)] #[salsa::invoke(crate::lower::type_alias_impl_traits)]
fn type_alias_impl_traits(&self, def: TypeAliasId) -> Option<Arc<Binders<ImplTraits>>>; fn type_alias_impl_traits(&self, def: TypeAliasId) -> Option<Arc<Binders<ImplTraits>>>;
#[salsa::invoke(crate::lower::generic_predicates_for_param_query)] #[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
@ -172,41 +173,41 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
assoc_name: Option<Name>, assoc_name: Option<Name>,
) -> GenericPredicates; ) -> GenericPredicates;
#[salsa::invoke_actual(crate::lower::generic_predicates_query)] #[salsa::invoke(crate::lower::generic_predicates_query)]
fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates; fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates;
#[salsa::invoke_actual(crate::lower::generic_predicates_without_parent_with_diagnostics_query)] #[salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)]
fn generic_predicates_without_parent_with_diagnostics( fn generic_predicates_without_parent_with_diagnostics(
&self, &self,
def: GenericDefId, def: GenericDefId,
) -> (GenericPredicates, Diagnostics); ) -> (GenericPredicates, Diagnostics);
#[salsa::invoke_actual(crate::lower::generic_predicates_without_parent_query)] #[salsa::invoke(crate::lower::generic_predicates_without_parent_query)]
#[salsa::transparent] #[salsa::transparent]
fn generic_predicates_without_parent(&self, def: GenericDefId) -> GenericPredicates; fn generic_predicates_without_parent(&self, def: GenericDefId) -> GenericPredicates;
#[salsa::invoke_actual(crate::lower::trait_environment_for_body_query)] #[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent] #[salsa::transparent]
fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<TraitEnvironment>; fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<TraitEnvironment>;
#[salsa::invoke_actual(crate::lower::trait_environment_query)] #[salsa::invoke(crate::lower::trait_environment_query)]
fn trait_environment(&self, def: GenericDefId) -> Arc<TraitEnvironment>; fn trait_environment(&self, def: GenericDefId) -> Arc<TraitEnvironment>;
#[salsa::invoke_actual(crate::lower::generic_defaults_with_diagnostics_query)] #[salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)]
#[salsa::cycle(crate::lower::generic_defaults_with_diagnostics_recover)] #[salsa::cycle(crate::lower::generic_defaults_with_diagnostics_recover)]
fn generic_defaults_with_diagnostics( fn generic_defaults_with_diagnostics(
&self, &self,
def: GenericDefId, def: GenericDefId,
) -> (GenericDefaults, Diagnostics); ) -> (GenericDefaults, Diagnostics);
#[salsa::invoke_actual(crate::lower::generic_defaults_query)] #[salsa::invoke(crate::lower::generic_defaults_query)]
#[salsa::transparent] #[salsa::transparent]
fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults; fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults;
#[salsa::invoke_actual(InherentImpls::inherent_impls_in_crate_query)] #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
fn inherent_impls_in_crate(&self, krate: Crate) -> Arc<InherentImpls>; fn inherent_impls_in_crate(&self, krate: Crate) -> Arc<InherentImpls>;
#[salsa::invoke_actual(InherentImpls::inherent_impls_in_block_query)] #[salsa::invoke(InherentImpls::inherent_impls_in_block_query)]
fn inherent_impls_in_block(&self, block: BlockId) -> Option<Arc<InherentImpls>>; fn inherent_impls_in_block(&self, block: BlockId) -> Option<Arc<InherentImpls>>;
/// Collects all crates in the dependency graph that have impls for the /// Collects all crates in the dependency graph that have impls for the
@ -220,13 +221,13 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fp: TyFingerprint, fp: TyFingerprint,
) -> SmallVec<[Crate; 2]>; ) -> SmallVec<[Crate; 2]>;
#[salsa::invoke_actual(TraitImpls::trait_impls_in_crate_query)] #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
fn trait_impls_in_crate(&self, krate: Crate) -> Arc<TraitImpls>; fn trait_impls_in_crate(&self, krate: Crate) -> Arc<TraitImpls>;
#[salsa::invoke_actual(TraitImpls::trait_impls_in_block_query)] #[salsa::invoke(TraitImpls::trait_impls_in_block_query)]
fn trait_impls_in_block(&self, block: BlockId) -> Option<Arc<TraitImpls>>; fn trait_impls_in_block(&self, block: BlockId) -> Option<Arc<TraitImpls>>;
#[salsa::invoke_actual(TraitImpls::trait_impls_in_deps_query)] #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
fn trait_impls_in_deps(&self, krate: Crate) -> Arc<[Arc<TraitImpls>]>; fn trait_impls_in_deps(&self, krate: Crate) -> Arc<[Arc<TraitImpls>]>;
// Interned IDs for Chalk integration // Interned IDs for Chalk integration
@ -251,7 +252,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::interned] #[salsa::interned]
fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId; fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId;
#[salsa::invoke_actual(chalk_db::associated_ty_data_query)] #[salsa::invoke(chalk_db::associated_ty_data_query)]
fn associated_ty_data(&self, id: TypeAliasId) -> sync::Arc<chalk_db::AssociatedTyDatum>; fn associated_ty_data(&self, id: TypeAliasId) -> sync::Arc<chalk_db::AssociatedTyDatum>;
#[salsa::invoke(chalk_db::trait_datum_query)] #[salsa::invoke(chalk_db::trait_datum_query)]
@ -268,16 +269,16 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fn impl_datum(&self, krate: Crate, impl_id: chalk_db::ImplId) fn impl_datum(&self, krate: Crate, impl_id: chalk_db::ImplId)
-> sync::Arc<chalk_db::ImplDatum>; -> sync::Arc<chalk_db::ImplDatum>;
#[salsa::invoke_actual(chalk_db::fn_def_datum_query)] #[salsa::invoke(chalk_db::fn_def_datum_query)]
fn fn_def_datum(&self, fn_def_id: CallableDefId) -> sync::Arc<chalk_db::FnDefDatum>; fn fn_def_datum(&self, fn_def_id: CallableDefId) -> sync::Arc<chalk_db::FnDefDatum>;
#[salsa::invoke_actual(chalk_db::fn_def_variance_query)] #[salsa::invoke(chalk_db::fn_def_variance_query)]
fn fn_def_variance(&self, fn_def_id: CallableDefId) -> chalk_db::Variances; fn fn_def_variance(&self, fn_def_id: CallableDefId) -> chalk_db::Variances;
#[salsa::invoke_actual(chalk_db::adt_variance_query)] #[salsa::invoke(chalk_db::adt_variance_query)]
fn adt_variance(&self, adt_id: AdtId) -> chalk_db::Variances; fn adt_variance(&self, adt_id: AdtId) -> chalk_db::Variances;
#[salsa::invoke_actual(crate::variance::variances_of)] #[salsa::invoke(crate::variance::variances_of)]
#[salsa::cycle(crate::variance::variances_of_cycle)] #[salsa::cycle(crate::variance::variances_of_cycle)]
fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>; fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>;

View file

@ -21,7 +21,7 @@ use hir_def::{
item_tree::FieldsShape, signatures::StaticFlags, src::HasSource, item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
}; };
use hir_expand::{ use hir_expand::{
HirFileId, HirFileIdExt, HirFileId,
name::{AsName, Name}, name::{AsName, Name},
}; };
use intern::sym; use intern::sym;
@ -644,7 +644,7 @@ impl<'a> DeclValidator<'a> {
return; return;
}; };
let edition = file_id.original_file(self.db).edition(); let edition = file_id.original_file(self.db).edition(self.db);
let diagnostic = IncorrectCase { let diagnostic = IncorrectCase {
file: file_id, file: file_id,
ident_type, ident_type,

View file

@ -7,12 +7,11 @@ use hir_def::signatures::StructFlags;
use stdx::never; use stdx::never;
use triomphe::Arc; use triomphe::Arc;
use crate::db::HirDatabaseData;
use crate::{ use crate::{
AliasTy, Canonical, CanonicalVarKinds, InEnvironment, Interner, ProjectionTy, TraitEnvironment, AliasTy, Canonical, CanonicalVarKinds, ConcreteConst, ConstScalar, ConstValue, InEnvironment,
Ty, TyBuilder, TyKind, db::HirDatabase, method_resolution::TyFingerprint, Interner, ProjectionTy, TraitEnvironment, Ty, TyBuilder, TyKind, db::HirDatabase,
method_resolution::TyFingerprint,
}; };
use crate::{ConcreteConst, ConstScalar, ConstValue};
fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool { fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool {
let module = match adt { let module = match adt {
@ -197,7 +196,6 @@ fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment>) -> bool {
pub(crate) fn has_drop_glue_recover( pub(crate) fn has_drop_glue_recover(
_db: &dyn HirDatabase, _db: &dyn HirDatabase,
_cycle: &salsa::Cycle, _cycle: &salsa::Cycle,
_: HirDatabaseData,
_ty: Ty, _ty: Ty,
_env: Arc<TraitEnvironment>, _env: Arc<TraitEnvironment>,
) -> DropGlue { ) -> DropGlue {

View file

@ -33,7 +33,7 @@ fn check_dyn_compatibility<'a>(
expected.into_iter().map(|(id, osvs)| (id, FxHashSet::from_iter(osvs))).collect(); expected.into_iter().map(|(id, osvs)| (id, FxHashSet::from_iter(osvs))).collect();
let (db, file_ids) = TestDB::with_many_files(ra_fixture); let (db, file_ids) = TestDB::with_many_files(ra_fixture);
for (trait_id, name) in file_ids.into_iter().flat_map(|file_id| { for (trait_id, name) in file_ids.into_iter().flat_map(|file_id| {
let module_id = db.module_for_file(file_id); let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db); let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope; let scope = &def_map[module_id.local_id].scope;
scope scope
@ -43,7 +43,7 @@ fn check_dyn_compatibility<'a>(
let name = db let name = db
.trait_signature(trait_id) .trait_signature(trait_id)
.name .name
.display_no_db(file_id.edition()) .display_no_db(file_id.edition(&db))
.to_smolstr(); .to_smolstr();
Some((trait_id, name)) Some((trait_id, name))
} else { } else {

View file

@ -20,7 +20,7 @@ use triomphe::Arc;
use crate::{ use crate::{
Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty,
consteval::try_const_usize, consteval::try_const_usize,
db::{HirDatabase, HirDatabaseData, InternedClosure}, db::{HirDatabase, InternedClosure},
infer::normalize, infer::normalize,
utils::ClosureSubst, utils::ClosureSubst,
}; };
@ -368,7 +368,6 @@ pub fn layout_of_ty_query(
pub(crate) fn layout_of_ty_recover( pub(crate) fn layout_of_ty_recover(
_: &dyn HirDatabase, _: &dyn HirDatabase,
_: &Cycle, _: &Cycle,
_: HirDatabaseData,
_: Ty, _: Ty,
_: Arc<TraitEnvironment>, _: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> { ) -> Result<Arc<Layout>, LayoutError> {

View file

@ -20,7 +20,7 @@ use crate::{
layout::{Layout, LayoutError, field_ty}, layout::{Layout, LayoutError, field_ty},
}; };
use super::{HirDatabaseData, LayoutCx}; use super::LayoutCx;
pub fn layout_of_adt_query( pub fn layout_of_adt_query(
db: &dyn HirDatabase, db: &dyn HirDatabase,
@ -134,7 +134,6 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
pub(crate) fn layout_of_adt_recover( pub(crate) fn layout_of_adt_recover(
_: &dyn HirDatabase, _: &dyn HirDatabase,
_: &Cycle, _: &Cycle,
_: HirDatabaseData,
_: AdtId, _: AdtId,
_: Substitution, _: Substitution,
_: Arc<TraitEnvironment>, _: Arc<TraitEnvironment>,

View file

@ -38,7 +38,7 @@ fn eval_goal(
let adt_or_type_alias_id = file_ids let adt_or_type_alias_id = file_ids
.into_iter() .into_iter()
.find_map(|file_id| { .find_map(|file_id| {
let module_id = db.module_for_file(file_id.file_id()); let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db); let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope; let scope = &def_map[module_id.local_id].scope;
let adt_or_type_alias_id = scope.declarations().find_map(|x| match x { let adt_or_type_alias_id = scope.declarations().find_map(|x| match x {
@ -47,14 +47,18 @@ fn eval_goal(
hir_def::AdtId::StructId(x) => db hir_def::AdtId::StructId(x) => db
.struct_signature(x) .struct_signature(x)
.name .name
.display_no_db(file_id.edition()) .display_no_db(file_id.edition(&db))
.to_smolstr(),
hir_def::AdtId::UnionId(x) => db
.union_signature(x)
.name
.display_no_db(file_id.edition(&db))
.to_smolstr(),
hir_def::AdtId::EnumId(x) => db
.enum_signature(x)
.name
.display_no_db(file_id.edition(&db))
.to_smolstr(), .to_smolstr(),
hir_def::AdtId::UnionId(x) => {
db.union_signature(x).name.display_no_db(file_id.edition()).to_smolstr()
}
hir_def::AdtId::EnumId(x) => {
db.enum_signature(x).name.display_no_db(file_id.edition()).to_smolstr()
}
}; };
(name == "Goal").then_some(Either::Left(x)) (name == "Goal").then_some(Either::Left(x))
} }
@ -62,7 +66,7 @@ fn eval_goal(
let name = db let name = db
.type_alias_signature(x) .type_alias_signature(x)
.name .name
.display_no_db(file_id.edition()) .display_no_db(file_id.edition(&db))
.to_smolstr(); .to_smolstr();
(name == "Goal").then_some(Either::Right(x)) (name == "Goal").then_some(Either::Right(x))
} }
@ -99,7 +103,7 @@ fn eval_expr(
); );
let (db, file_id) = TestDB::with_single_file(&ra_fixture); let (db, file_id) = TestDB::with_single_file(&ra_fixture);
let module_id = db.module_for_file(file_id.file_id()); let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db); let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope; let scope = &def_map[module_id.local_id].scope;
let function_id = scope let function_id = scope
@ -107,7 +111,7 @@ fn eval_expr(
.find_map(|x| match x { .find_map(|x| match x {
hir_def::ModuleDefId::FunctionId(x) => { hir_def::ModuleDefId::FunctionId(x) => {
let name = let name =
db.function_signature(x).name.display_no_db(file_id.edition()).to_smolstr(); db.function_signature(x).name.display_no_db(file_id.edition(&db)).to_smolstr();
(name == "main").then_some(x) (name == "main").then_some(x)
} }
_ => None, _ => None,
@ -117,7 +121,7 @@ fn eval_expr(
let b = hir_body let b = hir_body
.bindings .bindings
.iter() .iter()
.find(|x| x.1.name.display_no_db(file_id.edition()).to_smolstr() == "goal") .find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal")
.unwrap() .unwrap()
.0; .0;
let infer = db.infer(function_id.into()); let infer = db.infer(function_id.into());

View file

@ -59,7 +59,7 @@ use crate::{
QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder,
TyKind, WhereClause, all_super_traits, TyKind, WhereClause, all_super_traits,
consteval::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic}, consteval::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic},
db::{HirDatabase, HirDatabaseData}, db::HirDatabase,
error_lifetime, error_lifetime,
generics::{Generics, generics, trait_self_param_idx}, generics::{Generics, generics, trait_self_param_idx},
lower::{ lower::{
@ -957,7 +957,6 @@ pub(crate) fn generic_predicates_for_param_query(
pub(crate) fn generic_predicates_for_param_recover( pub(crate) fn generic_predicates_for_param_recover(
_db: &dyn HirDatabase, _db: &dyn HirDatabase,
_cycle: &salsa::Cycle, _cycle: &salsa::Cycle,
_: HirDatabaseData,
_def: GenericDefId, _def: GenericDefId,
_param_id: TypeOrConstParamId, _param_id: TypeOrConstParamId,
_assoc_name: Option<Name>, _assoc_name: Option<Name>,

View file

@ -16,7 +16,7 @@ use hir_def::{
resolver::{HasResolver, TypeNs, ValueNs}, resolver::{HasResolver, TypeNs, ValueNs},
signatures::{StaticFlags, StructFlags}, signatures::{StaticFlags, StructFlags},
}; };
use hir_expand::{HirFileIdExt, InFile, mod_path::path, name::Name}; use hir_expand::{InFile, mod_path::path, name::Name};
use intern::sym; use intern::sym;
use la_arena::ArenaMap; use la_arena::ArenaMap;
use rustc_abi::TargetDataLayout; use rustc_abi::TargetDataLayout;
@ -409,7 +409,7 @@ impl MirEvalError {
}; };
let file_id = span.file_id.original_file(db); let file_id = span.file_id.original_file(db);
let text_range = span.value.text_range(); let text_range = span.value.text_range();
writeln!(f, "{}", span_formatter(file_id.file_id(), text_range))?; writeln!(f, "{}", span_formatter(file_id.file_id(db), text_range))?;
} }
} }
match err { match err {

View file

@ -1,5 +1,6 @@
use hir_def::db::DefDatabase; use hir_def::db::DefDatabase;
use span::{Edition, EditionedFileId}; use hir_expand::EditionedFileId;
use span::Edition;
use syntax::{TextRange, TextSize}; use syntax::{TextRange, TextSize};
use test_fixture::WithFixture; use test_fixture::WithFixture;
@ -9,7 +10,7 @@ use crate::{Interner, Substitution, db::HirDatabase, mir::MirLowerError, test_db
use super::{MirEvalError, interpret_mir}; use super::{MirEvalError, interpret_mir};
fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> { fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> {
let module_id = db.module_for_file(file_id); let module_id = db.module_for_file(file_id.file_id(db));
let def_map = module_id.def_map(db); let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope; let scope = &def_map[module_id.local_id].scope;
let func_id = scope let func_id = scope
@ -69,7 +70,7 @@ fn check_pass_and_stdio(
let span_formatter = |file, range: TextRange| { let span_formatter = |file, range: TextRange| {
format!("{:?} {:?}..{:?}", file, line_index(range.start()), line_index(range.end())) format!("{:?} {:?}..{:?}", file, line_index(range.start()), line_index(range.end()))
}; };
let krate = db.module_for_file(file_id).krate(); let krate = db.module_for_file(file_id.file_id(&db)).krate();
e.pretty_print(&mut err, &db, span_formatter, DisplayTarget::from_crate(&db, krate)) e.pretty_print(&mut err, &db, span_formatter, DisplayTarget::from_crate(&db, krate))
.unwrap(); .unwrap();
panic!("Error in interpreting: {err}"); panic!("Error in interpreting: {err}");

View file

@ -19,7 +19,7 @@ use triomphe::Arc;
use crate::{ use crate::{
Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind, Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
consteval::{intern_const_scalar, unknown_const}, consteval::{intern_const_scalar, unknown_const},
db::{HirDatabase, HirDatabaseData, InternedClosure, InternedClosureId}, db::{HirDatabase, InternedClosure, InternedClosureId},
from_placeholder_idx, from_placeholder_idx,
generics::{Generics, generics}, generics::{Generics, generics},
infer::normalize, infer::normalize,
@ -316,7 +316,6 @@ pub fn monomorphized_mir_body_query(
pub(crate) fn monomorphized_mir_body_recover( pub(crate) fn monomorphized_mir_body_recover(
_: &dyn HirDatabase, _: &dyn HirDatabase,
_: &salsa::Cycle, _: &salsa::Cycle,
_: HirDatabaseData,
_: DefWithBodyId, _: DefWithBodyId,
_: Substitution, _: Substitution,
_: Arc<crate::TraitEnvironment>, _: Arc<crate::TraitEnvironment>,

View file

@ -8,9 +8,10 @@ use base_db::{
}; };
use hir_def::{ModuleId, db::DefDatabase}; use hir_def::{ModuleId, db::DefDatabase};
use hir_expand::EditionedFileId;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use salsa::{AsDynDatabase, Durability}; use salsa::{AsDynDatabase, Durability};
use span::{EditionedFileId, FileId}; use span::FileId;
use syntax::TextRange; use syntax::TextRange;
use test_utils::extract_annotations; use test_utils::extract_annotations;
use triomphe::Arc; use triomphe::Arc;
@ -119,7 +120,7 @@ impl TestDB {
for &krate in self.relevant_crates(file_id).iter() { for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate); let crate_def_map = self.crate_def_map(krate);
for (local_id, data) in crate_def_map.modules() { for (local_id, data) in crate_def_map.modules() {
if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) { if data.origin.file_id().map(|file_id| file_id.file_id(self)) == Some(file_id) {
return Some(crate_def_map.module_id(local_id)); return Some(crate_def_map.module_id(local_id));
} }
} }
@ -145,7 +146,7 @@ impl TestDB {
files files
.into_iter() .into_iter()
.filter_map(|file_id| { .filter_map(|file_id| {
let text = self.file_text(file_id.file_id()); let text = self.file_text(file_id.file_id(self));
let annotations = extract_annotations(&text.text(self)); let annotations = extract_annotations(&text.text(self));
if annotations.is_empty() { if annotations.is_empty() {
return None; return None;

View file

@ -126,7 +126,7 @@ fn check_impl(
let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new(); let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new();
for file_id in files { for file_id in files {
let module = db.module_for_file_opt(file_id); let module = db.module_for_file_opt(file_id.file_id(&db));
let module = match module { let module = match module {
Some(m) => m, Some(m) => m,
None => continue, None => continue,
@ -387,7 +387,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
} }
}; };
let module = db.module_for_file(file_id); let module = db.module_for_file(file_id.file_id(&db));
let def_map = module.def_map(&db); let def_map = module.def_map(&db);
let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new(); let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new();
@ -568,7 +568,7 @@ fn salsa_bug() {
", ",
); );
let module = db.module_for_file(pos.file_id); let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| { visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(match def { db.infer(match def {
@ -605,9 +605,9 @@ fn salsa_bug() {
} }
"; ";
db.set_file_text(pos.file_id.file_id(), new_text); db.set_file_text(pos.file_id.file_id(&db), new_text);
let module = db.module_for_file(pos.file_id); let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| { visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(match def { db.infer(match def {

View file

@ -1,9 +1,9 @@
use expect_test::{Expect, expect}; use expect_test::{Expect, expect};
use hir_def::db::DefDatabase; use hir_def::db::DefDatabase;
use hir_expand::files::InFileWrapper; use hir_expand::{HirFileId, files::InFileWrapper};
use itertools::Itertools; use itertools::Itertools;
use salsa::plumbing::FromId; use salsa::plumbing::FromId;
use span::{HirFileId, TextRange}; use span::TextRange;
use syntax::{AstNode, AstPtr}; use syntax::{AstNode, AstPtr};
use test_fixture::WithFixture; use test_fixture::WithFixture;
@ -16,7 +16,7 @@ use super::visit_module;
fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let (db, file_id) = TestDB::with_single_file(ra_fixture); let (db, file_id) = TestDB::with_single_file(ra_fixture);
let module = db.module_for_file(file_id); let module = db.module_for_file(file_id.file_id(&db));
let def_map = module.def_map(&db); let def_map = module.def_map(&db);
let mut defs = Vec::new(); let mut defs = Vec::new();

View file

@ -17,7 +17,7 @@ fn foo() -> i32 {
); );
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id()); let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| { visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def { if let ModuleDefId::FunctionId(it) = def {
@ -35,11 +35,11 @@ fn foo() -> i32 {
1 1
}"; }";
db.set_file_text(pos.file_id.file_id(), new_text); db.set_file_text(pos.file_id.file_id(&db), new_text);
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id()); let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| { visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def { if let ModuleDefId::FunctionId(it) = def {
@ -68,7 +68,7 @@ fn baz() -> i32 {
); );
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id()); let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| { visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def { if let ModuleDefId::FunctionId(it) = def {
@ -91,11 +91,11 @@ fn baz() -> i32 {
} }
"; ";
db.set_file_text(pos.file_id.file_id(), new_text); db.set_file_text(pos.file_id.file_id(&db), new_text);
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id()); let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| { visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def { if let ModuleDefId::FunctionId(it) = def {

View file

@ -979,7 +979,7 @@ struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
let (db, file_id) = TestDB::with_single_file(ra_fixture); let (db, file_id) = TestDB::with_single_file(ra_fixture);
let mut defs: Vec<GenericDefId> = Vec::new(); let mut defs: Vec<GenericDefId> = Vec::new();
let module = db.module_for_file_opt(file_id).unwrap(); let module = db.module_for_file_opt(file_id.file_id(&db)).unwrap();
let def_map = module.def_map(&db); let def_map = module.def_map(&db);
crate::tests::visit_module(&db, &def_map, module.local_id, &mut |it| { crate::tests::visit_module(&db, &def_map, module.local_id, &mut |it| {
defs.push(match it { defs.push(match it {

View file

@ -6,9 +6,8 @@ use hir_def::{
nameres::{ModuleOrigin, ModuleSource}, nameres::{ModuleOrigin, ModuleSource},
src::{HasChildSource, HasSource as _}, src::{HasChildSource, HasSource as _},
}; };
use hir_expand::{HirFileId, InFile}; use hir_expand::{EditionedFileId, HirFileId, InFile};
use hir_ty::db::InternedClosure; use hir_ty::db::InternedClosure;
use span::EditionedFileId;
use syntax::ast; use syntax::ast;
use tt::TextRange; use tt::TextRange;

View file

@ -82,7 +82,7 @@ use itertools::Itertools;
use nameres::diagnostics::DefDiagnosticKind; use nameres::diagnostics::DefDiagnosticKind;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use smallvec::SmallVec; use smallvec::SmallVec;
use span::{Edition, EditionedFileId, FileId, MacroCallId}; use span::{Edition, FileId};
use stdx::{format_to, impl_from, never}; use stdx::{format_to, impl_from, never};
use syntax::{ use syntax::{
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr,
@ -129,7 +129,7 @@ pub use {
{ModuleDefId, TraitId}, {ModuleDefId, TraitId},
}, },
hir_expand::{ hir_expand::{
ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt, MacroKind, EditionedFileId, ExpandResult, HirFileId, MacroCallId, MacroKind,
attrs::{Attr, AttrId}, attrs::{Attr, AttrId},
change::ChangeWithProcMacros, change::ChangeWithProcMacros,
files::{ files::{
@ -954,10 +954,11 @@ fn macro_call_diagnostics(
let node = let node =
InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id())); InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id()));
let RenderedExpandError { message, error, kind } = err.render_to_string(db); let RenderedExpandError { message, error, kind } = err.render_to_string(db);
let precise_location = if err.span().anchor.file_id == file_id { let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
let precise_location = if editioned_file_id == file_id {
Some( Some(
err.span().range err.span().range
+ db.ast_id_map(err.span().anchor.file_id.into()) + db.ast_id_map(editioned_file_id.into())
.get_erased(err.span().anchor.ast_id) .get_erased(err.span().anchor.ast_id)
.text_range() .text_range()
.start(), .start(),
@ -1926,7 +1927,7 @@ impl DefWithBody {
source_map source_map
.macro_calls() .macro_calls()
.for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id.macro_call_id, acc)); .for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc));
expr_store_diagnostics(db, acc, &source_map); expr_store_diagnostics(db, acc, &source_map);
@ -2145,10 +2146,11 @@ fn expr_store_diagnostics(
ExpressionStoreDiagnostics::MacroError { node, err } => { ExpressionStoreDiagnostics::MacroError { node, err } => {
let RenderedExpandError { message, error, kind } = err.render_to_string(db); let RenderedExpandError { message, error, kind } = err.render_to_string(db);
let precise_location = if err.span().anchor.file_id == node.file_id { let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
let precise_location = if editioned_file_id == node.file_id {
Some( Some(
err.span().range err.span().range
+ db.ast_id_map(err.span().anchor.file_id.into()) + db.ast_id_map(editioned_file_id.into())
.get_erased(err.span().anchor.ast_id) .get_erased(err.span().anchor.ast_id)
.text_range() .text_range()
.start(), .start(),
@ -4475,7 +4477,7 @@ impl Impl {
let src = self.source(db)?; let src = self.source(db)?;
let macro_file = src.file_id.macro_file()?; let macro_file = src.file_id.macro_file()?;
let loc = macro_file.macro_call_id.lookup(db); let loc = macro_file.lookup(db);
let (derive_attr, derive_index) = match loc.kind { let (derive_attr, derive_index) = match loc.kind {
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => { MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
let module_id = self.id.lookup(db).container; let module_id = self.id.lookup(db).container;
@ -4488,9 +4490,8 @@ impl Impl {
} }
_ => return None, _ => return None,
}; };
let file_id = MacroFileId { macro_call_id: derive_attr };
let path = db let path = db
.parse_macro_expansion(file_id) .parse_macro_expansion(derive_attr)
.value .value
.0 .0
.syntax_node() .syntax_node()
@ -4498,7 +4499,7 @@ impl Impl {
.nth(derive_index as usize) .nth(derive_index as usize)
.and_then(<ast::Attr as AstNode>::cast) .and_then(<ast::Attr as AstNode>::cast)
.and_then(|it| it.path())?; .and_then(|it| it.path())?;
Some(InMacroFile { file_id, value: path }) Some(InMacroFile { file_id: derive_attr, value: path })
} }
pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool { pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool {

View file

@ -20,11 +20,11 @@ use hir_def::{
type_ref::Mutability, type_ref::Mutability,
}; };
use hir_expand::{ use hir_expand::{
ExpandResult, FileRange, HirFileIdExt, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt, EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
attrs::collect_attrs, attrs::collect_attrs,
builtin::{BuiltinFnLikeExpander, EagerExpander}, builtin::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase, db::ExpandDatabase,
files::InRealFile, files::{FileRangeWrapper, InRealFile},
hygiene::SyntaxContextExt as _, hygiene::SyntaxContextExt as _,
inert_attr_macro::find_builtin_attr_idx, inert_attr_macro::find_builtin_attr_idx,
mod_path::{ModPath, PathKind}, mod_path::{ModPath, PathKind},
@ -35,7 +35,7 @@ use intern::{Interned, Symbol, sym};
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{SmallVec, smallvec}; use smallvec::{SmallVec, smallvec};
use span::{EditionedFileId, FileId, HirFileIdRepr, SyntaxContext}; use span::{Edition, FileId, SyntaxContext};
use stdx::TupleExt; use stdx::TupleExt;
use syntax::{ use syntax::{
AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
@ -46,10 +46,10 @@ use syntax::{
use crate::{ use crate::{
Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam, Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam,
Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource, Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource, Impl,
HirFileId, Impl, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
Module, ModuleDef, Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait, Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait, TraitAlias, TupleField,
TraitAlias, TupleField, Type, TypeAlias, TypeParam, Union, Variant, VariantDef, Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
db::HirDatabase, db::HirDatabase,
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
source_analyzer::{SourceAnalyzer, name_hygiene, resolve_hir_path}, source_analyzer::{SourceAnalyzer, name_hygiene, resolve_hir_path},
@ -136,8 +136,8 @@ pub struct Semantics<'db, DB> {
pub struct SemanticsImpl<'db> { pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase, pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>, s2d_cache: RefCell<SourceToDefCache>,
/// MacroCall to its expansion's MacroFileId cache /// MacroCall to its expansion's MacroCallId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>, macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroCallId>>,
} }
impl<DB> fmt::Debug for Semantics<'_, DB> { impl<DB> fmt::Debug for Semantics<'_, DB> {
@ -305,8 +305,8 @@ impl<'db> SemanticsImpl<'db> {
SemanticsImpl { db, s2d_cache: Default::default(), macro_call_cache: Default::default() } SemanticsImpl { db, s2d_cache: Default::default(), macro_call_cache: Default::default() }
} }
pub fn parse(&self, file_id: base_db::EditionedFileId) -> ast::SourceFile { pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
let hir_file_id = file_id.editioned_file_id(self.db).into(); let hir_file_id = file_id.into();
let tree = self.db.parse(file_id).tree(); let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), hir_file_id); self.cache(tree.syntax().clone(), hir_file_id);
tree tree
@ -322,27 +322,26 @@ impl<'db> SemanticsImpl<'db> {
pub fn attach_first_edition(&self, file: FileId) -> Option<EditionedFileId> { pub fn attach_first_edition(&self, file: FileId) -> Option<EditionedFileId> {
Some(EditionedFileId::new( Some(EditionedFileId::new(
self.db,
file, file,
self.file_to_module_defs(file).next()?.krate().edition(self.db), self.file_to_module_defs(file).next()?.krate().edition(self.db),
)) ))
} }
pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile { pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
let editioned_file_id = self let file_id = self
.attach_first_edition(file_id) .attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id)); .unwrap_or_else(|| EditionedFileId::new(self.db, file_id, Edition::CURRENT));
let file_id = base_db::EditionedFileId::new(self.db, editioned_file_id);
let tree = self.db.parse(file_id).tree(); let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), editioned_file_id.into()); self.cache(tree.syntax().clone(), file_id.into());
tree tree
} }
pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> { pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
match file_id.repr() { match file_id {
HirFileIdRepr::FileId(file_id) => { HirFileId::FileId(file_id) => {
let module = self.file_to_module_defs(file_id.file_id()).next()?; let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?;
let def_map = self.db.crate_def_map(module.krate().id); let def_map = self.db.crate_def_map(module.krate().id);
match def_map[module.id.local_id].origin { match def_map[module.id.local_id].origin {
ModuleOrigin::CrateRoot { .. } => None, ModuleOrigin::CrateRoot { .. } => None,
@ -357,9 +356,8 @@ impl<'db> SemanticsImpl<'db> {
_ => unreachable!("FileId can only belong to a file module"), _ => unreachable!("FileId can only belong to a file module"),
} }
} }
HirFileIdRepr::MacroFile(macro_file) => { HirFileId::MacroFile(macro_file) => {
let node = let node = self.db.lookup_intern_macro_call(macro_file).to_node(self.db);
self.db.lookup_intern_macro_call(macro_file.macro_call_id).to_node(self.db);
let root = find_root(&node.value); let root = find_root(&node.value);
self.cache(root, node.file_id); self.cache(root, node.file_id);
Some(node) Some(node)
@ -384,7 +382,7 @@ impl<'db> SemanticsImpl<'db> {
node node
} }
pub fn expand(&self, file_id: MacroFileId) -> ExpandResult<SyntaxNode> { pub fn expand(&self, file_id: MacroCallId) -> ExpandResult<SyntaxNode> {
let res = self.db.parse_macro_expansion(file_id).map(|it| it.0.syntax_node()); let res = self.db.parse_macro_expansion(file_id).map(|it| it.0.syntax_node());
self.cache(res.value.clone(), file_id.into()); self.cache(res.value.clone(), file_id.into());
res res
@ -402,13 +400,11 @@ impl<'db> SemanticsImpl<'db> {
pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> { pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
let file_id = self.find_file(attr.syntax()).file_id; let file_id = self.find_file(attr.syntax()).file_id;
let krate = match file_id.repr() { let krate = match file_id {
HirFileIdRepr::FileId(file_id) => { HirFileId::FileId(file_id) => {
self.file_to_module_defs(file_id.file_id()).next()?.krate().id self.file_to_module_defs(file_id.file_id(self.db)).next()?.krate().id
}
HirFileIdRepr::MacroFile(macro_file) => {
self.db.lookup_intern_macro_call(macro_file.macro_call_id).krate
} }
HirFileId::MacroFile(macro_file) => self.db.lookup_intern_macro_call(macro_file).krate,
}; };
hir_expand::check_cfg_attr_value(self.db, attr, krate) hir_expand::check_cfg_attr_value(self.db, attr, krate)
} }
@ -423,7 +419,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_call = InFile::new(sa.file_id, macro_call); let macro_call = InFile::new(sa.file_id, macro_call);
let file_id = sa.expand(self.db, macro_call)?; let file_id = sa.expand(self.db, macro_call)?;
let macro_call = self.db.lookup_intern_macro_call(file_id.macro_call_id); let macro_call = self.db.lookup_intern_macro_call(file_id);
let skip = matches!( let skip = matches!(
macro_call.def.kind, macro_call.def.kind,
@ -456,7 +452,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<SyntaxNode>> { pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<SyntaxNode>> {
let src = self.wrap_node_infile(item.clone()); let src = self.wrap_node_infile(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?; let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
Some(self.expand(macro_call_id.as_macro_file())) Some(self.expand(macro_call_id))
} }
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> { pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
@ -465,7 +461,7 @@ impl<'db> SemanticsImpl<'db> {
let call_id = self.with_ctx(|ctx| { let call_id = self.with_ctx(|ctx| {
ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it) ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
})?; })?;
Some(self.parse_or_expand(call_id.as_file())) Some(self.parse_or_expand(call_id.into()))
} }
pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> { pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
@ -485,7 +481,7 @@ impl<'db> SemanticsImpl<'db> {
.derive_macro_calls(attr)? .derive_macro_calls(attr)?
.into_iter() .into_iter()
.flat_map(|call| { .flat_map(|call| {
let file_id = call?.as_macro_file(); let file_id = call?;
let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id); let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
let root_node = value.0.syntax_node(); let root_node = value.0.syntax_node();
self.cache(root_node.clone(), file_id.into()); self.cache(root_node.clone(), file_id.into());
@ -526,7 +522,7 @@ impl<'db> SemanticsImpl<'db> {
Some(result) Some(result)
} }
pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroFileId)>> { pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroCallId)>> {
let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it { let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
ast::Item::Struct(it) => Some(ast::Adt::Struct(it)), ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
ast::Item::Enum(it) => Some(ast::Adt::Enum(it)), ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
@ -542,7 +538,7 @@ impl<'db> SemanticsImpl<'db> {
.derive_helpers_in_scope(InFile::new(sa.file_id, id))? .derive_helpers_in_scope(InFile::new(sa.file_id, id))?
.iter() .iter()
.filter(|&(name, _, _)| *name == attr_name) .filter(|&(name, _, _)| *name == attr_name)
.map(|&(_, macro_, call)| (macro_.into(), call.as_macro_file())) .map(|&(_, macro_, call)| (macro_.into(), call))
.collect(); .collect();
res.is_empty().not().then_some(res) res.is_empty().not().then_some(res)
} }
@ -564,7 +560,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_file = analyzer.expansion(macro_call)?; let macro_file = analyzer.expansion(macro_call)?;
hir_expand::db::expand_speculative( hir_expand::db::expand_speculative(
self.db, self.db,
macro_file.macro_call_id, macro_file,
speculative_args.syntax(), speculative_args.syntax(),
token_to_map, token_to_map,
) )
@ -572,16 +568,11 @@ impl<'db> SemanticsImpl<'db> {
pub fn speculative_expand_raw( pub fn speculative_expand_raw(
&self, &self,
macro_file: MacroFileId, macro_file: MacroCallId,
speculative_args: &SyntaxNode, speculative_args: &SyntaxNode,
token_to_map: SyntaxToken, token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> { ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
hir_expand::db::expand_speculative( hir_expand::db::expand_speculative(self.db, macro_file, speculative_args, token_to_map)
self.db,
macro_file.macro_call_id,
speculative_args,
token_to_map,
)
} }
/// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the /// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
@ -1025,7 +1016,7 @@ impl<'db> SemanticsImpl<'db> {
None => { None => {
stack.push(( stack.push((
file_id.into(), file_id.into(),
smallvec![(token, SyntaxContext::root(file_id.edition()))], smallvec![(token, SyntaxContext::root(file_id.edition(self.db)))],
)); ));
} }
} }
@ -1054,7 +1045,6 @@ impl<'db> SemanticsImpl<'db> {
}) })
}); });
if let Some((call_id, item)) = containing_attribute_macro_call { if let Some((call_id, item)) = containing_attribute_macro_call {
let file_id = call_id.as_macro_file();
let attr_id = match self.db.lookup_intern_macro_call(call_id).kind { let attr_id = match self.db.lookup_intern_macro_call(call_id).kind {
hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => { hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => {
invoc_attr_index.ast_index() invoc_attr_index.ast_index()
@ -1083,7 +1073,7 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or_else(|| text_range.start()); .unwrap_or_else(|| text_range.start());
let text_range = TextRange::new(start, text_range.end()); let text_range = TextRange::new(start, text_range.end());
filter_duplicates(tokens, text_range); filter_duplicates(tokens, text_range);
return process_expansion_for_token(&mut stack, file_id); return process_expansion_for_token(&mut stack, call_id);
} }
// Then check for token trees, that means we are either in a function-like macro or // Then check for token trees, that means we are either in a function-like macro or
@ -1127,7 +1117,7 @@ impl<'db> SemanticsImpl<'db> {
.eager_arg(self.db) .eager_arg(self.db)
.and_then(|arg| { .and_then(|arg| {
// also descend into eager expansions // also descend into eager expansions
process_expansion_for_token(&mut stack, arg.as_macro_file()) process_expansion_for_token(&mut stack, arg)
})) }))
} }
// derive or derive helper // derive or derive helper
@ -1151,7 +1141,6 @@ impl<'db> SemanticsImpl<'db> {
match derive_call { match derive_call {
Some(call_id) => { Some(call_id) => {
// resolved to a derive // resolved to a derive
let file_id = call_id.as_macro_file();
let text_range = attr.syntax().text_range(); let text_range = attr.syntax().text_range();
// remove any other token in this macro input, all their mappings are the // remove any other token in this macro input, all their mappings are the
// same as this // same as this
@ -1159,7 +1148,7 @@ impl<'db> SemanticsImpl<'db> {
!text_range.contains_range(t.text_range()) !text_range.contains_range(t.text_range())
}); });
return process_expansion_for_token( return process_expansion_for_token(
&mut stack, file_id, &mut stack, call_id,
); );
} }
None => Some(adt), None => Some(adt),
@ -1207,10 +1196,7 @@ impl<'db> SemanticsImpl<'db> {
// as there may be multiple derives registering the same helper // as there may be multiple derives registering the same helper
// name, we gotta make sure to call this for all of them! // name, we gotta make sure to call this for all of them!
// FIXME: We need to call `f` for all of them as well though! // FIXME: We need to call `f` for all of them as well though!
res = res.or(process_expansion_for_token( res = res.or(process_expansion_for_token(&mut stack, *derive));
&mut stack,
derive.as_macro_file(),
));
} }
res res
} }
@ -1288,10 +1274,14 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
pub fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange { pub fn diagnostics_display_range(
&self,
src: InFile<SyntaxNodePtr>,
) -> FileRangeWrapper<FileId> {
let root = self.parse_or_expand(src.file_id); let root = self.parse_or_expand(src.file_id);
let node = src.map(|it| it.to_node(&root)); let node = src.map(|it| it.to_node(&root));
node.as_ref().original_file_range_rooted(self.db) let FileRange { file_id, range } = node.as_ref().original_file_range_rooted(self.db);
FileRangeWrapper { file_id: file_id.file_id(self.db), range }
} }
fn token_ancestors_with_macros( fn token_ancestors_with_macros(
@ -1890,20 +1880,16 @@ fn macro_call_to_macro_id(
ctx: &mut SourceToDefCtx<'_, '_>, ctx: &mut SourceToDefCtx<'_, '_>,
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
) -> Option<MacroId> { ) -> Option<MacroId> {
use span::HirFileIdRepr;
let db: &dyn ExpandDatabase = ctx.db; let db: &dyn ExpandDatabase = ctx.db;
let loc = db.lookup_intern_macro_call(macro_call_id); let loc = db.lookup_intern_macro_call(macro_call_id);
match loc.def.ast_id() { match loc.def.ast_id() {
Either::Left(it) => { Either::Left(it) => {
let node = match it.file_id.repr() { let node = match it.file_id {
HirFileIdRepr::FileId(editioned_file_id) => { HirFileId::FileId(file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node()) it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
} }
HirFileIdRepr::MacroFile(macro_file) => { HirFileId::MacroFile(macro_file) => {
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file); let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
it.to_ptr(db).to_node(&expansion_info.expanded().value) it.to_ptr(db).to_node(&expansion_info.expanded().value)
} }
@ -1911,13 +1897,11 @@ fn macro_call_to_macro_id(
ctx.macro_to_def(InFile::new(it.file_id, &node)) ctx.macro_to_def(InFile::new(it.file_id, &node))
} }
Either::Right(it) => { Either::Right(it) => {
let node = match it.file_id.repr() { let node = match it.file_id {
HirFileIdRepr::FileId(editioned_file_id) => { HirFileId::FileId(file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node()) it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
} }
HirFileIdRepr::MacroFile(macro_file) => { HirFileId::MacroFile(macro_file) => {
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file); let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
it.to_ptr(db).to_node(&expansion_info.expanded().value) it.to_ptr(db).to_node(&expansion_info.expanded().value)
} }

View file

@ -197,7 +197,7 @@ impl ChildBySource for DefWithBodyId {
} }
sm.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(|(ast, &exp_id)| { sm.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(|(ast, &exp_id)| {
res[keys::MACRO_CALL].insert(ast.value, exp_id.macro_call_id); res[keys::MACRO_CALL].insert(ast.value, exp_id);
}); });
for (block, def_map) in body.blocks(db) { for (block, def_map) in body.blocks(db) {

View file

@ -98,12 +98,12 @@ use hir_def::{
hir::{BindingId, Expr, LabelId}, hir::{BindingId, Expr, LabelId},
}; };
use hir_expand::{ use hir_expand::{
ExpansionInfo, HirFileId, HirFileIdExt, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt, EditionedFileId, ExpansionInfo, HirFileId, InMacroFile, MacroCallId, attrs::AttrId,
attrs::AttrId, name::AsName, name::AsName,
}; };
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use smallvec::SmallVec; use smallvec::SmallVec;
use span::{EditionedFileId, FileId}; use span::FileId;
use stdx::impl_from; use stdx::impl_from;
use syntax::{ use syntax::{
AstNode, AstPtr, SyntaxNode, AstNode, AstPtr, SyntaxNode,
@ -116,9 +116,9 @@ use crate::{InFile, InlineAsmOperand, db::HirDatabase, semantics::child_by_sourc
#[derive(Default)] #[derive(Default)]
pub(super) struct SourceToDefCache { pub(super) struct SourceToDefCache {
pub(super) dynmap_cache: FxHashMap<(ChildContainer, HirFileId), DynMap>, pub(super) dynmap_cache: FxHashMap<(ChildContainer, HirFileId), DynMap>,
expansion_info_cache: FxHashMap<MacroFileId, ExpansionInfo>, expansion_info_cache: FxHashMap<MacroCallId, ExpansionInfo>,
pub(super) file_to_def_cache: FxHashMap<FileId, SmallVec<[ModuleId; 1]>>, pub(super) file_to_def_cache: FxHashMap<FileId, SmallVec<[ModuleId; 1]>>,
pub(super) included_file_cache: FxHashMap<EditionedFileId, Option<MacroFileId>>, pub(super) included_file_cache: FxHashMap<EditionedFileId, Option<MacroCallId>>,
/// Rootnode to HirFileId cache /// Rootnode to HirFileId cache
pub(super) root_to_file_cache: FxHashMap<SyntaxNode, HirFileId>, pub(super) root_to_file_cache: FxHashMap<SyntaxNode, HirFileId>,
} }
@ -138,14 +138,14 @@ impl SourceToDefCache {
&mut self, &mut self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
file: EditionedFileId, file: EditionedFileId,
) -> Option<MacroFileId> { ) -> Option<MacroCallId> {
if let Some(&m) = self.included_file_cache.get(&file) { if let Some(&m) = self.included_file_cache.get(&file) {
return m; return m;
} }
self.included_file_cache.insert(file, None); self.included_file_cache.insert(file, None);
for &crate_id in db.relevant_crates(file.into()).iter() { for &crate_id in db.relevant_crates(file.file_id(db)).iter() {
db.include_macro_invoc(crate_id).iter().for_each(|&(macro_call_id, file_id)| { db.include_macro_invoc(crate_id).iter().for_each(|&(macro_call_id, file_id)| {
self.included_file_cache.insert(file_id, Some(MacroFileId { macro_call_id })); self.included_file_cache.insert(file_id, Some(macro_call_id));
}); });
} }
self.included_file_cache.get(&file).copied().flatten() self.included_file_cache.get(&file).copied().flatten()
@ -154,7 +154,7 @@ impl SourceToDefCache {
pub(super) fn get_or_insert_expansion( pub(super) fn get_or_insert_expansion(
&mut self, &mut self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
macro_file: MacroFileId, macro_file: MacroCallId,
) -> &ExpansionInfo { ) -> &ExpansionInfo {
self.expansion_info_cache.entry(macro_file).or_insert_with(|| { self.expansion_info_cache.entry(macro_file).or_insert_with(|| {
let exp_info = macro_file.expansion_info(db); let exp_info = macro_file.expansion_info(db);
@ -184,7 +184,7 @@ impl SourceToDefCtx<'_, '_> {
let n_mods = mods.len(); let n_mods = mods.len();
let modules = |file| { let modules = |file| {
crate_def_map crate_def_map
.modules_for_file(file) .modules_for_file(self.db, file)
.map(|local_id| crate_def_map.module_id(local_id)) .map(|local_id| crate_def_map.module_id(local_id))
}; };
mods.extend(modules(file)); mods.extend(modules(file));
@ -193,18 +193,16 @@ impl SourceToDefCtx<'_, '_> {
self.db self.db
.include_macro_invoc(crate_id) .include_macro_invoc(crate_id)
.iter() .iter()
.filter(|&&(_, file_id)| file_id == file) .filter(|&&(_, file_id)| file_id.file_id(self.db) == file)
.flat_map(|&(macro_call_id, file_id)| { .flat_map(|&(macro_call_id, file_id)| {
self.cache self.cache.included_file_cache.insert(file_id, Some(macro_call_id));
.included_file_cache
.insert(file_id, Some(MacroFileId { macro_call_id }));
modules( modules(
macro_call_id macro_call_id
.lookup(self.db) .lookup(self.db)
.kind .kind
.file_id() .file_id()
.original_file(self.db) .original_file(self.db)
.file_id(), .file_id(self.db),
) )
}), }),
); );
@ -234,7 +232,7 @@ impl SourceToDefCtx<'_, '_> {
} }
None => { None => {
let file_id = src.file_id.original_file(self.db); let file_id = src.file_id.original_file(self.db);
self.file_to_def(file_id.file_id()).first().copied() self.file_to_def(file_id.file_id(self.db)).first().copied()
} }
}?; }?;
@ -247,7 +245,7 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn source_file_to_def(&mut self, src: InFile<&ast::SourceFile>) -> Option<ModuleId> { pub(super) fn source_file_to_def(&mut self, src: InFile<&ast::SourceFile>) -> Option<ModuleId> {
let _p = tracing::info_span!("source_file_to_def").entered(); let _p = tracing::info_span!("source_file_to_def").entered();
let file_id = src.file_id.original_file(self.db); let file_id = src.file_id.original_file(self.db);
self.file_to_def(file_id.file_id()).first().copied() self.file_to_def(file_id.file_id(self.db)).first().copied()
} }
pub(super) fn trait_to_def(&mut self, src: InFile<&ast::Trait>) -> Option<TraitId> { pub(super) fn trait_to_def(&mut self, src: InFile<&ast::Trait>) -> Option<TraitId> {
@ -526,8 +524,10 @@ impl SourceToDefCtx<'_, '_> {
return Some(def); return Some(def);
} }
let def = let def = self
self.file_to_def(src.file_id.original_file(self.db).file_id()).first().copied()?; .file_to_def(src.file_id.original_file(self.db).file_id(self.db))
.first()
.copied()?;
Some(def.into()) Some(def.into())
} }

View file

@ -29,7 +29,7 @@ use hir_def::{
type_ref::{Mutability, TypeRef, TypeRefId}, type_ref::{Mutability, TypeRef, TypeRefId},
}; };
use hir_expand::{ use hir_expand::{
HirFileId, InFile, MacroFileId, MacroFileIdExt, HirFileId, InFile, MacroCallId,
mod_path::{ModPath, PathKind, path}, mod_path::{ModPath, PathKind, path},
name::{AsName, Name}, name::{AsName, Name},
}; };
@ -47,10 +47,9 @@ use hir_ty::{
use intern::sym; use intern::sym;
use itertools::Itertools; use itertools::Itertools;
use smallvec::SmallVec; use smallvec::SmallVec;
use syntax::ast::{RangeItem, RangeOp};
use syntax::{ use syntax::{
SyntaxKind, SyntaxNode, TextRange, TextSize, SyntaxKind, SyntaxNode, TextRange, TextSize,
ast::{self, AstNode}, ast::{self, AstNode, RangeItem, RangeOp},
}; };
use triomphe::Arc; use triomphe::Arc;
@ -216,7 +215,7 @@ impl SourceAnalyzer {
}) })
} }
pub(crate) fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroFileId> { pub(crate) fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
self.store_sm()?.expansion(node) self.store_sm()?.expansion(node)
} }
@ -750,7 +749,7 @@ impl SourceAnalyzer {
let bs = self.store_sm()?; let bs = self.store_sm()?;
bs.expansion(macro_call).and_then(|it| { bs.expansion(macro_call).and_then(|it| {
// FIXME: Block def maps // FIXME: Block def maps
let def = it.macro_call_id.lookup(db).def; let def = it.lookup(db).def;
db.crate_def_map(def.krate) db.crate_def_map(def.krate)
.macro_def_to_macro_id .macro_def_to_macro_id
.get(&def.kind.erased_ast_id()) .get(&def.kind.erased_ast_id())
@ -1197,15 +1196,11 @@ impl SourceAnalyzer {
&self, &self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>, macro_call: InFile<&ast::MacroCall>,
) -> Option<MacroFileId> { ) -> Option<MacroCallId> {
self.store_sm().and_then(|bs| bs.expansion(macro_call)).or_else(|| { self.store_sm().and_then(|bs| bs.expansion(macro_call)).or_else(|| {
self.resolver self.resolver.item_scope().macro_invoc(
.item_scope() macro_call.with_value(db.ast_id_map(macro_call.file_id).ast_id(macro_call.value)),
.macro_invoc( )
macro_call
.with_value(db.ast_id_map(macro_call.file_id).ast_id(macro_call.value)),
)
.map(|it| it.as_macro_file())
}) })
} }

View file

@ -1,8 +1,6 @@
//! See [`AssistContext`]. //! See [`AssistContext`].
use hir::{FileRange, Semantics}; use hir::{EditionedFileId, FileRange, Semantics};
use ide_db::EditionedFileId;
use ide_db::base_db::salsa::AsDynDatabase;
use ide_db::{FileId, RootDatabase, label::Label}; use ide_db::{FileId, RootDatabase, label::Label};
use syntax::Edition; use syntax::Edition;
use syntax::{ use syntax::{
@ -65,10 +63,7 @@ impl<'a> AssistContext<'a> {
config: &'a AssistConfig, config: &'a AssistConfig,
frange: FileRange, frange: FileRange,
) -> AssistContext<'a> { ) -> AssistContext<'a> {
let editioned_file_id = let source_file = sema.parse(frange.file_id);
ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), frange.file_id);
let source_file = sema.parse(editioned_file_id);
let start = frange.range.start(); let start = frange.range.start();
let end = frange.range.end(); let end = frange.range.end();
@ -109,12 +104,16 @@ impl<'a> AssistContext<'a> {
self.frange.range.start() self.frange.range.start()
} }
pub(crate) fn vfs_file_id(&self) -> FileId {
self.frange.file_id.file_id(self.db())
}
pub(crate) fn file_id(&self) -> EditionedFileId { pub(crate) fn file_id(&self) -> EditionedFileId {
self.frange.file_id self.frange.file_id
} }
pub(crate) fn edition(&self) -> Edition { pub(crate) fn edition(&self) -> Edition {
self.frange.file_id.edition() self.frange.file_id.edition(self.db())
} }
pub(crate) fn has_empty_selection(&self) -> bool { pub(crate) fn has_empty_selection(&self) -> bool {
@ -169,7 +168,7 @@ impl Assists {
pub(crate) fn new(ctx: &AssistContext<'_>, resolve: AssistResolveStrategy) -> Assists { pub(crate) fn new(ctx: &AssistContext<'_>, resolve: AssistResolveStrategy) -> Assists {
Assists { Assists {
resolve, resolve,
file: ctx.frange.file_id.file_id(), file: ctx.frange.file_id.file_id(ctx.db()),
buf: Vec::new(), buf: Vec::new(),
allowed: ctx.config.allowed.clone(), allowed: ctx.config.allowed.clone(),
} }

View file

@ -48,7 +48,7 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
editor.replace(expr.syntax(), block_expr.syntax()); editor.replace(expr.syntax(), block_expr.syntax());
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

View file

@ -3,7 +3,6 @@ use std::iter::{self, Peekable};
use either::Either; use either::Either;
use hir::{Adt, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics, sym}; use hir::{Adt, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics, sym};
use ide_db::RootDatabase; use ide_db::RootDatabase;
use ide_db::base_db::salsa::AsDynDatabase;
use ide_db::syntax_helpers::suggest_name; use ide_db::syntax_helpers::suggest_name;
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast}; use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
use itertools::Itertools; use itertools::Itertools;
@ -257,12 +256,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
// Just replace the element that the original range came from // Just replace the element that the original range came from
let old_place = { let old_place = {
// Find the original element // Find the original element
let editioned_file_id = ide_db::base_db::EditionedFileId::new( let file = ctx.sema.parse(arm_list_range.file_id);
ctx.sema.db.as_dyn_database(),
arm_list_range.file_id,
);
let file = ctx.sema.parse(editioned_file_id);
let old_place = file.syntax().covering_element(arm_list_range.range); let old_place = file.syntax().covering_element(arm_list_range.range);
match old_place { match old_place {
@ -300,7 +294,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
} }
editor.add_mappings(make.take()); editor.add_mappings(make.take());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

View file

@ -119,7 +119,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
} }
} }
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
)? )?
} else { } else {
@ -181,7 +181,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
} }
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

View file

@ -147,7 +147,7 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
} }
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }
@ -230,7 +230,7 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>
} }
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

View file

@ -42,7 +42,7 @@ pub(crate) fn bind_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
format!("Bind as `let _ = {ident_pat};`"), format!("Bind as `let _ = {ident_pat};`"),
param.syntax().text_range(), param.syntax().text_range(),
|builder| { |builder| {
let line_index = ctx.db().line_index(ctx.file_id().into()); let line_index = ctx.db().line_index(ctx.vfs_file_id());
let indent = func.indent_level(); let indent = func.indent_level();
let text_indent = indent + 1; let text_indent = indent + 1;

View file

@ -135,7 +135,7 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
editor.replace(expr.syntax(), mcall.syntax()); editor.replace(expr.syntax(), mcall.syntax());
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }
@ -233,7 +233,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
editor.replace(mcall.syntax().clone(), if_expr.syntax().clone()); editor.replace(mcall.syntax().clone(), if_expr.syntax().clone());
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

View file

@ -209,7 +209,7 @@ fn replace_usages(
delayed_mutations: &mut Vec<(ImportScope, ast::Path)>, delayed_mutations: &mut Vec<(ImportScope, ast::Path)>,
) { ) {
for (file_id, references) in usages { for (file_id, references) in usages {
edit.edit_file(file_id.file_id()); edit.edit_file(file_id.file_id(ctx.db()));
let refs_with_imports = augment_references_with_imports(ctx, references, target_module); let refs_with_imports = augment_references_with_imports(ctx, references, target_module);

View file

@ -249,7 +249,7 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>)
); );
fn_ = fn_.dedent(IndentLevel::from_token(&fn_.syntax().last_token().unwrap())); fn_ = fn_.dedent(IndentLevel::from_token(&fn_.syntax().last_token().unwrap()));
builder.edit_file(ctx.file_id()); builder.edit_file(ctx.vfs_file_id());
match &closure_name { match &closure_name {
Some((closure_decl, _, _)) => { Some((closure_decl, _, _)) => {
fn_ = fn_.indent(closure_decl.indent_level()); fn_ = fn_.indent(closure_decl.indent_level());
@ -506,9 +506,8 @@ fn wrap_capture_in_deref_if_needed(
} }
fn capture_as_arg(ctx: &AssistContext<'_>, capture: &ClosureCapture) -> ast::Expr { fn capture_as_arg(ctx: &AssistContext<'_>, capture: &ClosureCapture) -> ast::Expr {
let place = let place = parse_expr_from_str(&capture.display_place_source_code(ctx.db()), ctx.edition())
parse_expr_from_str(&capture.display_place_source_code(ctx.db()), ctx.file_id().edition()) .expect("`display_place_source_code()` produced an invalid expr");
.expect("`display_place_source_code()` produced an invalid expr");
let needs_mut = match capture.kind() { let needs_mut = match capture.kind() {
CaptureKind::SharedRef => false, CaptureKind::SharedRef => false,
CaptureKind::MutableRef | CaptureKind::UniqueSharedRef => true, CaptureKind::MutableRef | CaptureKind::UniqueSharedRef => true,
@ -587,7 +586,7 @@ fn handle_call(
let indent = let indent =
if insert_newlines { first_arg_indent.unwrap().to_string() } else { String::new() }; if insert_newlines { first_arg_indent.unwrap().to_string() } else { String::new() };
// FIXME: This text manipulation seems risky. // FIXME: This text manipulation seems risky.
let text = ctx.db().file_text(file_id.file_id()).text(ctx.db()); let text = ctx.db().file_text(file_id.file_id(ctx.db())).text(ctx.db());
let mut text = text[..u32::from(range.end()).try_into().unwrap()].trim_end(); let mut text = text[..u32::from(range.end()).try_into().unwrap()].trim_end();
if !text.ends_with(')') { if !text.ends_with(')') {
return None; return None;
@ -630,7 +629,7 @@ fn handle_call(
to_insert.push(','); to_insert.push(',');
} }
builder.edit_file(file_id); builder.edit_file(file_id.file_id(ctx.db()));
builder.insert(offset, to_insert); builder.insert(offset, to_insert);
Some(()) Some(())

View file

@ -101,7 +101,7 @@ pub(crate) fn convert_for_loop_to_while_let(
editor.replace(for_loop.syntax(), while_loop.syntax()); editor.replace(for_loop.syntax(), while_loop.syntax());
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

View file

@ -99,7 +99,7 @@ fn edit_struct_def(
let tuple_fields = ast::make::tuple_field_list(tuple_fields); let tuple_fields = ast::make::tuple_field_list(tuple_fields);
let record_fields_text_range = record_fields.syntax().text_range(); let record_fields_text_range = record_fields.syntax().text_range();
edit.edit_file(ctx.file_id()); edit.edit_file(ctx.vfs_file_id());
edit.replace(record_fields_text_range, tuple_fields.syntax().text()); edit.replace(record_fields_text_range, tuple_fields.syntax().text());
if let Either::Left(strukt) = strukt { if let Either::Left(strukt) = strukt {
@ -149,7 +149,7 @@ fn edit_struct_references(
let usages = strukt_def.usages(&ctx.sema).include_self_refs().all(); let usages = strukt_def.usages(&ctx.sema).include_self_refs().all();
for (file_id, refs) in usages { for (file_id, refs) in usages {
edit.edit_file(file_id.file_id()); edit.edit_file(file_id.file_id(ctx.db()));
for r in refs { for r in refs {
process_struct_name_reference(ctx, r, edit); process_struct_name_reference(ctx, r, edit);
} }
@ -227,7 +227,7 @@ fn edit_field_references(
let def = Definition::Field(field); let def = Definition::Field(field);
let usages = def.usages(&ctx.sema).all(); let usages = def.usages(&ctx.sema).all();
for (file_id, refs) in usages { for (file_id, refs) in usages {
edit.edit_file(file_id.file_id()); edit.edit_file(file_id.file_id(ctx.db()));
for r in refs { for r in refs {
if let Some(name_ref) = r.name.as_name_ref() { if let Some(name_ref) = r.name.as_name_ref() {
// Only edit the field reference if it's part of a `.field` access // Only edit the field reference if it's part of a `.field` access

View file

@ -106,7 +106,7 @@ fn replace_usages(
target_module: &hir::Module, target_module: &hir::Module,
) { ) {
for (file_id, references) in usages.iter() { for (file_id, references) in usages.iter() {
edit.edit_file(file_id.file_id()); edit.edit_file(file_id.file_id(ctx.db()));
let refs_with_imports = let refs_with_imports =
augment_references_with_imports(edit, ctx, references, struct_name, target_module); augment_references_with_imports(edit, ctx, references, struct_name, target_module);

View file

@ -95,7 +95,7 @@ fn edit_struct_def(
let record_fields = ast::make::record_field_list(record_fields); let record_fields = ast::make::record_field_list(record_fields);
let tuple_fields_text_range = tuple_fields.syntax().text_range(); let tuple_fields_text_range = tuple_fields.syntax().text_range();
edit.edit_file(ctx.file_id()); edit.edit_file(ctx.vfs_file_id());
if let Either::Left(strukt) = strukt { if let Either::Left(strukt) = strukt {
if let Some(w) = strukt.where_clause() { if let Some(w) = strukt.where_clause() {
@ -142,7 +142,7 @@ fn edit_struct_references(
match node { match node {
ast::TupleStructPat(tuple_struct_pat) => { ast::TupleStructPat(tuple_struct_pat) => {
let file_range = ctx.sema.original_range_opt(&node)?; let file_range = ctx.sema.original_range_opt(&node)?;
edit.edit_file(file_range.file_id); edit.edit_file(file_range.file_id.file_id(ctx.db()));
edit.replace( edit.replace(
file_range.range, file_range.range,
ast::make::record_pat_with_fields( ast::make::record_pat_with_fields(
@ -197,7 +197,7 @@ fn edit_struct_references(
}; };
for (file_id, refs) in usages { for (file_id, refs) in usages {
edit.edit_file(file_id.file_id()); edit.edit_file(file_id.file_id(ctx.db()));
for r in refs { for r in refs {
for node in r.name.syntax().ancestors() { for node in r.name.syntax().ancestors() {
if edit_node(edit, node).is_some() { if edit_node(edit, node).is_some() {
@ -222,7 +222,7 @@ fn edit_field_references(
let def = Definition::Field(field); let def = Definition::Field(field);
let usages = def.usages(&ctx.sema).all(); let usages = def.usages(&ctx.sema).all();
for (file_id, refs) in usages { for (file_id, refs) in usages {
edit.edit_file(file_id.file_id()); edit.edit_file(file_id.file_id(ctx.db()));
for r in refs { for r in refs {
if let Some(name_ref) = r.name.as_name_ref() { if let Some(name_ref) = r.name.as_name_ref() {
edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text()); edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text());

View file

@ -66,7 +66,7 @@ fn destructure_struct_binding_impl(
let mut editor = builder.make_editor(data.ident_pat.syntax()); let mut editor = builder.make_editor(data.ident_pat.syntax());
destructure_pat(ctx, &mut editor, data, &field_names); destructure_pat(ctx, &mut editor, data, &field_names);
update_usages(ctx, &mut editor, data, &field_names.into_iter().collect()); update_usages(ctx, &mut editor, data, &field_names.into_iter().collect());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
} }
struct StructEditData { struct StructEditData {

View file

@ -64,7 +64,7 @@ fn expand_record_rest_pattern(
editor.replace(old_field_list.syntax(), new_field_list.syntax()); editor.replace(old_field_list.syntax(), new_field_list.syntax());
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }
@ -154,7 +154,7 @@ fn expand_tuple_struct_rest_pattern(
editor.replace(pat.syntax(), new_pat.syntax()); editor.replace(pat.syntax(), new_pat.syntax());
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

View file

@ -1,8 +1,7 @@
use std::iter; use std::iter;
use either::Either; use either::Either;
use hir::{HasSource, HirFileIdExt, ModuleSource}; use hir::{HasSource, ModuleSource};
use ide_db::base_db::salsa::AsDynDatabase;
use ide_db::{ use ide_db::{
FileId, FxHashMap, FxHashSet, FileId, FxHashMap, FxHashSet,
assists::AssistId, assists::AssistId,
@ -114,7 +113,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let (usages_to_be_processed, record_fields, use_stmts_to_be_inserted) = let (usages_to_be_processed, record_fields, use_stmts_to_be_inserted) =
module.get_usages_and_record_fields(ctx); module.get_usages_and_record_fields(ctx);
builder.edit_file(ctx.file_id()); builder.edit_file(ctx.vfs_file_id());
use_stmts_to_be_inserted.into_iter().for_each(|(_, use_stmt)| { use_stmts_to_be_inserted.into_iter().for_each(|(_, use_stmt)| {
builder.insert(ctx.selection_trimmed().end(), format!("\n{use_stmt}")); builder.insert(ctx.selection_trimmed().end(), format!("\n{use_stmt}"));
}); });
@ -126,7 +125,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let mut usages_to_be_processed_for_cur_file = vec![]; let mut usages_to_be_processed_for_cur_file = vec![];
for (file_id, usages) in usages_to_be_processed { for (file_id, usages) in usages_to_be_processed {
if file_id == ctx.file_id() { if file_id == ctx.vfs_file_id() {
usages_to_be_processed_for_cur_file = usages; usages_to_be_processed_for_cur_file = usages;
continue; continue;
} }
@ -136,7 +135,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
} }
} }
builder.edit_file(ctx.file_id()); builder.edit_file(ctx.vfs_file_id());
for (text_range, usage) in usages_to_be_processed_for_cur_file { for (text_range, usage) in usages_to_be_processed_for_cur_file {
builder.replace(text_range, usage); builder.replace(text_range, usage);
} }
@ -333,10 +332,7 @@ impl Module {
let mut use_stmts_set = FxHashSet::default(); let mut use_stmts_set = FxHashSet::default();
for (file_id, refs) in node_def.usages(&ctx.sema).all() { for (file_id, refs) in node_def.usages(&ctx.sema).all() {
let editioned_file_id = let source_file = ctx.sema.parse(file_id);
ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id);
let source_file = ctx.sema.parse(editioned_file_id);
let usages = refs.into_iter().filter_map(|FileReference { range, .. }| { let usages = refs.into_iter().filter_map(|FileReference { range, .. }| {
// handle normal usages // handle normal usages
let name_ref = find_node_at_range::<ast::NameRef>(source_file.syntax(), range)?; let name_ref = find_node_at_range::<ast::NameRef>(source_file.syntax(), range)?;
@ -368,7 +364,7 @@ impl Module {
None None
}); });
refs_in_files.entry(file_id.file_id()).or_default().extend(usages); refs_in_files.entry(file_id.file_id(ctx.db())).or_default().extend(usages);
} }
} }
@ -463,10 +459,7 @@ impl Module {
let file_id = ctx.file_id(); let file_id = ctx.file_id();
let usage_res = def.usages(&ctx.sema).in_scope(&SearchScope::single_file(file_id)).all(); let usage_res = def.usages(&ctx.sema).in_scope(&SearchScope::single_file(file_id)).all();
let editioned_file_id = let file = ctx.sema.parse(file_id);
ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id);
let file = ctx.sema.parse(editioned_file_id);
// track uses which does not exists in `Use` // track uses which does not exists in `Use`
let mut uses_exist_in_sel = false; let mut uses_exist_in_sel = false;
@ -492,7 +485,7 @@ impl Module {
ctx, ctx,
curr_parent_module, curr_parent_module,
selection_range, selection_range,
file_id.file_id(), file_id.file_id(ctx.db()),
); );
// Find use stmt that use def in current file // Find use stmt that use def in current file
@ -679,7 +672,7 @@ fn check_def_in_mod_and_out_sel(
let have_same_parent = if let Some(ast_module) = &curr_parent_module { let have_same_parent = if let Some(ast_module) = &curr_parent_module {
ctx.sema.to_module_def(ast_module).is_some_and(|it| it == $x.module(ctx.db())) ctx.sema.to_module_def(ast_module).is_some_and(|it| it == $x.module(ctx.db()))
} else { } else {
source.file_id.original_file(ctx.db()) == curr_file_id source.file_id.original_file(ctx.db()).file_id(ctx.db()) == curr_file_id
}; };
let in_sel = !selection_range.contains_range(source.value.syntax().text_range()); let in_sel = !selection_range.contains_range(source.value.syntax().text_range());
@ -695,7 +688,7 @@ fn check_def_in_mod_and_out_sel(
(Some(ast_module), Some(hir_module)) => { (Some(ast_module), Some(hir_module)) => {
ctx.sema.to_module_def(ast_module).is_some_and(|it| it == hir_module) ctx.sema.to_module_def(ast_module).is_some_and(|it| it == hir_module)
} }
_ => source.file_id.original_file(ctx.db()) == curr_file_id, _ => source.file_id.original_file(ctx.db()).file_id(ctx.db()) == curr_file_id,
}; };
if have_same_parent { if have_same_parent {

View file

@ -74,7 +74,7 @@ pub(crate) fn extract_struct_from_enum_variant(
def_file_references = Some(references); def_file_references = Some(references);
continue; continue;
} }
builder.edit_file(file_id.file_id()); builder.edit_file(file_id.file_id(ctx.db()));
let processed = process_references( let processed = process_references(
ctx, ctx,
builder, builder,
@ -87,7 +87,7 @@ pub(crate) fn extract_struct_from_enum_variant(
apply_references(ctx.config.insert_use, path, node, import, edition) apply_references(ctx.config.insert_use, path, node, import, edition)
}); });
} }
builder.edit_file(ctx.file_id()); builder.edit_file(ctx.vfs_file_id());
let variant = builder.make_mut(variant.clone()); let variant = builder.make_mut(variant.clone());
if let Some(references) = def_file_references { if let Some(references) = def_file_references {

View file

@ -87,7 +87,7 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
], ],
); );
builder.add_file_edits(ctx.file_id(), edit); builder.add_file_edits(ctx.vfs_file_id(), edit);
}, },
) )
} }

View file

@ -263,7 +263,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
} }
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
edit.add_file_edits(ctx.file_id(), editor); edit.add_file_edits(ctx.vfs_file_id(), editor);
edit.rename(); edit.rename();
}, },
); );
@ -378,7 +378,7 @@ fn get_literal_name(ctx: &AssistContext<'_>, expr: &ast::Expr) -> Option<String>
return None; return None;
} }
match LexedStr::single_token(ctx.file_id().edition(), &inner) { match LexedStr::single_token(ctx.edition(), &inner) {
Some((SyntaxKind::IDENT, None)) => Some(inner), Some((SyntaxKind::IDENT, None)) => Some(inner),
_ => None, _ => None,
} }

View file

@ -1,6 +1,4 @@
use hir::{ use hir::{HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef, db::HirDatabase};
HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef, db::HirDatabase,
};
use ide_db::FileId; use ide_db::FileId;
use syntax::{ use syntax::{
AstNode, TextRange, AstNode, TextRange,
@ -132,7 +130,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
); );
acc.add(AssistId::quick_fix("fix_visibility"), assist_label, target, |edit| { acc.add(AssistId::quick_fix("fix_visibility"), assist_label, target, |edit| {
edit.edit_file(target_file.file_id()); edit.edit_file(target_file.file_id(ctx.db()));
let vis_owner = edit.make_mut(vis_owner); let vis_owner = edit.make_mut(vis_owner);
vis_owner.set_visibility(Some(missing_visibility.clone_for_update())); vis_owner.set_visibility(Some(missing_visibility.clone_for_update()));
@ -159,7 +157,11 @@ fn target_data_for_def(
let in_file_syntax = source.syntax(); let in_file_syntax = source.syntax();
let file_id = in_file_syntax.file_id; let file_id = in_file_syntax.file_id;
let range = in_file_syntax.value.text_range(); let range = in_file_syntax.value.text_range();
Some((ast::AnyHasVisibility::new(source.value), range, file_id.original_file(db).file_id())) Some((
ast::AnyHasVisibility::new(source.value),
range,
file_id.original_file(db).file_id(db),
))
} }
let target_name; let target_name;
@ -201,7 +203,7 @@ fn target_data_for_def(
let in_file_source = m.declaration_source(db)?; let in_file_source = m.declaration_source(db)?;
let file_id = in_file_source.file_id.original_file(db); let file_id = in_file_source.file_id.original_file(db);
let range = in_file_source.value.syntax().text_range(); let range = in_file_source.value.syntax().text_range();
(ast::AnyHasVisibility::new(in_file_source.value), range, file_id.file_id()) (ast::AnyHasVisibility::new(in_file_source.value), range, file_id.file_id(db))
} }
// FIXME // FIXME
hir::ModuleDef::Macro(_) => return None, hir::ModuleDef::Macro(_) => return None,

View file

@ -55,7 +55,7 @@ pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
editor.replace(lhs.syntax(), rhs.syntax()); editor.replace(lhs.syntax(), rhs.syntax());
editor.replace(rhs.syntax(), lhs.syntax()); editor.replace(rhs.syntax(), lhs.syntax());
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

View file

@ -55,7 +55,7 @@ pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
editor.replace(next.clone(), prev.clone()); editor.replace(next.clone(), prev.clone());
} }
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}) })
} }

View file

@ -35,7 +35,7 @@ pub(crate) fn flip_or_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
let mut editor = builder.make_editor(parent.syntax()); let mut editor = builder.make_editor(parent.syntax());
editor.replace(before.clone(), after.clone()); editor.replace(before.clone(), after.clone());
editor.replace(after, before); editor.replace(after, before);
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}) })
} }

View file

@ -36,7 +36,7 @@ pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let mut editor = builder.make_editor(parent.syntax()); let mut editor = builder.make_editor(parent.syntax());
editor.replace(before.clone(), after.clone()); editor.replace(before.clone(), after.clone());
editor.replace(after, before); editor.replace(after, before);
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

View file

@ -1,5 +1,5 @@
use crate::assist_context::{AssistContext, Assists}; use crate::assist_context::{AssistContext, Assists};
use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module}; use hir::{HasVisibility, HirDisplay, Module};
use ide_db::{ use ide_db::{
FileId, FileId,
assists::AssistId, assists::AssistId,
@ -134,9 +134,9 @@ fn target_data_for_generate_constant(
.any(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains('\n')); .any(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains('\n'));
let post_string = let post_string =
if siblings_has_newline { format!("{indent}") } else { format!("\n{indent}") }; if siblings_has_newline { format!("{indent}") } else { format!("\n{indent}") };
Some((offset, indent + 1, Some(file_id.file_id()), post_string)) Some((offset, indent + 1, Some(file_id.file_id(ctx.db())), post_string))
} }
_ => Some((TextSize::from(0), 0.into(), Some(file_id.file_id()), "\n".into())), _ => Some((TextSize::from(0), 0.into(), Some(file_id.file_id(ctx.db())), "\n".into())),
} }
} }

View file

@ -124,7 +124,7 @@ impl Field {
) -> Option<Field> { ) -> Option<Field> {
let db = ctx.sema.db; let db = ctx.sema.db;
let module = ctx.sema.file_to_module_def(ctx.file_id())?; let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?;
let edition = module.krate().edition(ctx.db()); let edition = module.krate().edition(ctx.db());
let (name, range, ty) = match f { let (name, range, ty) = match f {

View file

@ -66,7 +66,7 @@ pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>)
if let Some(it) = enum_node.variant_list() { if let Some(it) = enum_node.variant_list() {
it.add_variant(&mut editor, &variant); it.add_variant(&mut editor, &variant);
} }
builder.add_file_edits(file_id, editor); builder.add_file_edits(file_id.file_id(ctx.db()), editor);
}) })
} }

View file

@ -117,7 +117,7 @@ pub(crate) fn generate_fn_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>)
} }
} }
builder.add_file_edits(ctx.file_id(), edit); builder.add_file_edits(ctx.vfs_file_id(), edit);
}, },
); );
} }

View file

@ -1,8 +1,7 @@
use hir::{ use hir::{
Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics, Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, StructKind, Type,
StructKind, Type, TypeInfo, TypeInfo,
}; };
use ide_db::base_db::salsa::AsDynDatabase;
use ide_db::{ use ide_db::{
FileId, FxHashMap, FxHashSet, RootDatabase, SnippetCap, FileId, FxHashMap, FxHashSet, RootDatabase, SnippetCap,
defs::{Definition, NameRefClass}, defs::{Definition, NameRefClass},
@ -207,14 +206,11 @@ fn get_adt_source(
) -> Option<(Option<ast::Impl>, FileId)> { ) -> Option<(Option<ast::Impl>, FileId)> {
let range = adt.source(ctx.sema.db)?.syntax().original_file_range_rooted(ctx.sema.db); let range = adt.source(ctx.sema.db)?.syntax().original_file_range_rooted(ctx.sema.db);
let editioned_file_id = let file = ctx.sema.parse(range.file_id);
ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), range.file_id);
let file = ctx.sema.parse(editioned_file_id);
let adt_source = let adt_source =
ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?; ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;
find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()]) find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()])
.map(|impl_| (impl_, range.file_id.file_id())) .map(|impl_| (impl_, range.file_id.file_id(ctx.db())))
} }
struct FunctionBuilder { struct FunctionBuilder {
@ -501,7 +497,7 @@ fn get_fn_target(
target_module: Option<Module>, target_module: Option<Module>,
call: CallExpr, call: CallExpr,
) -> Option<(GeneratedFunctionTarget, FileId)> { ) -> Option<(GeneratedFunctionTarget, FileId)> {
let mut file = ctx.file_id().into(); let mut file = ctx.vfs_file_id();
let target = match target_module { let target = match target_module {
Some(target_module) => { Some(target_module) => {
let (in_file, target) = next_space_for_fn_in_module(ctx.db(), target_module); let (in_file, target) = next_space_for_fn_in_module(ctx.db(), target_module);
@ -1191,7 +1187,7 @@ fn next_space_for_fn_in_module(
} }
}; };
(file.file_id(), assist_item) (file.file_id(db), assist_item)
} }
#[derive(Clone, Copy)] #[derive(Clone, Copy)]

View file

@ -71,6 +71,7 @@ use crate::{
// ``` // ```
pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let def_file = ctx.file_id(); let def_file = ctx.file_id();
let vfs_def_file = ctx.vfs_file_id();
let name = ctx.find_node_at_offset::<ast::Name>()?; let name = ctx.find_node_at_offset::<ast::Name>()?;
let ast_func = name.syntax().parent().and_then(ast::Fn::cast)?; let ast_func = name.syntax().parent().and_then(ast::Fn::cast)?;
let func_body = ast_func.body()?; let func_body = ast_func.body()?;
@ -106,7 +107,8 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let current_file_usage = usages.references.remove(&def_file); let current_file_usage = usages.references.remove(&def_file);
let mut remove_def = true; let mut remove_def = true;
let mut inline_refs_for_file = |file_id, refs: Vec<FileReference>| { let mut inline_refs_for_file = |file_id: EditionedFileId, refs: Vec<FileReference>| {
let file_id = file_id.file_id(ctx.db());
builder.edit_file(file_id); builder.edit_file(file_id);
let call_krate = ctx.sema.file_to_module_def(file_id).map(|it| it.krate()); let call_krate = ctx.sema.file_to_module_def(file_id).map(|it| it.krate());
let count = refs.len(); let count = refs.len();
@ -143,7 +145,7 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
} }
match current_file_usage { match current_file_usage {
Some(refs) => inline_refs_for_file(def_file, refs), Some(refs) => inline_refs_for_file(def_file, refs),
None => builder.edit_file(def_file), None => builder.edit_file(vfs_def_file),
} }
if remove_def { if remove_def {
builder.delete(ast_func.syntax().text_range()); builder.delete(ast_func.syntax().text_range());
@ -194,7 +196,7 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
let name_ref: ast::NameRef = ctx.find_node_at_offset()?; let name_ref: ast::NameRef = ctx.find_node_at_offset()?;
let call_info = CallInfo::from_name_ref( let call_info = CallInfo::from_name_ref(
name_ref.clone(), name_ref.clone(),
ctx.sema.file_to_module_def(ctx.file_id())?.krate().into(), ctx.sema.file_to_module_def(ctx.vfs_file_id())?.krate().into(),
)?; )?;
let (function, label) = match &call_info.node { let (function, label) = match &call_info.node {
ast::CallableExpr::Call(call) => { ast::CallableExpr::Call(call) => {

View file

@ -110,7 +110,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
} }
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

View file

@ -38,7 +38,7 @@ use crate::{AssistContext, AssistId, Assists};
pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?; let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?;
let macro_call = ctx.sema.to_def(&unexpanded)?; let macro_call = ctx.sema.to_def(&unexpanded)?;
let target_crate_id = ctx.sema.file_to_module_def(ctx.file_id())?.krate().into(); let target_crate_id = ctx.sema.file_to_module_def(ctx.vfs_file_id())?.krate().into();
let text_range = unexpanded.syntax().text_range(); let text_range = unexpanded.syntax().text_range();
acc.add( acc.add(
@ -46,8 +46,8 @@ pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
"Inline macro".to_owned(), "Inline macro".to_owned(),
text_range, text_range,
|builder| { |builder| {
let expanded = ctx.sema.parse_or_expand(macro_call.as_file()); let expanded = ctx.sema.parse_or_expand(macro_call.into());
let span_map = ctx.sema.db.expansion_span_map(macro_call.as_macro_file()); let span_map = ctx.sema.db.expansion_span_map(macro_call);
// Don't call `prettify_macro_expansion()` outside the actual assist action; it does some heavy rowan tree manipulation, // Don't call `prettify_macro_expansion()` outside the actual assist action; it does some heavy rowan tree manipulation,
// which can be very costly for big macros when it is done *even without the assist being invoked*. // which can be very costly for big macros when it is done *even without the assist being invoked*.
let expanded = prettify_macro_expansion(ctx.db(), expanded, &span_map, target_crate_id); let expanded = prettify_macro_expansion(ctx.db(), expanded, &span_map, target_crate_id);

View file

@ -87,17 +87,17 @@ pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>)
builder.replace(target, replacement); builder.replace(target, replacement);
} }
if file_id == ctx.file_id() { if file_id == ctx.vfs_file_id() {
builder.delete(ast_alias.syntax().text_range()); builder.delete(ast_alias.syntax().text_range());
definition_deleted = true; definition_deleted = true;
} }
}; };
for (file_id, refs) in usages.into_iter() { for (file_id, refs) in usages.into_iter() {
inline_refs_for_file(file_id.file_id(), refs); inline_refs_for_file(file_id.file_id(ctx.db()), refs);
} }
if !definition_deleted { if !definition_deleted {
builder.edit_file(ctx.file_id()); builder.edit_file(ctx.vfs_file_id());
builder.delete(ast_alias.syntax().text_range()); builder.delete(ast_alias.syntax().text_range());
} }
}, },

View file

@ -59,7 +59,7 @@ pub(crate) fn introduce_named_type_parameter(
} }
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

View file

@ -105,7 +105,7 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
builder.delete(range_to_delete); builder.delete(range_to_delete);
let usages = usages.iter().flat_map(|(file_id, usages)| { let usages = usages.iter().flat_map(|(file_id, usages)| {
let edition = file_id.edition(); let edition = file_id.edition(ctx.db());
usages.iter().map(move |usage| (edition, usage.range)) usages.iter().map(move |usage| (edition, usage.range))
}); });
for (edition, range) in usages { for (edition, range) in usages {

View file

@ -22,7 +22,7 @@ use crate::{
// ``` // ```
pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?; let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?;
let module = ctx.sema.file_to_module_def(ctx.file_id())?; let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?;
// Enable this assist if the user select all "meaningful" content in the source file // Enable this assist if the user select all "meaningful" content in the source file
let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed()); let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed());
let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range()); let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range());
@ -38,13 +38,13 @@ pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let target = source_file.syntax().text_range(); let target = source_file.syntax().text_range();
let module_name = module.name(ctx.db())?.as_str().to_smolstr(); let module_name = module.name(ctx.db())?.as_str().to_smolstr();
let path = format!("../{module_name}.rs"); let path = format!("../{module_name}.rs");
let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path }; let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path };
acc.add( acc.add(
AssistId::refactor("move_from_mod_rs"), AssistId::refactor("move_from_mod_rs"),
format!("Convert {module_name}/mod.rs to {module_name}.rs"), format!("Convert {module_name}/mod.rs to {module_name}.rs"),
target, target,
|builder| { |builder| {
builder.move_file(ctx.file_id(), dst); builder.move_file(ctx.vfs_file_id(), dst);
}, },
) )
} }

View file

@ -104,7 +104,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) ->
buf, buf,
); );
let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path }; let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path };
builder.create_file(dst, contents); builder.create_file(dst, contents);
}, },
) )

View file

@ -22,7 +22,7 @@ use crate::{
// ``` // ```
pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?; let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?;
let module = ctx.sema.file_to_module_def(ctx.file_id())?; let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?;
// Enable this assist if the user select all "meaningful" content in the source file // Enable this assist if the user select all "meaningful" content in the source file
let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed()); let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed());
let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range()); let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range());
@ -38,13 +38,13 @@ pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let target = source_file.syntax().text_range(); let target = source_file.syntax().text_range();
let module_name = module.name(ctx.db())?.as_str().to_smolstr(); let module_name = module.name(ctx.db())?.as_str().to_smolstr();
let path = format!("./{module_name}/mod.rs"); let path = format!("./{module_name}/mod.rs");
let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path }; let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path };
acc.add( acc.add(
AssistId::refactor("move_to_mod_rs"), AssistId::refactor("move_to_mod_rs"),
format!("Convert {module_name}.rs to {module_name}/mod.rs"), format!("Convert {module_name}.rs to {module_name}/mod.rs"),
target, target,
|builder| { |builder| {
builder.move_file(ctx.file_id(), dst); builder.move_file(ctx.vfs_file_id(), dst);
}, },
) )
} }

View file

@ -28,6 +28,6 @@ pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
_ => (), _ => (),
} }
editor.delete(mut_token); editor.delete(mut_token);
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}) })
} }

View file

@ -59,7 +59,7 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
} }
editor.replace(parens.syntax(), expr.syntax()); editor.replace(parens.syntax(), expr.syntax());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

View file

@ -1,6 +1,6 @@
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
use hir::{FileRange, HirFileIdExt, InFile, InRealFile, Module, ModuleSource}; use hir::{FileRange, InFile, InRealFile, Module, ModuleSource};
use ide_db::text_edit::TextRange; use ide_db::text_edit::TextRange;
use ide_db::{ use ide_db::{
FxHashMap, RootDatabase, FxHashMap, RootDatabase,

View file

@ -1,6 +1,4 @@
use ide_db::{ use ide_db::{EditionedFileId, defs::Definition, search::FileReference};
EditionedFileId, base_db::salsa::AsDynDatabase, defs::Definition, search::FileReference,
};
use syntax::{ use syntax::{
AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, T, TextRange, AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, T, TextRange,
algo::{find_node_at_range, least_common_ancestor_element}, algo::{find_node_at_range, least_common_ancestor_element},
@ -90,7 +88,7 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) ->
for (file_id, references) in fn_def.usages(&ctx.sema).all() { for (file_id, references) in fn_def.usages(&ctx.sema).all() {
process_usages(ctx, builder, file_id, references, param_position, is_self_present); process_usages(ctx, builder, file_id, references, param_position, is_self_present);
} }
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }
@ -98,15 +96,13 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) ->
fn process_usages( fn process_usages(
ctx: &AssistContext<'_>, ctx: &AssistContext<'_>,
builder: &mut SourceChangeBuilder, builder: &mut SourceChangeBuilder,
file_id: EditionedFileId, editioned_file_id: EditionedFileId,
references: Vec<FileReference>, references: Vec<FileReference>,
arg_to_remove: usize, arg_to_remove: usize,
is_self_present: bool, is_self_present: bool,
) { ) {
let editioned_file_id_wrapper = let source_file = ctx.sema.parse(editioned_file_id);
ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id); let file_id = editioned_file_id.file_id(ctx.db());
let source_file = ctx.sema.parse(editioned_file_id_wrapper);
builder.edit_file(file_id); builder.edit_file(file_id);
let possible_ranges = references let possible_ranges = references
.into_iter() .into_iter()

View file

@ -82,7 +82,7 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
} }
} }
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

View file

@ -106,7 +106,7 @@ pub(crate) fn reorder_impl_items(acc: &mut Assists, ctx: &AssistContext<'_>) ->
.zip(sorted) .zip(sorted)
.for_each(|(old, new)| editor.replace(old.syntax(), new.syntax())); .for_each(|(old, new)| editor.replace(old.syntax(), new.syntax()));
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

View file

@ -1,4 +1,4 @@
use hir::{InFile, MacroFileIdExt, ModuleDef}; use hir::{InFile, ModuleDef};
use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator}; use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
use itertools::Itertools; use itertools::Itertools;
use syntax::{ use syntax::{

View file

@ -142,7 +142,7 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
let mut editor = builder.make_editor(if_expr.syntax()); let mut editor = builder.make_editor(if_expr.syntax());
editor.replace(if_expr.syntax(), expr.syntax()); editor.replace(if_expr.syntax(), expr.syntax());
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }
@ -291,7 +291,7 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'
let mut editor = builder.make_editor(match_expr.syntax()); let mut editor = builder.make_editor(match_expr.syntax());
editor.replace(match_expr.syntax(), if_let_expr.syntax()); editor.replace(match_expr.syntax(), if_let_expr.syntax());
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor); builder.add_file_edits(ctx.vfs_file_id(), editor);
}, },
) )
} }

Some files were not shown because too many files have changed in this diff Show more