Make HirFileId, EditionedFileId and macro files Salsa struct

And make more queries non-interned.

Also flip the default for queries, now the default is to not intern and to intern a query you need to say `invoke_interned`.
This commit is contained in:
Chayim Refael Friedman 2025-04-18 12:10:29 +03:00
parent 02ade79631
commit c58ddafe90
195 changed files with 1473 additions and 1525 deletions

View file

@ -1,8 +1,8 @@
//! Builtin attributes.
use intern::sym;
use span::{MacroCallId, Span};
use span::Span;
use crate::{ExpandResult, MacroCallKind, db::ExpandDatabase, name, tt};
use crate::{ExpandResult, MacroCallId, MacroCallKind, db::ExpandDatabase, name, tt};
use super::quote;

View file

@ -4,13 +4,13 @@ use intern::sym;
use itertools::{Itertools, izip};
use parser::SyntaxKind;
use rustc_hash::FxHashSet;
use span::{Edition, MacroCallId, Span, SyntaxContext};
use span::{Edition, Span, SyntaxContext};
use stdx::never;
use syntax_bridge::DocCommentDesugarMode;
use tracing::debug;
use crate::{
ExpandError, ExpandResult,
ExpandError, ExpandResult, MacroCallId,
builtin::quote::{dollar_crate, quote},
db::ExpandDatabase,
hygiene::span_with_def_site_ctxt,
@ -59,7 +59,7 @@ impl BuiltinDeriveExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
self.expander()(db, span, tt)
}
}

View file

@ -8,7 +8,7 @@ use intern::{
sym::{self},
};
use mbe::{DelimiterKind, expect_fragment};
use span::{Edition, EditionedFileId, FileId, Span};
use span::{Edition, FileId, Span};
use stdx::format_to;
use syntax::{
format_smolstr,
@ -17,7 +17,7 @@ use syntax::{
use syntax_bridge::syntax_node_to_token_tree;
use crate::{
ExpandError, ExpandResult, HirFileIdExt, Lookup as _, MacroCallId,
EditionedFileId, ExpandError, ExpandResult, Lookup as _, MacroCallId,
builtin::quote::{WithDelimiter, dollar_crate, quote},
db::ExpandDatabase,
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt},
@ -72,7 +72,7 @@ impl BuiltinFnLikeExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
self.expander()(db, id, tt, span)
}
@ -89,7 +89,7 @@ impl EagerExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
self.expander()(db, id, tt, span)
}
@ -224,7 +224,7 @@ fn assert_expand(
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
let mut iter = tt.iter();
@ -345,7 +345,7 @@ fn panic_expand(
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let dollar_crate = dollar_crate(span);
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
let mac = if use_panic_2021(db, call_site_span) {
sym::panic_2021.clone()
@ -376,7 +376,7 @@ fn unreachable_expand(
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let dollar_crate = dollar_crate(span);
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
let mac = if use_panic_2021(db, call_site_span) {
sym::unreachable_2021.clone()
@ -407,7 +407,7 @@ fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool {
let Some(expn) = span.ctx.outer_expn(db) else {
break false;
};
let expn = db.lookup_intern_macro_call(expn);
let expn = db.lookup_intern_macro_call(expn.into());
// FIXME: Record allow_internal_unstable in the macro def (not been done yet because it
// would consume quite a bit extra memory for all call locs...)
// if let Some(features) = expn.def.allow_internal_unstable {
@ -663,7 +663,7 @@ fn relative_file(
err_span: Span,
) -> Result<EditionedFileId, ExpandError> {
let lookup = db.lookup_intern_macro_call(call_id);
let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id();
let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id(db);
let path = AnchoredPath { anchor: call_site, path: path_str };
let res: FileId = db
.resolve_path(path)
@ -672,7 +672,7 @@ fn relative_file(
if res == call_site && !allow_recursion {
Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`")))
} else {
Ok(EditionedFileId::new(res, lookup.krate.data(db).edition))
Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition))
}
}
@ -731,10 +731,8 @@ fn include_expand(
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let (file_id_wrapper, editioned_file_id) = match include_input_to_file_id(db, arg_id, tt) {
Ok(editioned_file_id) => {
(base_db::EditionedFileId::new(db, editioned_file_id), editioned_file_id)
}
let editioned_file_id = match include_input_to_file_id(db, arg_id, tt) {
Ok(editioned_file_id) => editioned_file_id,
Err(e) => {
return ExpandResult::new(
tt::TopSubtree::empty(DelimSpan { open: span, close: span }),
@ -745,7 +743,7 @@ fn include_expand(
let span_map = db.real_span_map(editioned_file_id);
// FIXME: Parse errors
ExpandResult::ok(syntax_node_to_token_tree(
&db.parse(file_id_wrapper).syntax_node(),
&db.parse(editioned_file_id).syntax_node(),
SpanMap::RealSpanMap(span_map),
span,
syntax_bridge::DocCommentDesugarMode::ProcMacro,
@ -807,7 +805,7 @@ fn include_str_expand(
}
};
let text = db.file_text(file_id.file_id());
let text = db.file_text(file_id.file_id(db));
let text = &*text.text(db);
ExpandResult::ok(quote!(call_site =>#text))

View file

@ -4,19 +4,15 @@ use base_db::{Crate, RootQueryDb};
use either::Either;
use mbe::MatchedArmIndex;
use rustc_hash::FxHashSet;
use salsa::plumbing::AsId;
use span::{
AstIdMap, Edition, EditionedFileId, HirFileId, HirFileIdRepr, MacroCallId, MacroFileId, Span,
SyntaxContext,
};
use span::{AstIdMap, Edition, Span, SyntaxContext};
use syntax::{AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
use triomphe::Arc;
use crate::{
AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
EagerExpander, ExpandError, ExpandResult, ExpandTo, MacroCallKind, MacroCallLoc, MacroDefId,
MacroDefKind,
EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
attrs::{AttrId, collect_attrs},
builtin::pseudo_derive_attr_expansion,
cfg_process,
@ -63,9 +59,10 @@ pub trait ExpandDatabase: RootQueryDb {
fn proc_macros(&self) -> Arc<ProcMacros>;
/// Incrementality query to prevent queries from directly depending on `ExpandDatabase::proc_macros`.
#[salsa::invoke_actual(crate::proc_macro::proc_macros_for_crate)]
#[salsa::invoke(crate::proc_macro::proc_macros_for_crate)]
fn proc_macros_for_crate(&self, krate: Crate) -> Option<Arc<CrateProcMacros>>;
#[salsa::invoke(ast_id_map)]
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
#[salsa::transparent]
@ -75,7 +72,7 @@ pub trait ExpandDatabase: RootQueryDb {
#[salsa::lru(512)]
fn parse_macro_expansion(
&self,
macro_file: span::MacroFileId,
macro_file: MacroCallId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
#[salsa::transparent]
@ -84,7 +81,7 @@ pub trait ExpandDatabase: RootQueryDb {
#[salsa::transparent]
#[salsa::invoke(crate::span_map::expansion_span_map)]
fn expansion_span_map(&self, file_id: MacroFileId) -> Arc<ExpansionSpanMap>;
fn expansion_span_map(&self, file_id: MacroCallId) -> Arc<ExpansionSpanMap>;
#[salsa::invoke(crate::span_map::real_span_map)]
fn real_span_map(&self, file_id: EditionedFileId) -> Arc<RealSpanMap>;
@ -136,6 +133,7 @@ pub trait ExpandDatabase: RootQueryDb {
/// directly depend on as that would cause to frequent invalidations, mainly because of the
/// parse queries being LRU cached. If they weren't the invalidations would only happen if the
/// user wrote in the file that defines the proc-macro.
#[salsa::invoke_interned(proc_macro_span)]
fn proc_macro_span(&self, fun: AstId<ast::Fn>) -> Span;
/// Firewall query that returns the errors from the `parse_macro_expansion` query.
@ -149,30 +147,17 @@ pub trait ExpandDatabase: RootQueryDb {
fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContext;
}
#[salsa::interned(no_lifetime, id = span::MacroCallId)]
pub struct MacroCallWrapper {
pub loc: MacroCallLoc,
}
fn intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallLoc) -> MacroCallId {
MacroCallWrapper::new(db, macro_call).0
}
fn lookup_intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> MacroCallLoc {
MacroCallWrapper::ingredient(db).data(db.as_dyn_database(), macro_call.as_id()).0.clone()
}
#[salsa::interned(no_lifetime, id = span::SyntaxContext)]
pub struct SyntaxContextWrapper {
pub data: SyntaxContext,
}
fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) -> SyntaxContext {
match file.repr() {
HirFileIdRepr::FileId(_) => SyntaxContext::root(edition),
HirFileIdRepr::MacroFile(m) => {
let kind = db.lookup_intern_macro_call(m.macro_call_id).kind;
db.macro_arg_considering_derives(m.macro_call_id, &kind).2.ctx
match file {
HirFileId::FileId(_) => SyntaxContext::root(edition),
HirFileId::MacroFile(m) => {
let kind = db.lookup_intern_macro_call(m).kind;
db.macro_arg_considering_derives(m, &kind).2.ctx
}
}
}
@ -300,9 +285,9 @@ pub fn expand_speculative(
loc.krate,
&tt,
attr_arg.as_ref(),
span_with_def_site_ctxt(db, span, actual_macro_call, loc.def.edition),
span_with_call_site_ctxt(db, span, actual_macro_call, loc.def.edition),
span_with_mixed_site_ctxt(db, span, actual_macro_call, loc.def.edition),
span_with_def_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
span_with_call_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
span_with_mixed_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
)
}
MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
@ -346,20 +331,16 @@ pub fn expand_speculative(
Some((node.syntax_node(), token))
}
fn ast_id_map(db: &dyn ExpandDatabase, file_id: span::HirFileId) -> triomphe::Arc<AstIdMap> {
fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> triomphe::Arc<AstIdMap> {
triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
}
/// Main public API -- parses a hir file, not caring whether it's a real
/// file or a macro expansion.
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
match file_id.repr() {
HirFileIdRepr::FileId(editioned_file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
db.parse(file_id).syntax_node()
}
HirFileIdRepr::MacroFile(macro_file) => {
match file_id {
HirFileId::FileId(file_id) => db.parse(file_id).syntax_node(),
HirFileId::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.0.syntax_node()
}
}
@ -369,14 +350,13 @@ fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
// instead of having it be untyped
fn parse_macro_expansion(
db: &dyn ExpandDatabase,
macro_file: MacroFileId,
macro_file: MacroCallId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = tracing::info_span!("parse_macro_expansion").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let loc = db.lookup_intern_macro_call(macro_file);
let def_edition = loc.def.edition;
let expand_to = loc.expand_to();
let mbe::ValueResult { value: (tt, matched_arm), err } =
macro_expand(db, macro_file.macro_call_id, loc);
let mbe::ValueResult { value: (tt, matched_arm), err } = macro_expand(db, macro_file, loc);
let (parse, mut rev_token_map) = token_tree_to_syntax_node(
db,
@ -397,7 +377,7 @@ fn parse_macro_expansion_error(
macro_call_id: MacroCallId,
) -> Option<Arc<ExpandResult<Arc<[SyntaxError]>>>> {
let e: ExpandResult<Arc<[SyntaxError]>> =
db.parse_macro_expansion(MacroFileId { macro_call_id }).map(|it| Arc::from(it.0.errors()));
db.parse_macro_expansion(macro_call_id).map(|it| Arc::from(it.0.errors()));
if e.value.is_empty() && e.err.is_none() { None } else { Some(Arc::new(e)) }
}
@ -405,16 +385,11 @@ pub(crate) fn parse_with_map(
db: &dyn ExpandDatabase,
file_id: HirFileId,
) -> (Parse<SyntaxNode>, SpanMap) {
match file_id.repr() {
HirFileIdRepr::FileId(editioned_file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
(
db.parse(file_id).to_syntax(),
SpanMap::RealSpanMap(db.real_span_map(editioned_file_id)),
)
match file_id {
HirFileId::FileId(file_id) => {
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
}
HirFileIdRepr::MacroFile(macro_file) => {
HirFileId::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse, SpanMap::ExpansionSpanMap(map))
}
@ -734,9 +709,9 @@ fn expand_proc_macro(
loc.krate,
&macro_arg,
attr_arg,
span_with_def_site_ctxt(db, span, id, loc.def.edition),
span_with_call_site_ctxt(db, span, id, loc.def.edition),
span_with_mixed_site_ctxt(db, span, id, loc.def.edition),
span_with_def_site_ctxt(db, span, id.into(), loc.def.edition),
span_with_call_site_ctxt(db, span, id.into(), loc.def.edition),
span_with_mixed_site_ctxt(db, span, id.into(), loc.def.edition),
)
};
@ -784,3 +759,11 @@ fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
})
}
}
fn intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallLoc) -> MacroCallId {
MacroCallId::new(db, macro_call)
}
fn lookup_intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> MacroCallLoc {
macro_call.loc(db)
}

View file

@ -2,14 +2,14 @@
use base_db::Crate;
use intern::sym;
use span::{Edition, HirFileIdRepr, MacroCallId, Span, SyntaxContext};
use span::{Edition, Span, SyntaxContext};
use stdx::TupleExt;
use syntax::{AstNode, ast};
use syntax_bridge::DocCommentDesugarMode;
use triomphe::Arc;
use crate::{
AstId, ExpandError, ExpandErrorKind, ExpandResult, Lookup,
AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId,
attrs::RawAttrs,
db::ExpandDatabase,
hygiene::{Transparency, apply_mark},
@ -42,7 +42,10 @@ impl DeclarativeMacroExpander {
.mac
.expand(
&tt,
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency, self.edition),
|s| {
s.ctx =
apply_mark(db, s.ctx, call_id.into(), self.transparency, self.edition)
},
span,
loc.def.edition,
)
@ -106,7 +109,8 @@ impl DeclarativeMacroExpander {
def_crate.data(db).edition
} else {
// UNWRAP-SAFETY: Only the root context has no outer expansion
let krate = db.lookup_intern_macro_call(ctx.outer_expn(db).unwrap()).def.krate;
let krate =
db.lookup_intern_macro_call(ctx.outer_expn(db).unwrap().into()).def.krate;
krate.data(db).edition
}
};
@ -160,9 +164,9 @@ impl DeclarativeMacroExpander {
transparency(&macro_def).unwrap_or(Transparency::Opaque),
),
};
let edition = ctx_edition(match id.file_id.repr() {
HirFileIdRepr::MacroFile(macro_file) => macro_file.macro_call_id.lookup(db).ctxt,
HirFileIdRepr::FileId(file) => SyntaxContext::root(file.edition()),
let edition = ctx_edition(match id.file_id {
HirFileId::MacroFile(macro_file) => macro_file.lookup(db).ctxt,
HirFileId::FileId(file) => SyntaxContext::root(file.edition(db)),
});
Arc::new(DeclarativeMacroExpander { mac, transparency, edition })
}

View file

@ -63,7 +63,7 @@ pub fn expand_eager_macro_input(
#[allow(deprecated)] // builtin eager macros are never derives
let (_, _, span) = db.macro_arg(arg_id);
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file());
db.parse_macro_expansion(arg_id);
let mut arg_map = ExpansionSpanMap::empty();
@ -73,7 +73,7 @@ pub fn expand_eager_macro_input(
&arg_exp_map,
&mut arg_map,
TextSize::new(0),
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
InFile::new(arg_id.into(), arg_exp.syntax_node()),
krate,
call_site,
resolver,
@ -134,10 +134,8 @@ fn lazy_expand(
call_site,
);
eager_callback(ast_id.map(|ast_id| (AstPtr::new(macro_call), ast_id)), id);
let macro_file = id.as_macro_file();
db.parse_macro_expansion(macro_file)
.map(|parse| (InFile::new(macro_file.into(), parse.0), parse.1))
db.parse_macro_expansion(id).map(|parse| (InFile::new(id.into(), parse.0), parse.1))
}
fn eager_macro_recur(
@ -224,7 +222,7 @@ fn eager_macro_recur(
call_id,
);
let ExpandResult { value: (parse, map), err: err2 } =
db.parse_macro_expansion(call_id.as_macro_file());
db.parse_macro_expansion(call_id);
map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span));

View file

@ -2,14 +2,11 @@
use std::borrow::Borrow;
use either::Either;
use span::{
AstIdNode, EditionedFileId, ErasedFileAstId, FileAstId, HirFileId, HirFileIdRepr, MacroFileId,
SyntaxContext,
};
use span::{AstIdNode, ErasedFileAstId, FileAstId, FileId, SyntaxContext};
use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize};
use crate::{
MacroFileIdExt, MacroKind,
EditionedFileId, HirFileId, MacroCallId, MacroKind,
db::{self, ExpandDatabase},
map_node_range_up, map_node_range_up_rooted, span_for_offset,
};
@ -27,7 +24,7 @@ pub struct InFileWrapper<FileKind, T> {
pub value: T,
}
pub type InFile<T> = InFileWrapper<HirFileId, T>;
pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>;
pub type InMacroFile<T> = InFileWrapper<MacroCallId, T>;
pub type InRealFile<T> = InFileWrapper<EditionedFileId, T>;
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
@ -36,12 +33,13 @@ pub struct FilePositionWrapper<FileKind> {
pub offset: TextSize,
}
pub type HirFilePosition = FilePositionWrapper<HirFileId>;
pub type MacroFilePosition = FilePositionWrapper<MacroFileId>;
pub type MacroFilePosition = FilePositionWrapper<MacroCallId>;
pub type FilePosition = FilePositionWrapper<EditionedFileId>;
impl From<FilePositionWrapper<EditionedFileId>> for FilePositionWrapper<span::FileId> {
fn from(value: FilePositionWrapper<EditionedFileId>) -> Self {
FilePositionWrapper { file_id: value.file_id.into(), offset: value.offset }
impl FilePosition {
#[inline]
pub fn into_file_id(self, db: &dyn ExpandDatabase) -> FilePositionWrapper<FileId> {
FilePositionWrapper { file_id: self.file_id.file_id(db), offset: self.offset }
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
@ -50,12 +48,13 @@ pub struct FileRangeWrapper<FileKind> {
pub range: TextRange,
}
pub type HirFileRange = FileRangeWrapper<HirFileId>;
pub type MacroFileRange = FileRangeWrapper<MacroFileId>;
pub type MacroFileRange = FileRangeWrapper<MacroCallId>;
pub type FileRange = FileRangeWrapper<EditionedFileId>;
impl From<FileRangeWrapper<EditionedFileId>> for FileRangeWrapper<span::FileId> {
fn from(value: FileRangeWrapper<EditionedFileId>) -> Self {
FileRangeWrapper { file_id: value.file_id.into(), range: value.range }
impl FileRange {
#[inline]
pub fn into_file_id(self, db: &dyn ExpandDatabase) -> FileRangeWrapper<FileId> {
FileRangeWrapper { file_id: self.file_id.file_id(db), range: self.range }
}
}
@ -163,12 +162,10 @@ trait FileIdToSyntax: Copy {
impl FileIdToSyntax for EditionedFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
let file_id = base_db::EditionedFileId::new(db, self);
db.parse(file_id).syntax_node()
db.parse(self).syntax_node()
}
}
impl FileIdToSyntax for MacroFileId {
impl FileIdToSyntax for MacroCallId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse_macro_expansion(self).value.0.syntax_node()
}
@ -221,7 +218,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => db
.lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id)
.lookup_intern_macro_call(node.file_id.macro_file()?)
.to_node_item(db)
.syntax()
.cloned()
@ -238,7 +235,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => db
.lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id)
.lookup_intern_macro_call(node.file_id.macro_file()?)
.to_node_item(db)
.syntax()
.cloned()
@ -278,11 +275,11 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
) -> Option<InRealFile<SyntaxNode>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
let file_id = match self.file_id {
HirFileId::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value.borrow().clone() });
}
HirFileIdRepr::MacroFile(m)
HirFileId::MacroFile(m)
if matches!(m.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) =>
{
m
@ -296,11 +293,9 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
self.value.borrow().text_range(),
)?;
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
let kind = self.kind();
let value = db
.parse(file_id)
.parse(editioned_file_id)
.syntax_node()
.covering_element(range)
.ancestors()
@ -332,9 +327,9 @@ impl InMacroFile<SyntaxToken> {
impl InFile<SyntaxToken> {
/// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
match self.file_id {
HirFileId::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileId::MacroFile(mac_file) => {
let (range, ctxt) = span_for_offset(
db,
&db.expansion_span_map(mac_file),
@ -348,7 +343,7 @@ impl InFile<SyntaxToken> {
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range(db)
}
}
@ -356,11 +351,11 @@ impl InFile<SyntaxToken> {
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
match self.file_id {
HirFileId::FileId(file_id) => {
Some(FileRange { file_id, range: self.value.text_range() })
}
HirFileIdRepr::MacroFile(mac_file) => {
HirFileId::MacroFile(mac_file) => {
let (range, ctxt) = span_for_offset(
db,
&db.expansion_span_map(mac_file),
@ -386,15 +381,15 @@ impl InFile<TextRange> {
self,
db: &dyn db::ExpandDatabase,
) -> (FileRange, SyntaxContext) {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(FileRange { file_id, range: self.value }, SyntaxContext::root(file_id.edition()))
match self.file_id {
HirFileId::FileId(file_id) => {
(FileRange { file_id, range: self.value }, SyntaxContext::root(file_id.edition(db)))
}
HirFileIdRepr::MacroFile(mac_file) => {
HirFileId::MacroFile(mac_file) => {
match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
None => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
let loc = db.lookup_intern_macro_call(mac_file);
(loc.kind.original_call_range(db), SyntaxContext::root(loc.def.edition))
}
}
@ -403,13 +398,13 @@ impl InFile<TextRange> {
}
pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => {
match self.file_id {
HirFileId::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileId::MacroFile(mac_file) => {
match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range(db)
}
}
@ -421,13 +416,13 @@ impl InFile<TextRange> {
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => {
match self.file_id {
HirFileId::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileId::MacroFile(mac_file) => {
match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range_with_body(db)
}
}
@ -439,12 +434,12 @@ impl InFile<TextRange> {
self,
db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContext)> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => Some((
match self.file_id {
HirFileId::FileId(file_id) => Some((
FileRange { file_id, range: self.value },
SyntaxContext::root(file_id.edition()),
SyntaxContext::root(file_id.edition(db)),
)),
HirFileIdRepr::MacroFile(mac_file) => {
HirFileId::MacroFile(mac_file) => {
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value)
}
}
@ -455,11 +450,11 @@ impl<N: AstNode> InFile<N> {
pub fn original_ast_node_rooted(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
let file_id = match self.file_id {
HirFileId::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value });
}
HirFileIdRepr::MacroFile(m) => m,
HirFileId::MacroFile(m) => m,
};
if !matches!(file_id.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) {
return None;
@ -471,10 +466,8 @@ impl<N: AstNode> InFile<N> {
self.value.syntax().text_range(),
)?;
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let anc = db.parse(file_id).syntax_node().covering_element(range);
let anc = db.parse(editioned_file_id).syntax_node().covering_element(range);
let value = anc.ancestors().find_map(N::cast)?;
Some(InRealFile::new(editioned_file_id, value))
}
@ -482,9 +475,9 @@ impl<N: AstNode> InFile<N> {
impl<T> InFile<T> {
pub fn into_real_file(self) -> Result<InRealFile<T>, InFile<T>> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }),
HirFileIdRepr::MacroFile(_) => Err(self),
match self.file_id {
HirFileId::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }),
HirFileId::MacroFile(_) => Err(self),
}
}
}

View file

@ -81,7 +81,7 @@ pub(super) fn apply_mark(
return apply_mark_internal(db, ctxt, call_id, transparency, edition);
}
let call_site_ctxt = db.lookup_intern_macro_call(call_id).ctxt;
let call_site_ctxt = db.lookup_intern_macro_call(call_id.into()).ctxt;
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
call_site_ctxt.normalize_to_macros_2_0(db)
} else {

View file

@ -27,6 +27,7 @@ mod prettify_macro_expansion_;
use attrs::collect_attrs;
use rustc_hash::FxHashMap;
use salsa::plumbing::{AsId, FromId};
use stdx::TupleExt;
use triomphe::Arc;
@ -35,10 +36,7 @@ use std::hash::Hash;
use base_db::Crate;
use either::Either;
use span::{
Edition, EditionedFileId, ErasedFileAstId, FileAstId, HirFileIdRepr, Span, SpanAnchor,
SyntaxContext,
};
use span::{Edition, ErasedFileAstId, FileAstId, Span, SpanAnchor, SyntaxContext};
use syntax::{
SyntaxNode, SyntaxToken, TextRange, TextSize,
ast::{self, AstNode},
@ -62,8 +60,8 @@ pub use crate::{
prettify_macro_expansion_::prettify_macro_expansion,
};
pub use base_db::EditionedFileId;
pub use mbe::{DeclarativeMacro, ValueResult};
pub use span::{HirFileId, MacroCallId, MacroFileId};
pub mod tt {
pub use span::Span;
@ -343,51 +341,34 @@ pub enum MacroCallKind {
},
}
pub trait HirFileIdExt {
fn edition(self, db: &dyn ExpandDatabase) -> Edition;
/// Returns the original file of this macro call hierarchy.
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId;
/// Returns the original file of this macro call hierarchy while going into the included file if
/// one of the calls comes from an `include!``.
fn original_file_respecting_includes(self, db: &dyn ExpandDatabase) -> EditionedFileId;
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>>;
fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>>;
}
impl HirFileIdExt for HirFileId {
fn edition(self, db: &dyn ExpandDatabase) -> Edition {
match self.repr() {
HirFileIdRepr::FileId(file_id) => file_id.edition(),
HirFileIdRepr::MacroFile(m) => db.lookup_intern_macro_call(m.macro_call_id).def.edition,
impl HirFileId {
pub fn edition(self, db: &dyn ExpandDatabase) -> Edition {
match self {
HirFileId::FileId(file_id) => file_id.editioned_file_id(db).edition(),
HirFileId::MacroFile(m) => db.lookup_intern_macro_call(m).def.edition,
}
}
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
pub fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
let mut file_id = self;
loop {
match file_id.repr() {
HirFileIdRepr::FileId(id) => break id,
HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
match file_id {
HirFileId::FileId(id) => break id,
HirFileId::MacroFile(macro_call_id) => {
file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id()
}
}
}
}
fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> EditionedFileId {
pub fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> EditionedFileId {
loop {
match self.repr() {
HirFileIdRepr::FileId(id) => break id,
HirFileIdRepr::MacroFile(file) => {
let loc = db.lookup_intern_macro_call(file.macro_call_id);
match self {
HirFileId::FileId(id) => break id,
HirFileId::MacroFile(file) => {
let loc = db.lookup_intern_macro_call(file);
if loc.def.is_include() {
if let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind {
if let Ok(it) =
include_input_to_file_id(db, file.macro_call_id, &eager.arg)
{
if let Ok(it) = include_input_to_file_id(db, file, &eager.arg) {
break it;
}
}
@ -398,23 +379,26 @@ impl HirFileIdExt for HirFileId {
}
}
fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>> {
let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db);
pub fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>> {
let mut call = db.lookup_intern_macro_call(self.macro_file()?).to_node(db);
loop {
match call.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
match call.file_id {
HirFileId::FileId(file_id) => {
break Some(InRealFile { file_id, value: call.value });
}
HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
HirFileId::MacroFile(macro_call_id) => {
call = db.lookup_intern_macro_call(macro_call_id).to_node(db);
}
}
}
}
fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>> {
pub fn as_builtin_derive_attr_node(
&self,
db: &dyn ExpandDatabase,
) -> Option<InFile<ast::Attr>> {
let macro_file = self.macro_file()?;
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let loc = db.lookup_intern_macro_call(macro_file);
let attr = match loc.def.kind {
MacroDefKind::BuiltInDerive(..) => loc.to_node(db),
_ => return None,
@ -441,57 +425,34 @@ pub enum MacroKind {
ProcMacro,
}
pub trait MacroFileIdExt {
fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool;
fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool;
fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId>;
fn expansion_level(self, db: &dyn ExpandDatabase) -> u32;
/// If this is a macro call, returns the syntax node of the call.
fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode>;
fn parent(self, db: &dyn ExpandDatabase) -> HirFileId;
fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo;
fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind;
/// Return whether this file is an include macro
fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool;
fn is_eager(&self, db: &dyn ExpandDatabase) -> bool;
/// Return whether this file is the pseudo expansion of the derive attribute.
/// See [`crate::builtin_attr_macro::derive_attr_expand`].
fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool;
}
impl MacroFileIdExt for MacroFileId {
fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
db.lookup_intern_macro_call(self.macro_call_id).to_node(db)
impl MacroCallId {
pub fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
db.lookup_intern_macro_call(self).to_node(db)
}
fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 {
pub fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 {
let mut level = 0;
let mut macro_file = self;
loop {
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let loc = db.lookup_intern_macro_call(macro_file);
level += 1;
macro_file = match loc.kind.file_id().repr() {
HirFileIdRepr::FileId(_) => break level,
HirFileIdRepr::MacroFile(it) => it,
macro_file = match loc.kind.file_id() {
HirFileId::FileId(_) => break level,
HirFileId::MacroFile(it) => it,
};
}
}
fn parent(self, db: &dyn ExpandDatabase) -> HirFileId {
db.lookup_intern_macro_call(self.macro_call_id).kind.file_id()
pub fn parent(self, db: &dyn ExpandDatabase) -> HirFileId {
db.lookup_intern_macro_call(self).kind.file_id()
}
/// Return expansion information if it is a macro-expansion file
fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo {
pub fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo {
ExpansionInfo::new(db, self)
}
fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind {
match db.lookup_intern_macro_call(self.macro_call_id).def.kind {
pub fn kind(self, db: &dyn ExpandDatabase) -> MacroKind {
match db.lookup_intern_macro_call(self).def.kind {
MacroDefKind::Declarative(..) => MacroKind::Declarative,
MacroDefKind::BuiltIn(..) | MacroDefKind::BuiltInEager(..) => {
MacroKind::DeclarativeBuiltIn
@ -504,33 +465,33 @@ impl MacroFileIdExt for MacroFileId {
}
}
fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self.macro_call_id).def.is_include()
pub fn is_include_macro(self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self).def.is_include()
}
fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self.macro_call_id).def.is_include_like()
pub fn is_include_like_macro(self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self).def.is_include_like()
}
fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self.macro_call_id).def.is_env_or_option_env()
pub fn is_env_or_option_env(self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self).def.is_env_or_option_env()
}
fn is_eager(&self, db: &dyn ExpandDatabase) -> bool {
let loc = db.lookup_intern_macro_call(self.macro_call_id);
pub fn is_eager(self, db: &dyn ExpandDatabase) -> bool {
let loc = db.lookup_intern_macro_call(self);
matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
}
fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId> {
let loc = db.lookup_intern_macro_call(self.macro_call_id);
pub fn eager_arg(self, db: &dyn ExpandDatabase) -> Option<MacroCallId> {
let loc = db.lookup_intern_macro_call(self);
match &loc.kind {
MacroCallKind::FnLike { eager, .. } => eager.as_ref().map(|it| it.arg_id),
_ => None,
}
}
fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool {
let loc = db.lookup_intern_macro_call(self.macro_call_id);
pub fn is_derive_attr_pseudo_expansion(self, db: &dyn ExpandDatabase) -> bool {
let loc = db.lookup_intern_macro_call(self);
loc.def.is_attribute_derive()
}
}
@ -728,11 +689,11 @@ impl MacroCallKind {
pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id().repr() {
HirFileIdRepr::MacroFile(file) => {
kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
match kind.file_id() {
HirFileId::MacroFile(file) => {
kind = db.lookup_intern_macro_call(file).kind;
}
HirFileIdRepr::FileId(file_id) => break file_id,
HirFileId::FileId(file_id) => break file_id,
}
};
@ -753,11 +714,11 @@ impl MacroCallKind {
pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id().repr() {
HirFileIdRepr::MacroFile(file) => {
kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
match kind.file_id() {
HirFileId::MacroFile(file) => {
kind = db.lookup_intern_macro_call(file).kind;
}
HirFileIdRepr::FileId(file_id) => break file_id,
HirFileId::FileId(file_id) => break file_id,
}
};
@ -898,7 +859,7 @@ impl ExpansionInfo {
let span = self.exp_map.span_at(token.start());
match &self.arg_map {
SpanMap::RealSpanMap(_) => {
let file_id = span.anchor.file_id.into();
let file_id = EditionedFileId::from_span(db, span.anchor.file_id).into();
let anchor_offset =
db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start();
InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
@ -921,9 +882,9 @@ impl ExpansionInfo {
}
}
pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo {
pub fn new(db: &dyn ExpandDatabase, macro_file: MacroCallId) -> ExpansionInfo {
let _p = tracing::info_span!("ExpansionInfo::new").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let loc = db.lookup_intern_macro_call(macro_file);
let arg_tt = loc.kind.arg(db);
let arg_map = db.span_map(arg_tt.file_id);
@ -955,9 +916,10 @@ pub fn map_node_range_up_rooted(
start = start.min(span.range.start());
end = end.max(span.range.end());
}
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some(FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset })
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some(FileRange { file_id, range: TextRange::new(start, end) + anchor_offset })
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
@ -980,12 +942,10 @@ pub fn map_node_range_up(
start = start.min(span.range.start());
end = end.max(span.range.end());
}
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some((
FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset },
ctx,
))
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some((FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }, ctx))
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
@ -1004,8 +964,9 @@ pub fn map_node_range_up_aggregated(
);
}
for ((anchor, _), range) in &mut map {
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
*range += anchor_offset;
}
map
@ -1018,12 +979,10 @@ pub fn span_for_offset(
offset: TextSize,
) -> (FileRange, SyntaxContext) {
let span = exp_map.span_at(offset);
let anchor_offset = db
.ast_id_map(span.anchor.file_id.into())
.get_erased(span.anchor.ast_id)
.text_range()
.start();
(FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
let file_id = EditionedFileId::from_span(db, span.anchor.file_id);
let anchor_offset =
db.ast_id_map(file_id.into()).get_erased(span.anchor.ast_id).text_range().start();
(FileRange { file_id, range: span.range + anchor_offset }, span.ctx)
}
/// In Rust, macros expand token trees to token trees. When we want to turn a
@ -1091,3 +1050,77 @@ impl ExpandTo {
}
intern::impl_internable!(ModPath, attrs::AttrInput);
#[salsa::interned(no_lifetime)]
#[doc(alias = "MacroFileId")]
pub struct MacroCallId {
pub loc: MacroCallLoc,
}
impl From<span::MacroCallId> for MacroCallId {
#[inline]
fn from(value: span::MacroCallId) -> Self {
MacroCallId::from_id(value.0)
}
}
impl From<MacroCallId> for span::MacroCallId {
#[inline]
fn from(value: MacroCallId) -> span::MacroCallId {
span::MacroCallId(value.as_id())
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
pub enum HirFileId {
FileId(EditionedFileId),
MacroFile(MacroCallId),
}
impl From<EditionedFileId> for HirFileId {
#[inline]
fn from(file_id: EditionedFileId) -> Self {
HirFileId::FileId(file_id)
}
}
impl From<MacroCallId> for HirFileId {
#[inline]
fn from(file_id: MacroCallId) -> Self {
HirFileId::MacroFile(file_id)
}
}
impl HirFileId {
#[inline]
pub fn macro_file(self) -> Option<MacroCallId> {
match self {
HirFileId::FileId(_) => None,
HirFileId::MacroFile(it) => Some(it),
}
}
#[inline]
pub fn is_macro(self) -> bool {
matches!(self, HirFileId::MacroFile(_))
}
#[inline]
pub fn file_id(self) -> Option<EditionedFileId> {
match self {
HirFileId::FileId(it) => Some(it),
HirFileId::MacroFile(_) => None,
}
}
}
impl PartialEq<EditionedFileId> for HirFileId {
fn eq(&self, &other: &EditionedFileId) -> bool {
*self == HirFileId::from(other)
}
}
impl PartialEq<HirFileId> for EditionedFileId {
fn eq(&self, &other: &HirFileId) -> bool {
other == HirFileId::from(*self)
}
}

View file

@ -278,7 +278,7 @@ fn convert_path(
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
let syn_ctx = span_for_range(segment.syntax().text_range());
if let Some(macro_call_id) = syn_ctx.outer_expn(db) {
if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
if db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner {
mod_path.kind = match resolve_crate_root(db, syn_ctx) {
Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate,
@ -353,7 +353,7 @@ pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContext) -> O
result_mark = Some(mark);
}
result_mark.map(|call| db.lookup_intern_macro_call(call).def.krate)
result_mark.map(|call| db.lookup_intern_macro_call(call.into()).def.krate)
}
pub use crate::name as __name;

View file

@ -26,7 +26,7 @@ pub fn prettify_macro_expansion(
syntax_ctx_id_to_dollar_crate_replacement.entry(ctx).or_insert_with(|| {
let macro_call_id =
ctx.outer_expn(db).expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
let macro_call = db.lookup_intern_macro_call(macro_call_id);
let macro_call = db.lookup_intern_macro_call(macro_call_id.into());
let macro_def_crate = macro_call.def.krate;
// First, if this is the same crate as the macro, nothing will work but `crate`.
// If not, if the target trait has the macro's crate as a dependency, using the dependency name

View file

@ -1,13 +1,13 @@
//! Span maps for real files and macro expansions.
use span::{EditionedFileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContext};
use span::{Span, SyntaxContext};
use stdx::TupleExt;
use syntax::{AstNode, TextRange, ast};
use triomphe::Arc;
pub use span::RealSpanMap;
use crate::{attrs::collect_attrs, db::ExpandDatabase};
use crate::{HirFileId, MacroCallId, attrs::collect_attrs, db::ExpandDatabase};
pub type ExpansionSpanMap = span::SpanMap<SyntaxContext>;
@ -61,9 +61,9 @@ impl SpanMap {
#[inline]
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
HirFileIdRepr::MacroFile(m) => {
match file_id {
HirFileId::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
HirFileId::MacroFile(m) => {
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
}
}
@ -81,15 +81,13 @@ impl SpanMapRef<'_> {
pub(crate) fn real_span_map(
db: &dyn ExpandDatabase,
editioned_file_id: EditionedFileId,
editioned_file_id: base_db::EditionedFileId,
) -> Arc<RealSpanMap> {
use syntax::ast::HasModuleItem;
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(editioned_file_id.into());
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
let tree = db.parse(file_id).tree();
let tree = db.parse(editioned_file_id).tree();
// This is an incrementality layer. Basically we can't use absolute ranges for our spans as that
// would mean we'd invalidate everything whenever we type. So instead we make the text ranges
// relative to some AstIds reducing the risk of invalidation as typing somewhere no longer
@ -140,7 +138,7 @@ pub(crate) fn real_span_map(
});
Arc::new(RealSpanMap::from_file(
editioned_file_id,
editioned_file_id.editioned_file_id(db),
pairs.into_boxed_slice(),
tree.syntax().text_range().end(),
))
@ -148,7 +146,7 @@ pub(crate) fn real_span_map(
pub(crate) fn expansion_span_map(
db: &dyn ExpandDatabase,
file_id: MacroFileId,
file_id: MacroCallId,
) -> Arc<ExpansionSpanMap> {
db.parse_macro_expansion(file_id).value.1
}