Replace ID based TokenMap with proper relative text-ranges / spans

This commit is contained in:
Lukas Wirth 2023-09-29 12:37:57 +02:00
parent f79439caed
commit 890eb17b4e
80 changed files with 1816 additions and 2046 deletions

View file

@ -12,11 +12,13 @@ use std::{
marker::PhantomData,
};
use la_arena::{Arena, Idx};
use la_arena::{Arena, Idx, RawIdx};
use profile::Count;
use rustc_hash::FxHasher;
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
pub use base_db::span::ErasedFileAstId;
/// `AstId` points to an AST node in a specific file.
pub struct FileAstId<N: AstIdNode> {
raw: ErasedFileAstId,
@ -62,8 +64,6 @@ impl<N: AstIdNode> FileAstId<N> {
}
}
pub type ErasedFileAstId = Idx<SyntaxNodePtr>;
pub trait AstIdNode: AstNode {}
macro_rules! register_ast_id_node {
(impl AstIdNode for $($ident:ident),+ ) => {
@ -129,6 +129,11 @@ impl AstIdMap {
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none());
let mut res = AstIdMap::default();
// make sure to allocate the root node
if !should_alloc_id(node.kind()) {
res.alloc(node);
}
// By walking the tree in breadth-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a
@ -155,6 +160,11 @@ impl AstIdMap {
res
}
/// The [`AstId`] of the root node
pub fn root(&self) -> SyntaxNodePtr {
self.arena[Idx::from_raw(RawIdx::from_u32(0))].clone()
}
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax());
FileAstId { raw, covariant: PhantomData }
@ -164,7 +174,7 @@ impl AstIdMap {
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
}
pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
pub fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
self.arena[id].clone()
}

View file

@ -1,7 +1,8 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
use std::{fmt, ops};
use base_db::CrateId;
use ::tt::Span;
use base_db::{span::SpanAnchor, CrateId};
use cfg::CfgExpr;
use either::Either;
use intern::Interned;
@ -39,11 +40,16 @@ impl ops::Deref for RawAttrs {
impl RawAttrs {
pub const EMPTY: Self = Self { entries: None };
pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
pub fn new(
db: &dyn ExpandDatabase,
span_anchor: SpanAnchor,
owner: &dyn ast::HasAttrs,
hygiene: &Hygiene,
) -> Self {
let entries = collect_attrs(owner)
.filter_map(|(id, attr)| match attr {
Either::Left(attr) => {
attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
attr.meta().and_then(|meta| Attr::from_src(db, span_anchor, meta, hygiene, id))
}
Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
id,
@ -58,9 +64,13 @@ impl RawAttrs {
Self { entries: if entries.is_empty() { None } else { Some(entries) } }
}
pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
pub fn from_attrs_owner(
db: &dyn ExpandDatabase,
span_anchor: SpanAnchor,
owner: InFile<&dyn ast::HasAttrs>,
) -> Self {
let hygiene = Hygiene::new(db, owner.file_id);
Self::new(db, owner.value, &hygiene)
Self::new(db, span_anchor, owner.value, &hygiene)
}
pub fn merge(&self, other: Self) -> Self {
@ -190,16 +200,17 @@ pub struct Attr {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum AttrInput {
/// `#[attr = "string"]`
// FIXME: This is losing span
Literal(SmolStr),
/// `#[attr(subtree)]`
TokenTree(Box<(tt::Subtree, mbe::TokenMap)>),
TokenTree(Box<tt::Subtree>),
}
impl fmt::Display for AttrInput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
AttrInput::TokenTree(tt) => tt.0.fmt(f),
AttrInput::TokenTree(tt) => tt.fmt(f),
}
}
}
@ -207,6 +218,7 @@ impl fmt::Display for AttrInput {
impl Attr {
fn from_src(
db: &dyn ExpandDatabase,
span_anchor: SpanAnchor,
ast: ast::Meta,
hygiene: &Hygiene,
id: AttrId,
@ -219,8 +231,13 @@ impl Attr {
};
Some(Interned::new(AttrInput::Literal(value)))
} else if let Some(tt) = ast.token_tree() {
let (tree, map) = syntax_node_to_token_tree(tt.syntax());
Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map)))))
// FIXME: We could also allocate ids for attributes and use the attribute itself as an anchor
let offset =
db.ast_id_map(span_anchor.file_id).get_raw(span_anchor.ast_id).text_range().start();
// FIXME: Spanmap
let tree =
syntax_node_to_token_tree(tt.syntax(), span_anchor, offset, &Default::default());
Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
} else {
None
};
@ -233,10 +250,12 @@ impl Attr {
hygiene: &Hygiene,
id: AttrId,
) -> Option<Attr> {
let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
// FIXME: Unecessary roundtrip tt -> ast -> tt
let (parse, _map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
let ast = ast::Meta::cast(parse.syntax_node())?;
Self::from_src(db, ast, hygiene, id)
// FIXME: we discard spans here!
Self::from_src(db, SpanAnchor::DUMMY, ast, hygiene, id)
}
pub fn path(&self) -> &ModPath {
@ -256,7 +275,7 @@ impl Attr {
/// #[path(ident)]
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
match self.input.as_deref()? {
AttrInput::TokenTree(tt) => match &*tt.0.token_trees {
AttrInput::TokenTree(tt) => match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
_ => None,
},
@ -267,7 +286,7 @@ impl Attr {
/// #[path TokenTree]
pub fn token_tree_value(&self) -> Option<&Subtree> {
match self.input.as_deref()? {
AttrInput::TokenTree(tt) => Some(&tt.0),
AttrInput::TokenTree(tt) => Some(tt),
_ => None,
}
}

View file

@ -1,5 +1,7 @@
//! Builtin attributes.
use ::tt::Span;
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
macro_rules! register_builtin {
@ -98,7 +100,7 @@ fn derive_attr_expand(
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let derives = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => &attr_args.0,
MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => attr_args,
_ => return ExpandResult::ok(tt::Subtree::empty()),
};
pseudo_derive_attr_expansion(tt, derives)
@ -112,7 +114,7 @@ pub fn pseudo_derive_attr_expansion(
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char,
spacing: tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
span: tt::SpanData::DUMMY,
}))
};

View file

@ -1,18 +1,20 @@
//! Builtin derives.
use ::tt::Ident;
use ::tt::Span;
use base_db::{CrateOrigin, LangCrateOrigin};
use itertools::izip;
use mbe::TokenMap;
use rustc_hash::FxHashSet;
use stdx::never;
use tracing::debug;
use crate::{
name::{AsName, Name},
tt::{self, TokenId},
tt, SpanMap,
};
use syntax::{
ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds},
TextSize,
};
use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
@ -29,7 +31,7 @@ macro_rules! register_builtin {
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
token_map: &TokenMap,
token_map: &SpanMap,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( BuiltinDeriveExpander::$trait => $expand, )*
@ -71,7 +73,7 @@ enum VariantShape {
}
fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
(0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified()))
(0..n).map(|it| tt::Ident::new(format!("f{it}"), tt::SpanData::DUMMY))
}
impl VariantShape {
@ -117,7 +119,7 @@ impl VariantShape {
}
}
fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
fn from(tm: &SpanMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
let r = match value {
None => VariantShape::Unit,
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
@ -189,8 +191,8 @@ struct BasicAdtInfo {
associated_types: Vec<tt::Subtree>,
}
fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
let (name, generic_param_list, shape) = match &adt {
fn parse_adt(tm: &SpanMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
let (name, generic_param_list, shape) = match adt {
ast::Adt::Struct(it) => (
it.name(),
it.generic_param_list(),
@ -234,21 +236,44 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
match this {
Some(it) => {
param_type_set.insert(it.as_name());
mbe::syntax_node_to_token_tree(it.syntax()).0
mbe::syntax_node_to_token_tree(
it.syntax(),
tm.span_for_range(it.syntax().first_token().unwrap().text_range())
.unwrap()
.anchor,
TextSize::from(0),
tm,
)
}
None => tt::Subtree::empty(),
}
};
let bounds = match &param {
ast::TypeOrConstParam::Type(it) => {
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
}
ast::TypeOrConstParam::Type(it) => it.type_bound_list().map(|it| {
mbe::syntax_node_to_token_tree(
it.syntax(),
tm.span_for_range(it.syntax().first_token().unwrap().text_range())
.unwrap()
.anchor,
TextSize::from(0),
tm,
)
}),
ast::TypeOrConstParam::Const(_) => None,
};
let ty = if let ast::TypeOrConstParam::Const(param) = param {
let ty = param
.ty()
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0)
.map(|ty| {
mbe::syntax_node_to_token_tree(
ty.syntax(),
tm.span_for_range(ty.syntax().first_token().unwrap().text_range())
.unwrap()
.anchor,
TextSize::from(0),
tm,
)
})
.unwrap_or_else(tt::Subtree::empty);
Some(ty)
} else {
@ -282,20 +307,26 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
param_type_set.contains(&name).then_some(p)
})
.map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
.map(|it| {
mbe::syntax_node_to_token_tree(
it.syntax(),
tm.span_for_range(it.syntax().first_token().unwrap().text_range()).unwrap().anchor,
TextSize::from(0),
tm,
)
})
.collect();
let name_token = name_to_token(&tm, name)?;
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
}
fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Ident, ExpandError> {
fn name_to_token(token_map: &SpanMap, name: Option<ast::Name>) -> Result<tt::Ident, ExpandError> {
let name = name.ok_or_else(|| {
debug!("parsed item has no name");
ExpandError::other("missing name")
})?;
let name_token_id =
token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
let name_token = tt::Ident { span: name_token_id, text: name.text().into() };
let span = token_map.span_for_range(name.syntax().text_range()).unwrap();
let name_token = tt::Ident { span, text: name.text().into() };
Ok(name_token)
}
@ -332,7 +363,7 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
/// therefore does not get bound by the derived trait.
fn expand_simple_derive(
tt: &ast::Adt,
tm: &TokenMap,
tm: &SpanMap,
trait_path: tt::Subtree,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -393,7 +424,7 @@ fn copy_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
tm: &TokenMap,
tm: &SpanMap,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {})
@ -403,16 +434,13 @@ fn clone_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
tm: &TokenMap,
tm: &SpanMap,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
let star =
tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY };
return quote! {
fn clone(&self) -> Self {
#star self
@ -420,11 +448,8 @@ fn clone_expand(
};
}
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
let star =
tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY };
return quote! {
fn clone(&self) -> Self {
match #star self {}
@ -452,16 +477,14 @@ fn clone_expand(
}
/// This function exists since `quote! { => }` doesn't work.
fn fat_arrow() -> ::tt::Subtree<TokenId> {
let eq =
tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
fn fat_arrow() -> tt::Subtree {
let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::SpanData::DUMMY };
quote! { #eq> }
}
/// This function exists since `quote! { && }` doesn't work.
fn and_and() -> ::tt::Subtree<TokenId> {
let and =
tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
fn and_and() -> tt::Subtree {
let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::SpanData::DUMMY };
quote! { #and& }
}
@ -469,7 +492,7 @@ fn default_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
tm: &TokenMap,
tm: &SpanMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| {
@ -509,7 +532,7 @@ fn debug_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
tm: &TokenMap,
tm: &SpanMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| {
@ -540,11 +563,8 @@ fn debug_expand(
},
};
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
let star =
tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY };
return quote! {
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match #star self {}
@ -590,7 +610,7 @@ fn hash_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
tm: &TokenMap,
tm: &SpanMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| {
@ -599,11 +619,8 @@ fn hash_expand(
return quote! {};
}
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
let star =
tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY };
return quote! {
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
match #star self {}
@ -644,7 +661,7 @@ fn eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
tm: &TokenMap,
tm: &SpanMap,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {})
@ -654,7 +671,7 @@ fn partial_eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
tm: &TokenMap,
tm: &SpanMap,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| {
@ -674,14 +691,14 @@ fn partial_eq_expand(
}
[first, rest @ ..] => {
let rest = rest.iter().map(|it| {
let t1 = Ident::new(format!("{}_self", it.text), it.span);
let t2 = Ident::new(format!("{}_other", it.text), it.span);
let t1 = tt::Ident::new(format!("{}_self", it.text), it.span);
let t2 = tt::Ident::new(format!("{}_other", it.text), it.span);
let and_and = and_and();
quote!(#and_and #t1 .eq( #t2 ))
});
let first = {
let t1 = Ident::new(format!("{}_self", first.text), first.span);
let t2 = Ident::new(format!("{}_other", first.text), first.span);
let t1 = tt::Ident::new(format!("{}_self", first.text), first.span);
let t2 = tt::Ident::new(format!("{}_other", first.text), first.span);
quote!(#t1 .eq( #t2 ))
};
quote!(#first ##rest)
@ -708,11 +725,11 @@ fn self_and_other_patterns(
name: &tt::Ident,
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
let self_patterns = adt.shape.as_pattern_map(name, |it| {
let t = Ident::new(format!("{}_self", it.text), it.span);
let t = tt::Ident::new(format!("{}_self", it.text), it.span);
quote!(#t)
});
let other_patterns = adt.shape.as_pattern_map(name, |it| {
let t = Ident::new(format!("{}_other", it.text), it.span);
let t = tt::Ident::new(format!("{}_other", it.text), it.span);
quote!(#t)
});
(self_patterns, other_patterns)
@ -722,7 +739,7 @@ fn ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
tm: &TokenMap,
tm: &SpanMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| {
@ -752,8 +769,8 @@ fn ord_expand(
|(pat1, pat2, fields)| {
let mut body = quote!(#krate::cmp::Ordering::Equal);
for f in fields.into_iter().rev() {
let t1 = Ident::new(format!("{}_self", f.text), f.span);
let t2 = Ident::new(format!("{}_other", f.text), f.span);
let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(#t1), quote!(#t2), body);
}
let fat_arrow = fat_arrow();
@ -784,7 +801,7 @@ fn partial_ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
tm: &TokenMap,
tm: &SpanMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| {
@ -817,8 +834,8 @@ fn partial_ord_expand(
|(pat1, pat2, fields)| {
let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
for f in fields.into_iter().rev() {
let t1 = Ident::new(format!("{}_self", f.text), f.span);
let t2 = Ident::new(format!("{}_other", f.text), f.span);
let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(#t1), quote!(#t2), body);
}
let fat_arrow = fat_arrow();

View file

@ -1,17 +1,22 @@
//! Builtin macro
use base_db::{AnchoredPath, Edition, FileId};
use base_db::{
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
AnchoredPath, Edition, FileId,
};
use cfg::CfgExpr;
use either::Either;
use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap};
use mbe::{parse_exprs_with_sep, parse_to_token_tree};
use syntax::{
ast::{self, AstToken},
SmolStr,
};
use crate::{
db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId,
MacroCallLoc,
db::ExpandDatabase,
name, quote,
tt::{self, Span},
EagerCallInfo, ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc,
};
macro_rules! register_builtin {
@ -110,7 +115,7 @@ register_builtin! {
}
const DOLLAR_CRATE: tt::Ident =
tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() };
tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::SpanData::DUMMY };
fn module_path_expand(
_db: &dyn ExpandDatabase,
@ -131,7 +136,7 @@ fn line_expand(
delimiter: tt::Delimiter::unspecified(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: "0u32".into(),
span: tt::Span::UNSPECIFIED,
span: tt::SpanData::DUMMY,
}))],
})
}
@ -179,7 +184,7 @@ fn assert_expand(
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
span: tt::SpanData::DUMMY,
}))],
};
let cond = cond.clone();
@ -446,7 +451,7 @@ fn concat_bytes_expand(
}
}
}
let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() };
let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::SpanData::DUMMY };
ExpandResult { value: quote!([#ident]), err }
}
@ -494,7 +499,7 @@ fn concat_idents_expand(
}
}
}
let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() };
let ident = tt::Ident { text: ident.into(), span: tt::SpanData::DUMMY };
ExpandResult { value: quote!(#ident), err }
}
@ -533,15 +538,16 @@ fn include_expand(
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
match db.include_expand(arg_id) {
Ok((res, _)) => ExpandResult::ok(res.0.clone()),
Ok((res, _)) => ExpandResult::ok(res.as_ref().clone()),
Err(e) => ExpandResult::new(tt::Subtree::empty(), e),
}
}
// FIXME: Check if this is still needed now after the token map rewrite
pub(crate) fn include_arg_to_tt(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
) -> Result<(triomphe::Arc<tt::Subtree>, FileId), ExpandError> {
let loc = db.lookup_intern_macro_call(arg_id);
let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else {
panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
@ -549,9 +555,12 @@ pub(crate) fn include_arg_to_tt(
let path = parse_string(&arg.0)?;
let file_id = relative_file(db, *arg_id, &path, false)?;
let (subtree, map) =
parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?;
Ok((triomphe::Arc::new((subtree, map)), file_id))
let subtree = parse_to_token_tree(
&db.file_text(file_id),
SpanAnchor { file_id: file_id.into(), ast_id: ROOT_ERASED_FILE_AST_ID },
)
.ok_or(mbe::ExpandError::ConversionError)?;
Ok((triomphe::Arc::new(subtree), file_id))
}
fn include_bytes_expand(
@ -568,7 +577,7 @@ fn include_bytes_expand(
delimiter: tt::Delimiter::unspecified(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: r#"b"""#.into(),
span: tt::TokenId::unspecified(),
span: tt::SpanData::DUMMY,
}))],
};
ExpandResult::ok(res)

View file

@ -1,22 +1,25 @@
//! Defines database & queries for macro expansion.
use base_db::{salsa, CrateId, Edition, SourceDatabase};
use base_db::{
salsa,
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
CrateId, Edition, SourceDatabase,
};
use either::Either;
use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet;
use mbe::{map_from_syntax_node, syntax_node_to_token_tree, ValueResult};
use syntax::{
ast::{self, HasAttrs, HasDocComments},
AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, TextSize, T,
};
use triomphe::Arc;
use crate::{
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
builtin_fn_macro::EagerExpander, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
MacroDefKind, MacroFile, ProcMacroExpander,
MacroDefKind, MacroFile, ProcMacroExpander, SpanMap, SyntaxContext, SyntaxContextId,
};
/// Total limit on the number of tokens produced by any macro invocation.
@ -30,8 +33,7 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
#[derive(Debug, Clone, Eq, PartialEq)]
/// Old-style `macro_rules` or the new macros 2.0
pub struct DeclarativeMacroExpander {
pub mac: mbe::DeclarativeMacro,
pub def_site_token_map: mbe::TokenMap,
pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>,
}
impl DeclarativeMacroExpander {
@ -41,21 +43,14 @@ impl DeclarativeMacroExpander {
tt::Subtree::empty(),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self.mac.expand(tt).map_err(Into::into),
None => self.mac.expand(&tt).map_err(Into::into),
}
}
pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId {
self.mac.map_id_down(token_id)
}
pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
self.mac.map_id_up(token_id)
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
/// Old-style `macro_rules` or the new macros 2.0
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
/// Stuff like `line!` and `file!`.
BuiltIn(BuiltinFnLikeExpander),
@ -69,31 +64,6 @@ pub enum TokenExpander {
ProcMacro(ProcMacroExpander),
}
// FIXME: Get rid of these methods
impl TokenExpander {
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
match self {
TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
TokenExpander::BuiltIn(..)
| TokenExpander::BuiltInEager(..)
| TokenExpander::BuiltInAttr(..)
| TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => id,
}
}
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
match self {
TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
TokenExpander::BuiltIn(..)
| TokenExpander::BuiltInEager(..)
| TokenExpander::BuiltInAttr(..)
| TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
}
}
}
#[salsa::query_group(ExpandDatabaseStorage)]
pub trait ExpandDatabase: SourceDatabase {
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
@ -109,7 +79,7 @@ pub trait ExpandDatabase: SourceDatabase {
fn parse_macro_expansion(
&self,
macro_file: MacroFile,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
) -> ExpandResult<(Parse<SyntaxNode>, Arc<SpanMap>)>;
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
/// reason why we use salsa at all.
@ -118,23 +88,16 @@ pub trait ExpandDatabase: SourceDatabase {
/// to be incremental.
#[salsa::interned]
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
#[salsa::interned]
fn intern_syntax_context(&self, ctx: SyntaxContext) -> SyntaxContextId;
/// Lowers syntactic macro call to a token tree representation.
#[salsa::transparent]
/// Lowers syntactic macro call to a token tree representation. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
fn macro_arg(
&self,
id: MacroCallId,
) -> ValueResult<
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
Arc<Box<[SyntaxError]>>,
>;
/// Extracts syntax node, corresponding to a macro call. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
fn macro_arg_node(
&self,
id: MacroCallId,
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
) -> ValueResult<Option<Arc<tt::Subtree>>, Arc<Box<[SyntaxError]>>>;
/// Fetches the expander for this macro.
#[salsa::transparent]
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
@ -152,10 +115,7 @@ pub trait ExpandDatabase: SourceDatabase {
fn include_expand(
&self,
arg_id: MacroCallId,
) -> Result<
(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId),
ExpandError,
>;
) -> Result<(triomphe::Arc<tt::Subtree>, base_db::FileId), ExpandError>;
/// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and
/// non-determinism breaks salsa in a very, very, very bad way.
@ -181,21 +141,19 @@ pub fn expand_speculative(
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call);
let token_range = token_to_map.text_range();
let file_id = loc.kind.file_id();
// Build the subtree and token mapping for the speculative args
let censor = censor_for_macro_input(&loc, speculative_args);
let mut fixups = fixup::fixup_syntax(speculative_args);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
let _censor = censor_for_macro_input(&loc, speculative_args);
let mut tt = mbe::syntax_node_to_token_tree(
speculative_args,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
// we don't leak these spans into any query so its fine to make them absolute
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
TextSize::new(0),
&Default::default(),
);
let (attr_arg, token_id) = match loc.kind {
let attr_arg = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => {
let attr = if loc.def.is_attribute_derive() {
// for pseudo-derive expansion we actually pass the attribute itself only
@ -210,48 +168,27 @@ pub fn expand_speculative(
}?;
match attr.token_tree() {
Some(token_tree) => {
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
tree.delimiter = tt::Delimiter::unspecified();
let mut tree = syntax_node_to_token_tree(
token_tree.syntax(),
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
TextSize::new(0),
&Default::default(),
);
tree.delimiter = tt::Delimiter::UNSPECIFIED;
let shift = mbe::Shift::new(&tt);
shift.shift_all(&mut tree);
let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
let attr_input_start =
token_tree.left_delimiter_token()?.text_range().start();
let range = token_range.checked_sub(attr_input_start)?;
let token_id = shift.shift(map.token_by_range(range)?);
Some(token_id)
} else {
None
};
(Some(tree), token_id)
Some(tree)
}
_ => (None, None),
}
}
_ => (None, None),
};
let token_id = match token_id {
Some(token_id) => token_id,
// token wasn't inside an attribute input so it has to be in the general macro input
None => {
let range = token_range.checked_sub(speculative_args.text_range().start())?;
let token_id = spec_args_tmap.token_by_range(range)?;
match loc.def.kind {
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
}
_ => token_id,
_ => None,
}
}
_ => None,
};
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind {
let speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => {
tt.delimiter = tt::Delimiter::unspecified();
tt.delimiter = tt::Delimiter::UNSPECIFIED;
expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
@ -260,7 +197,17 @@ pub fn expand_speculative(
MacroDefKind::BuiltInDerive(expander, ..) => {
// this cast is a bit sus, can we avoid losing the typedness here?
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
expander.expand(
db,
actual_macro_call,
&adt,
&map_from_syntax_node(
speculative_args,
// we don't leak these spans into any query so its fine to make them absolute
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
TextSize::new(0),
),
)
}
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
@ -271,12 +218,14 @@ pub fn expand_speculative(
};
let expand_to = macro_expand_to(db, actual_macro_call);
fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
let (node, rev_tmap) = token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to);
let syntax_node = node.syntax_node();
let token = rev_tmap
.ranges_by_token(token_id, token_to_map.kind())
.ranges_with_span(tt::SpanData {
range: token_to_map.text_range(),
anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
})
.filter_map(|range| syntax_node.covering_element(range).into_token())
.min_by_key(|t| {
// prefer tokens of the same kind and text
@ -293,7 +242,7 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
HirFileIdRepr::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.0.syntax_node()
}
@ -315,7 +264,7 @@ fn parse_or_expand_with_err(
fn parse_macro_expansion(
db: &dyn ExpandDatabase,
macro_file: MacroFile,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
) -> ExpandResult<(Parse<SyntaxNode>, Arc<SpanMap>)> {
let _p = profile::span("parse_macro_expansion");
let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
@ -324,7 +273,7 @@ fn parse_macro_expansion(
tracing::debug!("expanded = {}", tt.as_debug_string());
tracing::debug!("kind = {:?}", expand_to);
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
let (parse, rev_token_map) = token_tree_to_syntax_node(db, &tt, expand_to);
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
}
@ -340,48 +289,119 @@ fn parse_macro_expansion_error(
fn macro_arg(
db: &dyn ExpandDatabase,
id: MacroCallId,
) -> ValueResult<
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
Arc<Box<[SyntaxError]>>,
> {
let loc = db.lookup_intern_macro_call(id);
if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
}
let ValueResult { value, err } = db.macro_arg_node(id);
let Some(arg) = value else {
return ValueResult { value: None, err };
) -> ValueResult<Option<Arc<tt::Subtree>>, Arc<Box<[SyntaxError]>>> {
let mismatched_delimiters = |arg: &SyntaxNode| {
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
let well_formed_tt =
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
if !well_formed_tt {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
Some(Arc::new(Box::new([SyntaxError::new(
"unbalanced token tree".to_owned(),
arg.text_range(),
)]) as Box<[_]>))
} else {
None
}
};
let loc = db.lookup_intern_macro_call(id);
if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
.then(|| loc.eager.as_deref())
.flatten()
{
ValueResult::ok(Some(Arc::new(arg.0.clone())))
} else {
let (parse, map) = match loc.kind.file_id().repr() {
HirFileIdRepr::FileId(file_id) => {
(db.parse(file_id).to_syntax(), Arc::new(Default::default()))
}
HirFileIdRepr::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse, map)
}
};
let root = parse.syntax_node();
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
let mut fixups = fixup::fixup_syntax(&node);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
&node,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
let (syntax, offset, ast_id) = match loc.kind {
MacroCallKind::FnLike { ast_id, .. } => {
let node = &ast_id.to_ptr(db).to_node(&root);
let offset = node.syntax().text_range().start();
match node.token_tree().map(|it| it.syntax().clone()) {
Some(tt) => {
if let Some(e) = mismatched_delimiters(&tt) {
return ValueResult::only_err(e);
}
(tt, offset, ast_id.value.erase())
}
None => {
return ValueResult::only_err(Arc::new(Box::new([
SyntaxError::new_at_offset("missing token tree".to_owned(), offset),
])));
}
}
}
MacroCallKind::Derive { ast_id, .. } => {
let syntax_node = ast_id.to_ptr(db).to_node(&root).syntax().clone();
let offset = syntax_node.text_range().start();
(syntax_node, offset, ast_id.value.erase())
}
MacroCallKind::Attr { ast_id, .. } => {
let syntax_node = ast_id.to_ptr(db).to_node(&root).syntax().clone();
let offset = syntax_node.text_range().start();
(syntax_node, offset, ast_id.value.erase())
}
};
let censor = censor_for_macro_input(&loc, &syntax);
// let mut fixups = fixup::fixup_syntax(&node);
// fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
// let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
// &node,
// fixups.token_map,
// fixups.next_id,
// fixups.replace,
// fixups.append,
// );
let mut tt = mbe::syntax_node_to_token_tree_censored(
&syntax,
SpanAnchor { file_id: loc.kind.file_id(), ast_id },
offset,
&map,
censor,
);
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = tt::Delimiter::unspecified();
}
let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
match err {
Some(err) => ValueResult::new(val, err),
None => ValueResult::ok(val),
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = tt::Delimiter::UNSPECIFIED;
}
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
match parse.errors() {
[] => ValueResult::ok(Some(Arc::new(tt))),
errors => ValueResult::new(
Some(Arc::new(tt)),
// Box::<[_]>::from(res.errors()), not stable yet
Arc::new(errors.to_vec().into_boxed_slice()),
),
}
} else {
ValueResult::ok(Some(Arc::new(tt)))
}
}
}
// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
/// - attributes expect the invoking attribute to be stripped
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> Vec<SyntaxNode> {
// FIXME: handle `cfg_attr`
(|| {
let censor = match loc.kind {
@ -417,103 +437,56 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
.unwrap_or_default()
}
fn macro_arg_node(
db: &dyn ExpandDatabase,
id: MacroCallId,
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
let err = || -> Arc<Box<[_]>> {
Arc::new(Box::new([SyntaxError::new_at_offset(
"invalid macro call".to_owned(),
syntax::TextSize::from(0),
)]))
};
let loc = db.lookup_intern_macro_call(id);
let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0)
} else {
loc.kind
.arg(db)
.and_then(|arg| ast::TokenTree::cast(arg.value))
.map(|tt| tt.reparse_as_comma_separated_expr().to_syntax())
};
match res {
Some(res) if res.errors().is_empty() => res.syntax_node(),
Some(res) => {
return ValueResult::new(
Some(res.syntax_node().green().into()),
// Box::<[_]>::from(res.errors()), not stable yet
Arc::new(res.errors().to_vec().into_boxed_slice()),
);
}
None => return ValueResult::only_err(err()),
}
} else {
match loc.kind.arg(db) {
Some(res) => res.value,
None => return ValueResult::only_err(err()),
}
};
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
let well_formed_tt =
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
if !well_formed_tt {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
"unbalanced token tree".to_owned(),
arg.text_range(),
)])));
}
}
ValueResult::ok(Some(arg.green().into()))
}
fn decl_macro_expander(
db: &dyn ExpandDatabase,
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander> {
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
let (mac, def_site_token_map) = match id.to_node(db) {
let (root, map) = match id.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(db.parse(file_id).syntax_node(), Arc::new(Default::default()))
}
HirFileIdRepr::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse.syntax_node(), map)
}
};
let mac = match id.to_ptr(db).to_node(&root) {
ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
Some(arg) => {
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let tt = mbe::syntax_node_to_token_tree(
arg.syntax(),
SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() },
macro_rules.syntax().text_range().start(),
&map,
);
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
(mac, def_site_token_map)
mac
}
None => (
mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
Default::default(),
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
ast::Macro::MacroDef(macro_def) => match macro_def.body() {
Some(arg) => {
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let tt = mbe::syntax_node_to_token_tree(
arg.syntax(),
SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() },
macro_def.syntax().text_range().start(),
&map,
);
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
(mac, def_site_token_map)
mac
}
None => (
mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
Default::default(),
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
};
Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
Arc::new(DeclarativeMacroExpander { mac })
}
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
@ -536,25 +509,37 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
let ExpandResult { value: tt, mut err } = match loc.def.kind {
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
MacroDefKind::BuiltInDerive(expander, ..) => {
let arg = db.macro_arg_node(id).value.unwrap();
// FIXME: add firewall query for this?
let hir_file_id = loc.kind.file_id();
let (root, map) = match hir_file_id.repr() {
HirFileIdRepr::FileId(file_id) => (db.parse(file_id).syntax_node(), None),
HirFileIdRepr::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse.syntax_node(), Some(map))
}
};
let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
let node = ast_id.to_ptr(db).to_node(&root);
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
let mut fixups = fixup::fixup_syntax(&node);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
// FIXME: we might need to remove the spans from the input to the derive macro here
let _censor = censor_for_macro_input(&loc, node.syntax());
let _t;
expander.expand(
db,
id,
&node,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
// this cast is a bit sus, can we avoid losing the typedness here?
let adt = ast::Adt::cast(node).unwrap();
let mut res = expander.expand(db, id, &adt, &tmap);
fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
res
match &map {
Some(map) => map,
None => {
_t = map_from_syntax_node(
node.syntax(),
SpanAnchor { file_id: hir_file_id, ast_id: ast_id.value.erase() },
node.syntax().text_range().start(),
);
&_t
}
},
)
}
_ => {
let ValueResult { value, err } = db.macro_arg(id);
@ -570,8 +555,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
};
};
let (arg, arg_tm, undo_info) = &*macro_arg;
let mut res = match loc.def.kind {
let arg = &*macro_arg;
match loc.def.kind {
MacroDefKind::Declarative(id) => {
db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
}
@ -583,11 +568,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
// will end up going through here again, whereas we want to just want to inspect the raw input.
// As such we just return the input subtree here.
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
let mut arg = arg.clone();
fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
return ExpandResult {
value: Arc::new(arg),
value: Arc::new(arg.clone()),
err: err.map(|err| {
let mut buf = String::new();
for err in &**err {
@ -603,9 +585,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
_ => unreachable!(),
};
fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
res
}
}
};
@ -626,6 +606,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
}
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
// FIXME: Syntax fix ups
let loc = db.lookup_intern_macro_call(id);
let Some(macro_arg) = db.macro_arg(id).value else {
return ExpandResult {
@ -639,32 +620,24 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
};
};
let (arg_tt, arg_tm, undo_info) = &*macro_arg;
let expander = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => expander,
_ => unreachable!(),
};
let attr_arg = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => {
let mut attr_args = attr_args.0.clone();
mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
Some(attr_args)
}
MacroCallKind::Attr { attr_args, .. } => Some(&**attr_args),
_ => None,
};
let ExpandResult { value: mut tt, err } =
expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref());
let ExpandResult { value: tt, err } =
expander.expand(db, loc.def.krate, loc.krate, &macro_arg, attr_arg);
// Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) {
return value;
}
fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
ExpandResult { value: Arc::new(tt), err }
}
@ -677,9 +650,10 @@ fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
}
fn token_tree_to_syntax_node(
db: &dyn ExpandDatabase,
tt: &tt::Subtree,
expand_to: ExpandTo,
) -> (Parse<SyntaxNode>, mbe::TokenMap) {
) -> (Parse<SyntaxNode>, SpanMap) {
let entry_point = match expand_to {
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
@ -687,7 +661,18 @@ fn token_tree_to_syntax_node(
ExpandTo::Type => mbe::TopEntryPoint::Type,
ExpandTo::Expr => mbe::TopEntryPoint::Expr,
};
mbe::token_tree_to_syntax_node(tt, entry_point)
let mut tm = mbe::token_tree_to_syntax_node(tt, entry_point);
// now what the hell is going on here
tm.1.span_map.sort_by(|(_, a), (_, b)| {
a.anchor.file_id.cmp(&b.anchor.file_id).then_with(|| {
let map = db.ast_id_map(a.anchor.file_id);
map.get_raw(a.anchor.ast_id)
.text_range()
.start()
.cmp(&map.get_raw(b.anchor.ast_id).text_range().start())
})
});
tm
}
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>> {

View file

@ -18,8 +18,11 @@
//!
//!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
use base_db::CrateId;
use rustc_hash::{FxHashMap, FxHashSet};
use base_db::{
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
CrateId,
};
use rustc_hash::FxHashMap;
use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
use triomphe::Arc;
@ -29,7 +32,7 @@ use crate::{
hygiene::Hygiene,
mod_path::ModPath,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
MacroCallLoc, MacroDefId, MacroDefKind,
MacroCallLoc, MacroDefId, MacroDefKind, SpanMap,
};
pub fn expand_eager_macro_input(
@ -54,15 +57,15 @@ pub fn expand_eager_macro_input(
eager: None,
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
});
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
let ExpandResult { value: (arg_exp, _arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file());
// we need this map here as the expansion of the eager input fake file loses whitespace ...
let mut ws_mapping = FxHashMap::default();
if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
}));
}
// let mut ws_mapping = FxHashMap::default();
// if let Some((tm)) = db.macro_arg(arg_id).value.as_deref() {
// ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
// Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
// }));
// }
let ExpandResult { value: expanded_eager_input, err } = {
eager_macro_recur(
@ -75,49 +78,55 @@ pub fn expand_eager_macro_input(
};
let err = parse_err.or(err);
let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
let Some((expanded_eager_input, _mapping)) = expanded_eager_input else {
return ExpandResult { value: None, err };
};
let (mut subtree, expanded_eager_input_token_map) =
mbe::syntax_node_to_token_tree(&expanded_eager_input);
let mut subtree = mbe::syntax_node_to_token_tree(
&expanded_eager_input,
// is this right?
SpanAnchor { file_id: arg_id.as_file(), ast_id: ROOT_ERASED_FILE_AST_ID },
TextSize::new(0),
// FIXME: Spans! `eager_macro_recur` needs to fill out a span map for us
&Default::default(),
);
let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
let mut ids_used = FxHashSet::default();
let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
// The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
// so we need to remap them to the original input of the eager macro.
subtree.visit_ids(&mut |id| {
// Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
// let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
// let mut ids_used = FxHashSet::default();
// let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
// // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
// // so we need to remap them to the original input of the eager macro.
// subtree.visit_ids(&mut |id| {
// // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
if let Some(range) = expanded_eager_input_token_map
.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
{
// remap from expanded eager input to eager input expansion
if let Some(og_range) = mapping.get(&range) {
// remap from eager input expansion to original eager input
if let Some(&og_range) = ws_mapping.get(og_range) {
if let Some(og_token) = og_tmap.token_by_range(og_range) {
ids_used.insert(og_token);
return og_token;
}
}
}
}
tt::TokenId::UNSPECIFIED
});
og_tmap.filter(|id| ids_used.contains(&id));
og_tmap
} else {
Default::default()
};
subtree.delimiter = crate::tt::Delimiter::unspecified();
// if let Some(range) = expanded_eager_input_token_map
// .first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
// {
// // remap from expanded eager input to eager input expansion
// if let Some(og_range) = mapping.get(&range) {
// // remap from eager input expansion to original eager input
// if let Some(&og_range) = ws_mapping.get(og_range) {
// if let Some(og_token) = og_tmap.token_by_range(og_range) {
// ids_used.insert(og_token);
// return og_token;
// }
// }
// }
// }
// tt::TokenId::UNSPECIFIED
// });
// og_tmap.filter(|id| ids_used.contains(&id));
// og_tmap
// } else {
// Default::default()
// };
subtree.delimiter = crate::tt::Delimiter::UNSPECIFIED;
let loc = MacroCallLoc {
def,
krate,
eager: Some(Box::new(EagerCallInfo {
arg: Arc::new((subtree, og_tmap)),
arg: Arc::new((subtree,)),
arg_id,
error: err.clone(),
})),
@ -132,7 +141,7 @@ fn lazy_expand(
def: &MacroDefId,
macro_call: InFile<ast::MacroCall>,
krate: CrateId,
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> {
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<SpanMap>)> {
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
let expand_to = ExpandTo::from_call_site(&macro_call.value);
@ -214,19 +223,19 @@ fn eager_macro_recur(
let ExpandResult { value, err: err2 } =
db.parse_macro_expansion(call_id.as_macro_file());
if let Some(tt) = call.token_tree() {
let call_tt_start = tt.syntax().text_range().start();
let call_start =
apply_offset(call.syntax().text_range().start(), offset);
if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
value
.1
.first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
.map(|r| (r + call_start, range + call_tt_start))
}));
}
}
// if let Some(tt) = call.token_tree() {
// let call_tt_start = tt.syntax().text_range().start();
// let call_start =
// apply_offset(call.syntax().text_range().start(), offset);
// if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
// mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
// value
// .1
// .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
// .map(|r| (r + call_start, range + call_tt_start))
// }));
// }
// }
ExpandResult {
value: Some(value.0.syntax_node().clone_for_update()),
@ -241,13 +250,8 @@ fn eager_macro_recur(
| MacroDefKind::BuiltInAttr(..)
| MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(..) => {
let ExpandResult { value: (parse, tm), err } =
let ExpandResult { value: (parse, _tm), err } =
lazy_expand(db, &def, curr.with_value(call.clone()), krate);
let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
Some(db.decl_macro_expander(def.krate, ast_id))
} else {
None
};
// replace macro inside
let hygiene = Hygiene::new(db, parse.file_id);
@ -261,24 +265,29 @@ fn eager_macro_recur(
);
let err = err.or(error);
if let Some(tt) = call.token_tree() {
let call_tt_start = tt.syntax().text_range().start();
let call_start = apply_offset(call.syntax().text_range().start(), offset);
if let Some((_tt, arg_map, _)) = parse
.file_id
.macro_file()
.and_then(|id| db.macro_arg(id.macro_call_id).value)
.as_deref()
{
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
tm.first_range_by_token(
decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
syntax::SyntaxKind::TOMBSTONE,
)
.map(|r| (r + call_start, range + call_tt_start))
}));
}
}
// if let Some(tt) = call.token_tree() {
// let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
// Some(db.decl_macro_expander(def.krate, ast_id))
// } else {
// None
// };
// let call_tt_start = tt.syntax().text_range().start();
// let call_start = apply_offset(call.syntax().text_range().start(), offset);
// if let Some((_tt, arg_map, _)) = parse
// .file_id
// .macro_file()
// .and_then(|id| db.macro_arg(id.macro_call_id).value)
// .as_deref()
// {
// mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
// tm.first_range_by_token(
// decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
// syntax::SyntaxKind::TOMBSTONE,
// )
// .map(|r| (r + call_start, range + call_tt_start))
// }));
// }
// }
// FIXME: Do we need to re-use _m here?
ExpandResult { value: value.map(|(n, _m)| n), err }
}

View file

@ -5,18 +5,16 @@
use base_db::CrateId;
use db::TokenExpander;
use either::Either;
use mbe::Origin;
use syntax::{
ast::{self, HasDocComments},
AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize,
AstNode, SyntaxNode, TextRange, TextSize,
};
use triomphe::Arc;
use crate::{
db::{self, ExpandDatabase},
fixup,
name::{AsName, Name},
HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, SpanMap,
};
#[derive(Clone, Debug)]
@ -50,23 +48,25 @@ impl Hygiene {
Either::Left(name_ref.as_name())
}
pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
let mut token = path.syntax().first_token()?.text_range();
pub fn local_inner_macros(&self, _db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
let mut _token = path.syntax().first_token()?.text_range();
let frames = self.frames.as_ref()?;
let mut current = &frames.0;
let mut _current = &frames.0;
loop {
let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
if origin == Origin::Def {
return if current.local_inner {
frames.root_crate(db, path.syntax())
} else {
None
};
}
current = current.call_site.as_ref()?;
token = mapped.value;
}
// FIXME: Hygiene ...
return None;
// loop {
// let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
// if origin == Origin::Def {
// return if current.local_inner {
// frames.root_crate(db, path.syntax())
// } else {
// None
// };
// }
// current = current.call_site.as_ref()?;
// token = mapped.value;
// }
}
}
@ -92,31 +92,33 @@ impl HygieneFrames {
HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
}
fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
let mut token = node.first_token()?.text_range();
let mut result = self.0.krate;
let mut current = self.0.clone();
fn root_crate(&self, _db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
let mut _token = node.first_token()?.text_range();
let mut _result = self.0.krate;
let mut _current = self.0.clone();
while let Some((mapped, origin)) =
current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
{
result = current.krate;
return None;
let site = match origin {
Origin::Def => &current.def_site,
Origin::Call => &current.call_site,
};
// while let Some((mapped, origin)) =
// current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
// {
// result = current.krate;
let site = match site {
None => break,
Some(it) => it,
};
// let site = match origin {
// Origin::Def => &current.def_site,
// Origin::Call => &current.call_site,
// };
current = site.clone();
token = mapped.value;
}
// let site = match site {
// None => break,
// Some(it) => it,
// };
result
// current = site.clone();
// token = mapped.value;
// }
// result
}
}
@ -127,45 +129,18 @@ struct HygieneInfo {
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
macro_def: TokenExpander,
macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
macro_arg_shift: mbe::Shift,
exp_map: Arc<mbe::TokenMap>,
macro_arg: Arc<crate::tt::Subtree>,
exp_map: Arc<SpanMap>,
}
impl HygieneInfo {
fn map_ident_up(
fn _map_ident_up(
&self,
db: &dyn ExpandDatabase,
token: TextRange,
) -> Option<(InFile<TextRange>, Origin)> {
let token_id = self.exp_map.token_by_range(token)?;
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
let loc = db.lookup_intern_macro_call(self.file.macro_call_id);
let (token_map, tt) = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) {
Some(unshifted) => {
token_id = unshifted;
(&attr_args.1, self.attr_input_or_mac_def_start?)
}
None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
},
_ => match origin {
mbe::Origin::Call => {
(&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
}
mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
(TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
(&expander.def_site_token_map, *tt)
}
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
},
},
};
let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
Some((tt.with_value(range + tt.value), origin))
_db: &dyn ExpandDatabase,
_token: TextRange,
) -> Option<InFile<TextRange>> {
// self.exp_map.token_by_range(token).map(|span| InFile::new(span.anchor, span.range))
None
}
}
@ -197,18 +172,13 @@ fn make_hygiene_info(
let macro_def = db.macro_expander(loc.def);
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
Arc::new((
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
Default::default(),
Default::default(),
))
Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() })
});
HygieneInfo {
file: macro_file,
attr_input_or_mac_def_start: attr_input_or_mac_def
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
macro_arg_shift: mbe::Shift::new(&macro_arg.0),
macro_arg,
macro_def,
exp_map,

View file

@ -18,21 +18,13 @@ pub mod quote;
pub mod eager;
pub mod mod_path;
pub mod attrs;
mod fixup;
// mod fixup;
use mbe::TokenMap;
pub use mbe::{Origin, ValueResult};
use ::tt::token_id as tt;
use triomphe::Arc;
use std::{fmt, hash::Hash, iter};
use base_db::{
impl_intern_key,
salsa::{self, InternId},
CrateId, FileId, FileRange, ProcMacroKind,
};
use base_db::{span::HirFileIdRepr, CrateId, FileId, FileRange, ProcMacroKind};
use either::Either;
use syntax::{
algo::{self, skip_trivia_token},
@ -51,6 +43,25 @@ use crate::{
proc_macro::ProcMacroExpander,
};
pub use base_db::span::{HirFileId, MacroCallId, MacroFile};
pub use mbe::ValueResult;
pub type SpanMap = ::mbe::TokenMap<tt::SpanData>;
pub type DeclarativeMacro = ::mbe::DeclarativeMacro<tt::SpanData>;
pub mod tt {
pub use base_db::span::SpanData;
pub use tt::{DelimiterKind, Spacing, Span};
pub type Delimiter = ::tt::Delimiter<SpanData>;
pub type Subtree = ::tt::Subtree<SpanData>;
pub type Leaf = ::tt::Leaf<SpanData>;
pub type Literal = ::tt::Literal<SpanData>;
pub type Punct = ::tt::Punct<SpanData>;
pub type Ident = ::tt::Ident<SpanData>;
pub type TokenTree = ::tt::TokenTree<SpanData>;
}
pub type ExpandResult<T> = ValueResult<T, ExpandError>;
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
@ -86,42 +97,43 @@ impl fmt::Display for ExpandError {
}
}
/// Input to the analyzer is a set of files, where each file is identified by
/// `FileId` and contains source code. However, another source of source code in
/// Rust are macros: each macro can be thought of as producing a "temporary
/// file". To assign an id to such a file, we use the id of the macro call that
/// produced the file. So, a `HirFileId` is either a `FileId` (source code
/// written by user), or a `MacroCallId` (source code produced by macro).
///
/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
/// containing the call plus the offset of the macro call in the file. Note that
/// this is a recursive definition! However, the size_of of `HirFileId` is
/// finite (because everything bottoms out at the real `FileId`) and small
/// (`MacroCallId` uses the location interning. You can check details here:
/// <https://en.wikipedia.org/wiki/String_interning>).
///
/// The two variants are encoded in a single u32 which are differentiated by the MSB.
/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
/// `MacroCallId`.
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct HirFileId(u32);
impl fmt::Debug for HirFileId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.repr().fmt(f)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroFile {
pub macro_call_id: MacroCallId,
}
/// `MacroCallId` identifies a particular macro invocation, like
/// `println!("Hello, {}", world)`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroCallId(salsa::InternId);
impl_intern_key!(MacroCallId);
pub struct SyntaxContextId(base_db::salsa::InternId);
base_db::impl_intern_key!(SyntaxContextId);
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct SyntaxContext {
outer_expn: HirFileId,
outer_transparency: Transparency,
parent: SyntaxContextId,
/// This context, but with all transparent and semi-transparent expansions filtered away.
opaque: SyntaxContextId,
/// This context, but with all transparent expansions filtered away.
opaque_and_semitransparent: SyntaxContextId,
/// Name of the crate to which `$crate` with this context would resolve.
dollar_crate_name: name::Name,
}
/// A property of a macro expansion that determines how identifiers
/// produced by that expansion are resolved.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
pub enum Transparency {
/// Identifier produced by a transparent expansion is always resolved at call-site.
/// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
Transparent,
/// Identifier produced by a semi-transparent expansion may be resolved
/// either at call-site or at definition-site.
/// If it's a local variable, label or `$crate` then it's resolved at def-site.
/// Otherwise it's resolved at call-site.
/// `macro_rules` macros behave like this, built-in macros currently behave like this too,
/// but that's an implementation detail.
SemiTransparent,
/// Identifier produced by an opaque expansion is always resolved at definition-site.
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
Opaque,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct MacroCallLoc {
@ -154,7 +166,7 @@ pub enum MacroDefKind {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct EagerCallInfo {
/// The expanded argument of the eager macro.
arg: Arc<(tt::Subtree, TokenMap)>,
arg: Arc<(tt::Subtree,)>,
/// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
arg_id: MacroCallId,
error: Option<ExpandError>,
@ -178,7 +190,7 @@ pub enum MacroCallKind {
},
Attr {
ast_id: AstId<ast::Item>,
attr_args: Arc<(tt::Subtree, mbe::TokenMap)>,
attr_args: Arc<tt::Subtree>,
/// Syntactical index of the invoking `#[attribute]`.
///
/// Outer attributes are counted first, then inner attributes. This does not support
@ -187,34 +199,40 @@ pub enum MacroCallKind {
},
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
enum HirFileIdRepr {
FileId(FileId),
MacroFile(MacroFile),
}
impl From<FileId> for HirFileId {
fn from(FileId(id): FileId) -> Self {
assert!(id < Self::MAX_FILE_ID);
HirFileId(id)
}
}
impl From<MacroFile> for HirFileId {
fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self {
let id = id.as_u32();
assert!(id < Self::MAX_FILE_ID);
HirFileId(id | Self::MACRO_FILE_TAG_MASK)
}
}
impl HirFileId {
const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
pub trait HirFileIdExt {
/// For macro-expansion files, returns the file original source file the
/// expansion originated from.
pub fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId {
fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId;
fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32;
/// If this is a macro call, returns the syntax node of the call.
fn call_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>>;
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)>;
/// Return expansion information if it is a macro-expansion file
fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo>;
fn as_builtin_derive_attr_node(&self, db: &dyn db::ExpandDatabase)
-> Option<InFile<ast::Attr>>;
fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool;
fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool;
/// Return whether this file is an include macro
fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool;
fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool;
/// Return whether this file is an attr macro
fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool;
/// Return whether this file is the pseudo expansion of the derive attribute.
/// See [`crate::builtin_attr_macro::derive_attr_expand`].
fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool;
}
impl HirFileIdExt for HirFileId {
fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId {
let mut file_id = self;
loop {
match file_id.repr() {
@ -231,7 +249,7 @@ impl HirFileId {
}
}
pub fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 {
fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 {
let mut level = 0;
let mut curr = self;
while let Some(macro_file) = curr.macro_file() {
@ -243,15 +261,13 @@ impl HirFileId {
level
}
/// If this is a macro call, returns the syntax node of the call.
pub fn call_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
fn call_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
Some(loc.to_node(db))
}
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
pub fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> {
fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> {
let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db);
loop {
match call.file_id.repr() {
@ -264,12 +280,12 @@ impl HirFileId {
}
/// Return expansion information if it is a macro-expansion file
pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
let macro_file = self.macro_file()?;
ExpansionInfo::new(db, macro_file)
}
pub fn as_builtin_derive_attr_node(
fn as_builtin_derive_attr_node(
&self,
db: &dyn db::ExpandDatabase,
) -> Option<InFile<ast::Attr>> {
@ -282,7 +298,7 @@ impl HirFileId {
Some(attr.with_value(ast::Attr::cast(attr.value.clone())?))
}
pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
matches!(
@ -294,7 +310,7 @@ impl HirFileId {
}
}
pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
matches!(
@ -306,8 +322,7 @@ impl HirFileId {
}
}
/// Return whether this file is an include macro
pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include()
@ -316,7 +331,7 @@ impl HirFileId {
}
}
pub fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool {
fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@ -326,8 +341,7 @@ impl HirFileId {
}
}
/// Return whether this file is an attr macro
pub fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@ -337,9 +351,7 @@ impl HirFileId {
}
}
/// Return whether this file is the pseudo expansion of the derive attribute.
/// See [`crate::builtin_attr_macro::derive_attr_expand`].
pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool {
fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@ -348,38 +360,6 @@ impl HirFileId {
None => false,
}
}
#[inline]
pub fn is_macro(self) -> bool {
self.0 & Self::MACRO_FILE_TAG_MASK != 0
}
#[inline]
pub fn macro_file(self) -> Option<MacroFile> {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => None,
_ => Some(MacroFile {
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
}),
}
}
#[inline]
pub fn file_id(self) -> Option<FileId> {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => Some(FileId(self.0)),
_ => None,
}
}
fn repr(self) -> HirFileIdRepr {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => HirFileIdRepr::FileId(FileId(self.0)),
_ => HirFileIdRepr::MacroFile(MacroFile {
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
}),
}
}
}
impl MacroDefId {
@ -587,16 +567,6 @@ impl MacroCallKind {
}
}
impl MacroCallId {
pub fn as_file(self) -> HirFileId {
MacroFile { macro_call_id: self }.into()
}
pub fn as_macro_file(self) -> MacroFile {
MacroFile { macro_call_id: self }
}
}
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ExpansionInfo {
@ -607,11 +577,8 @@ pub struct ExpansionInfo {
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
macro_def: TokenExpander,
macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
/// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
/// and as such we need to shift tokens if they are part of an attributes input instead of their item.
macro_arg_shift: mbe::Shift,
exp_map: Arc<mbe::TokenMap>,
macro_arg: Arc<tt::Subtree>,
exp_map: Arc<SpanMap>,
}
impl ExpansionInfo {
@ -640,69 +607,33 @@ impl ExpansionInfo {
pub fn map_token_down(
&self,
db: &dyn db::ExpandDatabase,
item: Option<ast::Item>,
token: InFile<&SyntaxToken>,
// FIXME: use this for range mapping, so that we can resolve inline format args
_relative_token_offset: Option<TextSize>,
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
assert_eq!(token.file_id, self.arg.file_id);
let token_id_in_attr_input = if let Some(item) = item {
// check if we are mapping down in an attribute input
// this is a special case as attributes can have two inputs
let call_id = self.expanded.file_id.macro_call_id;
let loc = db.lookup_intern_macro_call(call_id);
let token_range = token.value.text_range();
match &loc.kind {
MacroCallKind::Attr { attr_args, invoc_attr_index, .. } => {
// FIXME: handle `cfg_attr`
let attr = item
.doc_comments_and_attrs()
.nth(invoc_attr_index.ast_index())
.and_then(Either::left)?;
match attr.token_tree() {
Some(token_tree)
if token_tree.syntax().text_range().contains_range(token_range) =>
{
let attr_input_start =
token_tree.left_delimiter_token()?.text_range().start();
let relative_range =
token.value.text_range().checked_sub(attr_input_start)?;
// shift by the item's tree's max id
let token_id = attr_args.1.token_by_range(relative_range)?;
let token_id = if loc.def.is_attribute_derive() {
// we do not shift for `#[derive]`, as we only need to downmap the derive attribute tokens
token_id
} else {
self.macro_arg_shift.shift(token_id)
};
Some(token_id)
}
_ => None,
}
}
_ => None,
}
let span_map = &self.exp_map.span_map;
let (start, end) = if span_map
.first()
.map_or(false, |(_, span)| span.anchor.file_id == token.file_id)
{
(0, span_map.partition_point(|a| a.1.anchor.file_id == token.file_id))
} else {
None
let start = span_map.partition_point(|a| a.1.anchor.file_id != token.file_id);
(
start,
start + span_map[start..].partition_point(|a| a.1.anchor.file_id == token.file_id),
)
};
let token_id = match token_id_in_attr_input {
Some(token_id) => token_id,
// the token is not inside `an attribute's input so do the lookup in the macro_arg as usual
None => {
let relative_range =
token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
let token_id = self.macro_arg.1.token_by_range(relative_range)?;
// conditionally shift the id by a declarative macro definition
self.macro_def.map_id_down(token_id)
}
};
let tokens = self
.exp_map
.ranges_by_token(token_id, token.value.kind())
let token_text_range = token.value.text_range();
let ast_id_map = db.ast_id_map(token.file_id);
let tokens = span_map[start..end]
.iter()
.filter_map(move |(range, span)| {
let offset = ast_id_map.get_raw(span.anchor.ast_id).text_range().start();
let abs_range = span.range + offset;
token_text_range.eq(&abs_range).then_some(*range)
})
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token)))
@ -713,60 +644,18 @@ impl ExpansionInfo {
&self,
db: &dyn db::ExpandDatabase,
token: InFile<&SyntaxToken>,
) -> Option<(InFile<SyntaxToken>, Origin)> {
assert_eq!(token.file_id, self.expanded.file_id.into());
// Fetch the id through its text range,
let token_id = self.exp_map.token_by_range(token.value.text_range())?;
// conditionally unshifting the id to accommodate for macro-rules def site
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
let call_id = self.expanded.file_id.macro_call_id;
let loc = db.lookup_intern_macro_call(call_id);
// Special case: map tokens from `include!` expansions to the included file
if loc.def.is_include() {
if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) {
let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?;
let source = db.parse(file_id);
let token = source.syntax_node().covering_element(range).into_token()?;
return Some((InFile::new(file_id.into(), token), Origin::Call));
}
}
// Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item.
let (token_map, tt) = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => {
if loc.def.is_attribute_derive() {
(&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
} else {
// try unshifting the token id, if unshifting fails, the token resides in the non-item attribute input
// note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this
match self.macro_arg_shift.unshift(token_id) {
Some(unshifted) => {
token_id = unshifted;
(&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
}
None => (&self.macro_arg.1, self.arg.clone()),
}
}
}
_ => match origin {
mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def) {
(TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
(&expander.def_site_token_map, tt.syntax().cloned())
}
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
},
},
};
let range = token_map.first_range_by_token(token_id, token.value.kind())?;
let token =
tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
Some((tt.with_value(token), origin))
) -> Option<InFile<SyntaxToken>> {
self.exp_map.span_for_range(token.value.text_range()).and_then(|span| {
let anchor =
db.ast_id_map(span.anchor.file_id).get_raw(span.anchor.ast_id).text_range().start();
InFile::new(
span.anchor.file_id,
db.parse_or_expand(span.anchor.file_id)
.covering_element(span.range + anchor)
.into_token(),
)
.transpose()
})
}
fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option<ExpansionInfo> {
@ -779,11 +668,7 @@ impl ExpansionInfo {
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
Arc::new((
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
Default::default(),
Default::default(),
))
Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() })
});
let def = loc.def.ast_id().left().and_then(|id| {
@ -814,7 +699,6 @@ impl ExpansionInfo {
expanded,
arg: arg_tt,
attr_input_or_mac_def,
macro_arg_shift: mbe::Shift::new(&macro_arg.0),
macro_arg,
macro_def,
exp_map,
@ -1018,7 +902,7 @@ impl InFile<&SyntaxNode> {
impl InFile<SyntaxToken> {
pub fn upmap(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxToken>> {
let expansion = self.file_id.expansion_info(db)?;
expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it)
expansion.map_token_up(db, self.as_ref())
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
@ -1067,6 +951,7 @@ impl<T> From<InMacroFile<T>> for InFile<T> {
}
}
// FIXME: Get rid of this
fn ascend_node_border_tokens(
db: &dyn db::ExpandDatabase,
InFile { file_id, value: node }: InFile<&SyntaxNode>,
@ -1090,13 +975,13 @@ fn ascend_call_token(
token: InFile<SyntaxToken>,
) -> Option<InFile<SyntaxToken>> {
let mut mapping = expansion.map_token_up(db, token.as_ref())?;
while let (mapped, Origin::Call) = mapping {
match mapped.file_id.expansion_info(db) {
Some(info) => mapping = info.map_token_up(db, mapped.as_ref())?,
None => return Some(mapped),
loop {
match mapping.file_id.expansion_info(db) {
Some(info) => mapping = info.map_token_up(db, mapping.as_ref())?,
None => return Some(mapping),
}
}
None
}
impl<N: AstNode> InFile<N> {

View file

@ -18,8 +18,8 @@ macro_rules! __quote {
crate::tt::Subtree {
delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::$delim,
open: crate::tt::TokenId::unspecified(),
close: crate::tt::TokenId::unspecified(),
open: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
close: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
},
token_trees: $crate::quote::IntoTt::to_tokens(children),
}
@ -32,7 +32,7 @@ macro_rules! __quote {
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $first,
spacing: crate::tt::Spacing::Alone,
span: crate::tt::TokenId::unspecified(),
span: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
}).into()
]
}
@ -44,12 +44,12 @@ macro_rules! __quote {
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $first,
spacing: crate::tt::Spacing::Joint,
span: crate::tt::TokenId::unspecified(),
span: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
}).into(),
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $sec,
spacing: crate::tt::Spacing::Alone,
span: crate::tt::TokenId::unspecified(),
span: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
}).into()
]
}
@ -89,7 +89,7 @@ macro_rules! __quote {
vec![ {
crate::tt::Leaf::Ident(crate::tt::Ident {
text: stringify!($tt).into(),
span: crate::tt::TokenId::unspecified(),
span: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
}).into()
}]
};
@ -195,20 +195,22 @@ macro_rules! impl_to_to_tokentrees {
}
impl_to_to_tokentrees! {
u32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
usize => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
i32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
bool => self { crate::tt::Ident{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
u32 => self { crate::tt::Literal{text: self.to_string().into(), span: <crate::tt::SpanData as crate::tt::Span>::DUMMY} };
usize => self { crate::tt::Literal{text: self.to_string().into(), span: <crate::tt::SpanData as crate::tt::Span>::DUMMY} };
i32 => self { crate::tt::Literal{text: self.to_string().into(), span: <crate::tt::SpanData as crate::tt::Span>::DUMMY} };
bool => self { crate::tt::Ident{text: self.to_string().into(), span: <crate::tt::SpanData as crate::tt::Span>::DUMMY} };
crate::tt::Leaf => self { self };
crate::tt::Literal => self { self };
crate::tt::Ident => self { self };
crate::tt::Punct => self { self };
&str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}};
String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}}
&str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: <crate::tt::SpanData as crate::tt::Span>::DUMMY}};
String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: <crate::tt::SpanData as crate::tt::Span>::DUMMY}}
}
#[cfg(test)]
mod tests {
use expect_test::expect;
#[test]
fn test_quote_delimiters() {
assert_eq!(quote!({}).to_string(), "{}");
@ -231,7 +233,10 @@ mod tests {
}
fn mk_ident(name: &str) -> crate::tt::Ident {
crate::tt::Ident { text: name.into(), span: crate::tt::TokenId::unspecified() }
crate::tt::Ident {
text: name.into(),
span: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
}
}
#[test]
@ -241,7 +246,9 @@ mod tests {
let quoted = quote!(#a);
assert_eq!(quoted.to_string(), "hello");
let t = format!("{quoted:?}");
assert_eq!(t, "SUBTREE $$ 4294967295 4294967295\n IDENT hello 4294967295");
expect![[r#"
SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::<RustLanguage>>(0) } } SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::<RustLanguage>>(0) } }
IDENT hello SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::<RustLanguage>>(0) } }"#]].assert_eq(&t);
}
#[test]
@ -273,8 +280,8 @@ mod tests {
let list = crate::tt::Subtree {
delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::Brace,
open: crate::tt::TokenId::unspecified(),
close: crate::tt::TokenId::unspecified(),
open: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
close: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
},
token_trees: fields.collect(),
};