mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-29 05:15:04 +00:00
Merge commit '37f84c101b
' into sync-from-ra
This commit is contained in:
parent
6502421771
commit
4704881b64
311 changed files with 13700 additions and 9110 deletions
|
@ -18,47 +18,89 @@ use rustc_hash::FxHasher;
|
|||
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
|
||||
|
||||
/// `AstId` points to an AST node in a specific file.
|
||||
pub struct FileAstId<N: AstNode> {
|
||||
pub struct FileAstId<N: AstIdNode> {
|
||||
raw: ErasedFileAstId,
|
||||
covariant: PhantomData<fn() -> N>,
|
||||
}
|
||||
|
||||
impl<N: AstNode> Clone for FileAstId<N> {
|
||||
impl<N: AstIdNode> Clone for FileAstId<N> {
|
||||
fn clone(&self) -> FileAstId<N> {
|
||||
*self
|
||||
}
|
||||
}
|
||||
impl<N: AstNode> Copy for FileAstId<N> {}
|
||||
impl<N: AstIdNode> Copy for FileAstId<N> {}
|
||||
|
||||
impl<N: AstNode> PartialEq for FileAstId<N> {
|
||||
impl<N: AstIdNode> PartialEq for FileAstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.raw == other.raw
|
||||
}
|
||||
}
|
||||
impl<N: AstNode> Eq for FileAstId<N> {}
|
||||
impl<N: AstNode> Hash for FileAstId<N> {
|
||||
impl<N: AstIdNode> Eq for FileAstId<N> {}
|
||||
impl<N: AstIdNode> Hash for FileAstId<N> {
|
||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||
self.raw.hash(hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: AstNode> fmt::Debug for FileAstId<N> {
|
||||
impl<N: AstIdNode> fmt::Debug for FileAstId<N> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw())
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: AstNode> FileAstId<N> {
|
||||
impl<N: AstIdNode> FileAstId<N> {
|
||||
// Can't make this a From implementation because of coherence
|
||||
pub fn upcast<M: AstNode>(self) -> FileAstId<M>
|
||||
pub fn upcast<M: AstIdNode>(self) -> FileAstId<M>
|
||||
where
|
||||
N: Into<M>,
|
||||
{
|
||||
FileAstId { raw: self.raw, covariant: PhantomData }
|
||||
}
|
||||
|
||||
pub fn erase(self) -> ErasedFileAstId {
|
||||
self.raw
|
||||
}
|
||||
}
|
||||
|
||||
type ErasedFileAstId = Idx<SyntaxNodePtr>;
|
||||
pub type ErasedFileAstId = Idx<SyntaxNodePtr>;
|
||||
|
||||
pub trait AstIdNode: AstNode {}
|
||||
macro_rules! register_ast_id_node {
|
||||
(impl AstIdNode for $($ident:ident),+ ) => {
|
||||
$(
|
||||
impl AstIdNode for ast::$ident {}
|
||||
)+
|
||||
fn should_alloc_id(kind: syntax::SyntaxKind) -> bool {
|
||||
$(
|
||||
ast::$ident::can_cast(kind)
|
||||
)||+
|
||||
}
|
||||
};
|
||||
}
|
||||
register_ast_id_node! {
|
||||
impl AstIdNode for
|
||||
Item,
|
||||
Adt,
|
||||
Enum,
|
||||
Struct,
|
||||
Union,
|
||||
Const,
|
||||
ExternBlock,
|
||||
ExternCrate,
|
||||
Fn,
|
||||
Impl,
|
||||
Macro,
|
||||
MacroDef,
|
||||
MacroRules,
|
||||
MacroCall,
|
||||
Module,
|
||||
Static,
|
||||
Trait,
|
||||
TraitAlias,
|
||||
TypeAlias,
|
||||
Use,
|
||||
AssocItem, BlockExpr, Variant, RecordField, TupleField, ConstArg
|
||||
}
|
||||
|
||||
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
|
||||
#[derive(Default)]
|
||||
|
@ -92,14 +134,7 @@ impl AstIdMap {
|
|||
// change parent's id. This means that, say, adding a new function to a
|
||||
// trait does not change ids of top-level items, which helps caching.
|
||||
bdfs(node, |it| {
|
||||
let kind = it.kind();
|
||||
if ast::Item::can_cast(kind)
|
||||
|| ast::BlockExpr::can_cast(kind)
|
||||
|| ast::Variant::can_cast(kind)
|
||||
|| ast::RecordField::can_cast(kind)
|
||||
|| ast::TupleField::can_cast(kind)
|
||||
|| ast::ConstArg::can_cast(kind)
|
||||
{
|
||||
if should_alloc_id(it.kind()) {
|
||||
res.alloc(&it);
|
||||
true
|
||||
} else {
|
||||
|
@ -120,15 +155,19 @@ impl AstIdMap {
|
|||
res
|
||||
}
|
||||
|
||||
pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
|
||||
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
|
||||
let raw = self.erased_ast_id(item.syntax());
|
||||
FileAstId { raw, covariant: PhantomData }
|
||||
}
|
||||
|
||||
pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
|
||||
pub fn get<N: AstIdNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
|
||||
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
|
||||
self.arena[id].clone()
|
||||
}
|
||||
|
||||
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
|
||||
let ptr = SyntaxNodePtr::new(item);
|
||||
let hash = hash_ptr(&ptr);
|
||||
|
|
|
@ -35,7 +35,7 @@ macro_rules! register_builtin {
|
|||
|
||||
impl BuiltinAttrExpander {
|
||||
pub fn is_derive(self) -> bool {
|
||||
matches!(self, BuiltinAttrExpander::Derive)
|
||||
matches!(self, BuiltinAttrExpander::Derive | BuiltinAttrExpander::DeriveConst)
|
||||
}
|
||||
pub fn is_test(self) -> bool {
|
||||
matches!(self, BuiltinAttrExpander::Test)
|
||||
|
@ -50,6 +50,8 @@ register_builtin! {
|
|||
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
|
||||
(cfg_eval, CfgEval) => dummy_attr_expand,
|
||||
(derive, Derive) => derive_attr_expand,
|
||||
// derive const is equivalent to derive for our proposes.
|
||||
(derive_const, DeriveConst) => derive_attr_expand,
|
||||
(global_allocator, GlobalAllocator) => dummy_attr_expand,
|
||||
(test, Test) => dummy_attr_expand,
|
||||
(test_case, TestCase) => dummy_attr_expand
|
||||
|
|
|
@ -12,9 +12,7 @@ use crate::{
|
|||
name::{AsName, Name},
|
||||
tt::{self, TokenId},
|
||||
};
|
||||
use syntax::ast::{
|
||||
self, AstNode, FieldList, HasAttrs, HasGenericParams, HasModuleItem, HasName, HasTypeBounds,
|
||||
};
|
||||
use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
|
||||
|
||||
use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
|
||||
|
||||
|
@ -30,12 +28,13 @@ macro_rules! register_builtin {
|
|||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
tt: &ast::Adt,
|
||||
token_map: &TokenMap,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let expander = match *self {
|
||||
$( BuiltinDeriveExpander::$trait => $expand, )*
|
||||
};
|
||||
expander(db, id, tt)
|
||||
expander(db, id, tt, token_map)
|
||||
}
|
||||
|
||||
fn find_by_name(name: &name::Name) -> Option<Self> {
|
||||
|
@ -72,12 +71,12 @@ enum VariantShape {
|
|||
}
|
||||
|
||||
fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
|
||||
(0..n).map(|x| Ident::new(format!("f{x}"), tt::TokenId::unspecified()))
|
||||
(0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified()))
|
||||
}
|
||||
|
||||
impl VariantShape {
|
||||
fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree {
|
||||
self.as_pattern_map(path, |x| quote!(#x))
|
||||
self.as_pattern_map(path, |it| quote!(#it))
|
||||
}
|
||||
|
||||
fn field_names(&self) -> Vec<tt::Ident> {
|
||||
|
@ -95,17 +94,17 @@ impl VariantShape {
|
|||
) -> tt::Subtree {
|
||||
match self {
|
||||
VariantShape::Struct(fields) => {
|
||||
let fields = fields.iter().map(|x| {
|
||||
let mapped = field_map(x);
|
||||
quote! { #x : #mapped , }
|
||||
let fields = fields.iter().map(|it| {
|
||||
let mapped = field_map(it);
|
||||
quote! { #it : #mapped , }
|
||||
});
|
||||
quote! {
|
||||
#path { ##fields }
|
||||
}
|
||||
}
|
||||
&VariantShape::Tuple(n) => {
|
||||
let fields = tuple_field_iterator(n).map(|x| {
|
||||
let mapped = field_map(&x);
|
||||
let fields = tuple_field_iterator(n).map(|it| {
|
||||
let mapped = field_map(&it);
|
||||
quote! {
|
||||
#mapped ,
|
||||
}
|
||||
|
@ -118,16 +117,16 @@ impl VariantShape {
|
|||
}
|
||||
}
|
||||
|
||||
fn from(value: Option<FieldList>, token_map: &TokenMap) -> Result<Self, ExpandError> {
|
||||
fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
|
||||
let r = match value {
|
||||
None => VariantShape::Unit,
|
||||
Some(FieldList::RecordFieldList(x)) => VariantShape::Struct(
|
||||
x.fields()
|
||||
.map(|x| x.name())
|
||||
.map(|x| name_to_token(token_map, x))
|
||||
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
|
||||
it.fields()
|
||||
.map(|it| it.name())
|
||||
.map(|it| name_to_token(tm, it))
|
||||
.collect::<Result<_, _>>()?,
|
||||
),
|
||||
Some(FieldList::TupleFieldList(x)) => VariantShape::Tuple(x.fields().count()),
|
||||
Some(FieldList::TupleFieldList(it)) => VariantShape::Tuple(it.fields().count()),
|
||||
};
|
||||
Ok(r)
|
||||
}
|
||||
|
@ -141,7 +140,7 @@ enum AdtShape {
|
|||
|
||||
impl AdtShape {
|
||||
fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> {
|
||||
self.as_pattern_map(name, |x| quote!(#x))
|
||||
self.as_pattern_map(name, |it| quote!(#it))
|
||||
}
|
||||
|
||||
fn field_names(&self) -> Vec<Vec<tt::Ident>> {
|
||||
|
@ -190,32 +189,19 @@ struct BasicAdtInfo {
|
|||
associated_types: Vec<tt::Subtree>,
|
||||
}
|
||||
|
||||
fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
|
||||
let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems);
|
||||
let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
|
||||
debug!("derive node didn't parse");
|
||||
ExpandError::other("invalid item definition")
|
||||
})?;
|
||||
let item = macro_items.items().next().ok_or_else(|| {
|
||||
debug!("no module item parsed");
|
||||
ExpandError::other("no item found")
|
||||
})?;
|
||||
let adt = ast::Adt::cast(item.syntax().clone()).ok_or_else(|| {
|
||||
debug!("expected adt, found: {:?}", item);
|
||||
ExpandError::other("expected struct, enum or union")
|
||||
})?;
|
||||
fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
|
||||
let (name, generic_param_list, shape) = match &adt {
|
||||
ast::Adt::Struct(it) => (
|
||||
it.name(),
|
||||
it.generic_param_list(),
|
||||
AdtShape::Struct(VariantShape::from(it.field_list(), &token_map)?),
|
||||
AdtShape::Struct(VariantShape::from(tm, it.field_list())?),
|
||||
),
|
||||
ast::Adt::Enum(it) => {
|
||||
let default_variant = it
|
||||
.variant_list()
|
||||
.into_iter()
|
||||
.flat_map(|x| x.variants())
|
||||
.position(|x| x.attrs().any(|x| x.simple_name() == Some("default".into())));
|
||||
.flat_map(|it| it.variants())
|
||||
.position(|it| it.attrs().any(|it| it.simple_name() == Some("default".into())));
|
||||
(
|
||||
it.name(),
|
||||
it.generic_param_list(),
|
||||
|
@ -224,11 +210,11 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
|
|||
variants: it
|
||||
.variant_list()
|
||||
.into_iter()
|
||||
.flat_map(|x| x.variants())
|
||||
.map(|x| {
|
||||
.flat_map(|it| it.variants())
|
||||
.map(|it| {
|
||||
Ok((
|
||||
name_to_token(&token_map, x.name())?,
|
||||
VariantShape::from(x.field_list(), &token_map)?,
|
||||
name_to_token(tm, it.name())?,
|
||||
VariantShape::from(tm, it.field_list())?,
|
||||
))
|
||||
})
|
||||
.collect::<Result<_, ExpandError>>()?,
|
||||
|
@ -246,16 +232,16 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
|
|||
let name = {
|
||||
let this = param.name();
|
||||
match this {
|
||||
Some(x) => {
|
||||
param_type_set.insert(x.as_name());
|
||||
mbe::syntax_node_to_token_tree(x.syntax()).0
|
||||
Some(it) => {
|
||||
param_type_set.insert(it.as_name());
|
||||
mbe::syntax_node_to_token_tree(it.syntax()).0
|
||||
}
|
||||
None => tt::Subtree::empty(),
|
||||
}
|
||||
};
|
||||
let bounds = match ¶m {
|
||||
ast::TypeOrConstParam::Type(x) => {
|
||||
x.type_bound_list().map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0)
|
||||
ast::TypeOrConstParam::Type(it) => {
|
||||
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
|
||||
}
|
||||
ast::TypeOrConstParam::Const(_) => None,
|
||||
};
|
||||
|
@ -296,9 +282,9 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
|
|||
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
|
||||
param_type_set.contains(&name).then_some(p)
|
||||
})
|
||||
.map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0)
|
||||
.map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
|
||||
.collect();
|
||||
let name_token = name_to_token(&token_map, name)?;
|
||||
let name_token = name_to_token(&tm, name)?;
|
||||
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
|
||||
}
|
||||
|
||||
|
@ -345,11 +331,12 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
|
|||
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
|
||||
/// therefore does not get bound by the derived trait.
|
||||
fn expand_simple_derive(
|
||||
tt: &tt::Subtree,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
trait_path: tt::Subtree,
|
||||
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let info = match parse_adt(tt) {
|
||||
let info = match parse_adt(tm, tt) {
|
||||
Ok(info) => info,
|
||||
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
|
||||
};
|
||||
|
@ -373,10 +360,10 @@ fn expand_simple_derive(
|
|||
})
|
||||
.unzip();
|
||||
|
||||
where_block.extend(info.associated_types.iter().map(|x| {
|
||||
let x = x.clone();
|
||||
where_block.extend(info.associated_types.iter().map(|it| {
|
||||
let it = it.clone();
|
||||
let bound = trait_path.clone();
|
||||
quote! { #x : #bound , }
|
||||
quote! { #it : #bound , }
|
||||
}));
|
||||
|
||||
let name = info.name;
|
||||
|
@ -405,19 +392,21 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
|
|||
fn copy_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, quote! { #krate::marker::Copy }, |_| quote! {})
|
||||
expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {})
|
||||
}
|
||||
|
||||
fn clone_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, quote! { #krate::clone::Clone }, |adt| {
|
||||
expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| {
|
||||
if matches!(adt.shape, AdtShape::Union) {
|
||||
let star = tt::Punct {
|
||||
char: '*',
|
||||
|
@ -444,7 +433,7 @@ fn clone_expand(
|
|||
}
|
||||
let name = &adt.name;
|
||||
let patterns = adt.shape.as_pattern(name);
|
||||
let exprs = adt.shape.as_pattern_map(name, |x| quote! { #x .clone() });
|
||||
let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() });
|
||||
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
|
||||
let fat_arrow = fat_arrow();
|
||||
quote! {
|
||||
|
@ -479,10 +468,11 @@ fn and_and() -> ::tt::Subtree<TokenId> {
|
|||
fn default_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, quote! { #krate::default::Default }, |adt| {
|
||||
expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| {
|
||||
let body = match &adt.shape {
|
||||
AdtShape::Struct(fields) => {
|
||||
let name = &adt.name;
|
||||
|
@ -518,16 +508,17 @@ fn default_expand(
|
|||
fn debug_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, quote! { #krate::fmt::Debug }, |adt| {
|
||||
expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| {
|
||||
let for_variant = |name: String, v: &VariantShape| match v {
|
||||
VariantShape::Struct(fields) => {
|
||||
let for_fields = fields.iter().map(|x| {
|
||||
let x_string = x.to_string();
|
||||
let for_fields = fields.iter().map(|it| {
|
||||
let x_string = it.to_string();
|
||||
quote! {
|
||||
.field(#x_string, & #x)
|
||||
.field(#x_string, & #it)
|
||||
}
|
||||
});
|
||||
quote! {
|
||||
|
@ -535,9 +526,9 @@ fn debug_expand(
|
|||
}
|
||||
}
|
||||
VariantShape::Tuple(n) => {
|
||||
let for_fields = tuple_field_iterator(*n).map(|x| {
|
||||
let for_fields = tuple_field_iterator(*n).map(|it| {
|
||||
quote! {
|
||||
.field( & #x)
|
||||
.field( & #it)
|
||||
}
|
||||
});
|
||||
quote! {
|
||||
|
@ -598,10 +589,11 @@ fn debug_expand(
|
|||
fn hash_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, quote! { #krate::hash::Hash }, |adt| {
|
||||
expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| {
|
||||
if matches!(adt.shape, AdtShape::Union) {
|
||||
// FIXME: Return expand error here
|
||||
return quote! {};
|
||||
|
@ -621,7 +613,7 @@ fn hash_expand(
|
|||
let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
|
||||
|(pat, names)| {
|
||||
let expr = {
|
||||
let it = names.iter().map(|x| quote! { #x . hash(ra_expand_state); });
|
||||
let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); });
|
||||
quote! { {
|
||||
##it
|
||||
} }
|
||||
|
@ -632,9 +624,14 @@ fn hash_expand(
|
|||
}
|
||||
},
|
||||
);
|
||||
let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) {
|
||||
quote! { #krate::mem::discriminant(self).hash(ra_expand_state); }
|
||||
} else {
|
||||
quote! {}
|
||||
};
|
||||
quote! {
|
||||
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
|
||||
#krate::mem::discriminant(self).hash(ra_expand_state);
|
||||
#check_discriminant
|
||||
match self {
|
||||
##arms
|
||||
}
|
||||
|
@ -646,19 +643,21 @@ fn hash_expand(
|
|||
fn eq_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, quote! { #krate::cmp::Eq }, |_| quote! {})
|
||||
expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {})
|
||||
}
|
||||
|
||||
fn partial_eq_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, quote! { #krate::cmp::PartialEq }, |adt| {
|
||||
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| {
|
||||
if matches!(adt.shape, AdtShape::Union) {
|
||||
// FIXME: Return expand error here
|
||||
return quote! {};
|
||||
|
@ -674,9 +673,9 @@ fn partial_eq_expand(
|
|||
quote!(true)
|
||||
}
|
||||
[first, rest @ ..] => {
|
||||
let rest = rest.iter().map(|x| {
|
||||
let t1 = Ident::new(format!("{}_self", x.text), x.span);
|
||||
let t2 = Ident::new(format!("{}_other", x.text), x.span);
|
||||
let rest = rest.iter().map(|it| {
|
||||
let t1 = Ident::new(format!("{}_self", it.text), it.span);
|
||||
let t2 = Ident::new(format!("{}_other", it.text), it.span);
|
||||
let and_and = and_and();
|
||||
quote!(#and_and #t1 .eq( #t2 ))
|
||||
});
|
||||
|
@ -708,12 +707,12 @@ fn self_and_other_patterns(
|
|||
adt: &BasicAdtInfo,
|
||||
name: &tt::Ident,
|
||||
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
|
||||
let self_patterns = adt.shape.as_pattern_map(name, |x| {
|
||||
let t = Ident::new(format!("{}_self", x.text), x.span);
|
||||
let self_patterns = adt.shape.as_pattern_map(name, |it| {
|
||||
let t = Ident::new(format!("{}_self", it.text), it.span);
|
||||
quote!(#t)
|
||||
});
|
||||
let other_patterns = adt.shape.as_pattern_map(name, |x| {
|
||||
let t = Ident::new(format!("{}_other", x.text), x.span);
|
||||
let other_patterns = adt.shape.as_pattern_map(name, |it| {
|
||||
let t = Ident::new(format!("{}_other", it.text), it.span);
|
||||
quote!(#t)
|
||||
});
|
||||
(self_patterns, other_patterns)
|
||||
|
@ -722,10 +721,11 @@ fn self_and_other_patterns(
|
|||
fn ord_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, quote! { #krate::cmp::Ord }, |adt| {
|
||||
expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| {
|
||||
fn compare(
|
||||
krate: &tt::TokenTree,
|
||||
left: tt::Subtree,
|
||||
|
@ -747,9 +747,6 @@ fn ord_expand(
|
|||
// FIXME: Return expand error here
|
||||
return quote!();
|
||||
}
|
||||
let left = quote!(#krate::intrinsics::discriminant_value(self));
|
||||
let right = quote!(#krate::intrinsics::discriminant_value(other));
|
||||
|
||||
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
|
||||
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
|
||||
|(pat1, pat2, fields)| {
|
||||
|
@ -764,17 +761,17 @@ fn ord_expand(
|
|||
},
|
||||
);
|
||||
let fat_arrow = fat_arrow();
|
||||
let body = compare(
|
||||
krate,
|
||||
left,
|
||||
right,
|
||||
quote! {
|
||||
match (self, other) {
|
||||
##arms
|
||||
_unused #fat_arrow #krate::cmp::Ordering::Equal
|
||||
}
|
||||
},
|
||||
);
|
||||
let mut body = quote! {
|
||||
match (self, other) {
|
||||
##arms
|
||||
_unused #fat_arrow #krate::cmp::Ordering::Equal
|
||||
}
|
||||
};
|
||||
if matches!(&adt.shape, AdtShape::Enum { .. }) {
|
||||
let left = quote!(#krate::intrinsics::discriminant_value(self));
|
||||
let right = quote!(#krate::intrinsics::discriminant_value(other));
|
||||
body = compare(krate, left, right, body);
|
||||
}
|
||||
quote! {
|
||||
fn cmp(&self, other: &Self) -> #krate::cmp::Ordering {
|
||||
#body
|
||||
|
@ -786,10 +783,11 @@ fn ord_expand(
|
|||
fn partial_ord_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, quote! { #krate::cmp::PartialOrd }, |adt| {
|
||||
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| {
|
||||
fn compare(
|
||||
krate: &tt::TokenTree,
|
||||
left: tt::Subtree,
|
||||
|
|
|
@ -339,7 +339,7 @@ fn format_args_expand_general(
|
|||
parts.push(mem::take(&mut last_part));
|
||||
let arg_tree = if argument.is_empty() {
|
||||
match args.next() {
|
||||
Some(x) => x,
|
||||
Some(it) => it,
|
||||
None => {
|
||||
err = Some(mbe::ExpandError::NoMatchingRule.into());
|
||||
tt::Subtree::empty()
|
||||
|
@ -361,7 +361,7 @@ fn format_args_expand_general(
|
|||
quote!(::core::fmt::Display::fmt)
|
||||
}
|
||||
};
|
||||
arg_tts.push(quote! { ::core::fmt::Argument::new(&(#arg_tree), #formatter), });
|
||||
arg_tts.push(quote! { ::core::fmt::ArgumentV1::new(&(#arg_tree), #formatter), });
|
||||
}
|
||||
'}' => {
|
||||
if format_iter.peek() == Some(&'}') {
|
||||
|
@ -378,11 +378,11 @@ fn format_args_expand_general(
|
|||
if !last_part.is_empty() {
|
||||
parts.push(last_part);
|
||||
}
|
||||
let part_tts = parts.into_iter().map(|x| {
|
||||
let part_tts = parts.into_iter().map(|it| {
|
||||
let text = if let Some(raw) = &raw_sharps {
|
||||
format!("r{raw}\"{}\"{raw}", x).into()
|
||||
format!("r{raw}\"{}\"{raw}", it).into()
|
||||
} else {
|
||||
format!("\"{}\"", x).into()
|
||||
format!("\"{}\"", it).into()
|
||||
};
|
||||
let l = tt::Literal { span: tt::TokenId::unspecified(), text };
|
||||
quote!(#l ,)
|
||||
|
@ -574,7 +574,7 @@ fn concat_bytes_expand(
|
|||
syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()),
|
||||
syntax::SyntaxKind::BYTE_STRING => {
|
||||
let components = unquote_byte_string(lit).unwrap_or_default();
|
||||
components.into_iter().for_each(|x| bytes.push(x.to_string()));
|
||||
components.into_iter().for_each(|it| bytes.push(it.to_string()));
|
||||
}
|
||||
_ => {
|
||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
||||
|
@ -692,7 +692,7 @@ pub(crate) fn include_arg_to_tt(
|
|||
arg_id: MacroCallId,
|
||||
) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
|
||||
let loc = db.lookup_intern_macro_call(arg_id);
|
||||
let Some(EagerCallInfo {arg, arg_id: Some(arg_id), .. }) = loc.eager.as_deref() else {
|
||||
let Some(EagerCallInfo { arg,arg_id, .. }) = loc.eager.as_deref() else {
|
||||
panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
|
||||
};
|
||||
let path = parse_string(&arg.0)?;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
//! Defines database & queries for macro expansion.
|
||||
|
||||
use base_db::{salsa, Edition, SourceDatabase};
|
||||
use base_db::{salsa, CrateId, Edition, SourceDatabase};
|
||||
use either::Either;
|
||||
use limit::Limit;
|
||||
use mbe::syntax_node_to_token_tree;
|
||||
use mbe::{syntax_node_to_token_tree, ValueResult};
|
||||
use rustc_hash::FxHashSet;
|
||||
use syntax::{
|
||||
ast::{self, HasAttrs, HasDocComments},
|
||||
|
@ -13,7 +13,7 @@ use triomphe::Arc;
|
|||
|
||||
use crate::{
|
||||
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, BuiltinAttrExpander,
|
||||
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
|
||||
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
|
||||
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
|
||||
MacroDefKind, MacroFile, ProcMacroExpander,
|
||||
|
@ -27,62 +27,68 @@ use crate::{
|
|||
/// Actual max for `analysis-stats .` at some point: 30672.
|
||||
static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
/// Old-style `macro_rules` or the new macros 2.0
|
||||
pub struct DeclarativeMacroExpander {
|
||||
pub mac: mbe::DeclarativeMacro,
|
||||
pub def_site_token_map: mbe::TokenMap,
|
||||
}
|
||||
|
||||
impl DeclarativeMacroExpander {
|
||||
pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
match self.mac.err() {
|
||||
Some(e) => ExpandResult::new(
|
||||
tt::Subtree::empty(),
|
||||
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||
),
|
||||
None => self.mac.expand(tt).map_err(Into::into),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId {
|
||||
self.mac.map_id_down(token_id)
|
||||
}
|
||||
|
||||
pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
|
||||
self.mac.map_id_up(token_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum TokenExpander {
|
||||
/// Old-style `macro_rules` or the new macros 2.0
|
||||
DeclarativeMacro { mac: mbe::DeclarativeMacro, def_site_token_map: mbe::TokenMap },
|
||||
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
|
||||
/// Stuff like `line!` and `file!`.
|
||||
Builtin(BuiltinFnLikeExpander),
|
||||
BuiltIn(BuiltinFnLikeExpander),
|
||||
/// Built-in eagerly expanded fn-like macros (`include!`, `concat!`, etc.)
|
||||
BuiltinEager(EagerExpander),
|
||||
BuiltInEager(EagerExpander),
|
||||
/// `global_allocator` and such.
|
||||
BuiltinAttr(BuiltinAttrExpander),
|
||||
BuiltInAttr(BuiltinAttrExpander),
|
||||
/// `derive(Copy)` and such.
|
||||
BuiltinDerive(BuiltinDeriveExpander),
|
||||
BuiltInDerive(BuiltinDeriveExpander),
|
||||
/// The thing we love the most here in rust-analyzer -- procedural macros.
|
||||
ProcMacro(ProcMacroExpander),
|
||||
}
|
||||
|
||||
// FIXME: Get rid of these methods
|
||||
impl TokenExpander {
|
||||
fn expand(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
match self {
|
||||
TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt).map_err(Into::into),
|
||||
TokenExpander::Builtin(it) => it.expand(db, id, tt).map_err(Into::into),
|
||||
TokenExpander::BuiltinEager(it) => it.expand(db, id, tt).map_err(Into::into),
|
||||
TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt),
|
||||
TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
|
||||
TokenExpander::ProcMacro(_) => {
|
||||
// We store the result in salsa db to prevent non-deterministic behavior in
|
||||
// some proc-macro implementation
|
||||
// See #4315 for details
|
||||
db.expand_proc_macro(id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
|
||||
match self {
|
||||
TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_down(id),
|
||||
TokenExpander::Builtin(..)
|
||||
| TokenExpander::BuiltinEager(..)
|
||||
| TokenExpander::BuiltinAttr(..)
|
||||
| TokenExpander::BuiltinDerive(..)
|
||||
TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
|
||||
TokenExpander::BuiltIn(..)
|
||||
| TokenExpander::BuiltInEager(..)
|
||||
| TokenExpander::BuiltInAttr(..)
|
||||
| TokenExpander::BuiltInDerive(..)
|
||||
| TokenExpander::ProcMacro(..) => id,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
|
||||
match self {
|
||||
TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_up(id),
|
||||
TokenExpander::Builtin(..)
|
||||
| TokenExpander::BuiltinEager(..)
|
||||
| TokenExpander::BuiltinAttr(..)
|
||||
| TokenExpander::BuiltinDerive(..)
|
||||
TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
|
||||
TokenExpander::BuiltIn(..)
|
||||
| TokenExpander::BuiltInEager(..)
|
||||
| TokenExpander::BuiltInAttr(..)
|
||||
| TokenExpander::BuiltInDerive(..)
|
||||
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
|
||||
}
|
||||
}
|
||||
|
@ -118,14 +124,26 @@ pub trait ExpandDatabase: SourceDatabase {
|
|||
fn macro_arg(
|
||||
&self,
|
||||
id: MacroCallId,
|
||||
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>;
|
||||
) -> ValueResult<
|
||||
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
|
||||
Arc<Box<[SyntaxError]>>,
|
||||
>;
|
||||
/// Extracts syntax node, corresponding to a macro call. That's a firewall
|
||||
/// query, only typing in the macro call itself changes the returned
|
||||
/// subtree.
|
||||
fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
|
||||
/// Gets the expander for this macro. This compiles declarative macros, and
|
||||
/// just fetches procedural ones.
|
||||
fn macro_def(&self, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError>;
|
||||
fn macro_arg_node(
|
||||
&self,
|
||||
id: MacroCallId,
|
||||
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
|
||||
/// Fetches the expander for this macro.
|
||||
#[salsa::transparent]
|
||||
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
|
||||
/// Fetches (and compiles) the expander of this decl macro.
|
||||
fn decl_macro_expander(
|
||||
&self,
|
||||
def_crate: CrateId,
|
||||
id: AstId<ast::Macro>,
|
||||
) -> Arc<DeclarativeMacroExpander>;
|
||||
|
||||
/// Expand macro call to a token tree.
|
||||
// This query is LRU cached
|
||||
|
@ -141,8 +159,8 @@ pub trait ExpandDatabase: SourceDatabase {
|
|||
/// Special case of the previous query for procedural macros. We can't LRU
|
||||
/// proc macros, since they are not deterministic in general, and
|
||||
/// non-determinism breaks salsa in a very, very, very bad way.
|
||||
/// @edwin0cheng heroically debugged this once!
|
||||
fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<tt::Subtree>;
|
||||
/// @edwin0cheng heroically debugged this once! See #4315 for details
|
||||
fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
|
||||
/// Firewall query that returns the errors from the `parse_macro_expansion` query.
|
||||
fn parse_macro_expansion_error(
|
||||
&self,
|
||||
|
@ -163,7 +181,6 @@ pub fn expand_speculative(
|
|||
token_to_map: SyntaxToken,
|
||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||
let loc = db.lookup_intern_macro_call(actual_macro_call);
|
||||
let macro_def = db.macro_def(loc.def).ok()?;
|
||||
let token_range = token_to_map.text_range();
|
||||
|
||||
// Build the subtree and token mapping for the speculative args
|
||||
|
@ -221,7 +238,12 @@ pub fn expand_speculative(
|
|||
None => {
|
||||
let range = token_range.checked_sub(speculative_args.text_range().start())?;
|
||||
let token_id = spec_args_tmap.token_by_range(range)?;
|
||||
macro_def.map_id_down(token_id)
|
||||
match loc.def.kind {
|
||||
MacroDefKind::Declarative(it) => {
|
||||
db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
|
||||
}
|
||||
_ => token_id,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -235,7 +257,17 @@ pub fn expand_speculative(
|
|||
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
|
||||
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
|
||||
}
|
||||
_ => macro_def.expand(db, actual_macro_call, &tt),
|
||||
MacroDefKind::BuiltInDerive(expander, ..) => {
|
||||
// this cast is a bit sus, can we avoid losing the typedness here?
|
||||
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
|
||||
expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
|
||||
}
|
||||
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
|
||||
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
|
||||
MacroDefKind::BuiltInEager(it, _) => {
|
||||
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
|
||||
};
|
||||
|
||||
let expand_to = macro_expand_to(db, actual_macro_call);
|
||||
|
@ -297,17 +329,31 @@ fn parse_macro_expansion(
|
|||
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
|
||||
}
|
||||
|
||||
fn parse_macro_expansion_error(
|
||||
db: &dyn ExpandDatabase,
|
||||
macro_call_id: MacroCallId,
|
||||
) -> ExpandResult<Box<[SyntaxError]>> {
|
||||
db.parse_macro_expansion(MacroFile { macro_call_id })
|
||||
.map(|it| it.0.errors().to_vec().into_boxed_slice())
|
||||
}
|
||||
|
||||
fn macro_arg(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
|
||||
) -> ValueResult<
|
||||
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
|
||||
Arc<Box<[SyntaxError]>>,
|
||||
> {
|
||||
let loc = db.lookup_intern_macro_call(id);
|
||||
|
||||
if let Some(EagerCallInfo { arg, arg_id: Some(_), error: _ }) = loc.eager.as_deref() {
|
||||
return Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default())));
|
||||
if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
|
||||
return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
|
||||
}
|
||||
|
||||
let arg = db.macro_arg_text(id)?;
|
||||
let ValueResult { value, err } = db.macro_arg_node(id);
|
||||
let Some(arg) = value else {
|
||||
return ValueResult { value: None, err };
|
||||
};
|
||||
|
||||
let node = SyntaxNode::new_root(arg);
|
||||
let censor = censor_for_macro_input(&loc, &node);
|
||||
|
@ -325,9 +371,16 @@ fn macro_arg(
|
|||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||
tt.delimiter = tt::Delimiter::unspecified();
|
||||
}
|
||||
Some(Arc::new((tt, tmap, fixups.undo_info)))
|
||||
let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
|
||||
match err {
|
||||
Some(err) => ValueResult::new(val, err),
|
||||
None => ValueResult::ok(val),
|
||||
}
|
||||
}
|
||||
|
||||
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
|
||||
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
|
||||
/// - attributes expect the invoking attribute to be stripped
|
||||
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
||||
// FIXME: handle `cfg_attr`
|
||||
(|| {
|
||||
|
@ -364,9 +417,44 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
|
|||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode> {
|
||||
fn macro_arg_node(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
|
||||
let err = || -> Arc<Box<[_]>> {
|
||||
Arc::new(Box::new([SyntaxError::new_at_offset(
|
||||
"invalid macro call".to_owned(),
|
||||
syntax::TextSize::from(0),
|
||||
)]))
|
||||
};
|
||||
let loc = db.lookup_intern_macro_call(id);
|
||||
let arg = loc.kind.arg(db)?;
|
||||
let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
|
||||
let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
|
||||
Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr).0)
|
||||
} else {
|
||||
loc.kind
|
||||
.arg(db)
|
||||
.and_then(|arg| ast::TokenTree::cast(arg.value))
|
||||
.map(|tt| tt.reparse_as_expr().to_syntax())
|
||||
};
|
||||
|
||||
match res {
|
||||
Some(res) if res.errors().is_empty() => res.syntax_node(),
|
||||
Some(res) => {
|
||||
return ValueResult::new(
|
||||
Some(res.syntax_node().green().into()),
|
||||
// Box::<[_]>::from(res.errors()), not stable yet
|
||||
Arc::new(res.errors().to_vec().into_boxed_slice()),
|
||||
);
|
||||
}
|
||||
None => return ValueResult::only_err(err()),
|
||||
}
|
||||
} else {
|
||||
match loc.kind.arg(db) {
|
||||
Some(res) => res.value,
|
||||
None => return ValueResult::only_err(err()),
|
||||
}
|
||||
};
|
||||
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
|
||||
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
|
||||
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
|
||||
|
@ -381,101 +469,146 @@ fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode>
|
|||
// Some day, we'll have explicit recursion counters for all
|
||||
// recursive things, at which point this code might be removed.
|
||||
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
|
||||
return None;
|
||||
return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
|
||||
"unbalanced token tree".to_owned(),
|
||||
arg.text_range(),
|
||||
)])));
|
||||
}
|
||||
}
|
||||
if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
|
||||
Some(
|
||||
mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr)
|
||||
.0
|
||||
.syntax_node()
|
||||
.green()
|
||||
.into(),
|
||||
)
|
||||
} else {
|
||||
Some(arg.green().into())
|
||||
}
|
||||
ValueResult::ok(Some(arg.green().into()))
|
||||
}
|
||||
|
||||
fn macro_def(
|
||||
fn decl_macro_expander(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroDefId,
|
||||
) -> Result<Arc<TokenExpander>, mbe::ParseError> {
|
||||
def_crate: CrateId,
|
||||
id: AstId<ast::Macro>,
|
||||
) -> Arc<DeclarativeMacroExpander> {
|
||||
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
|
||||
let (mac, def_site_token_map) = match id.to_node(db) {
|
||||
ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
|
||||
Some(arg) => {
|
||||
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
||||
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
|
||||
(mac, def_site_token_map)
|
||||
}
|
||||
None => (
|
||||
mbe::DeclarativeMacro::from_err(
|
||||
mbe::ParseError::Expected("expected a token tree".into()),
|
||||
is_2021,
|
||||
),
|
||||
Default::default(),
|
||||
),
|
||||
},
|
||||
ast::Macro::MacroDef(macro_def) => match macro_def.body() {
|
||||
Some(arg) => {
|
||||
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
||||
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
|
||||
(mac, def_site_token_map)
|
||||
}
|
||||
None => (
|
||||
mbe::DeclarativeMacro::from_err(
|
||||
mbe::ParseError::Expected("expected a token tree".into()),
|
||||
is_2021,
|
||||
),
|
||||
Default::default(),
|
||||
),
|
||||
},
|
||||
};
|
||||
Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
|
||||
}
|
||||
|
||||
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
||||
match id.kind {
|
||||
MacroDefKind::Declarative(ast_id) => {
|
||||
let is_2021 = db.crate_graph()[id.krate].edition >= Edition::Edition2021;
|
||||
let (mac, def_site_token_map) = match ast_id.to_node(db) {
|
||||
ast::Macro::MacroRules(macro_rules) => {
|
||||
let arg = macro_rules
|
||||
.token_tree()
|
||||
.ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
|
||||
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
||||
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021)?;
|
||||
(mac, def_site_token_map)
|
||||
}
|
||||
ast::Macro::MacroDef(macro_def) => {
|
||||
let arg = macro_def
|
||||
.body()
|
||||
.ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
|
||||
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
||||
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021)?;
|
||||
(mac, def_site_token_map)
|
||||
}
|
||||
};
|
||||
Ok(Arc::new(TokenExpander::DeclarativeMacro { mac, def_site_token_map }))
|
||||
TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
|
||||
}
|
||||
MacroDefKind::BuiltIn(expander, _) => Ok(Arc::new(TokenExpander::Builtin(expander))),
|
||||
MacroDefKind::BuiltInAttr(expander, _) => {
|
||||
Ok(Arc::new(TokenExpander::BuiltinAttr(expander)))
|
||||
}
|
||||
MacroDefKind::BuiltInDerive(expander, _) => {
|
||||
Ok(Arc::new(TokenExpander::BuiltinDerive(expander)))
|
||||
}
|
||||
MacroDefKind::BuiltInEager(expander, ..) => {
|
||||
Ok(Arc::new(TokenExpander::BuiltinEager(expander)))
|
||||
}
|
||||
MacroDefKind::ProcMacro(expander, ..) => Ok(Arc::new(TokenExpander::ProcMacro(expander))),
|
||||
MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
|
||||
MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
|
||||
MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
|
||||
MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
|
||||
MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
|
||||
}
|
||||
}
|
||||
|
||||
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
||||
let _p = profile::span("macro_expand");
|
||||
let loc = db.lookup_intern_macro_call(id);
|
||||
if let Some(EagerCallInfo { arg, arg_id: None, error }) = loc.eager.as_deref() {
|
||||
// This is an input expansion for an eager macro. These are already pre-expanded
|
||||
return ExpandResult { value: Arc::new(arg.0.clone()), err: error.clone() };
|
||||
}
|
||||
let expander = match db.macro_def(loc.def) {
|
||||
Ok(it) => it,
|
||||
// FIXME: We should make sure to enforce a variant that invalid macro
|
||||
// definitions do not get expanders that could reach this call path!
|
||||
Err(err) => {
|
||||
return ExpandResult {
|
||||
value: Arc::new(tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
token_trees: vec![],
|
||||
}),
|
||||
err: Some(ExpandError::other(format!("invalid macro definition: {err}"))),
|
||||
}
|
||||
|
||||
let ExpandResult { value: tt, mut err } = match loc.def.kind {
|
||||
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
|
||||
MacroDefKind::BuiltInDerive(expander, ..) => {
|
||||
let arg = db.macro_arg_node(id).value.unwrap();
|
||||
|
||||
let node = SyntaxNode::new_root(arg);
|
||||
let censor = censor_for_macro_input(&loc, &node);
|
||||
let mut fixups = fixup::fixup_syntax(&node);
|
||||
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
||||
let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
|
||||
&node,
|
||||
fixups.token_map,
|
||||
fixups.next_id,
|
||||
fixups.replace,
|
||||
fixups.append,
|
||||
);
|
||||
|
||||
// this cast is a bit sus, can we avoid losing the typedness here?
|
||||
let adt = ast::Adt::cast(node).unwrap();
|
||||
let mut res = expander.expand(db, id, &adt, &tmap);
|
||||
fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
|
||||
res
|
||||
}
|
||||
_ => {
|
||||
let ValueResult { value, err } = db.macro_arg(id);
|
||||
let Some(macro_arg) = value else {
|
||||
return ExpandResult {
|
||||
value: Arc::new(tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
token_trees: Vec::new(),
|
||||
}),
|
||||
// FIXME: We should make sure to enforce an invariant that invalid macro
|
||||
// calls do not reach this call path!
|
||||
err: Some(ExpandError::other("invalid token tree")),
|
||||
};
|
||||
};
|
||||
|
||||
let (arg, arg_tm, undo_info) = &*macro_arg;
|
||||
let mut res = match loc.def.kind {
|
||||
MacroDefKind::Declarative(id) => {
|
||||
db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
|
||||
}
|
||||
MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
|
||||
// This might look a bit odd, but we do not expand the inputs to eager macros here.
|
||||
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
|
||||
// That kind of expansion uses the ast id map of an eager macros input though which goes through
|
||||
// the HirFileId machinery. As eager macro inputs are assigned a macro file id that query
|
||||
// will end up going through here again, whereas we want to just want to inspect the raw input.
|
||||
// As such we just return the input subtree here.
|
||||
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
|
||||
let mut arg = arg.clone();
|
||||
fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
|
||||
|
||||
return ExpandResult {
|
||||
value: Arc::new(arg),
|
||||
err: err.map(|err| {
|
||||
let mut buf = String::new();
|
||||
for err in &**err {
|
||||
use std::fmt::Write;
|
||||
_ = write!(buf, "{}, ", err);
|
||||
}
|
||||
buf.pop();
|
||||
buf.pop();
|
||||
ExpandError::other(buf)
|
||||
}),
|
||||
};
|
||||
}
|
||||
MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
|
||||
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
|
||||
res
|
||||
}
|
||||
};
|
||||
let Some(macro_arg) = db.macro_arg(id) else {
|
||||
return ExpandResult {
|
||||
value: Arc::new(
|
||||
tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
token_trees: Vec::new(),
|
||||
},
|
||||
),
|
||||
// FIXME: We should make sure to enforce a variant that invalid macro
|
||||
// calls do not reach this call path!
|
||||
err: Some(ExpandError::other(
|
||||
"invalid token tree"
|
||||
)),
|
||||
};
|
||||
};
|
||||
let (arg_tt, arg_tm, undo_info) = &*macro_arg;
|
||||
let ExpandResult { value: mut tt, mut err } = expander.expand(db, id, arg_tt);
|
||||
|
||||
if let Some(EagerCallInfo { error, .. }) = loc.eager.as_deref() {
|
||||
// FIXME: We should report both errors!
|
||||
|
@ -483,48 +616,29 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
|||
}
|
||||
|
||||
// Set a hard limit for the expanded tt
|
||||
let count = tt.count();
|
||||
if TOKEN_LIMIT.check(count).is_err() {
|
||||
return ExpandResult {
|
||||
value: Arc::new(tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
token_trees: vec![],
|
||||
}),
|
||||
err: Some(ExpandError::other(format!(
|
||||
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
||||
count,
|
||||
TOKEN_LIMIT.inner(),
|
||||
))),
|
||||
};
|
||||
if let Err(value) = check_tt_count(&tt) {
|
||||
return value;
|
||||
}
|
||||
|
||||
fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
|
||||
|
||||
ExpandResult { value: Arc::new(tt), err }
|
||||
}
|
||||
|
||||
fn parse_macro_expansion_error(
|
||||
db: &dyn ExpandDatabase,
|
||||
macro_call_id: MacroCallId,
|
||||
) -> ExpandResult<Box<[SyntaxError]>> {
|
||||
db.parse_macro_expansion(MacroFile { macro_call_id })
|
||||
.map(|it| it.0.errors().to_vec().into_boxed_slice())
|
||||
}
|
||||
|
||||
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
|
||||
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
||||
let loc = db.lookup_intern_macro_call(id);
|
||||
let Some(macro_arg) = db.macro_arg(id) else {
|
||||
let Some(macro_arg) = db.macro_arg(id).value else {
|
||||
return ExpandResult {
|
||||
value: tt::Subtree {
|
||||
value: Arc::new(tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
token_trees: Vec::new(),
|
||||
},
|
||||
err: Some(ExpandError::other(
|
||||
"invalid token tree"
|
||||
)),
|
||||
}),
|
||||
// FIXME: We should make sure to enforce an invariant that invalid macro
|
||||
// calls do not reach this call path!
|
||||
err: Some(ExpandError::other("invalid token tree")),
|
||||
};
|
||||
};
|
||||
|
||||
let (arg_tt, arg_tm, undo_info) = &*macro_arg;
|
||||
|
||||
let expander = match loc.def.kind {
|
||||
MacroDefKind::ProcMacro(expander, ..) => expander,
|
||||
_ => unreachable!(),
|
||||
|
@ -533,13 +647,23 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<t
|
|||
let attr_arg = match &loc.kind {
|
||||
MacroCallKind::Attr { attr_args, .. } => {
|
||||
let mut attr_args = attr_args.0.clone();
|
||||
mbe::Shift::new(¯o_arg.0).shift_all(&mut attr_args);
|
||||
mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
|
||||
Some(attr_args)
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
expander.expand(db, loc.def.krate, loc.krate, ¯o_arg.0, attr_arg.as_ref())
|
||||
let ExpandResult { value: mut tt, err } =
|
||||
expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref());
|
||||
|
||||
// Set a hard limit for the expanded tt
|
||||
if let Err(value) = check_tt_count(&tt) {
|
||||
return value;
|
||||
}
|
||||
|
||||
fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
|
||||
|
||||
ExpandResult { value: Arc::new(tt), err }
|
||||
}
|
||||
|
||||
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
|
||||
|
@ -563,3 +687,22 @@ fn token_tree_to_syntax_node(
|
|||
};
|
||||
mbe::token_tree_to_syntax_node(tt, entry_point)
|
||||
}
|
||||
|
||||
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>> {
|
||||
let count = tt.count();
|
||||
if TOKEN_LIMIT.check(count).is_err() {
|
||||
Err(ExpandResult {
|
||||
value: Arc::new(tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
token_trees: vec![],
|
||||
}),
|
||||
err: Some(ExpandError::other(format!(
|
||||
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
||||
count,
|
||||
TOKEN_LIMIT.inner(),
|
||||
))),
|
||||
})
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,8 @@
|
|||
//!
|
||||
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
||||
use base_db::CrateId;
|
||||
use syntax::{ted, Parse, SyntaxNode};
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
|
@ -38,19 +39,8 @@ pub fn expand_eager_macro_input(
|
|||
def: MacroDefId,
|
||||
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
||||
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
|
||||
assert!(matches!(def.kind, MacroDefKind::BuiltInEager(..)));
|
||||
let token_tree = macro_call.value.token_tree();
|
||||
|
||||
let Some(token_tree) = token_tree else {
|
||||
return Ok(ExpandResult { value: None, err:
|
||||
Some(ExpandError::other(
|
||||
"invalid token tree"
|
||||
)),
|
||||
});
|
||||
};
|
||||
let (parsed_args, arg_token_map) = mbe::syntax_node_to_token_tree(token_tree.syntax());
|
||||
|
||||
let ast_map = db.ast_id_map(macro_call.file_id);
|
||||
// the expansion which the ast id map is built upon has no whitespace, so the offsets are wrong as macro_call is from the token tree that has whitespace!
|
||||
let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(¯o_call.value));
|
||||
let expand_to = ExpandTo::from_call_site(¯o_call.value);
|
||||
|
||||
|
@ -61,41 +51,69 @@ pub fn expand_eager_macro_input(
|
|||
let arg_id = db.intern_macro_call(MacroCallLoc {
|
||||
def,
|
||||
krate,
|
||||
eager: Some(Box::new(EagerCallInfo {
|
||||
arg: Arc::new((parsed_args, arg_token_map)),
|
||||
arg_id: None,
|
||||
error: None,
|
||||
})),
|
||||
eager: None,
|
||||
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
|
||||
});
|
||||
let arg_as_expr = match db.macro_arg_text(arg_id) {
|
||||
Some(it) => it,
|
||||
None => {
|
||||
return Ok(ExpandResult {
|
||||
value: None,
|
||||
err: Some(ExpandError::other("invalid token tree")),
|
||||
})
|
||||
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
|
||||
db.parse_macro_expansion(arg_id.as_macro_file());
|
||||
// we need this map here as the expansion of the eager input fake file loses whitespace ...
|
||||
let mut ws_mapping = FxHashMap::default();
|
||||
if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
|
||||
ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
|
||||
Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
|
||||
}));
|
||||
}
|
||||
|
||||
let ExpandResult { value: expanded_eager_input, err } = {
|
||||
eager_macro_recur(
|
||||
db,
|
||||
&Hygiene::new(db, macro_call.file_id),
|
||||
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
|
||||
krate,
|
||||
resolver,
|
||||
)?
|
||||
};
|
||||
let err = parse_err.or(err);
|
||||
|
||||
let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
|
||||
return Ok(ExpandResult { value: None, err });
|
||||
};
|
||||
|
||||
let og_tmap = mbe::syntax_node_to_token_map(
|
||||
macro_call.value.token_tree().expect("macro_arg_text succeeded").syntax(),
|
||||
);
|
||||
|
||||
let (mut subtree, expanded_eager_input_token_map) =
|
||||
mbe::syntax_node_to_token_tree(&expanded_eager_input);
|
||||
|
||||
// The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
|
||||
// so we need to remap them to the original input of the eager macro.
|
||||
subtree.visit_ids(&|id| {
|
||||
// Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
|
||||
|
||||
if let Some(range) =
|
||||
expanded_eager_input_token_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
|
||||
{
|
||||
// remap from expanded eager input to eager input expansion
|
||||
if let Some(og_range) = mapping.get(&range) {
|
||||
// remap from eager input expansion to original eager input
|
||||
if let Some(&og_range) = ws_mapping.get(og_range) {
|
||||
if let Some(og_token) = og_tmap.token_by_range(og_range) {
|
||||
return og_token;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
let ExpandResult { value: expanded_eager_input, err } = eager_macro_recur(
|
||||
db,
|
||||
&Hygiene::new(db, macro_call.file_id),
|
||||
InFile::new(arg_id.as_file(), SyntaxNode::new_root(arg_as_expr)),
|
||||
krate,
|
||||
resolver,
|
||||
)?;
|
||||
let Some(expanded_eager_input) = expanded_eager_input else {
|
||||
return Ok(ExpandResult { value: None, err })
|
||||
};
|
||||
let (mut subtree, token_map) = mbe::syntax_node_to_token_tree(&expanded_eager_input);
|
||||
tt::TokenId::UNSPECIFIED
|
||||
});
|
||||
subtree.delimiter = crate::tt::Delimiter::unspecified();
|
||||
|
||||
let loc = MacroCallLoc {
|
||||
def,
|
||||
krate,
|
||||
eager: Some(Box::new(EagerCallInfo {
|
||||
arg: Arc::new((subtree, token_map)),
|
||||
arg_id: Some(arg_id),
|
||||
arg: Arc::new((subtree, og_tmap)),
|
||||
arg_id,
|
||||
error: err.clone(),
|
||||
})),
|
||||
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
|
||||
|
@ -109,19 +127,16 @@ fn lazy_expand(
|
|||
def: &MacroDefId,
|
||||
macro_call: InFile<ast::MacroCall>,
|
||||
krate: CrateId,
|
||||
) -> ExpandResult<InFile<Parse<SyntaxNode>>> {
|
||||
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> {
|
||||
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value);
|
||||
|
||||
let expand_to = ExpandTo::from_call_site(¯o_call.value);
|
||||
let id = def.as_lazy_macro(
|
||||
db,
|
||||
krate,
|
||||
MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to },
|
||||
);
|
||||
|
||||
let ast_id = macro_call.with_value(ast_id);
|
||||
let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to });
|
||||
let macro_file = id.as_macro_file();
|
||||
|
||||
db.parse_macro_expansion(macro_file).map(|parse| InFile::new(macro_file.into(), parse.0))
|
||||
db.parse_macro_expansion(macro_file)
|
||||
.map(|parse| (InFile::new(macro_file.into(), parse.0), parse.1))
|
||||
}
|
||||
|
||||
fn eager_macro_recur(
|
||||
|
@ -130,18 +145,43 @@ fn eager_macro_recur(
|
|||
curr: InFile<SyntaxNode>,
|
||||
krate: CrateId,
|
||||
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
||||
) -> Result<ExpandResult<Option<SyntaxNode>>, UnresolvedMacro> {
|
||||
) -> Result<ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>>, UnresolvedMacro> {
|
||||
let original = curr.value.clone_for_update();
|
||||
let mut mapping = FxHashMap::default();
|
||||
|
||||
let children = original.descendants().filter_map(ast::MacroCall::cast);
|
||||
let mut replacements = Vec::new();
|
||||
|
||||
// Note: We only report a single error inside of eager expansions
|
||||
let mut error = None;
|
||||
let mut offset = 0i32;
|
||||
let apply_offset = |it: TextSize, offset: i32| {
|
||||
TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default())
|
||||
};
|
||||
let mut children = original.preorder_with_tokens();
|
||||
|
||||
// Collect replacement
|
||||
for child in children {
|
||||
let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
|
||||
while let Some(child) = children.next() {
|
||||
let WalkEvent::Enter(child) = child else { continue };
|
||||
let call = match child {
|
||||
syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) {
|
||||
Some(it) => {
|
||||
children.skip_subtree();
|
||||
it
|
||||
}
|
||||
None => continue,
|
||||
},
|
||||
syntax::NodeOrToken::Token(t) => {
|
||||
mapping.insert(
|
||||
TextRange::new(
|
||||
apply_offset(t.text_range().start(), offset),
|
||||
apply_offset(t.text_range().end(), offset),
|
||||
),
|
||||
t.text_range(),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
|
||||
Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?,
|
||||
None => {
|
||||
error = Some(ExpandError::other("malformed macro invocation"));
|
||||
|
@ -153,7 +193,7 @@ fn eager_macro_recur(
|
|||
let ExpandResult { value, err } = match expand_eager_macro_input(
|
||||
db,
|
||||
krate,
|
||||
curr.with_value(child.clone()),
|
||||
curr.with_value(call.clone()),
|
||||
def,
|
||||
macro_resolver,
|
||||
) {
|
||||
|
@ -161,9 +201,22 @@ fn eager_macro_recur(
|
|||
Err(err) => return Err(err),
|
||||
};
|
||||
match value {
|
||||
Some(call) => {
|
||||
Some(call_id) => {
|
||||
let ExpandResult { value, err: err2 } =
|
||||
db.parse_macro_expansion(call.as_macro_file());
|
||||
db.parse_macro_expansion(call_id.as_macro_file());
|
||||
|
||||
let call_tt_start =
|
||||
call.token_tree().unwrap().syntax().text_range().start();
|
||||
let call_start = apply_offset(call.syntax().text_range().start(), offset);
|
||||
if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
|
||||
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||
value
|
||||
.1
|
||||
.first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
|
||||
.map(|r| (r + call_start, range + call_tt_start))
|
||||
}));
|
||||
};
|
||||
|
||||
ExpandResult {
|
||||
value: Some(value.0.syntax_node().clone_for_update()),
|
||||
err: err.or(err2),
|
||||
|
@ -177,36 +230,61 @@ fn eager_macro_recur(
|
|||
| MacroDefKind::BuiltInAttr(..)
|
||||
| MacroDefKind::BuiltInDerive(..)
|
||||
| MacroDefKind::ProcMacro(..) => {
|
||||
let ExpandResult { value, err } =
|
||||
lazy_expand(db, &def, curr.with_value(child.clone()), krate);
|
||||
let ExpandResult { value: (parse, tm), err } =
|
||||
lazy_expand(db, &def, curr.with_value(call.clone()), krate);
|
||||
let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
|
||||
Some(db.decl_macro_expander(def.krate, ast_id))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// replace macro inside
|
||||
let hygiene = Hygiene::new(db, value.file_id);
|
||||
let hygiene = Hygiene::new(db, parse.file_id);
|
||||
let ExpandResult { value, err: error } = eager_macro_recur(
|
||||
db,
|
||||
&hygiene,
|
||||
// FIXME: We discard parse errors here
|
||||
value.map(|it| it.syntax_node()),
|
||||
parse.as_ref().map(|it| it.syntax_node()),
|
||||
krate,
|
||||
macro_resolver,
|
||||
)?;
|
||||
let err = err.or(error);
|
||||
ExpandResult { value, err }
|
||||
|
||||
let call_tt_start = call.token_tree().unwrap().syntax().text_range().start();
|
||||
let call_start = apply_offset(call.syntax().text_range().start(), offset);
|
||||
if let Some((_tt, arg_map, _)) = parse
|
||||
.file_id
|
||||
.macro_file()
|
||||
.and_then(|id| db.macro_arg(id.macro_call_id).value)
|
||||
.as_deref()
|
||||
{
|
||||
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||
tm.first_range_by_token(
|
||||
decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
|
||||
syntax::SyntaxKind::TOMBSTONE,
|
||||
)
|
||||
.map(|r| (r + call_start, range + call_tt_start))
|
||||
}));
|
||||
};
|
||||
// FIXME: Do we need to re-use _m here?
|
||||
ExpandResult { value: value.map(|(n, _m)| n), err }
|
||||
}
|
||||
};
|
||||
if err.is_some() {
|
||||
error = err;
|
||||
}
|
||||
// check if the whole original syntax is replaced
|
||||
if child.syntax() == &original {
|
||||
return Ok(ExpandResult { value, err: error });
|
||||
if call.syntax() == &original {
|
||||
return Ok(ExpandResult { value: value.zip(Some(mapping)), err: error });
|
||||
}
|
||||
|
||||
if let Some(insert) = value {
|
||||
replacements.push((child, insert));
|
||||
offset += u32::from(insert.text_range().len()) as i32
|
||||
- u32::from(call.syntax().text_range().len()) as i32;
|
||||
replacements.push((call, insert));
|
||||
}
|
||||
}
|
||||
|
||||
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
|
||||
Ok(ExpandResult { value: Some(original), err: error })
|
||||
Ok(ExpandResult { value: Some((original, mapping)), err: error })
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ pub(crate) struct SyntaxFixups {
|
|||
/// This is the information needed to reverse the fixups.
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
pub struct SyntaxFixupUndoInfo {
|
||||
original: Vec<Subtree>,
|
||||
original: Box<[Subtree]>,
|
||||
}
|
||||
|
||||
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
|
||||
|
@ -272,7 +272,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
|||
replace,
|
||||
token_map,
|
||||
next_id,
|
||||
undo_info: SyntaxFixupUndoInfo { original },
|
||||
undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -472,13 +472,13 @@ fn foo () {match __ra_fixup {}}
|
|||
check(
|
||||
r#"
|
||||
fn foo() {
|
||||
match x {
|
||||
match it {
|
||||
|
||||
}
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {match x {}}
|
||||
fn foo () {match it {}}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
@ -547,11 +547,11 @@ fn foo () {a . __ra_fixup ; bar () ;}
|
|||
check(
|
||||
r#"
|
||||
fn foo() {
|
||||
let x = a
|
||||
let it = a
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {let x = a ;}
|
||||
fn foo () {let it = a ;}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
@ -561,11 +561,11 @@ fn foo () {let x = a ;}
|
|||
check(
|
||||
r#"
|
||||
fn foo() {
|
||||
let x = a.
|
||||
let it = a.
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {let x = a . __ra_fixup ;}
|
||||
fn foo () {let it = a . __ra_fixup ;}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
|
|
@ -126,7 +126,7 @@ struct HygieneInfo {
|
|||
/// The start offset of the `macro_rules!` arguments or attribute input.
|
||||
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
|
||||
|
||||
macro_def: Arc<TokenExpander>,
|
||||
macro_def: TokenExpander,
|
||||
macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
|
||||
macro_arg_shift: mbe::Shift,
|
||||
exp_map: Arc<mbe::TokenMap>,
|
||||
|
@ -149,19 +149,15 @@ impl HygieneInfo {
|
|||
token_id = unshifted;
|
||||
(&attr_args.1, self.attr_input_or_mac_def_start?)
|
||||
}
|
||||
None => (
|
||||
&self.macro_arg.1,
|
||||
InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
|
||||
),
|
||||
None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
|
||||
},
|
||||
_ => match origin {
|
||||
mbe::Origin::Call => (
|
||||
&self.macro_arg.1,
|
||||
InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
|
||||
),
|
||||
mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def_start) {
|
||||
(TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => {
|
||||
(def_site_token_map, *tt)
|
||||
mbe::Origin::Call => {
|
||||
(&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
|
||||
}
|
||||
mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
|
||||
(TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
|
||||
(&expander.def_site_token_map, *tt)
|
||||
}
|
||||
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
|
||||
},
|
||||
|
@ -198,9 +194,9 @@ fn make_hygiene_info(
|
|||
_ => None,
|
||||
});
|
||||
|
||||
let macro_def = db.macro_def(loc.def).ok()?;
|
||||
let macro_def = db.macro_expander(loc.def);
|
||||
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
|
||||
let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
|
||||
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
||||
Arc::new((
|
||||
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
|
||||
Default::default(),
|
||||
|
|
|
@ -37,11 +37,11 @@ use either::Either;
|
|||
use syntax::{
|
||||
algo::{self, skip_trivia_token},
|
||||
ast::{self, AstNode, HasDocComments},
|
||||
Direction, SyntaxNode, SyntaxToken,
|
||||
AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
ast_id_map::FileAstId,
|
||||
ast_id_map::{AstIdNode, ErasedFileAstId, FileAstId},
|
||||
attrs::AttrId,
|
||||
builtin_attr_macro::BuiltinAttrExpander,
|
||||
builtin_derive_macro::BuiltinDeriveExpander,
|
||||
|
@ -127,7 +127,8 @@ impl_intern_key!(MacroCallId);
|
|||
pub struct MacroCallLoc {
|
||||
pub def: MacroDefId,
|
||||
pub(crate) krate: CrateId,
|
||||
/// Some if `def` is a builtin eager macro.
|
||||
/// Some if this is a macro call for an eager macro. Note that this is `None`
|
||||
/// for the eager input macro file.
|
||||
eager: Option<Box<EagerCallInfo>>,
|
||||
pub kind: MacroCallKind,
|
||||
}
|
||||
|
@ -152,11 +153,10 @@ pub enum MacroDefKind {
|
|||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
struct EagerCallInfo {
|
||||
/// NOTE: This can be *either* the expansion result, *or* the argument to the eager macro!
|
||||
/// The expanded argument of the eager macro.
|
||||
arg: Arc<(tt::Subtree, TokenMap)>,
|
||||
/// call id of the eager macro's input file. If this is none, macro call containing this call info
|
||||
/// is an eager macro's input, otherwise it is its output.
|
||||
arg_id: Option<MacroCallId>,
|
||||
/// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
|
||||
arg_id: MacroCallId,
|
||||
error: Option<ExpandError>,
|
||||
}
|
||||
|
||||
|
@ -221,11 +221,7 @@ impl HirFileId {
|
|||
HirFileIdRepr::FileId(id) => break id,
|
||||
HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id);
|
||||
let is_include_expansion = loc.def.is_include()
|
||||
&& matches!(
|
||||
loc.eager.as_deref(),
|
||||
Some(EagerCallInfo { arg_id: Some(_), .. })
|
||||
);
|
||||
let is_include_expansion = loc.def.is_include() && loc.eager.is_some();
|
||||
file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) {
|
||||
Some(Ok((_, file))) => file.into(),
|
||||
_ => loc.kind.file_id(),
|
||||
|
@ -270,57 +266,13 @@ impl HirFileId {
|
|||
/// Return expansion information if it is a macro-expansion file
|
||||
pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
|
||||
let macro_file = self.macro_file()?;
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
|
||||
let arg_tt = loc.kind.arg(db)?;
|
||||
|
||||
let macro_def = db.macro_def(loc.def).ok()?;
|
||||
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
|
||||
let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
|
||||
Arc::new((
|
||||
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
))
|
||||
});
|
||||
|
||||
let def = loc.def.ast_id().left().and_then(|id| {
|
||||
let def_tt = match id.to_node(db) {
|
||||
ast::Macro::MacroRules(mac) => mac.token_tree()?,
|
||||
ast::Macro::MacroDef(_) if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) => {
|
||||
return None
|
||||
}
|
||||
ast::Macro::MacroDef(mac) => mac.body()?,
|
||||
};
|
||||
Some(InFile::new(id.file_id, def_tt))
|
||||
});
|
||||
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
|
||||
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
|
||||
// FIXME: handle `cfg_attr`
|
||||
let tt = ast_id
|
||||
.to_node(db)
|
||||
.doc_comments_and_attrs()
|
||||
.nth(invoc_attr_index.ast_index())
|
||||
.and_then(Either::left)?
|
||||
.token_tree()?;
|
||||
Some(InFile::new(ast_id.file_id, tt))
|
||||
}
|
||||
_ => None,
|
||||
});
|
||||
|
||||
Some(ExpansionInfo {
|
||||
expanded: InFile::new(self, parse.syntax_node()),
|
||||
arg: InFile::new(loc.kind.file_id(), arg_tt),
|
||||
attr_input_or_mac_def,
|
||||
macro_arg_shift: mbe::Shift::new(¯o_arg.0),
|
||||
macro_arg,
|
||||
macro_def,
|
||||
exp_map,
|
||||
})
|
||||
ExpansionInfo::new(db, macro_file)
|
||||
}
|
||||
|
||||
/// Indicate it is macro file generated for builtin derive
|
||||
pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<ast::Attr>> {
|
||||
pub fn as_builtin_derive_attr_node(
|
||||
&self,
|
||||
db: &dyn db::ExpandDatabase,
|
||||
) -> Option<InFile<ast::Attr>> {
|
||||
let macro_file = self.macro_file()?;
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
let attr = match loc.def.kind {
|
||||
|
@ -333,8 +285,22 @@ impl HirFileId {
|
|||
pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
match self.macro_file() {
|
||||
Some(macro_file) => {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
matches!(loc.def.kind, MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _))
|
||||
matches!(
|
||||
db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
|
||||
MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
|
||||
)
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
match self.macro_file() {
|
||||
Some(macro_file) => {
|
||||
matches!(
|
||||
db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
|
||||
MacroDefKind::BuiltInDerive(..)
|
||||
)
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
|
@ -344,8 +310,7 @@ impl HirFileId {
|
|||
pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
match self.macro_file() {
|
||||
Some(macro_file) => {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
loc.def.is_include()
|
||||
db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include()
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
|
@ -355,7 +320,7 @@ impl HirFileId {
|
|||
match self.macro_file() {
|
||||
Some(macro_file) => {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
matches!(loc.eager.as_deref(), Some(EagerCallInfo { .. }))
|
||||
matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
|
@ -536,9 +501,9 @@ impl MacroCallKind {
|
|||
};
|
||||
|
||||
let range = match kind {
|
||||
MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
|
||||
MacroCallKind::Derive { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
|
||||
MacroCallKind::Attr { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
|
||||
MacroCallKind::FnLike { ast_id, .. } => ast_id.to_ptr(db).text_range(),
|
||||
MacroCallKind::Derive { ast_id, .. } => ast_id.to_ptr(db).text_range(),
|
||||
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).text_range(),
|
||||
};
|
||||
|
||||
FileRange { range, file_id }
|
||||
|
@ -588,13 +553,18 @@ impl MacroCallKind {
|
|||
FileRange { range, file_id }
|
||||
}
|
||||
|
||||
fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<SyntaxNode> {
|
||||
fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
|
||||
match self {
|
||||
MacroCallKind::FnLike { ast_id, .. } => {
|
||||
Some(ast_id.to_node(db).token_tree()?.syntax().clone())
|
||||
MacroCallKind::FnLike { ast_id, .. } => ast_id
|
||||
.to_in_file_node(db)
|
||||
.map(|it| Some(it.token_tree()?.syntax().clone()))
|
||||
.transpose(),
|
||||
MacroCallKind::Derive { ast_id, .. } => {
|
||||
Some(ast_id.to_in_file_node(db).syntax().cloned())
|
||||
}
|
||||
MacroCallKind::Attr { ast_id, .. } => {
|
||||
Some(ast_id.to_in_file_node(db).syntax().cloned())
|
||||
}
|
||||
MacroCallKind::Derive { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
|
||||
MacroCallKind::Attr { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -612,13 +582,13 @@ impl MacroCallId {
|
|||
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ExpansionInfo {
|
||||
expanded: InFile<SyntaxNode>,
|
||||
expanded: InMacroFile<SyntaxNode>,
|
||||
/// The argument TokenTree or item for attributes
|
||||
arg: InFile<SyntaxNode>,
|
||||
/// The `macro_rules!` or attribute input.
|
||||
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
|
||||
|
||||
macro_def: Arc<TokenExpander>,
|
||||
macro_def: TokenExpander,
|
||||
macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
|
||||
/// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
|
||||
/// and as such we need to shift tokens if they are part of an attributes input instead of their item.
|
||||
|
@ -628,7 +598,7 @@ pub struct ExpansionInfo {
|
|||
|
||||
impl ExpansionInfo {
|
||||
pub fn expanded(&self) -> InFile<SyntaxNode> {
|
||||
self.expanded.clone()
|
||||
self.expanded.clone().into()
|
||||
}
|
||||
|
||||
pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
|
||||
|
@ -659,7 +629,7 @@ impl ExpansionInfo {
|
|||
let token_id_in_attr_input = if let Some(item) = item {
|
||||
// check if we are mapping down in an attribute input
|
||||
// this is a special case as attributes can have two inputs
|
||||
let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
|
||||
let call_id = self.expanded.file_id.macro_call_id;
|
||||
let loc = db.lookup_intern_macro_call(call_id);
|
||||
|
||||
let token_range = token.value.text_range();
|
||||
|
@ -705,7 +675,7 @@ impl ExpansionInfo {
|
|||
let relative_range =
|
||||
token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
|
||||
let token_id = self.macro_arg.1.token_by_range(relative_range)?;
|
||||
// conditionally shift the id by a declaratives macro definition
|
||||
// conditionally shift the id by a declarative macro definition
|
||||
self.macro_def.map_id_down(token_id)
|
||||
}
|
||||
};
|
||||
|
@ -715,7 +685,7 @@ impl ExpansionInfo {
|
|||
.ranges_by_token(token_id, token.value.kind())
|
||||
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
|
||||
|
||||
Some(tokens.map(move |token| self.expanded.with_value(token)))
|
||||
Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token)))
|
||||
}
|
||||
|
||||
/// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion.
|
||||
|
@ -724,18 +694,17 @@ impl ExpansionInfo {
|
|||
db: &dyn db::ExpandDatabase,
|
||||
token: InFile<&SyntaxToken>,
|
||||
) -> Option<(InFile<SyntaxToken>, Origin)> {
|
||||
assert_eq!(token.file_id, self.expanded.file_id.into());
|
||||
// Fetch the id through its text range,
|
||||
let token_id = self.exp_map.token_by_range(token.value.text_range())?;
|
||||
// conditionally unshifting the id to accommodate for macro-rules def site
|
||||
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
|
||||
|
||||
let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
|
||||
let call_id = self.expanded.file_id.macro_call_id;
|
||||
let loc = db.lookup_intern_macro_call(call_id);
|
||||
|
||||
// Special case: map tokens from `include!` expansions to the included file
|
||||
if loc.def.is_include()
|
||||
&& matches!(loc.eager.as_deref(), Some(EagerCallInfo { arg_id: Some(_), .. }))
|
||||
{
|
||||
if loc.def.is_include() {
|
||||
if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) {
|
||||
let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?;
|
||||
let source = db.parse(file_id);
|
||||
|
@ -765,9 +734,9 @@ impl ExpansionInfo {
|
|||
}
|
||||
_ => match origin {
|
||||
mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
|
||||
mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def) {
|
||||
(TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => {
|
||||
(def_site_token_map, tt.syntax().cloned())
|
||||
mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def) {
|
||||
(TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
|
||||
(&expander.def_site_token_map, tt.syntax().cloned())
|
||||
}
|
||||
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
|
||||
},
|
||||
|
@ -779,6 +748,58 @@ impl ExpansionInfo {
|
|||
tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
|
||||
Some((tt.with_value(token), origin))
|
||||
}
|
||||
|
||||
fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option<ExpansionInfo> {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
|
||||
let arg_tt = loc.kind.arg(db)?;
|
||||
|
||||
let macro_def = db.macro_expander(loc.def);
|
||||
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
|
||||
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
|
||||
|
||||
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
||||
Arc::new((
|
||||
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
))
|
||||
});
|
||||
|
||||
let def = loc.def.ast_id().left().and_then(|id| {
|
||||
let def_tt = match id.to_node(db) {
|
||||
ast::Macro::MacroRules(mac) => mac.token_tree()?,
|
||||
ast::Macro::MacroDef(_) if matches!(macro_def, TokenExpander::BuiltInAttr(_)) => {
|
||||
return None
|
||||
}
|
||||
ast::Macro::MacroDef(mac) => mac.body()?,
|
||||
};
|
||||
Some(InFile::new(id.file_id, def_tt))
|
||||
});
|
||||
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
|
||||
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
|
||||
// FIXME: handle `cfg_attr`
|
||||
let tt = ast_id
|
||||
.to_node(db)
|
||||
.doc_comments_and_attrs()
|
||||
.nth(invoc_attr_index.ast_index())
|
||||
.and_then(Either::left)?
|
||||
.token_tree()?;
|
||||
Some(InFile::new(ast_id.file_id, tt))
|
||||
}
|
||||
_ => None,
|
||||
});
|
||||
|
||||
Some(ExpansionInfo {
|
||||
expanded,
|
||||
arg: arg_tt,
|
||||
attr_input_or_mac_def,
|
||||
macro_arg_shift: mbe::Shift::new(¯o_arg.0),
|
||||
macro_arg,
|
||||
macro_def,
|
||||
exp_map,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// `AstId` points to an AST node in any file.
|
||||
|
@ -786,10 +807,26 @@ impl ExpansionInfo {
|
|||
/// It is stable across reparses, and can be used as salsa key/value.
|
||||
pub type AstId<N> = InFile<FileAstId<N>>;
|
||||
|
||||
impl<N: AstNode> AstId<N> {
|
||||
impl<N: AstIdNode> AstId<N> {
|
||||
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
|
||||
let root = db.parse_or_expand(self.file_id);
|
||||
db.ast_id_map(self.file_id).get(self.value).to_node(&root)
|
||||
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
|
||||
}
|
||||
pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> InFile<N> {
|
||||
InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
|
||||
}
|
||||
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
|
||||
db.ast_id_map(self.file_id).get(self.value)
|
||||
}
|
||||
}
|
||||
|
||||
pub type ErasedAstId = InFile<ErasedFileAstId>;
|
||||
|
||||
impl ErasedAstId {
|
||||
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
|
||||
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
|
||||
}
|
||||
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
|
||||
db.ast_id_map(self.file_id).get_raw(self.value)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -850,7 +887,7 @@ impl<L, R> InFile<Either<L, R>> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> InFile<&'a SyntaxNode> {
|
||||
impl InFile<&SyntaxNode> {
|
||||
pub fn ancestors_with_macros(
|
||||
self,
|
||||
db: &dyn db::ExpandDatabase,
|
||||
|
@ -1011,6 +1048,18 @@ impl InFile<SyntaxToken> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
||||
pub struct InMacroFile<T> {
|
||||
pub file_id: MacroFile,
|
||||
pub value: T,
|
||||
}
|
||||
|
||||
impl<T> From<InMacroFile<T>> for InFile<T> {
|
||||
fn from(macro_file: InMacroFile<T>) -> Self {
|
||||
InFile { file_id: macro_file.file_id.into(), value: macro_file.value }
|
||||
}
|
||||
}
|
||||
|
||||
fn ascend_node_border_tokens(
|
||||
db: &dyn db::ExpandDatabase,
|
||||
InFile { file_id, value: node }: InFile<&SyntaxNode>,
|
||||
|
|
|
@ -126,7 +126,7 @@ struct Display<'a> {
|
|||
path: &'a ModPath,
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for Display<'a> {
|
||||
impl fmt::Display for Display<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
display_fmt_path(self.db, self.path, f, true)
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ struct UnescapedDisplay<'a> {
|
|||
path: &'a UnescapedModPath<'a>,
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for UnescapedDisplay<'a> {
|
||||
impl fmt::Display for UnescapedDisplay<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
display_fmt_path(self.db, self.path.0, f, false)
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ enum Repr {
|
|||
TupleField(usize),
|
||||
}
|
||||
|
||||
impl<'a> UnescapedName<'a> {
|
||||
impl UnescapedName<'_> {
|
||||
/// Returns the textual representation of this name as a [`SmolStr`]. Prefer using this over
|
||||
/// [`ToString::to_string`] if possible as this conversion is cheaper in the general case.
|
||||
pub fn to_smol_str(&self) -> SmolStr {
|
||||
|
@ -40,7 +40,7 @@ impl<'a> UnescapedName<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn display(&'a self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a {
|
||||
pub fn display(&self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + '_ {
|
||||
_ = db;
|
||||
UnescapedDisplay { name: self }
|
||||
}
|
||||
|
@ -96,6 +96,15 @@ impl Name {
|
|||
Name::new_inline("[missing name]")
|
||||
}
|
||||
|
||||
/// Returns true if this is a fake name for things missing in the source code. See
|
||||
/// [`missing()`][Self::missing] for details.
|
||||
///
|
||||
/// Use this method instead of comparing with `Self::missing()` as missing names
|
||||
/// (ideally should) have a `gensym` semantics.
|
||||
pub fn is_missing(&self) -> bool {
|
||||
self == &Name::missing()
|
||||
}
|
||||
|
||||
/// Generates a new name which is only equal to itself, by incrementing a counter. Due
|
||||
/// its implementation, it should not be used in things that salsa considers, like
|
||||
/// type names or field names, and it should be only used in names of local variables
|
||||
|
@ -162,7 +171,7 @@ struct Display<'a> {
|
|||
name: &'a Name,
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for Display<'a> {
|
||||
impl fmt::Display for Display<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match &self.name.0 {
|
||||
Repr::Text(text) => fmt::Display::fmt(&text, f),
|
||||
|
@ -175,7 +184,7 @@ struct UnescapedDisplay<'a> {
|
|||
name: &'a UnescapedName<'a>,
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for UnescapedDisplay<'a> {
|
||||
impl fmt::Display for UnescapedDisplay<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match &self.name.0 .0 {
|
||||
Repr::Text(text) => {
|
||||
|
@ -282,8 +291,10 @@ pub mod known {
|
|||
alloc,
|
||||
iter,
|
||||
ops,
|
||||
fmt,
|
||||
future,
|
||||
result,
|
||||
string,
|
||||
boxed,
|
||||
option,
|
||||
prelude,
|
||||
|
@ -311,6 +322,7 @@ pub mod known {
|
|||
RangeToInclusive,
|
||||
RangeTo,
|
||||
Range,
|
||||
String,
|
||||
Neg,
|
||||
Not,
|
||||
None,
|
||||
|
@ -321,6 +333,7 @@ pub mod known {
|
|||
iter_mut,
|
||||
len,
|
||||
is_empty,
|
||||
as_str,
|
||||
new,
|
||||
// Builtin macros
|
||||
asm,
|
||||
|
@ -334,6 +347,7 @@ pub mod known {
|
|||
core_panic,
|
||||
env,
|
||||
file,
|
||||
format,
|
||||
format_args_nl,
|
||||
format_args,
|
||||
global_asm,
|
||||
|
@ -365,6 +379,7 @@ pub mod known {
|
|||
cfg_eval,
|
||||
crate_type,
|
||||
derive,
|
||||
derive_const,
|
||||
global_allocator,
|
||||
no_core,
|
||||
no_std,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue