mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 21:05:02 +00:00
Merge commit '457b966b17
' into sync-from-ra
This commit is contained in:
parent
5285df4f6c
commit
f532576ac5
263 changed files with 9788 additions and 6258 deletions
|
@ -12,11 +12,40 @@ use std::{
|
|||
marker::PhantomData,
|
||||
};
|
||||
|
||||
use la_arena::{Arena, Idx};
|
||||
use la_arena::{Arena, Idx, RawIdx};
|
||||
use profile::Count;
|
||||
use rustc_hash::FxHasher;
|
||||
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
|
||||
|
||||
use crate::db;
|
||||
|
||||
pub use base_db::span::ErasedFileAstId;
|
||||
|
||||
/// `AstId` points to an AST node in any file.
|
||||
///
|
||||
/// It is stable across reparses, and can be used as salsa key/value.
|
||||
pub type AstId<N> = crate::InFile<FileAstId<N>>;
|
||||
|
||||
impl<N: AstIdNode> AstId<N> {
|
||||
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
|
||||
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
|
||||
}
|
||||
pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> crate::InFile<N> {
|
||||
crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
|
||||
}
|
||||
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
|
||||
db.ast_id_map(self.file_id).get(self.value)
|
||||
}
|
||||
}
|
||||
|
||||
pub type ErasedAstId = crate::InFile<ErasedFileAstId>;
|
||||
|
||||
impl ErasedAstId {
|
||||
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
|
||||
db.ast_id_map(self.file_id).get_erased(self.value)
|
||||
}
|
||||
}
|
||||
|
||||
/// `AstId` points to an AST node in a specific file.
|
||||
pub struct FileAstId<N: AstIdNode> {
|
||||
raw: ErasedFileAstId,
|
||||
|
@ -62,8 +91,6 @@ impl<N: AstIdNode> FileAstId<N> {
|
|||
}
|
||||
}
|
||||
|
||||
pub type ErasedFileAstId = Idx<SyntaxNodePtr>;
|
||||
|
||||
pub trait AstIdNode: AstNode {}
|
||||
macro_rules! register_ast_id_node {
|
||||
(impl AstIdNode for $($ident:ident),+ ) => {
|
||||
|
@ -129,6 +156,11 @@ impl AstIdMap {
|
|||
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
|
||||
assert!(node.parent().is_none());
|
||||
let mut res = AstIdMap::default();
|
||||
|
||||
// make sure to allocate the root node
|
||||
if !should_alloc_id(node.kind()) {
|
||||
res.alloc(node);
|
||||
}
|
||||
// By walking the tree in breadth-first order we make sure that parents
|
||||
// get lower ids then children. That is, adding a new child does not
|
||||
// change parent's id. This means that, say, adding a new function to a
|
||||
|
@ -136,9 +168,9 @@ impl AstIdMap {
|
|||
bdfs(node, |it| {
|
||||
if should_alloc_id(it.kind()) {
|
||||
res.alloc(&it);
|
||||
true
|
||||
TreeOrder::BreadthFirst
|
||||
} else {
|
||||
false
|
||||
TreeOrder::DepthFirst
|
||||
}
|
||||
});
|
||||
res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
|
||||
|
@ -155,6 +187,11 @@ impl AstIdMap {
|
|||
res
|
||||
}
|
||||
|
||||
/// The [`AstId`] of the root node
|
||||
pub fn root(&self) -> SyntaxNodePtr {
|
||||
self.arena[Idx::from_raw(RawIdx::from_u32(0))].clone()
|
||||
}
|
||||
|
||||
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
|
||||
let raw = self.erased_ast_id(item.syntax());
|
||||
FileAstId { raw, covariant: PhantomData }
|
||||
|
@ -164,7 +201,7 @@ impl AstIdMap {
|
|||
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
|
||||
pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
|
||||
self.arena[id].clone()
|
||||
}
|
||||
|
||||
|
@ -192,14 +229,20 @@ fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 {
|
|||
hasher.finish()
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
enum TreeOrder {
|
||||
BreadthFirst,
|
||||
DepthFirst,
|
||||
}
|
||||
|
||||
/// Walks the subtree in bdfs order, calling `f` for each node. What is bdfs
|
||||
/// order? It is a mix of breadth-first and depth first orders. Nodes for which
|
||||
/// `f` returns true are visited breadth-first, all the other nodes are explored
|
||||
/// depth-first.
|
||||
/// `f` returns [`TreeOrder::BreadthFirst`] are visited breadth-first, all the other nodes are explored
|
||||
/// [`TreeOrder::DepthFirst`].
|
||||
///
|
||||
/// In other words, the size of the bfs queue is bound by the number of "true"
|
||||
/// nodes.
|
||||
fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
|
||||
fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> TreeOrder) {
|
||||
let mut curr_layer = vec![node.clone()];
|
||||
let mut next_layer = vec![];
|
||||
while !curr_layer.is_empty() {
|
||||
|
@ -208,7 +251,7 @@ fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
|
|||
while let Some(event) = preorder.next() {
|
||||
match event {
|
||||
syntax::WalkEvent::Enter(node) => {
|
||||
if f(node.clone()) {
|
||||
if f(node.clone()) == TreeOrder::BreadthFirst {
|
||||
next_layer.extend(node.children());
|
||||
preorder.skip_subtree();
|
||||
}
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
//! A higher level attributes based on TokenTree, with also some shortcuts.
|
||||
use std::{fmt, ops};
|
||||
|
||||
use base_db::CrateId;
|
||||
use base_db::{span::SyntaxContextId, CrateId};
|
||||
use cfg::CfgExpr;
|
||||
use either::Either;
|
||||
use intern::Interned;
|
||||
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode};
|
||||
use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
db::ExpandDatabase,
|
||||
hygiene::Hygiene,
|
||||
mod_path::ModPath,
|
||||
span::SpanMapRef,
|
||||
tt::{self, Subtree},
|
||||
InFile,
|
||||
};
|
||||
|
@ -39,28 +39,33 @@ impl ops::Deref for RawAttrs {
|
|||
impl RawAttrs {
|
||||
pub const EMPTY: Self = Self { entries: None };
|
||||
|
||||
pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
|
||||
let entries = collect_attrs(owner)
|
||||
.filter_map(|(id, attr)| match attr {
|
||||
Either::Left(attr) => {
|
||||
attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
|
||||
}
|
||||
Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
|
||||
id,
|
||||
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
|
||||
path: Interned::new(ModPath::from(crate::name!(doc))),
|
||||
}),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
// FIXME: use `Arc::from_iter` when it becomes available
|
||||
let entries: Arc<[Attr]> = Arc::from(entries);
|
||||
pub fn new(
|
||||
db: &dyn ExpandDatabase,
|
||||
owner: &dyn ast::HasAttrs,
|
||||
span_map: SpanMapRef<'_>,
|
||||
) -> Self {
|
||||
let entries = collect_attrs(owner).filter_map(|(id, attr)| match attr {
|
||||
Either::Left(attr) => {
|
||||
attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
|
||||
}
|
||||
Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
|
||||
id,
|
||||
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
|
||||
path: Interned::new(ModPath::from(crate::name!(doc))),
|
||||
ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
|
||||
}),
|
||||
});
|
||||
let entries: Arc<[Attr]> = Arc::from_iter(entries);
|
||||
|
||||
Self { entries: if entries.is_empty() { None } else { Some(entries) } }
|
||||
}
|
||||
|
||||
pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
|
||||
let hygiene = Hygiene::new(db, owner.file_id);
|
||||
Self::new(db, owner.value, &hygiene)
|
||||
pub fn from_attrs_owner(
|
||||
db: &dyn ExpandDatabase,
|
||||
owner: InFile<&dyn ast::HasAttrs>,
|
||||
span_map: SpanMapRef<'_>,
|
||||
) -> Self {
|
||||
Self::new(db, owner.value, span_map)
|
||||
}
|
||||
|
||||
pub fn merge(&self, other: Self) -> Self {
|
||||
|
@ -71,19 +76,13 @@ impl RawAttrs {
|
|||
(Some(a), Some(b)) => {
|
||||
let last_ast_index = a.last().map_or(0, |it| it.id.ast_index() + 1) as u32;
|
||||
Self {
|
||||
entries: Some(Arc::from(
|
||||
a.iter()
|
||||
.cloned()
|
||||
.chain(b.iter().map(|it| {
|
||||
let mut it = it.clone();
|
||||
it.id.id = it.id.ast_index() as u32 + last_ast_index
|
||||
| (it.id.cfg_attr_index().unwrap_or(0) as u32)
|
||||
<< AttrId::AST_INDEX_BITS;
|
||||
it
|
||||
}))
|
||||
// FIXME: use `Arc::from_iter` when it becomes available
|
||||
.collect::<Vec<_>>(),
|
||||
)),
|
||||
entries: Some(Arc::from_iter(a.iter().cloned().chain(b.iter().map(|it| {
|
||||
let mut it = it.clone();
|
||||
it.id.id = it.id.ast_index() as u32 + last_ast_index
|
||||
| (it.id.cfg_attr_index().unwrap_or(0) as u32)
|
||||
<< AttrId::AST_INDEX_BITS;
|
||||
it
|
||||
})))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -100,51 +99,43 @@ impl RawAttrs {
|
|||
}
|
||||
|
||||
let crate_graph = db.crate_graph();
|
||||
let new_attrs = Arc::from(
|
||||
self.iter()
|
||||
.flat_map(|attr| -> SmallVec<[_; 1]> {
|
||||
let is_cfg_attr =
|
||||
attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
|
||||
if !is_cfg_attr {
|
||||
return smallvec![attr.clone()];
|
||||
}
|
||||
let new_attrs = Arc::from_iter(self.iter().flat_map(|attr| -> SmallVec<[_; 1]> {
|
||||
let is_cfg_attr =
|
||||
attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
|
||||
if !is_cfg_attr {
|
||||
return smallvec![attr.clone()];
|
||||
}
|
||||
|
||||
let subtree = match attr.token_tree_value() {
|
||||
Some(it) => it,
|
||||
_ => return smallvec![attr.clone()],
|
||||
let subtree = match attr.token_tree_value() {
|
||||
Some(it) => it,
|
||||
_ => return smallvec![attr.clone()],
|
||||
};
|
||||
|
||||
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
|
||||
Some(it) => it,
|
||||
None => return smallvec![attr.clone()],
|
||||
};
|
||||
let index = attr.id;
|
||||
let attrs =
|
||||
parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| {
|
||||
let tree = Subtree {
|
||||
delimiter: tt::Delimiter::dummy_invisible(),
|
||||
token_trees: attr.to_vec(),
|
||||
};
|
||||
Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
|
||||
});
|
||||
|
||||
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
|
||||
Some(it) => it,
|
||||
None => return smallvec![attr.clone()],
|
||||
};
|
||||
let index = attr.id;
|
||||
let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(
|
||||
|(idx, attr)| {
|
||||
let tree = Subtree {
|
||||
delimiter: tt::Delimiter::unspecified(),
|
||||
token_trees: attr.to_vec(),
|
||||
};
|
||||
// FIXME hygiene
|
||||
let hygiene = Hygiene::new_unhygienic();
|
||||
Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx))
|
||||
},
|
||||
);
|
||||
let cfg_options = &crate_graph[krate].cfg_options;
|
||||
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
|
||||
let cfg = CfgExpr::parse(&cfg);
|
||||
if cfg_options.check(&cfg) == Some(false) {
|
||||
smallvec![]
|
||||
} else {
|
||||
cov_mark::hit!(cfg_attr_active);
|
||||
|
||||
let cfg_options = &crate_graph[krate].cfg_options;
|
||||
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
|
||||
let cfg = CfgExpr::parse(&cfg);
|
||||
if cfg_options.check(&cfg) == Some(false) {
|
||||
smallvec![]
|
||||
} else {
|
||||
cov_mark::hit!(cfg_attr_active);
|
||||
|
||||
attrs.collect()
|
||||
}
|
||||
})
|
||||
// FIXME: use `Arc::from_iter` when it becomes available
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
attrs.collect()
|
||||
}
|
||||
}));
|
||||
|
||||
RawAttrs { entries: Some(new_attrs) }
|
||||
}
|
||||
|
@ -185,21 +176,23 @@ pub struct Attr {
|
|||
pub id: AttrId,
|
||||
pub path: Interned<ModPath>,
|
||||
pub input: Option<Interned<AttrInput>>,
|
||||
pub ctxt: SyntaxContextId,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum AttrInput {
|
||||
/// `#[attr = "string"]`
|
||||
// FIXME: This is losing span
|
||||
Literal(SmolStr),
|
||||
/// `#[attr(subtree)]`
|
||||
TokenTree(Box<(tt::Subtree, mbe::TokenMap)>),
|
||||
TokenTree(Box<tt::Subtree>),
|
||||
}
|
||||
|
||||
impl fmt::Display for AttrInput {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
|
||||
AttrInput::TokenTree(tt) => tt.0.fmt(f),
|
||||
AttrInput::TokenTree(tt) => tt.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -208,10 +201,10 @@ impl Attr {
|
|||
fn from_src(
|
||||
db: &dyn ExpandDatabase,
|
||||
ast: ast::Meta,
|
||||
hygiene: &Hygiene,
|
||||
span_map: SpanMapRef<'_>,
|
||||
id: AttrId,
|
||||
) -> Option<Attr> {
|
||||
let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?);
|
||||
let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?);
|
||||
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
|
||||
let value = match lit.kind() {
|
||||
ast::LiteralKind::String(string) => string.value()?.into(),
|
||||
|
@ -219,24 +212,20 @@ impl Attr {
|
|||
};
|
||||
Some(Interned::new(AttrInput::Literal(value)))
|
||||
} else if let Some(tt) = ast.token_tree() {
|
||||
let (tree, map) = syntax_node_to_token_tree(tt.syntax());
|
||||
Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map)))))
|
||||
let tree = syntax_node_to_token_tree(tt.syntax(), span_map);
|
||||
Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Some(Attr { id, path, input })
|
||||
Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx })
|
||||
}
|
||||
|
||||
fn from_tt(
|
||||
db: &dyn ExpandDatabase,
|
||||
tt: &tt::Subtree,
|
||||
hygiene: &Hygiene,
|
||||
id: AttrId,
|
||||
) -> Option<Attr> {
|
||||
let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
|
||||
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
|
||||
// FIXME: Unecessary roundtrip tt -> ast -> tt
|
||||
let (parse, map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
|
||||
let ast = ast::Meta::cast(parse.syntax_node())?;
|
||||
|
||||
Self::from_src(db, ast, hygiene, id)
|
||||
Self::from_src(db, ast, SpanMapRef::ExpansionSpanMap(&map), id)
|
||||
}
|
||||
|
||||
pub fn path(&self) -> &ModPath {
|
||||
|
@ -256,7 +245,7 @@ impl Attr {
|
|||
/// #[path(ident)]
|
||||
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
|
||||
match self.input.as_deref()? {
|
||||
AttrInput::TokenTree(tt) => match &*tt.0.token_trees {
|
||||
AttrInput::TokenTree(tt) => match &*tt.token_trees {
|
||||
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
|
||||
_ => None,
|
||||
},
|
||||
|
@ -267,7 +256,7 @@ impl Attr {
|
|||
/// #[path TokenTree]
|
||||
pub fn token_tree_value(&self) -> Option<&Subtree> {
|
||||
match self.input.as_deref()? {
|
||||
AttrInput::TokenTree(tt) => Some(&tt.0),
|
||||
AttrInput::TokenTree(tt) => Some(tt),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -276,8 +265,7 @@ impl Attr {
|
|||
pub fn parse_path_comma_token_tree<'a>(
|
||||
&'a self,
|
||||
db: &'a dyn ExpandDatabase,
|
||||
hygiene: &'a Hygiene,
|
||||
) -> Option<impl Iterator<Item = ModPath> + 'a> {
|
||||
) -> Option<impl Iterator<Item = (ModPath, SyntaxContextId)> + 'a> {
|
||||
let args = self.token_tree_value()?;
|
||||
|
||||
if args.delimiter.kind != DelimiterKind::Parenthesis {
|
||||
|
@ -290,12 +278,13 @@ impl Attr {
|
|||
if tts.is_empty() {
|
||||
return None;
|
||||
}
|
||||
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation here.
|
||||
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
|
||||
// here or maybe just parse a mod path from a token tree directly
|
||||
let subtree = tt::Subtree {
|
||||
delimiter: tt::Delimiter::unspecified(),
|
||||
token_trees: tts.into_iter().cloned().collect(),
|
||||
delimiter: tt::Delimiter::dummy_invisible(),
|
||||
token_trees: tts.to_vec(),
|
||||
};
|
||||
let (parse, _) =
|
||||
let (parse, span_map) =
|
||||
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
|
||||
let meta = ast::Meta::cast(parse.syntax_node())?;
|
||||
// Only simple paths are allowed.
|
||||
|
@ -304,7 +293,11 @@ impl Attr {
|
|||
return None;
|
||||
}
|
||||
let path = meta.path()?;
|
||||
ModPath::from_src(db, path, hygiene)
|
||||
let call_site = span_map.span_at(path.syntax().text_range().start()).ctx;
|
||||
Some((
|
||||
ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
|
||||
call_site,
|
||||
))
|
||||
});
|
||||
|
||||
Some(paths)
|
||||
|
|
|
@ -1,16 +1,22 @@
|
|||
//! Builtin attributes.
|
||||
|
||||
use base_db::{
|
||||
span::{SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
|
||||
FileId,
|
||||
};
|
||||
use syntax::{TextRange, TextSize};
|
||||
|
||||
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
|
||||
|
||||
macro_rules! register_builtin {
|
||||
( $(($name:ident, $variant:ident) => $expand:ident),* ) => {
|
||||
($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => {
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum BuiltinAttrExpander {
|
||||
$($variant),*
|
||||
}
|
||||
|
||||
impl BuiltinAttrExpander {
|
||||
pub fn expand(
|
||||
pub fn $expand_fn(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
|
@ -45,7 +51,7 @@ impl BuiltinAttrExpander {
|
|||
}
|
||||
}
|
||||
|
||||
register_builtin! {
|
||||
register_builtin! { expand:
|
||||
(bench, Bench) => dummy_attr_expand,
|
||||
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
|
||||
(cfg_eval, CfgEval) => dummy_attr_expand,
|
||||
|
@ -77,9 +83,8 @@ fn dummy_attr_expand(
|
|||
///
|
||||
/// As such, we expand `#[derive(Foo, bar::Bar)]` into
|
||||
/// ```
|
||||
/// #[Foo]
|
||||
/// #[bar::Bar]
|
||||
/// ();
|
||||
/// #![Foo]
|
||||
/// #![bar::Bar]
|
||||
/// ```
|
||||
/// which allows fallback path resolution in hir::Semantics to properly identify our derives.
|
||||
/// Since we do not expand the attribute in nameres though, we keep the original item.
|
||||
|
@ -98,21 +103,31 @@ fn derive_attr_expand(
|
|||
) -> ExpandResult<tt::Subtree> {
|
||||
let loc = db.lookup_intern_macro_call(id);
|
||||
let derives = match &loc.kind {
|
||||
MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => &attr_args.0,
|
||||
_ => return ExpandResult::ok(tt::Subtree::empty()),
|
||||
MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => {
|
||||
attr_args
|
||||
}
|
||||
_ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)),
|
||||
};
|
||||
pseudo_derive_attr_expansion(tt, derives)
|
||||
pseudo_derive_attr_expansion(tt, derives, loc.call_site)
|
||||
}
|
||||
|
||||
pub fn pseudo_derive_attr_expansion(
|
||||
tt: &tt::Subtree,
|
||||
args: &tt::Subtree,
|
||||
call_site: SyntaxContextId,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let mk_leaf = |char| {
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
||||
char,
|
||||
spacing: tt::Spacing::Alone,
|
||||
span: tt::TokenId::unspecified(),
|
||||
span: tt::SpanData {
|
||||
range: TextRange::empty(TextSize::new(0)),
|
||||
anchor: base_db::span::SpanAnchor {
|
||||
file_id: FileId::BOGUS,
|
||||
ast_id: ROOT_ERASED_FILE_AST_ID,
|
||||
},
|
||||
ctx: call_site,
|
||||
},
|
||||
}))
|
||||
};
|
||||
|
||||
|
@ -122,12 +137,10 @@ pub fn pseudo_derive_attr_expansion(
|
|||
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
|
||||
{
|
||||
token_trees.push(mk_leaf('#'));
|
||||
token_trees.push(mk_leaf('!'));
|
||||
token_trees.push(mk_leaf('['));
|
||||
token_trees.extend(tt.iter().cloned());
|
||||
token_trees.push(mk_leaf(']'));
|
||||
}
|
||||
token_trees.push(mk_leaf('('));
|
||||
token_trees.push(mk_leaf(')'));
|
||||
token_trees.push(mk_leaf(';'));
|
||||
ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees })
|
||||
}
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
//! Builtin derives.
|
||||
|
||||
use ::tt::Ident;
|
||||
use base_db::{CrateOrigin, LangCrateOrigin};
|
||||
use base_db::{span::SpanData, CrateOrigin, LangCrateOrigin};
|
||||
use itertools::izip;
|
||||
use mbe::TokenMap;
|
||||
use rustc_hash::FxHashSet;
|
||||
use stdx::never;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::{
|
||||
hygiene::span_with_def_site_ctxt,
|
||||
name::{AsName, Name},
|
||||
tt::{self, TokenId},
|
||||
span::SpanMapRef,
|
||||
tt,
|
||||
};
|
||||
use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
|
||||
|
||||
|
@ -29,12 +29,15 @@ macro_rules! register_builtin {
|
|||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &ast::Adt,
|
||||
token_map: &TokenMap,
|
||||
token_map: SpanMapRef<'_>,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let expander = match *self {
|
||||
$( BuiltinDeriveExpander::$trait => $expand, )*
|
||||
};
|
||||
expander(db, id, tt, token_map)
|
||||
|
||||
let span = db.lookup_intern_macro_call(id).span(db);
|
||||
let span = span_with_def_site_ctxt(db, span, id);
|
||||
expander(db, id, span, tt, token_map)
|
||||
}
|
||||
|
||||
fn find_by_name(name: &name::Name) -> Option<Self> {
|
||||
|
@ -70,19 +73,19 @@ enum VariantShape {
|
|||
Unit,
|
||||
}
|
||||
|
||||
fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
|
||||
(0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified()))
|
||||
fn tuple_field_iterator(span: SpanData, n: usize) -> impl Iterator<Item = tt::Ident> {
|
||||
(0..n).map(move |it| tt::Ident::new(format!("f{it}"), span))
|
||||
}
|
||||
|
||||
impl VariantShape {
|
||||
fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree {
|
||||
self.as_pattern_map(path, |it| quote!(#it))
|
||||
fn as_pattern(&self, path: tt::Subtree, span: SpanData) -> tt::Subtree {
|
||||
self.as_pattern_map(path, span, |it| quote!(span => #it))
|
||||
}
|
||||
|
||||
fn field_names(&self) -> Vec<tt::Ident> {
|
||||
fn field_names(&self, span: SpanData) -> Vec<tt::Ident> {
|
||||
match self {
|
||||
VariantShape::Struct(s) => s.clone(),
|
||||
VariantShape::Tuple(n) => tuple_field_iterator(*n).collect(),
|
||||
VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(),
|
||||
VariantShape::Unit => vec![],
|
||||
}
|
||||
}
|
||||
|
@ -90,26 +93,27 @@ impl VariantShape {
|
|||
fn as_pattern_map(
|
||||
&self,
|
||||
path: tt::Subtree,
|
||||
span: SpanData,
|
||||
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
|
||||
) -> tt::Subtree {
|
||||
match self {
|
||||
VariantShape::Struct(fields) => {
|
||||
let fields = fields.iter().map(|it| {
|
||||
let mapped = field_map(it);
|
||||
quote! { #it : #mapped , }
|
||||
quote! {span => #it : #mapped , }
|
||||
});
|
||||
quote! {
|
||||
quote! {span =>
|
||||
#path { ##fields }
|
||||
}
|
||||
}
|
||||
&VariantShape::Tuple(n) => {
|
||||
let fields = tuple_field_iterator(n).map(|it| {
|
||||
let fields = tuple_field_iterator(span, n).map(|it| {
|
||||
let mapped = field_map(&it);
|
||||
quote! {
|
||||
quote! {span =>
|
||||
#mapped ,
|
||||
}
|
||||
});
|
||||
quote! {
|
||||
quote! {span =>
|
||||
#path ( ##fields )
|
||||
}
|
||||
}
|
||||
|
@ -117,7 +121,7 @@ impl VariantShape {
|
|||
}
|
||||
}
|
||||
|
||||
fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
|
||||
fn from(tm: SpanMapRef<'_>, value: Option<FieldList>) -> Result<Self, ExpandError> {
|
||||
let r = match value {
|
||||
None => VariantShape::Unit,
|
||||
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
|
||||
|
@ -139,17 +143,17 @@ enum AdtShape {
|
|||
}
|
||||
|
||||
impl AdtShape {
|
||||
fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> {
|
||||
self.as_pattern_map(name, |it| quote!(#it))
|
||||
fn as_pattern(&self, span: SpanData, name: &tt::Ident) -> Vec<tt::Subtree> {
|
||||
self.as_pattern_map(name, |it| quote!(span =>#it), span)
|
||||
}
|
||||
|
||||
fn field_names(&self) -> Vec<Vec<tt::Ident>> {
|
||||
fn field_names(&self, span: SpanData) -> Vec<Vec<tt::Ident>> {
|
||||
match self {
|
||||
AdtShape::Struct(s) => {
|
||||
vec![s.field_names()]
|
||||
vec![s.field_names(span)]
|
||||
}
|
||||
AdtShape::Enum { variants, .. } => {
|
||||
variants.iter().map(|(_, fields)| fields.field_names()).collect()
|
||||
variants.iter().map(|(_, fields)| fields.field_names(span)).collect()
|
||||
}
|
||||
AdtShape::Union => {
|
||||
never!("using fields of union in derive is always wrong");
|
||||
|
@ -162,18 +166,21 @@ impl AdtShape {
|
|||
&self,
|
||||
name: &tt::Ident,
|
||||
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> Vec<tt::Subtree> {
|
||||
match self {
|
||||
AdtShape::Struct(s) => {
|
||||
vec![s.as_pattern_map(quote! { #name }, field_map)]
|
||||
vec![s.as_pattern_map(quote! {span => #name }, span, field_map)]
|
||||
}
|
||||
AdtShape::Enum { variants, .. } => variants
|
||||
.iter()
|
||||
.map(|(v, fields)| fields.as_pattern_map(quote! { #name :: #v }, &field_map))
|
||||
.map(|(v, fields)| {
|
||||
fields.as_pattern_map(quote! {span => #name :: #v }, span, &field_map)
|
||||
})
|
||||
.collect(),
|
||||
AdtShape::Union => {
|
||||
never!("pattern matching on union is always wrong");
|
||||
vec![quote! { un }]
|
||||
vec![quote! {span => un }]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -189,8 +196,12 @@ struct BasicAdtInfo {
|
|||
associated_types: Vec<tt::Subtree>,
|
||||
}
|
||||
|
||||
fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
|
||||
let (name, generic_param_list, shape) = match &adt {
|
||||
fn parse_adt(
|
||||
tm: SpanMapRef<'_>,
|
||||
adt: &ast::Adt,
|
||||
call_site: SpanData,
|
||||
) -> Result<BasicAdtInfo, ExpandError> {
|
||||
let (name, generic_param_list, shape) = match adt {
|
||||
ast::Adt::Struct(it) => (
|
||||
it.name(),
|
||||
it.generic_param_list(),
|
||||
|
@ -234,22 +245,26 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
|
|||
match this {
|
||||
Some(it) => {
|
||||
param_type_set.insert(it.as_name());
|
||||
mbe::syntax_node_to_token_tree(it.syntax()).0
|
||||
mbe::syntax_node_to_token_tree(it.syntax(), tm)
|
||||
}
|
||||
None => {
|
||||
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
|
||||
}
|
||||
None => tt::Subtree::empty(),
|
||||
}
|
||||
};
|
||||
let bounds = match ¶m {
|
||||
ast::TypeOrConstParam::Type(it) => {
|
||||
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
|
||||
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
|
||||
}
|
||||
ast::TypeOrConstParam::Const(_) => None,
|
||||
};
|
||||
let ty = if let ast::TypeOrConstParam::Const(param) = param {
|
||||
let ty = param
|
||||
.ty()
|
||||
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0)
|
||||
.unwrap_or_else(tt::Subtree::empty);
|
||||
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm))
|
||||
.unwrap_or_else(|| {
|
||||
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
|
||||
});
|
||||
Some(ty)
|
||||
} else {
|
||||
None
|
||||
|
@ -282,20 +297,22 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
|
|||
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
|
||||
param_type_set.contains(&name).then_some(p)
|
||||
})
|
||||
.map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
|
||||
.map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
|
||||
.collect();
|
||||
let name_token = name_to_token(&tm, name)?;
|
||||
let name_token = name_to_token(tm, name)?;
|
||||
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
|
||||
}
|
||||
|
||||
fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Ident, ExpandError> {
|
||||
fn name_to_token(
|
||||
token_map: SpanMapRef<'_>,
|
||||
name: Option<ast::Name>,
|
||||
) -> Result<tt::Ident, ExpandError> {
|
||||
let name = name.ok_or_else(|| {
|
||||
debug!("parsed item has no name");
|
||||
ExpandError::other("missing name")
|
||||
})?;
|
||||
let name_token_id =
|
||||
token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
|
||||
let name_token = tt::Ident { span: name_token_id, text: name.text().into() };
|
||||
let span = token_map.span_for_range(name.syntax().text_range());
|
||||
let name_token = tt::Ident { span, text: name.text().into() };
|
||||
Ok(name_token)
|
||||
}
|
||||
|
||||
|
@ -331,14 +348,21 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
|
|||
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
|
||||
/// therefore does not get bound by the derived trait.
|
||||
fn expand_simple_derive(
|
||||
// FIXME: use
|
||||
invoc_span: SpanData,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
tm: SpanMapRef<'_>,
|
||||
trait_path: tt::Subtree,
|
||||
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let info = match parse_adt(tm, tt) {
|
||||
let info = match parse_adt(tm, tt, invoc_span) {
|
||||
Ok(info) => info,
|
||||
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
|
||||
Err(e) => {
|
||||
return ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: invoc_span, close: invoc_span }),
|
||||
e,
|
||||
)
|
||||
}
|
||||
};
|
||||
let trait_body = make_trait_body(&info);
|
||||
let mut where_block = vec![];
|
||||
|
@ -349,13 +373,13 @@ fn expand_simple_derive(
|
|||
let ident_ = ident.clone();
|
||||
if let Some(b) = bound {
|
||||
let ident = ident.clone();
|
||||
where_block.push(quote! { #ident : #b , });
|
||||
where_block.push(quote! {invoc_span => #ident : #b , });
|
||||
}
|
||||
if let Some(ty) = param_ty {
|
||||
(quote! { const #ident : #ty , }, quote! { #ident_ , })
|
||||
(quote! {invoc_span => const #ident : #ty , }, quote! {invoc_span => #ident_ , })
|
||||
} else {
|
||||
let bound = trait_path.clone();
|
||||
(quote! { #ident : #bound , }, quote! { #ident_ , })
|
||||
(quote! {invoc_span => #ident : #bound , }, quote! {invoc_span => #ident_ , })
|
||||
}
|
||||
})
|
||||
.unzip();
|
||||
|
@ -363,17 +387,17 @@ fn expand_simple_derive(
|
|||
where_block.extend(info.associated_types.iter().map(|it| {
|
||||
let it = it.clone();
|
||||
let bound = trait_path.clone();
|
||||
quote! { #it : #bound , }
|
||||
quote! {invoc_span => #it : #bound , }
|
||||
}));
|
||||
|
||||
let name = info.name;
|
||||
let expanded = quote! {
|
||||
let expanded = quote! {invoc_span =>
|
||||
impl < ##params > #trait_path for #name < ##args > where ##where_block { #trait_body }
|
||||
};
|
||||
ExpandResult::ok(expanded)
|
||||
}
|
||||
|
||||
fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree {
|
||||
fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) -> tt::TokenTree {
|
||||
// FIXME: make hygiene works for builtin derive macro
|
||||
// such that $crate can be used here.
|
||||
let cg = db.crate_graph();
|
||||
|
@ -381,9 +405,9 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
|
|||
|
||||
let tt = if matches!(cg[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core)) {
|
||||
cov_mark::hit!(test_copy_expand_in_core);
|
||||
quote! { crate }
|
||||
quote! {span => crate }
|
||||
} else {
|
||||
quote! { core }
|
||||
quote! {span => core }
|
||||
};
|
||||
|
||||
tt.token_trees[0].clone()
|
||||
|
@ -392,56 +416,50 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
|
|||
fn copy_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
span: SpanData,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
tm: SpanMapRef<'_>,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {})
|
||||
let krate = find_builtin_crate(db, id, span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
|
||||
}
|
||||
|
||||
fn clone_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
span: SpanData,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
tm: SpanMapRef<'_>,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| {
|
||||
let krate = find_builtin_crate(db, id, span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::clone::Clone }, |adt| {
|
||||
if matches!(adt.shape, AdtShape::Union) {
|
||||
let star = tt::Punct {
|
||||
char: '*',
|
||||
spacing: ::tt::Spacing::Alone,
|
||||
span: tt::TokenId::unspecified(),
|
||||
};
|
||||
return quote! {
|
||||
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
|
||||
return quote! {span =>
|
||||
fn clone(&self) -> Self {
|
||||
#star self
|
||||
}
|
||||
};
|
||||
}
|
||||
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
|
||||
let star = tt::Punct {
|
||||
char: '*',
|
||||
spacing: ::tt::Spacing::Alone,
|
||||
span: tt::TokenId::unspecified(),
|
||||
};
|
||||
return quote! {
|
||||
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
|
||||
return quote! {span =>
|
||||
fn clone(&self) -> Self {
|
||||
match #star self {}
|
||||
}
|
||||
};
|
||||
}
|
||||
let name = &adt.name;
|
||||
let patterns = adt.shape.as_pattern(name);
|
||||
let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() });
|
||||
let patterns = adt.shape.as_pattern(span, name);
|
||||
let exprs = adt.shape.as_pattern_map(name, |it| quote! {span => #it .clone() }, span);
|
||||
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
|
||||
let fat_arrow = fat_arrow();
|
||||
quote! {
|
||||
let fat_arrow = fat_arrow(span);
|
||||
quote! {span =>
|
||||
#pat #fat_arrow #expr,
|
||||
}
|
||||
});
|
||||
|
||||
quote! {
|
||||
quote! {span =>
|
||||
fn clone(&self) -> Self {
|
||||
match self {
|
||||
##arms
|
||||
|
@ -451,53 +469,56 @@ fn clone_expand(
|
|||
})
|
||||
}
|
||||
|
||||
/// This function exists since `quote! { => }` doesn't work.
|
||||
fn fat_arrow() -> ::tt::Subtree<TokenId> {
|
||||
let eq =
|
||||
tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
|
||||
quote! { #eq> }
|
||||
/// This function exists since `quote! {span => => }` doesn't work.
|
||||
fn fat_arrow(span: SpanData) -> tt::Subtree {
|
||||
let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span };
|
||||
quote! {span => #eq> }
|
||||
}
|
||||
|
||||
/// This function exists since `quote! { && }` doesn't work.
|
||||
fn and_and() -> ::tt::Subtree<TokenId> {
|
||||
let and =
|
||||
tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
|
||||
quote! { #and& }
|
||||
/// This function exists since `quote! {span => && }` doesn't work.
|
||||
fn and_and(span: SpanData) -> tt::Subtree {
|
||||
let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span };
|
||||
quote! {span => #and& }
|
||||
}
|
||||
|
||||
fn default_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
span: SpanData,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
tm: SpanMapRef<'_>,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| {
|
||||
let krate = &find_builtin_crate(db, id, span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::default::Default }, |adt| {
|
||||
let body = match &adt.shape {
|
||||
AdtShape::Struct(fields) => {
|
||||
let name = &adt.name;
|
||||
fields
|
||||
.as_pattern_map(quote!(#name), |_| quote!(#krate::default::Default::default()))
|
||||
fields.as_pattern_map(
|
||||
quote!(span =>#name),
|
||||
span,
|
||||
|_| quote!(span =>#krate::default::Default::default()),
|
||||
)
|
||||
}
|
||||
AdtShape::Enum { default_variant, variants } => {
|
||||
if let Some(d) = default_variant {
|
||||
let (name, fields) = &variants[*d];
|
||||
let adt_name = &adt.name;
|
||||
fields.as_pattern_map(
|
||||
quote!(#adt_name :: #name),
|
||||
|_| quote!(#krate::default::Default::default()),
|
||||
quote!(span =>#adt_name :: #name),
|
||||
span,
|
||||
|_| quote!(span =>#krate::default::Default::default()),
|
||||
)
|
||||
} else {
|
||||
// FIXME: Return expand error here
|
||||
quote!()
|
||||
quote!(span =>)
|
||||
}
|
||||
}
|
||||
AdtShape::Union => {
|
||||
// FIXME: Return expand error here
|
||||
quote!()
|
||||
quote!(span =>)
|
||||
}
|
||||
};
|
||||
quote! {
|
||||
quote! {span =>
|
||||
fn default() -> Self {
|
||||
#body
|
||||
}
|
||||
|
@ -508,44 +529,41 @@ fn default_expand(
|
|||
fn debug_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
span: SpanData,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
tm: SpanMapRef<'_>,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| {
|
||||
let krate = &find_builtin_crate(db, id, span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::fmt::Debug }, |adt| {
|
||||
let for_variant = |name: String, v: &VariantShape| match v {
|
||||
VariantShape::Struct(fields) => {
|
||||
let for_fields = fields.iter().map(|it| {
|
||||
let x_string = it.to_string();
|
||||
quote! {
|
||||
quote! {span =>
|
||||
.field(#x_string, & #it)
|
||||
}
|
||||
});
|
||||
quote! {
|
||||
quote! {span =>
|
||||
f.debug_struct(#name) ##for_fields .finish()
|
||||
}
|
||||
}
|
||||
VariantShape::Tuple(n) => {
|
||||
let for_fields = tuple_field_iterator(*n).map(|it| {
|
||||
quote! {
|
||||
let for_fields = tuple_field_iterator(span, *n).map(|it| {
|
||||
quote! {span =>
|
||||
.field( & #it)
|
||||
}
|
||||
});
|
||||
quote! {
|
||||
quote! {span =>
|
||||
f.debug_tuple(#name) ##for_fields .finish()
|
||||
}
|
||||
}
|
||||
VariantShape::Unit => quote! {
|
||||
VariantShape::Unit => quote! {span =>
|
||||
f.write_str(#name)
|
||||
},
|
||||
};
|
||||
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
|
||||
let star = tt::Punct {
|
||||
char: '*',
|
||||
spacing: ::tt::Spacing::Alone,
|
||||
span: tt::TokenId::unspecified(),
|
||||
};
|
||||
return quote! {
|
||||
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
|
||||
return quote! {span =>
|
||||
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
|
||||
match #star self {}
|
||||
}
|
||||
|
@ -553,20 +571,20 @@ fn debug_expand(
|
|||
}
|
||||
let arms = match &adt.shape {
|
||||
AdtShape::Struct(fields) => {
|
||||
let fat_arrow = fat_arrow();
|
||||
let fat_arrow = fat_arrow(span);
|
||||
let name = &adt.name;
|
||||
let pat = fields.as_pattern(quote!(#name));
|
||||
let pat = fields.as_pattern(quote!(span =>#name), span);
|
||||
let expr = for_variant(name.to_string(), fields);
|
||||
vec![quote! { #pat #fat_arrow #expr }]
|
||||
vec![quote! {span => #pat #fat_arrow #expr }]
|
||||
}
|
||||
AdtShape::Enum { variants, .. } => variants
|
||||
.iter()
|
||||
.map(|(name, v)| {
|
||||
let fat_arrow = fat_arrow();
|
||||
let fat_arrow = fat_arrow(span);
|
||||
let adt_name = &adt.name;
|
||||
let pat = v.as_pattern(quote!(#adt_name :: #name));
|
||||
let pat = v.as_pattern(quote!(span =>#adt_name :: #name), span);
|
||||
let expr = for_variant(name.to_string(), v);
|
||||
quote! {
|
||||
quote! {span =>
|
||||
#pat #fat_arrow #expr ,
|
||||
}
|
||||
})
|
||||
|
@ -576,7 +594,7 @@ fn debug_expand(
|
|||
vec![]
|
||||
}
|
||||
};
|
||||
quote! {
|
||||
quote! {span =>
|
||||
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
|
||||
match self {
|
||||
##arms
|
||||
|
@ -589,47 +607,46 @@ fn debug_expand(
|
|||
fn hash_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
span: SpanData,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
tm: SpanMapRef<'_>,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| {
|
||||
let krate = &find_builtin_crate(db, id, span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::hash::Hash }, |adt| {
|
||||
if matches!(adt.shape, AdtShape::Union) {
|
||||
// FIXME: Return expand error here
|
||||
return quote! {};
|
||||
return quote! {span =>};
|
||||
}
|
||||
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
|
||||
let star = tt::Punct {
|
||||
char: '*',
|
||||
spacing: ::tt::Spacing::Alone,
|
||||
span: tt::TokenId::unspecified(),
|
||||
};
|
||||
return quote! {
|
||||
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
|
||||
return quote! {span =>
|
||||
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
|
||||
match #star self {}
|
||||
}
|
||||
};
|
||||
}
|
||||
let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
|
||||
|(pat, names)| {
|
||||
let expr = {
|
||||
let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); });
|
||||
quote! { {
|
||||
##it
|
||||
} }
|
||||
};
|
||||
let fat_arrow = fat_arrow();
|
||||
quote! {
|
||||
#pat #fat_arrow #expr ,
|
||||
}
|
||||
},
|
||||
);
|
||||
let arms =
|
||||
adt.shape.as_pattern(span, &adt.name).into_iter().zip(adt.shape.field_names(span)).map(
|
||||
|(pat, names)| {
|
||||
let expr = {
|
||||
let it =
|
||||
names.iter().map(|it| quote! {span => #it . hash(ra_expand_state); });
|
||||
quote! {span => {
|
||||
##it
|
||||
} }
|
||||
};
|
||||
let fat_arrow = fat_arrow(span);
|
||||
quote! {span =>
|
||||
#pat #fat_arrow #expr ,
|
||||
}
|
||||
},
|
||||
);
|
||||
let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) {
|
||||
quote! { #krate::mem::discriminant(self).hash(ra_expand_state); }
|
||||
quote! {span => #krate::mem::discriminant(self).hash(ra_expand_state); }
|
||||
} else {
|
||||
quote! {}
|
||||
quote! {span =>}
|
||||
};
|
||||
quote! {
|
||||
quote! {span =>
|
||||
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
|
||||
#check_discriminant
|
||||
match self {
|
||||
|
@ -643,56 +660,58 @@ fn hash_expand(
|
|||
fn eq_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
span: SpanData,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
tm: SpanMapRef<'_>,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {})
|
||||
let krate = find_builtin_crate(db, id, span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
|
||||
}
|
||||
|
||||
fn partial_eq_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
span: SpanData,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
tm: SpanMapRef<'_>,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| {
|
||||
let krate = find_builtin_crate(db, id, span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialEq }, |adt| {
|
||||
if matches!(adt.shape, AdtShape::Union) {
|
||||
// FIXME: Return expand error here
|
||||
return quote! {};
|
||||
return quote! {span =>};
|
||||
}
|
||||
let name = &adt.name;
|
||||
|
||||
let (self_patterns, other_patterns) = self_and_other_patterns(adt, name);
|
||||
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
|
||||
let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span);
|
||||
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|
||||
|(pat1, pat2, names)| {
|
||||
let fat_arrow = fat_arrow();
|
||||
let fat_arrow = fat_arrow(span);
|
||||
let body = match &*names {
|
||||
[] => {
|
||||
quote!(true)
|
||||
quote!(span =>true)
|
||||
}
|
||||
[first, rest @ ..] => {
|
||||
let rest = rest.iter().map(|it| {
|
||||
let t1 = Ident::new(format!("{}_self", it.text), it.span);
|
||||
let t2 = Ident::new(format!("{}_other", it.text), it.span);
|
||||
let and_and = and_and();
|
||||
quote!(#and_and #t1 .eq( #t2 ))
|
||||
let t1 = tt::Ident::new(format!("{}_self", it.text), it.span);
|
||||
let t2 = tt::Ident::new(format!("{}_other", it.text), it.span);
|
||||
let and_and = and_and(span);
|
||||
quote!(span =>#and_and #t1 .eq( #t2 ))
|
||||
});
|
||||
let first = {
|
||||
let t1 = Ident::new(format!("{}_self", first.text), first.span);
|
||||
let t2 = Ident::new(format!("{}_other", first.text), first.span);
|
||||
quote!(#t1 .eq( #t2 ))
|
||||
let t1 = tt::Ident::new(format!("{}_self", first.text), first.span);
|
||||
let t2 = tt::Ident::new(format!("{}_other", first.text), first.span);
|
||||
quote!(span =>#t1 .eq( #t2 ))
|
||||
};
|
||||
quote!(#first ##rest)
|
||||
quote!(span =>#first ##rest)
|
||||
}
|
||||
};
|
||||
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
|
||||
quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
|
||||
},
|
||||
);
|
||||
|
||||
let fat_arrow = fat_arrow();
|
||||
quote! {
|
||||
let fat_arrow = fat_arrow(span);
|
||||
quote! {span =>
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
##arms
|
||||
|
@ -706,35 +725,46 @@ fn partial_eq_expand(
|
|||
fn self_and_other_patterns(
|
||||
adt: &BasicAdtInfo,
|
||||
name: &tt::Ident,
|
||||
span: SpanData,
|
||||
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
|
||||
let self_patterns = adt.shape.as_pattern_map(name, |it| {
|
||||
let t = Ident::new(format!("{}_self", it.text), it.span);
|
||||
quote!(#t)
|
||||
});
|
||||
let other_patterns = adt.shape.as_pattern_map(name, |it| {
|
||||
let t = Ident::new(format!("{}_other", it.text), it.span);
|
||||
quote!(#t)
|
||||
});
|
||||
let self_patterns = adt.shape.as_pattern_map(
|
||||
name,
|
||||
|it| {
|
||||
let t = tt::Ident::new(format!("{}_self", it.text), it.span);
|
||||
quote!(span =>#t)
|
||||
},
|
||||
span,
|
||||
);
|
||||
let other_patterns = adt.shape.as_pattern_map(
|
||||
name,
|
||||
|it| {
|
||||
let t = tt::Ident::new(format!("{}_other", it.text), it.span);
|
||||
quote!(span =>#t)
|
||||
},
|
||||
span,
|
||||
);
|
||||
(self_patterns, other_patterns)
|
||||
}
|
||||
|
||||
fn ord_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
span: SpanData,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
tm: SpanMapRef<'_>,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| {
|
||||
let krate = &find_builtin_crate(db, id, span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Ord }, |adt| {
|
||||
fn compare(
|
||||
krate: &tt::TokenTree,
|
||||
left: tt::Subtree,
|
||||
right: tt::Subtree,
|
||||
rest: tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> tt::Subtree {
|
||||
let fat_arrow1 = fat_arrow();
|
||||
let fat_arrow2 = fat_arrow();
|
||||
quote! {
|
||||
let fat_arrow1 = fat_arrow(span);
|
||||
let fat_arrow2 = fat_arrow(span);
|
||||
quote! {span =>
|
||||
match #left.cmp(&#right) {
|
||||
#krate::cmp::Ordering::Equal #fat_arrow1 {
|
||||
#rest
|
||||
|
@ -745,34 +775,34 @@ fn ord_expand(
|
|||
}
|
||||
if matches!(adt.shape, AdtShape::Union) {
|
||||
// FIXME: Return expand error here
|
||||
return quote!();
|
||||
return quote!(span =>);
|
||||
}
|
||||
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
|
||||
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
|
||||
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
|
||||
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|
||||
|(pat1, pat2, fields)| {
|
||||
let mut body = quote!(#krate::cmp::Ordering::Equal);
|
||||
let mut body = quote!(span =>#krate::cmp::Ordering::Equal);
|
||||
for f in fields.into_iter().rev() {
|
||||
let t1 = Ident::new(format!("{}_self", f.text), f.span);
|
||||
let t2 = Ident::new(format!("{}_other", f.text), f.span);
|
||||
body = compare(krate, quote!(#t1), quote!(#t2), body);
|
||||
let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
|
||||
let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
|
||||
body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
|
||||
}
|
||||
let fat_arrow = fat_arrow();
|
||||
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
|
||||
let fat_arrow = fat_arrow(span);
|
||||
quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
|
||||
},
|
||||
);
|
||||
let fat_arrow = fat_arrow();
|
||||
let mut body = quote! {
|
||||
let fat_arrow = fat_arrow(span);
|
||||
let mut body = quote! {span =>
|
||||
match (self, other) {
|
||||
##arms
|
||||
_unused #fat_arrow #krate::cmp::Ordering::Equal
|
||||
}
|
||||
};
|
||||
if matches!(&adt.shape, AdtShape::Enum { .. }) {
|
||||
let left = quote!(#krate::intrinsics::discriminant_value(self));
|
||||
let right = quote!(#krate::intrinsics::discriminant_value(other));
|
||||
body = compare(krate, left, right, body);
|
||||
let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
|
||||
let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
|
||||
body = compare(krate, left, right, body, span);
|
||||
}
|
||||
quote! {
|
||||
quote! {span =>
|
||||
fn cmp(&self, other: &Self) -> #krate::cmp::Ordering {
|
||||
#body
|
||||
}
|
||||
|
@ -783,20 +813,22 @@ fn ord_expand(
|
|||
fn partial_ord_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
span: SpanData,
|
||||
tt: &ast::Adt,
|
||||
tm: &TokenMap,
|
||||
tm: SpanMapRef<'_>,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &find_builtin_crate(db, id);
|
||||
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| {
|
||||
let krate = &find_builtin_crate(db, id, span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialOrd }, |adt| {
|
||||
fn compare(
|
||||
krate: &tt::TokenTree,
|
||||
left: tt::Subtree,
|
||||
right: tt::Subtree,
|
||||
rest: tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> tt::Subtree {
|
||||
let fat_arrow1 = fat_arrow();
|
||||
let fat_arrow2 = fat_arrow();
|
||||
quote! {
|
||||
let fat_arrow1 = fat_arrow(span);
|
||||
let fat_arrow2 = fat_arrow(span);
|
||||
quote! {span =>
|
||||
match #left.partial_cmp(&#right) {
|
||||
#krate::option::Option::Some(#krate::cmp::Ordering::Equal) #fat_arrow1 {
|
||||
#rest
|
||||
|
@ -807,37 +839,39 @@ fn partial_ord_expand(
|
|||
}
|
||||
if matches!(adt.shape, AdtShape::Union) {
|
||||
// FIXME: Return expand error here
|
||||
return quote!();
|
||||
return quote!(span =>);
|
||||
}
|
||||
let left = quote!(#krate::intrinsics::discriminant_value(self));
|
||||
let right = quote!(#krate::intrinsics::discriminant_value(other));
|
||||
let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
|
||||
let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
|
||||
|
||||
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
|
||||
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
|
||||
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
|
||||
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|
||||
|(pat1, pat2, fields)| {
|
||||
let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
|
||||
let mut body =
|
||||
quote!(span =>#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
|
||||
for f in fields.into_iter().rev() {
|
||||
let t1 = Ident::new(format!("{}_self", f.text), f.span);
|
||||
let t2 = Ident::new(format!("{}_other", f.text), f.span);
|
||||
body = compare(krate, quote!(#t1), quote!(#t2), body);
|
||||
let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
|
||||
let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
|
||||
body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
|
||||
}
|
||||
let fat_arrow = fat_arrow();
|
||||
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
|
||||
let fat_arrow = fat_arrow(span);
|
||||
quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
|
||||
},
|
||||
);
|
||||
let fat_arrow = fat_arrow();
|
||||
let fat_arrow = fat_arrow(span);
|
||||
let body = compare(
|
||||
krate,
|
||||
left,
|
||||
right,
|
||||
quote! {
|
||||
quote! {span =>
|
||||
match (self, other) {
|
||||
##arms
|
||||
_unused #fat_arrow #krate::option::Option::Some(#krate::cmp::Ordering::Equal)
|
||||
}
|
||||
},
|
||||
span,
|
||||
);
|
||||
quote! {
|
||||
quote! {span =>
|
||||
fn partial_cmp(&self, other: &Self) -> #krate::option::Option::Option<#krate::cmp::Ordering> {
|
||||
#body
|
||||
}
|
||||
|
|
|
@ -1,17 +1,24 @@
|
|||
//! Builtin macro
|
||||
|
||||
use base_db::{AnchoredPath, Edition, FileId};
|
||||
use base_db::{
|
||||
span::{SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
|
||||
AnchoredPath, Edition, FileId,
|
||||
};
|
||||
use cfg::CfgExpr;
|
||||
use either::Either;
|
||||
use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap};
|
||||
use itertools::Itertools;
|
||||
use mbe::{parse_exprs_with_sep, parse_to_token_tree};
|
||||
use syntax::{
|
||||
ast::{self, AstToken},
|
||||
SmolStr,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId,
|
||||
MacroCallLoc,
|
||||
db::ExpandDatabase,
|
||||
hygiene::span_with_def_site_ctxt,
|
||||
name, quote,
|
||||
tt::{self, DelimSpan},
|
||||
ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc,
|
||||
};
|
||||
|
||||
macro_rules! register_builtin {
|
||||
|
@ -36,7 +43,10 @@ macro_rules! register_builtin {
|
|||
let expander = match *self {
|
||||
$( BuiltinFnLikeExpander::$kind => $expand, )*
|
||||
};
|
||||
expander(db, id, tt)
|
||||
|
||||
let span = db.lookup_intern_macro_call(id).span(db);
|
||||
let span = span_with_def_site_ctxt(db, span, id);
|
||||
expander(db, id, tt, span)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -44,13 +54,16 @@ macro_rules! register_builtin {
|
|||
pub fn expand(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
arg_id: MacroCallId,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let expander = match *self {
|
||||
$( EagerExpander::$e_kind => $e_expand, )*
|
||||
};
|
||||
expander(db, arg_id, tt)
|
||||
|
||||
let span = db.lookup_intern_macro_call(id).span(db);
|
||||
let span = span_with_def_site_ctxt(db, span, id);
|
||||
expander(db, id, tt, span)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -109,29 +122,44 @@ register_builtin! {
|
|||
(option_env, OptionEnv) => option_env_expand
|
||||
}
|
||||
|
||||
const DOLLAR_CRATE: tt::Ident =
|
||||
tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() };
|
||||
fn mk_pound(span: SpanData) -> tt::Subtree {
|
||||
crate::quote::IntoTt::to_subtree(
|
||||
vec![crate::tt::Leaf::Punct(crate::tt::Punct {
|
||||
char: '#',
|
||||
spacing: crate::tt::Spacing::Alone,
|
||||
span: span,
|
||||
})
|
||||
.into()],
|
||||
span,
|
||||
)
|
||||
}
|
||||
|
||||
fn module_path_expand(
|
||||
_db: &dyn ExpandDatabase,
|
||||
_id: MacroCallId,
|
||||
_tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
// Just return a dummy result.
|
||||
ExpandResult::ok(quote! { "module::path" })
|
||||
ExpandResult::ok(quote! {span =>
|
||||
"module::path"
|
||||
})
|
||||
}
|
||||
|
||||
fn line_expand(
|
||||
_db: &dyn ExpandDatabase,
|
||||
_id: MacroCallId,
|
||||
_tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
// dummy implementation for type-checking purposes
|
||||
// Note that `line!` and `column!` will never be implemented properly, as they are by definition
|
||||
// not incremental
|
||||
ExpandResult::ok(tt::Subtree {
|
||||
delimiter: tt::Delimiter::unspecified(),
|
||||
delimiter: tt::Delimiter::dummy_invisible(),
|
||||
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||
text: "0u32".into(),
|
||||
span: tt::Span::UNSPECIFIED,
|
||||
span,
|
||||
}))],
|
||||
})
|
||||
}
|
||||
|
@ -140,26 +168,29 @@ fn log_syntax_expand(
|
|||
_db: &dyn ExpandDatabase,
|
||||
_id: MacroCallId,
|
||||
_tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
ExpandResult::ok(quote! {})
|
||||
ExpandResult::ok(quote! {span =>})
|
||||
}
|
||||
|
||||
fn trace_macros_expand(
|
||||
_db: &dyn ExpandDatabase,
|
||||
_id: MacroCallId,
|
||||
_tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
ExpandResult::ok(quote! {})
|
||||
ExpandResult::ok(quote! {span =>})
|
||||
}
|
||||
|
||||
fn stringify_expand(
|
||||
_db: &dyn ExpandDatabase,
|
||||
_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let pretty = ::tt::pretty(&tt.token_trees);
|
||||
|
||||
let expanded = quote! {
|
||||
let expanded = quote! {span =>
|
||||
#pretty
|
||||
};
|
||||
|
||||
|
@ -170,27 +201,29 @@ fn assert_expand(
|
|||
_db: &dyn ExpandDatabase,
|
||||
_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let args = parse_exprs_with_sep(tt, ',');
|
||||
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
|
||||
let expanded = match &*args {
|
||||
[cond, panic_args @ ..] => {
|
||||
let comma = tt::Subtree {
|
||||
delimiter: tt::Delimiter::unspecified(),
|
||||
delimiter: tt::Delimiter::dummy_invisible(),
|
||||
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
||||
char: ',',
|
||||
spacing: tt::Spacing::Alone,
|
||||
span: tt::TokenId::unspecified(),
|
||||
span,
|
||||
}))],
|
||||
};
|
||||
let cond = cond.clone();
|
||||
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
|
||||
quote! {{
|
||||
quote! {span =>{
|
||||
if !(#cond) {
|
||||
#DOLLAR_CRATE::panic!(##panic_args);
|
||||
#dollar_crate::panic!(##panic_args);
|
||||
}
|
||||
}}
|
||||
}
|
||||
[] => quote! {{}},
|
||||
[] => quote! {span =>{}},
|
||||
};
|
||||
|
||||
ExpandResult::ok(expanded)
|
||||
|
@ -200,12 +233,13 @@ fn file_expand(
|
|||
_db: &dyn ExpandDatabase,
|
||||
_id: MacroCallId,
|
||||
_tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
// FIXME: RA purposefully lacks knowledge of absolute file names
|
||||
// so just return "".
|
||||
let file_name = "";
|
||||
|
||||
let expanded = quote! {
|
||||
let expanded = quote! {span =>
|
||||
#file_name
|
||||
};
|
||||
|
||||
|
@ -216,16 +250,18 @@ fn format_args_expand(
|
|||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
format_args_expand_general(db, id, tt, "")
|
||||
format_args_expand_general(db, id, tt, "", span)
|
||||
}
|
||||
|
||||
fn format_args_nl_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
format_args_expand_general(db, id, tt, "\\n")
|
||||
format_args_expand_general(db, id, tt, "\\n", span)
|
||||
}
|
||||
|
||||
fn format_args_expand_general(
|
||||
|
@ -234,11 +270,12 @@ fn format_args_expand_general(
|
|||
tt: &tt::Subtree,
|
||||
// FIXME: Make use of this so that mir interpretation works properly
|
||||
_end_string: &str,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let pound = quote! {@PUNCT '#'};
|
||||
let pound = mk_pound(span);
|
||||
let mut tt = tt.clone();
|
||||
tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
|
||||
return ExpandResult::ok(quote! {
|
||||
return ExpandResult::ok(quote! {span =>
|
||||
builtin #pound format_args #tt
|
||||
});
|
||||
}
|
||||
|
@ -247,25 +284,25 @@ fn asm_expand(
|
|||
_db: &dyn ExpandDatabase,
|
||||
_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
// We expand all assembly snippets to `format_args!` invocations to get format syntax
|
||||
// highlighting for them.
|
||||
|
||||
let mut literals = Vec::new();
|
||||
for tt in tt.token_trees.chunks(2) {
|
||||
match tt {
|
||||
[tt::TokenTree::Leaf(tt::Leaf::Literal(lit))]
|
||||
| [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', span: _, spacing: _ }))] =>
|
||||
{
|
||||
let krate = DOLLAR_CRATE.clone();
|
||||
literals.push(quote!(#krate::format_args!(#lit);));
|
||||
let dollar_krate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
|
||||
literals.push(quote!(span=>#dollar_krate::format_args!(#lit);));
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
let pound = quote! {@PUNCT '#'};
|
||||
let expanded = quote! {
|
||||
let pound = mk_pound(span);
|
||||
let expanded = quote! {span =>
|
||||
builtin #pound asm (
|
||||
{##literals}
|
||||
)
|
||||
|
@ -277,20 +314,22 @@ fn global_asm_expand(
|
|||
_db: &dyn ExpandDatabase,
|
||||
_id: MacroCallId,
|
||||
_tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
// Expand to nothing (at item-level)
|
||||
ExpandResult::ok(quote! {})
|
||||
ExpandResult::ok(quote! {span =>})
|
||||
}
|
||||
|
||||
fn cfg_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let loc = db.lookup_intern_macro_call(id);
|
||||
let expr = CfgExpr::parse(tt);
|
||||
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false);
|
||||
let expanded = if enabled { quote!(true) } else { quote!(false) };
|
||||
let expanded = if enabled { quote!(span=>true) } else { quote!(span=>false) };
|
||||
ExpandResult::ok(expanded)
|
||||
}
|
||||
|
||||
|
@ -298,13 +337,15 @@ fn panic_expand(
|
|||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
|
||||
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
|
||||
// Expand to a macro call `$crate::panic::panic_{edition}`
|
||||
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
|
||||
quote!(#DOLLAR_CRATE::panic::panic_2021!)
|
||||
quote!(span =>#dollar_crate::panic::panic_2021!)
|
||||
} else {
|
||||
quote!(#DOLLAR_CRATE::panic::panic_2015!)
|
||||
quote!(span =>#dollar_crate::panic::panic_2015!)
|
||||
};
|
||||
|
||||
// Pass the original arguments
|
||||
|
@ -316,13 +357,15 @@ fn unreachable_expand(
|
|||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
|
||||
// Expand to a macro call `$crate::panic::unreachable_{edition}`
|
||||
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
|
||||
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
|
||||
quote!(#DOLLAR_CRATE::panic::unreachable_2021!)
|
||||
quote!(span =>#dollar_crate::panic::unreachable_2021!)
|
||||
} else {
|
||||
quote!(#DOLLAR_CRATE::panic::unreachable_2015!)
|
||||
quote!(span =>#dollar_crate::panic::unreachable_2015!)
|
||||
};
|
||||
|
||||
// Pass the original arguments
|
||||
|
@ -352,6 +395,7 @@ fn compile_error_expand(
|
|||
_db: &dyn ExpandDatabase,
|
||||
_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let err = match &*tt.token_trees {
|
||||
[tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
|
||||
|
@ -361,13 +405,14 @@ fn compile_error_expand(
|
|||
_ => ExpandError::other("`compile_error!` argument must be a string"),
|
||||
};
|
||||
|
||||
ExpandResult { value: quote! {}, err: Some(err) }
|
||||
ExpandResult { value: quote! {span =>}, err: Some(err) }
|
||||
}
|
||||
|
||||
fn concat_expand(
|
||||
_db: &dyn ExpandDatabase,
|
||||
_arg_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let mut err = None;
|
||||
let mut text = String::new();
|
||||
|
@ -407,13 +452,14 @@ fn concat_expand(
|
|||
}
|
||||
}
|
||||
}
|
||||
ExpandResult { value: quote!(#text), err }
|
||||
ExpandResult { value: quote!(span =>#text), err }
|
||||
}
|
||||
|
||||
fn concat_bytes_expand(
|
||||
_db: &dyn ExpandDatabase,
|
||||
_arg_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let mut bytes = Vec::new();
|
||||
let mut err = None;
|
||||
|
@ -446,8 +492,25 @@ fn concat_bytes_expand(
|
|||
}
|
||||
}
|
||||
}
|
||||
let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() };
|
||||
ExpandResult { value: quote!([#ident]), err }
|
||||
let value = tt::Subtree {
|
||||
delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
|
||||
token_trees: {
|
||||
Itertools::intersperse_with(
|
||||
bytes.into_iter().map(|it| {
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: it.into(), span }))
|
||||
}),
|
||||
|| {
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
||||
char: ',',
|
||||
spacing: tt::Spacing::Alone,
|
||||
span,
|
||||
}))
|
||||
},
|
||||
)
|
||||
.collect()
|
||||
},
|
||||
};
|
||||
ExpandResult { value, err }
|
||||
}
|
||||
|
||||
fn concat_bytes_expand_subtree(
|
||||
|
@ -480,6 +543,7 @@ fn concat_idents_expand(
|
|||
_db: &dyn ExpandDatabase,
|
||||
_arg_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let mut err = None;
|
||||
let mut ident = String::new();
|
||||
|
@ -494,8 +558,9 @@ fn concat_idents_expand(
|
|||
}
|
||||
}
|
||||
}
|
||||
let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() };
|
||||
ExpandResult { value: quote!(#ident), err }
|
||||
// FIXME merge spans
|
||||
let ident = tt::Ident { text: ident.into(), span };
|
||||
ExpandResult { value: quote!(span =>#ident), err }
|
||||
}
|
||||
|
||||
fn relative_file(
|
||||
|
@ -530,45 +595,48 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
|
|||
fn include_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
arg_id: MacroCallId,
|
||||
_tt: &tt::Subtree,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
match db.include_expand(arg_id) {
|
||||
Ok((res, _)) => ExpandResult::ok(res.0.clone()),
|
||||
Err(e) => ExpandResult::new(tt::Subtree::empty(), e),
|
||||
let file_id = match include_input_to_file_id(db, arg_id, tt) {
|
||||
Ok(it) => it,
|
||||
Err(e) => {
|
||||
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
|
||||
}
|
||||
};
|
||||
match parse_to_token_tree(
|
||||
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||
SyntaxContextId::ROOT,
|
||||
&db.file_text(file_id),
|
||||
) {
|
||||
Some(it) => ExpandResult::ok(it),
|
||||
None => ExpandResult::new(
|
||||
tt::Subtree::empty(DelimSpan { open: span, close: span }),
|
||||
ExpandError::other("failed to parse included file"),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn include_arg_to_tt(
|
||||
pub fn include_input_to_file_id(
|
||||
db: &dyn ExpandDatabase,
|
||||
arg_id: MacroCallId,
|
||||
) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
|
||||
let loc = db.lookup_intern_macro_call(arg_id);
|
||||
let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else {
|
||||
panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
|
||||
};
|
||||
let path = parse_string(&arg.0)?;
|
||||
let file_id = relative_file(db, *arg_id, &path, false)?;
|
||||
|
||||
let (subtree, map) =
|
||||
parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?;
|
||||
Ok((triomphe::Arc::new((subtree, map)), file_id))
|
||||
arg: &tt::Subtree,
|
||||
) -> Result<FileId, ExpandError> {
|
||||
relative_file(db, arg_id, &parse_string(arg)?, false)
|
||||
}
|
||||
|
||||
fn include_bytes_expand(
|
||||
_db: &dyn ExpandDatabase,
|
||||
_arg_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
_tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
if let Err(e) = parse_string(tt) {
|
||||
return ExpandResult::new(tt::Subtree::empty(), e);
|
||||
}
|
||||
|
||||
// FIXME: actually read the file here if the user asked for macro expansion
|
||||
let res = tt::Subtree {
|
||||
delimiter: tt::Delimiter::unspecified(),
|
||||
delimiter: tt::Delimiter::dummy_invisible(),
|
||||
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||
text: r#"b"""#.into(),
|
||||
span: tt::TokenId::unspecified(),
|
||||
span,
|
||||
}))],
|
||||
};
|
||||
ExpandResult::ok(res)
|
||||
|
@ -578,10 +646,13 @@ fn include_str_expand(
|
|||
db: &dyn ExpandDatabase,
|
||||
arg_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let path = match parse_string(tt) {
|
||||
Ok(it) => it,
|
||||
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
|
||||
Err(e) => {
|
||||
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
|
||||
}
|
||||
};
|
||||
|
||||
// FIXME: we're not able to read excluded files (which is most of them because
|
||||
|
@ -591,14 +662,14 @@ fn include_str_expand(
|
|||
let file_id = match relative_file(db, arg_id, &path, true) {
|
||||
Ok(file_id) => file_id,
|
||||
Err(_) => {
|
||||
return ExpandResult::ok(quote!(""));
|
||||
return ExpandResult::ok(quote!(span =>""));
|
||||
}
|
||||
};
|
||||
|
||||
let text = db.file_text(file_id);
|
||||
let text = &*text;
|
||||
|
||||
ExpandResult::ok(quote!(#text))
|
||||
ExpandResult::ok(quote!(span =>#text))
|
||||
}
|
||||
|
||||
fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
|
||||
|
@ -610,10 +681,13 @@ fn env_expand(
|
|||
db: &dyn ExpandDatabase,
|
||||
arg_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let key = match parse_string(tt) {
|
||||
Ok(it) => it,
|
||||
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
|
||||
Err(e) => {
|
||||
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
|
||||
}
|
||||
};
|
||||
|
||||
let mut err = None;
|
||||
|
@ -630,7 +704,7 @@ fn env_expand(
|
|||
// `include!("foo.rs"), which might go to infinite loop
|
||||
"UNRESOLVED_ENV_VAR".to_string()
|
||||
});
|
||||
let expanded = quote! { #s };
|
||||
let expanded = quote! {span => #s };
|
||||
|
||||
ExpandResult { value: expanded, err }
|
||||
}
|
||||
|
@ -639,15 +713,18 @@ fn option_env_expand(
|
|||
db: &dyn ExpandDatabase,
|
||||
arg_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let key = match parse_string(tt) {
|
||||
Ok(it) => it,
|
||||
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
|
||||
Err(e) => {
|
||||
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
|
||||
}
|
||||
};
|
||||
// FIXME: Use `DOLLAR_CRATE` when that works in eager macros.
|
||||
let expanded = match get_env_inner(db, arg_id, &key) {
|
||||
None => quote! { ::core::option::Option::None::<&str> },
|
||||
Some(s) => quote! { ::core::option::Option::Some(#s) },
|
||||
None => quote! {span => ::core::option::Option::None::<&str> },
|
||||
Some(s) => quote! {span => ::core::option::Option::Some(#s) },
|
||||
};
|
||||
|
||||
ExpandResult::ok(expanded)
|
||||
|
|
|
@ -1,22 +1,31 @@
|
|||
//! Defines database & queries for macro expansion.
|
||||
|
||||
use base_db::{salsa, CrateId, Edition, SourceDatabase};
|
||||
use base_db::{
|
||||
salsa::{self, debug::DebugQueryTable},
|
||||
span::SyntaxContextId,
|
||||
CrateId, Edition, FileId, SourceDatabase,
|
||||
};
|
||||
use either::Either;
|
||||
use limit::Limit;
|
||||
use mbe::{syntax_node_to_token_tree, ValueResult};
|
||||
use rustc_hash::FxHashSet;
|
||||
use syntax::{
|
||||
ast::{self, HasAttrs, HasDocComments},
|
||||
AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
||||
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
||||
};
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
|
||||
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
|
||||
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
|
||||
MacroDefKind, MacroFile, ProcMacroExpander,
|
||||
ast_id_map::AstIdMap,
|
||||
attrs::RawAttrs,
|
||||
builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||
builtin_fn_macro::EagerExpander,
|
||||
fixup::{self, SyntaxFixupUndoInfo},
|
||||
hygiene::{apply_mark, SyntaxContextData, Transparency},
|
||||
span::{RealSpanMap, SpanMap, SpanMapRef},
|
||||
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
|
||||
ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId,
|
||||
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileId, ProcMacroExpander,
|
||||
};
|
||||
|
||||
/// Total limit on the number of tokens produced by any macro invocation.
|
||||
|
@ -30,32 +39,43 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
|
|||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
/// Old-style `macro_rules` or the new macros 2.0
|
||||
pub struct DeclarativeMacroExpander {
|
||||
pub mac: mbe::DeclarativeMacro,
|
||||
pub def_site_token_map: mbe::TokenMap,
|
||||
pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>,
|
||||
pub transparency: Transparency,
|
||||
}
|
||||
|
||||
impl DeclarativeMacroExpander {
|
||||
pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
pub fn expand(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
tt: tt::Subtree,
|
||||
call_id: MacroCallId,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
match self.mac.err() {
|
||||
Some(e) => ExpandResult::new(
|
||||
tt::Subtree::empty(),
|
||||
tt::Subtree::empty(tt::DelimSpan::DUMMY),
|
||||
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||
),
|
||||
None => self.mac.expand(tt).map_err(Into::into),
|
||||
None => self
|
||||
.mac
|
||||
.expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency))
|
||||
.map_err(Into::into),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId {
|
||||
self.mac.map_id_down(token_id)
|
||||
}
|
||||
|
||||
pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
|
||||
self.mac.map_id_up(token_id)
|
||||
pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
match self.mac.err() {
|
||||
Some(e) => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan::DUMMY),
|
||||
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||
),
|
||||
None => self.mac.expand(&tt, |_| ()).map_err(Into::into),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum TokenExpander {
|
||||
/// Old-style `macro_rules` or the new macros 2.0
|
||||
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
|
||||
/// Stuff like `line!` and `file!`.
|
||||
BuiltIn(BuiltinFnLikeExpander),
|
||||
|
@ -69,31 +89,6 @@ pub enum TokenExpander {
|
|||
ProcMacro(ProcMacroExpander),
|
||||
}
|
||||
|
||||
// FIXME: Get rid of these methods
|
||||
impl TokenExpander {
|
||||
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
|
||||
match self {
|
||||
TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
|
||||
TokenExpander::BuiltIn(..)
|
||||
| TokenExpander::BuiltInEager(..)
|
||||
| TokenExpander::BuiltInAttr(..)
|
||||
| TokenExpander::BuiltInDerive(..)
|
||||
| TokenExpander::ProcMacro(..) => id,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
|
||||
match self {
|
||||
TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
|
||||
TokenExpander::BuiltIn(..)
|
||||
| TokenExpander::BuiltInEager(..)
|
||||
| TokenExpander::BuiltInAttr(..)
|
||||
| TokenExpander::BuiltInDerive(..)
|
||||
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::query_group(ExpandDatabaseStorage)]
|
||||
pub trait ExpandDatabase: SourceDatabase {
|
||||
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
|
||||
|
@ -108,8 +103,12 @@ pub trait ExpandDatabase: SourceDatabase {
|
|||
// This query is LRU cached
|
||||
fn parse_macro_expansion(
|
||||
&self,
|
||||
macro_file: MacroFile,
|
||||
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
|
||||
macro_file: MacroFileId,
|
||||
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
|
||||
#[salsa::transparent]
|
||||
fn span_map(&self, file_id: HirFileId) -> SpanMap;
|
||||
|
||||
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
|
||||
|
||||
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
|
||||
/// reason why we use salsa at all.
|
||||
|
@ -118,23 +117,21 @@ pub trait ExpandDatabase: SourceDatabase {
|
|||
/// to be incremental.
|
||||
#[salsa::interned]
|
||||
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
|
||||
#[salsa::interned]
|
||||
fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
|
||||
|
||||
/// Lowers syntactic macro call to a token tree representation.
|
||||
#[salsa::transparent]
|
||||
fn setup_syntax_context_root(&self) -> ();
|
||||
#[salsa::transparent]
|
||||
fn dump_syntax_contexts(&self) -> String;
|
||||
|
||||
/// Lowers syntactic macro call to a token tree representation. That's a firewall
|
||||
/// query, only typing in the macro call itself changes the returned
|
||||
/// subtree.
|
||||
fn macro_arg(
|
||||
&self,
|
||||
id: MacroCallId,
|
||||
) -> ValueResult<
|
||||
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
|
||||
Arc<Box<[SyntaxError]>>,
|
||||
>;
|
||||
/// Extracts syntax node, corresponding to a macro call. That's a firewall
|
||||
/// query, only typing in the macro call itself changes the returned
|
||||
/// subtree.
|
||||
fn macro_arg_node(
|
||||
&self,
|
||||
id: MacroCallId,
|
||||
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
|
||||
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
|
||||
/// Fetches the expander for this macro.
|
||||
#[salsa::transparent]
|
||||
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
|
||||
|
@ -144,18 +141,6 @@ pub trait ExpandDatabase: SourceDatabase {
|
|||
def_crate: CrateId,
|
||||
id: AstId<ast::Macro>,
|
||||
) -> Arc<DeclarativeMacroExpander>;
|
||||
|
||||
/// Expand macro call to a token tree.
|
||||
// This query is LRU cached
|
||||
fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
|
||||
#[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)]
|
||||
fn include_expand(
|
||||
&self,
|
||||
arg_id: MacroCallId,
|
||||
) -> Result<
|
||||
(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId),
|
||||
ExpandError,
|
||||
>;
|
||||
/// Special case of the previous query for procedural macros. We can't LRU
|
||||
/// proc macros, since they are not deterministic in general, and
|
||||
/// non-determinism breaks salsa in a very, very, very bad way.
|
||||
|
@ -166,8 +151,20 @@ pub trait ExpandDatabase: SourceDatabase {
|
|||
&self,
|
||||
macro_call: MacroCallId,
|
||||
) -> ExpandResult<Box<[SyntaxError]>>;
|
||||
}
|
||||
|
||||
fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
|
||||
#[inline]
|
||||
pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
|
||||
match file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
|
||||
HirFileIdRepr::MacroFile(m) => {
|
||||
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
|
||||
Arc::new(RealSpanMap::from_file(db, file_id))
|
||||
}
|
||||
|
||||
/// This expands the given macro call, but with different arguments. This is
|
||||
|
@ -181,21 +178,36 @@ pub fn expand_speculative(
|
|||
token_to_map: SyntaxToken,
|
||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||
let loc = db.lookup_intern_macro_call(actual_macro_call);
|
||||
let token_range = token_to_map.text_range();
|
||||
|
||||
let span_map = RealSpanMap::absolute(FileId::BOGUS);
|
||||
let span_map = SpanMapRef::RealSpanMap(&span_map);
|
||||
|
||||
// Build the subtree and token mapping for the speculative args
|
||||
let censor = censor_for_macro_input(&loc, speculative_args);
|
||||
let mut fixups = fixup::fixup_syntax(speculative_args);
|
||||
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
||||
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
||||
speculative_args,
|
||||
fixups.token_map,
|
||||
fixups.next_id,
|
||||
fixups.replace,
|
||||
fixups.append,
|
||||
);
|
||||
let (mut tt, undo_info) = match loc.kind {
|
||||
MacroCallKind::FnLike { .. } => {
|
||||
(mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE)
|
||||
}
|
||||
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
||||
let censor = censor_for_macro_input(&loc, speculative_args);
|
||||
let mut fixups = fixup::fixup_syntax(span_map, speculative_args);
|
||||
fixups.append.retain(|it, _| match it {
|
||||
syntax::NodeOrToken::Node(it) => !censor.contains(it),
|
||||
syntax::NodeOrToken::Token(_) => true,
|
||||
});
|
||||
fixups.remove.extend(censor);
|
||||
(
|
||||
mbe::syntax_node_to_token_tree_modified(
|
||||
speculative_args,
|
||||
span_map,
|
||||
fixups.append,
|
||||
fixups.remove,
|
||||
),
|
||||
fixups.undo_info,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let (attr_arg, token_id) = match loc.kind {
|
||||
let attr_arg = match loc.kind {
|
||||
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
||||
let attr = if loc.def.is_attribute_derive() {
|
||||
// for pseudo-derive expansion we actually pass the attribute itself only
|
||||
|
@ -210,59 +222,45 @@ pub fn expand_speculative(
|
|||
}?;
|
||||
match attr.token_tree() {
|
||||
Some(token_tree) => {
|
||||
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
|
||||
tree.delimiter = tt::Delimiter::unspecified();
|
||||
let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map);
|
||||
tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
|
||||
|
||||
let shift = mbe::Shift::new(&tt);
|
||||
shift.shift_all(&mut tree);
|
||||
|
||||
let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
|
||||
let attr_input_start =
|
||||
token_tree.left_delimiter_token()?.text_range().start();
|
||||
let range = token_range.checked_sub(attr_input_start)?;
|
||||
let token_id = shift.shift(map.token_by_range(range)?);
|
||||
Some(token_id)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
(Some(tree), token_id)
|
||||
Some(tree)
|
||||
}
|
||||
_ => (None, None),
|
||||
}
|
||||
}
|
||||
_ => (None, None),
|
||||
};
|
||||
let token_id = match token_id {
|
||||
Some(token_id) => token_id,
|
||||
// token wasn't inside an attribute input so it has to be in the general macro input
|
||||
None => {
|
||||
let range = token_range.checked_sub(speculative_args.text_range().start())?;
|
||||
let token_id = spec_args_tmap.token_by_range(range)?;
|
||||
match loc.def.kind {
|
||||
MacroDefKind::Declarative(it) => {
|
||||
db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
|
||||
}
|
||||
_ => token_id,
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
||||
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
||||
let mut speculative_expansion = match loc.def.kind {
|
||||
MacroDefKind::ProcMacro(expander, ..) => {
|
||||
tt.delimiter = tt::Delimiter::unspecified();
|
||||
expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
|
||||
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
|
||||
let call_site = loc.span(db);
|
||||
expander.expand(
|
||||
db,
|
||||
loc.def.krate,
|
||||
loc.krate,
|
||||
&tt,
|
||||
attr_arg.as_ref(),
|
||||
call_site,
|
||||
call_site,
|
||||
call_site,
|
||||
)
|
||||
}
|
||||
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
|
||||
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
|
||||
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, loc.call_site)
|
||||
}
|
||||
MacroDefKind::BuiltInDerive(expander, ..) => {
|
||||
// this cast is a bit sus, can we avoid losing the typedness here?
|
||||
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
|
||||
expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
|
||||
expander.expand(db, actual_macro_call, &adt, span_map)
|
||||
}
|
||||
MacroDefKind::Declarative(it) => {
|
||||
db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt)
|
||||
}
|
||||
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
|
||||
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
|
||||
MacroDefKind::BuiltInEager(it, _) => {
|
||||
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
|
||||
|
@ -270,13 +268,14 @@ pub fn expand_speculative(
|
|||
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
|
||||
};
|
||||
|
||||
let expand_to = macro_expand_to(db, actual_macro_call);
|
||||
fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
|
||||
let expand_to = loc.expand_to();
|
||||
|
||||
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
|
||||
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
|
||||
|
||||
let syntax_node = node.syntax_node();
|
||||
let token = rev_tmap
|
||||
.ranges_by_token(token_id, token_to_map.kind())
|
||||
.ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
|
||||
.filter_map(|range| syntax_node.covering_element(range).into_token())
|
||||
.min_by_key(|t| {
|
||||
// prefer tokens of the same kind and text
|
||||
|
@ -293,7 +292,7 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
|
|||
|
||||
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
|
||||
match file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
|
||||
HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
|
||||
HirFileIdRepr::MacroFile(macro_file) => {
|
||||
db.parse_macro_expansion(macro_file).value.0.syntax_node()
|
||||
}
|
||||
|
@ -312,17 +311,16 @@ fn parse_or_expand_with_err(
|
|||
}
|
||||
}
|
||||
|
||||
// FIXME: We should verify that the parsed node is one of the many macro node variants we expect
|
||||
// instead of having it be untyped
|
||||
fn parse_macro_expansion(
|
||||
db: &dyn ExpandDatabase,
|
||||
macro_file: MacroFile,
|
||||
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
|
||||
macro_file: MacroFileId,
|
||||
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
|
||||
let _p = profile::span("parse_macro_expansion");
|
||||
let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
|
||||
|
||||
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
|
||||
|
||||
tracing::debug!("expanded = {}", tt.as_debug_string());
|
||||
tracing::debug!("kind = {:?}", expand_to);
|
||||
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
let expand_to = loc.expand_to();
|
||||
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
|
||||
|
||||
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
|
||||
|
||||
|
@ -333,51 +331,129 @@ fn parse_macro_expansion_error(
|
|||
db: &dyn ExpandDatabase,
|
||||
macro_call_id: MacroCallId,
|
||||
) -> ExpandResult<Box<[SyntaxError]>> {
|
||||
db.parse_macro_expansion(MacroFile { macro_call_id })
|
||||
db.parse_macro_expansion(MacroFileId { macro_call_id })
|
||||
.map(|it| it.0.errors().to_vec().into_boxed_slice())
|
||||
}
|
||||
|
||||
fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxNode>, SpanMap) {
|
||||
match file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => {
|
||||
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
|
||||
}
|
||||
HirFileIdRepr::MacroFile(macro_file) => {
|
||||
let (parse, map) = db.parse_macro_expansion(macro_file).value;
|
||||
(parse, SpanMap::ExpansionSpanMap(map))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn macro_arg(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
) -> ValueResult<
|
||||
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
|
||||
Arc<Box<[SyntaxError]>>,
|
||||
> {
|
||||
let loc = db.lookup_intern_macro_call(id);
|
||||
|
||||
if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
|
||||
return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
|
||||
}
|
||||
|
||||
let ValueResult { value, err } = db.macro_arg_node(id);
|
||||
let Some(arg) = value else {
|
||||
return ValueResult { value: None, err };
|
||||
// FIXME: consider the following by putting fixup info into eager call info args
|
||||
// ) -> ValueResult<Option<Arc<(tt::Subtree, SyntaxFixupUndoInfo)>>, Arc<Box<[SyntaxError]>>> {
|
||||
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>> {
|
||||
let mismatched_delimiters = |arg: &SyntaxNode| {
|
||||
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
|
||||
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
|
||||
let well_formed_tt =
|
||||
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
|
||||
if !well_formed_tt {
|
||||
// Don't expand malformed (unbalanced) macro invocations. This is
|
||||
// less than ideal, but trying to expand unbalanced macro calls
|
||||
// sometimes produces pathological, deeply nested code which breaks
|
||||
// all kinds of things.
|
||||
//
|
||||
// Some day, we'll have explicit recursion counters for all
|
||||
// recursive things, at which point this code might be removed.
|
||||
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
|
||||
Some(Arc::new(Box::new([SyntaxError::new(
|
||||
"unbalanced token tree".to_owned(),
|
||||
arg.text_range(),
|
||||
)]) as Box<[_]>))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
let loc = db.lookup_intern_macro_call(id);
|
||||
if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
|
||||
.then(|| loc.eager.as_deref())
|
||||
.flatten()
|
||||
{
|
||||
ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE)))
|
||||
} else {
|
||||
let (parse, map) = parse_with_map(db, loc.kind.file_id());
|
||||
let root = parse.syntax_node();
|
||||
|
||||
let node = SyntaxNode::new_root(arg);
|
||||
let censor = censor_for_macro_input(&loc, &node);
|
||||
let mut fixups = fixup::fixup_syntax(&node);
|
||||
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
||||
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
||||
&node,
|
||||
fixups.token_map,
|
||||
fixups.next_id,
|
||||
fixups.replace,
|
||||
fixups.append,
|
||||
);
|
||||
let syntax = match loc.kind {
|
||||
MacroCallKind::FnLike { ast_id, .. } => {
|
||||
let node = &ast_id.to_ptr(db).to_node(&root);
|
||||
let offset = node.syntax().text_range().start();
|
||||
match node.token_tree() {
|
||||
Some(tt) => {
|
||||
let tt = tt.syntax();
|
||||
if let Some(e) = mismatched_delimiters(tt) {
|
||||
return ValueResult::only_err(e);
|
||||
}
|
||||
tt.clone()
|
||||
}
|
||||
None => {
|
||||
return ValueResult::only_err(Arc::new(Box::new([
|
||||
SyntaxError::new_at_offset("missing token tree".to_owned(), offset),
|
||||
])));
|
||||
}
|
||||
}
|
||||
}
|
||||
MacroCallKind::Derive { ast_id, .. } => {
|
||||
ast_id.to_ptr(db).to_node(&root).syntax().clone()
|
||||
}
|
||||
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
|
||||
};
|
||||
let (mut tt, undo_info) = match loc.kind {
|
||||
MacroCallKind::FnLike { .. } => {
|
||||
(mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE)
|
||||
}
|
||||
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
||||
let censor = censor_for_macro_input(&loc, &syntax);
|
||||
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax);
|
||||
fixups.append.retain(|it, _| match it {
|
||||
syntax::NodeOrToken::Node(it) => !censor.contains(it),
|
||||
syntax::NodeOrToken::Token(_) => true,
|
||||
});
|
||||
fixups.remove.extend(censor);
|
||||
(
|
||||
mbe::syntax_node_to_token_tree_modified(
|
||||
&syntax,
|
||||
map,
|
||||
fixups.append,
|
||||
fixups.remove,
|
||||
),
|
||||
fixups.undo_info,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
if loc.def.is_proc_macro() {
|
||||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||
tt.delimiter = tt::Delimiter::unspecified();
|
||||
}
|
||||
let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
|
||||
match err {
|
||||
Some(err) => ValueResult::new(val, err),
|
||||
None => ValueResult::ok(val),
|
||||
if loc.def.is_proc_macro() {
|
||||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
|
||||
}
|
||||
|
||||
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
|
||||
match parse.errors() {
|
||||
[] => ValueResult::ok(Some((Arc::new(tt), undo_info))),
|
||||
errors => ValueResult::new(
|
||||
Some((Arc::new(tt), undo_info)),
|
||||
// Box::<[_]>::from(res.errors()), not stable yet
|
||||
Arc::new(errors.to_vec().into_boxed_slice()),
|
||||
),
|
||||
}
|
||||
} else {
|
||||
ValueResult::ok(Some((Arc::new(tt), undo_info)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
|
||||
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
|
||||
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
|
||||
/// - attributes expect the invoking attribute to be stripped
|
||||
|
@ -417,103 +493,67 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
|
|||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn macro_arg_node(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
|
||||
let err = || -> Arc<Box<[_]>> {
|
||||
Arc::new(Box::new([SyntaxError::new_at_offset(
|
||||
"invalid macro call".to_owned(),
|
||||
syntax::TextSize::from(0),
|
||||
)]))
|
||||
};
|
||||
let loc = db.lookup_intern_macro_call(id);
|
||||
let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
|
||||
let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
|
||||
Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0)
|
||||
} else {
|
||||
loc.kind
|
||||
.arg(db)
|
||||
.and_then(|arg| ast::TokenTree::cast(arg.value))
|
||||
.map(|tt| tt.reparse_as_comma_separated_expr().to_syntax())
|
||||
};
|
||||
match res {
|
||||
Some(res) if res.errors().is_empty() => res.syntax_node(),
|
||||
Some(res) => {
|
||||
return ValueResult::new(
|
||||
Some(res.syntax_node().green().into()),
|
||||
// Box::<[_]>::from(res.errors()), not stable yet
|
||||
Arc::new(res.errors().to_vec().into_boxed_slice()),
|
||||
);
|
||||
}
|
||||
None => return ValueResult::only_err(err()),
|
||||
}
|
||||
} else {
|
||||
match loc.kind.arg(db) {
|
||||
Some(res) => res.value,
|
||||
None => return ValueResult::only_err(err()),
|
||||
}
|
||||
};
|
||||
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
|
||||
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
|
||||
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
|
||||
let well_formed_tt =
|
||||
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
|
||||
if !well_formed_tt {
|
||||
// Don't expand malformed (unbalanced) macro invocations. This is
|
||||
// less than ideal, but trying to expand unbalanced macro calls
|
||||
// sometimes produces pathological, deeply nested code which breaks
|
||||
// all kinds of things.
|
||||
//
|
||||
// Some day, we'll have explicit recursion counters for all
|
||||
// recursive things, at which point this code might be removed.
|
||||
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
|
||||
return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
|
||||
"unbalanced token tree".to_owned(),
|
||||
arg.text_range(),
|
||||
)])));
|
||||
}
|
||||
}
|
||||
ValueResult::ok(Some(arg.green().into()))
|
||||
}
|
||||
|
||||
fn decl_macro_expander(
|
||||
db: &dyn ExpandDatabase,
|
||||
def_crate: CrateId,
|
||||
id: AstId<ast::Macro>,
|
||||
) -> Arc<DeclarativeMacroExpander> {
|
||||
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
|
||||
let (mac, def_site_token_map) = match id.to_node(db) {
|
||||
ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
|
||||
Some(arg) => {
|
||||
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
||||
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
|
||||
(mac, def_site_token_map)
|
||||
}
|
||||
None => (
|
||||
mbe::DeclarativeMacro::from_err(
|
||||
mbe::ParseError::Expected("expected a token tree".into()),
|
||||
is_2021,
|
||||
),
|
||||
Default::default(),
|
||||
),
|
||||
},
|
||||
ast::Macro::MacroDef(macro_def) => match macro_def.body() {
|
||||
Some(arg) => {
|
||||
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
||||
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
|
||||
(mac, def_site_token_map)
|
||||
}
|
||||
None => (
|
||||
mbe::DeclarativeMacro::from_err(
|
||||
mbe::ParseError::Expected("expected a token tree".into()),
|
||||
is_2021,
|
||||
),
|
||||
Default::default(),
|
||||
),
|
||||
},
|
||||
let (root, map) = parse_with_map(db, id.file_id);
|
||||
let root = root.syntax_node();
|
||||
|
||||
let transparency = |node| {
|
||||
// ... would be nice to have the item tree here
|
||||
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
|
||||
match &*attrs
|
||||
.iter()
|
||||
.find(|it| {
|
||||
it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency")
|
||||
})?
|
||||
.token_tree_value()?
|
||||
.token_trees
|
||||
{
|
||||
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
|
||||
"transparent" => Some(Transparency::Transparent),
|
||||
"semitransparent" => Some(Transparency::SemiTransparent),
|
||||
"opaque" => Some(Transparency::Opaque),
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
};
|
||||
Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
|
||||
|
||||
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
|
||||
ast::Macro::MacroRules(macro_rules) => (
|
||||
match macro_rules.token_tree() {
|
||||
Some(arg) => {
|
||||
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
|
||||
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
|
||||
mac
|
||||
}
|
||||
None => mbe::DeclarativeMacro::from_err(
|
||||
mbe::ParseError::Expected("expected a token tree".into()),
|
||||
is_2021,
|
||||
),
|
||||
},
|
||||
transparency(¯o_rules).unwrap_or(Transparency::SemiTransparent),
|
||||
),
|
||||
ast::Macro::MacroDef(macro_def) => (
|
||||
match macro_def.body() {
|
||||
Some(arg) => {
|
||||
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
|
||||
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
|
||||
mac
|
||||
}
|
||||
None => mbe::DeclarativeMacro::from_err(
|
||||
mbe::ParseError::Expected("expected a token tree".into()),
|
||||
is_2021,
|
||||
),
|
||||
},
|
||||
transparency(¯o_def).unwrap_or(Transparency::Opaque),
|
||||
),
|
||||
};
|
||||
Arc::new(DeclarativeMacroExpander { mac, transparency })
|
||||
}
|
||||
|
||||
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
||||
|
@ -529,39 +569,31 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
|||
}
|
||||
}
|
||||
|
||||
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
||||
fn macro_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
macro_call_id: MacroCallId,
|
||||
loc: MacroCallLoc,
|
||||
) -> ExpandResult<Arc<tt::Subtree>> {
|
||||
let _p = profile::span("macro_expand");
|
||||
let loc = db.lookup_intern_macro_call(id);
|
||||
|
||||
let ExpandResult { value: tt, mut err } = match loc.def.kind {
|
||||
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
|
||||
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id),
|
||||
MacroDefKind::BuiltInDerive(expander, ..) => {
|
||||
let arg = db.macro_arg_node(id).value.unwrap();
|
||||
let (root, map) = parse_with_map(db, loc.kind.file_id());
|
||||
let root = root.syntax_node();
|
||||
let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
|
||||
let node = ast_id.to_ptr(db).to_node(&root);
|
||||
|
||||
let node = SyntaxNode::new_root(arg);
|
||||
let censor = censor_for_macro_input(&loc, &node);
|
||||
let mut fixups = fixup::fixup_syntax(&node);
|
||||
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
||||
let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
|
||||
&node,
|
||||
fixups.token_map,
|
||||
fixups.next_id,
|
||||
fixups.replace,
|
||||
fixups.append,
|
||||
);
|
||||
|
||||
// this cast is a bit sus, can we avoid losing the typedness here?
|
||||
let adt = ast::Adt::cast(node).unwrap();
|
||||
let mut res = expander.expand(db, id, &adt, &tmap);
|
||||
fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
|
||||
res
|
||||
// FIXME: Use censoring
|
||||
let _censor = censor_for_macro_input(&loc, node.syntax());
|
||||
expander.expand(db, macro_call_id, &node, map.as_ref())
|
||||
}
|
||||
_ => {
|
||||
let ValueResult { value, err } = db.macro_arg(id);
|
||||
let Some(macro_arg) = value else {
|
||||
let ValueResult { value, err } = db.macro_arg(macro_call_id);
|
||||
let Some((macro_arg, undo_info)) = value else {
|
||||
return ExpandResult {
|
||||
value: Arc::new(tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
|
||||
token_trees: Vec::new(),
|
||||
}),
|
||||
// FIXME: We should make sure to enforce an invariant that invalid macro
|
||||
|
@ -570,12 +602,14 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
|||
};
|
||||
};
|
||||
|
||||
let (arg, arg_tm, undo_info) = &*macro_arg;
|
||||
let mut res = match loc.def.kind {
|
||||
let arg = &*macro_arg;
|
||||
match loc.def.kind {
|
||||
MacroDefKind::Declarative(id) => {
|
||||
db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
|
||||
db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id)
|
||||
}
|
||||
MacroDefKind::BuiltIn(it, _) => {
|
||||
it.expand(db, macro_call_id, &arg).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
|
||||
// This might look a bit odd, but we do not expand the inputs to eager macros here.
|
||||
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
|
||||
// That kind of expansion uses the ast id map of an eager macros input though which goes through
|
||||
|
@ -583,11 +617,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
|||
// will end up going through here again, whereas we want to just want to inspect the raw input.
|
||||
// As such we just return the input subtree here.
|
||||
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
|
||||
let mut arg = arg.clone();
|
||||
fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
|
||||
|
||||
return ExpandResult {
|
||||
value: Arc::new(arg),
|
||||
value: macro_arg.clone(),
|
||||
err: err.map(|err| {
|
||||
let mut buf = String::new();
|
||||
for err in &**err {
|
||||
|
@ -600,12 +631,16 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
|||
}),
|
||||
};
|
||||
}
|
||||
MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
|
||||
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
|
||||
MacroDefKind::BuiltInEager(it, _) => {
|
||||
it.expand(db, macro_call_id, &arg).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInAttr(it, _) => {
|
||||
let mut res = it.expand(db, macro_call_id, &arg);
|
||||
fixup::reverse_fixups(&mut res.value, &undo_info);
|
||||
res
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
|
||||
res
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -627,10 +662,10 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
|||
|
||||
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
||||
let loc = db.lookup_intern_macro_call(id);
|
||||
let Some(macro_arg) = db.macro_arg(id).value else {
|
||||
let Some((macro_arg, undo_info)) = db.macro_arg(id).value else {
|
||||
return ExpandResult {
|
||||
value: Arc::new(tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
|
||||
token_trees: Vec::new(),
|
||||
}),
|
||||
// FIXME: We should make sure to enforce an invariant that invalid macro
|
||||
|
@ -639,47 +674,44 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
|
|||
};
|
||||
};
|
||||
|
||||
let (arg_tt, arg_tm, undo_info) = &*macro_arg;
|
||||
|
||||
let expander = match loc.def.kind {
|
||||
MacroDefKind::ProcMacro(expander, ..) => expander,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let attr_arg = match &loc.kind {
|
||||
MacroCallKind::Attr { attr_args, .. } => {
|
||||
let mut attr_args = attr_args.0.clone();
|
||||
mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
|
||||
Some(attr_args)
|
||||
}
|
||||
MacroCallKind::Attr { attr_args: Some(attr_args), .. } => Some(&**attr_args),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let ExpandResult { value: mut tt, err } =
|
||||
expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref());
|
||||
let call_site = loc.span(db);
|
||||
let ExpandResult { value: mut tt, err } = expander.expand(
|
||||
db,
|
||||
loc.def.krate,
|
||||
loc.krate,
|
||||
¯o_arg,
|
||||
attr_arg,
|
||||
// FIXME
|
||||
call_site,
|
||||
call_site,
|
||||
// FIXME
|
||||
call_site,
|
||||
);
|
||||
|
||||
// Set a hard limit for the expanded tt
|
||||
if let Err(value) = check_tt_count(&tt) {
|
||||
return value;
|
||||
}
|
||||
|
||||
fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
|
||||
fixup::reverse_fixups(&mut tt, &undo_info);
|
||||
|
||||
ExpandResult { value: Arc::new(tt), err }
|
||||
}
|
||||
|
||||
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
|
||||
Arc::new(HygieneFrame::new(db, file_id))
|
||||
}
|
||||
|
||||
fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
|
||||
db.lookup_intern_macro_call(id).expand_to()
|
||||
}
|
||||
|
||||
fn token_tree_to_syntax_node(
|
||||
tt: &tt::Subtree,
|
||||
expand_to: ExpandTo,
|
||||
) -> (Parse<SyntaxNode>, mbe::TokenMap) {
|
||||
) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
|
||||
let entry_point = match expand_to {
|
||||
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
|
||||
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
|
||||
|
@ -695,7 +727,7 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
|
|||
if TOKEN_LIMIT.check(count).is_err() {
|
||||
Err(ExpandResult {
|
||||
value: Arc::new(tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
|
||||
token_trees: vec![],
|
||||
}),
|
||||
err: Some(ExpandError::other(format!(
|
||||
|
@ -708,3 +740,44 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
|
|||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
|
||||
db.intern_syntax_context(SyntaxContextData::root());
|
||||
}
|
||||
|
||||
fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
|
||||
let mut s = String::from("Expansions:");
|
||||
let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
|
||||
entries.sort_by_key(|e| e.key);
|
||||
for e in entries {
|
||||
let id = e.key;
|
||||
let expn_data = e.value.as_ref().unwrap();
|
||||
s.push_str(&format!(
|
||||
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
|
||||
id,
|
||||
expn_data.kind.file_id(),
|
||||
expn_data.call_site,
|
||||
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
|
||||
expn_data.kind.descr(),
|
||||
));
|
||||
}
|
||||
|
||||
s.push_str("\n\nSyntaxContexts:\n");
|
||||
let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
|
||||
entries.sort_by_key(|e| e.key);
|
||||
for e in entries {
|
||||
struct SyntaxContextDebug<'a>(
|
||||
&'a dyn ExpandDatabase,
|
||||
SyntaxContextId,
|
||||
&'a SyntaxContextData,
|
||||
);
|
||||
|
||||
impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.2.fancy_debug(self.1, self.0, f)
|
||||
}
|
||||
}
|
||||
stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
|
||||
}
|
||||
s
|
||||
}
|
||||
|
|
|
@ -18,18 +18,17 @@
|
|||
//!
|
||||
//!
|
||||
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
||||
use base_db::CrateId;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
|
||||
use base_db::{span::SyntaxContextId, CrateId};
|
||||
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
ast::{self, AstNode},
|
||||
db::ExpandDatabase,
|
||||
hygiene::Hygiene,
|
||||
mod_path::ModPath,
|
||||
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
|
||||
MacroCallLoc, MacroDefId, MacroDefKind,
|
||||
span::SpanMapRef,
|
||||
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, MacroCallId,
|
||||
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
|
||||
};
|
||||
|
||||
pub fn expand_eager_macro_input(
|
||||
|
@ -37,6 +36,7 @@ pub fn expand_eager_macro_input(
|
|||
krate: CrateId,
|
||||
macro_call: InFile<ast::MacroCall>,
|
||||
def: MacroDefId,
|
||||
call_site: SyntaxContextId,
|
||||
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
||||
) -> ExpandResult<Option<MacroCallId>> {
|
||||
let ast_map = db.ast_id_map(macro_call.file_id);
|
||||
|
@ -53,75 +53,44 @@ pub fn expand_eager_macro_input(
|
|||
krate,
|
||||
eager: None,
|
||||
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
|
||||
call_site,
|
||||
});
|
||||
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
|
||||
db.parse_macro_expansion(arg_id.as_macro_file());
|
||||
// we need this map here as the expansion of the eager input fake file loses whitespace ...
|
||||
let mut ws_mapping = FxHashMap::default();
|
||||
if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
|
||||
ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
|
||||
Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
|
||||
}));
|
||||
}
|
||||
|
||||
let mut arg_map = ExpansionSpanMap::empty();
|
||||
|
||||
let ExpandResult { value: expanded_eager_input, err } = {
|
||||
eager_macro_recur(
|
||||
db,
|
||||
&Hygiene::new(db, macro_call.file_id),
|
||||
&arg_exp_map,
|
||||
&mut arg_map,
|
||||
TextSize::new(0),
|
||||
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
|
||||
krate,
|
||||
call_site,
|
||||
resolver,
|
||||
)
|
||||
};
|
||||
let err = parse_err.or(err);
|
||||
if cfg!(debug) {
|
||||
arg_map.finish();
|
||||
}
|
||||
|
||||
let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
|
||||
let Some((expanded_eager_input, _mapping)) = expanded_eager_input else {
|
||||
return ExpandResult { value: None, err };
|
||||
};
|
||||
|
||||
let (mut subtree, expanded_eager_input_token_map) =
|
||||
mbe::syntax_node_to_token_tree(&expanded_eager_input);
|
||||
let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map);
|
||||
|
||||
let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
|
||||
let mut ids_used = FxHashSet::default();
|
||||
let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
|
||||
// The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
|
||||
// so we need to remap them to the original input of the eager macro.
|
||||
subtree.visit_ids(&mut |id| {
|
||||
// Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
|
||||
|
||||
if let Some(range) = expanded_eager_input_token_map
|
||||
.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
|
||||
{
|
||||
// remap from expanded eager input to eager input expansion
|
||||
if let Some(og_range) = mapping.get(&range) {
|
||||
// remap from eager input expansion to original eager input
|
||||
if let Some(&og_range) = ws_mapping.get(og_range) {
|
||||
if let Some(og_token) = og_tmap.token_by_range(og_range) {
|
||||
ids_used.insert(og_token);
|
||||
return og_token;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
tt::TokenId::UNSPECIFIED
|
||||
});
|
||||
og_tmap.filter(|id| ids_used.contains(&id));
|
||||
og_tmap
|
||||
} else {
|
||||
Default::default()
|
||||
};
|
||||
subtree.delimiter = crate::tt::Delimiter::unspecified();
|
||||
subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE;
|
||||
|
||||
let loc = MacroCallLoc {
|
||||
def,
|
||||
krate,
|
||||
eager: Some(Box::new(EagerCallInfo {
|
||||
arg: Arc::new((subtree, og_tmap)),
|
||||
arg_id,
|
||||
error: err.clone(),
|
||||
})),
|
||||
eager: Some(Box::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })),
|
||||
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
|
||||
call_site,
|
||||
};
|
||||
|
||||
ExpandResult { value: Some(db.intern_macro_call(loc)), err }
|
||||
|
@ -132,12 +101,13 @@ fn lazy_expand(
|
|||
def: &MacroDefId,
|
||||
macro_call: InFile<ast::MacroCall>,
|
||||
krate: CrateId,
|
||||
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> {
|
||||
call_site: SyntaxContextId,
|
||||
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
|
||||
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value);
|
||||
|
||||
let expand_to = ExpandTo::from_call_site(¯o_call.value);
|
||||
let ast_id = macro_call.with_value(ast_id);
|
||||
let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to });
|
||||
let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }, call_site);
|
||||
let macro_file = id.as_macro_file();
|
||||
|
||||
db.parse_macro_expansion(macro_file)
|
||||
|
@ -146,57 +116,59 @@ fn lazy_expand(
|
|||
|
||||
fn eager_macro_recur(
|
||||
db: &dyn ExpandDatabase,
|
||||
hygiene: &Hygiene,
|
||||
span_map: &ExpansionSpanMap,
|
||||
expanded_map: &mut ExpansionSpanMap,
|
||||
mut offset: TextSize,
|
||||
curr: InFile<SyntaxNode>,
|
||||
krate: CrateId,
|
||||
call_site: SyntaxContextId,
|
||||
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
||||
) -> ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>> {
|
||||
) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
|
||||
let original = curr.value.clone_for_update();
|
||||
let mut mapping = FxHashMap::default();
|
||||
|
||||
let mut replacements = Vec::new();
|
||||
|
||||
// FIXME: We only report a single error inside of eager expansions
|
||||
let mut error = None;
|
||||
let mut offset = 0i32;
|
||||
let apply_offset = |it: TextSize, offset: i32| {
|
||||
TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default())
|
||||
};
|
||||
let mut children = original.preorder_with_tokens();
|
||||
|
||||
// Collect replacement
|
||||
while let Some(child) = children.next() {
|
||||
let WalkEvent::Enter(child) = child else { continue };
|
||||
let call = match child {
|
||||
syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) {
|
||||
WalkEvent::Enter(SyntaxElement::Node(child)) => match ast::MacroCall::cast(child) {
|
||||
Some(it) => {
|
||||
children.skip_subtree();
|
||||
it
|
||||
}
|
||||
None => continue,
|
||||
_ => continue,
|
||||
},
|
||||
syntax::NodeOrToken::Token(t) => {
|
||||
mapping.insert(
|
||||
TextRange::new(
|
||||
apply_offset(t.text_range().start(), offset),
|
||||
apply_offset(t.text_range().end(), offset),
|
||||
),
|
||||
t.text_range(),
|
||||
);
|
||||
WalkEvent::Enter(_) => continue,
|
||||
WalkEvent::Leave(child) => {
|
||||
if let SyntaxElement::Token(t) = child {
|
||||
let start = t.text_range().start();
|
||||
offset += t.text_range().len();
|
||||
expanded_map.push(offset, span_map.span_at(start));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
|
||||
|
||||
let def = match call
|
||||
.path()
|
||||
.and_then(|path| ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(span_map)))
|
||||
{
|
||||
Some(path) => match macro_resolver(path.clone()) {
|
||||
Some(def) => def,
|
||||
None => {
|
||||
error =
|
||||
Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
|
||||
offset += call.syntax().text_range().len();
|
||||
continue;
|
||||
}
|
||||
},
|
||||
None => {
|
||||
error = Some(ExpandError::other("malformed macro invocation"));
|
||||
offset += call.syntax().text_range().len();
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
@ -207,29 +179,22 @@ fn eager_macro_recur(
|
|||
krate,
|
||||
curr.with_value(call.clone()),
|
||||
def,
|
||||
call_site,
|
||||
macro_resolver,
|
||||
);
|
||||
match value {
|
||||
Some(call_id) => {
|
||||
let ExpandResult { value, err: err2 } =
|
||||
let ExpandResult { value: (parse, map), err: err2 } =
|
||||
db.parse_macro_expansion(call_id.as_macro_file());
|
||||
|
||||
if let Some(tt) = call.token_tree() {
|
||||
let call_tt_start = tt.syntax().text_range().start();
|
||||
let call_start =
|
||||
apply_offset(call.syntax().text_range().start(), offset);
|
||||
if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
|
||||
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||
value
|
||||
.1
|
||||
.first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
|
||||
.map(|r| (r + call_start, range + call_tt_start))
|
||||
}));
|
||||
}
|
||||
}
|
||||
map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span));
|
||||
|
||||
let syntax_node = parse.syntax_node();
|
||||
ExpandResult {
|
||||
value: Some(value.0.syntax_node().clone_for_update()),
|
||||
value: Some((
|
||||
syntax_node.clone_for_update(),
|
||||
offset + syntax_node.text_range().len(),
|
||||
)),
|
||||
err: err.or(err2),
|
||||
}
|
||||
}
|
||||
|
@ -242,45 +207,23 @@ fn eager_macro_recur(
|
|||
| MacroDefKind::BuiltInDerive(..)
|
||||
| MacroDefKind::ProcMacro(..) => {
|
||||
let ExpandResult { value: (parse, tm), err } =
|
||||
lazy_expand(db, &def, curr.with_value(call.clone()), krate);
|
||||
let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
|
||||
Some(db.decl_macro_expander(def.krate, ast_id))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
lazy_expand(db, &def, curr.with_value(call.clone()), krate, call_site);
|
||||
|
||||
// replace macro inside
|
||||
let hygiene = Hygiene::new(db, parse.file_id);
|
||||
let ExpandResult { value, err: error } = eager_macro_recur(
|
||||
db,
|
||||
&hygiene,
|
||||
&tm,
|
||||
expanded_map,
|
||||
offset,
|
||||
// FIXME: We discard parse errors here
|
||||
parse.as_ref().map(|it| it.syntax_node()),
|
||||
krate,
|
||||
call_site,
|
||||
macro_resolver,
|
||||
);
|
||||
let err = err.or(error);
|
||||
|
||||
if let Some(tt) = call.token_tree() {
|
||||
let call_tt_start = tt.syntax().text_range().start();
|
||||
let call_start = apply_offset(call.syntax().text_range().start(), offset);
|
||||
if let Some((_tt, arg_map, _)) = parse
|
||||
.file_id
|
||||
.macro_file()
|
||||
.and_then(|id| db.macro_arg(id.macro_call_id).value)
|
||||
.as_deref()
|
||||
{
|
||||
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||
tm.first_range_by_token(
|
||||
decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
|
||||
syntax::SyntaxKind::TOMBSTONE,
|
||||
)
|
||||
.map(|r| (r + call_start, range + call_tt_start))
|
||||
}));
|
||||
}
|
||||
}
|
||||
// FIXME: Do we need to re-use _m here?
|
||||
ExpandResult { value: value.map(|(n, _m)| n), err }
|
||||
ExpandResult { value, err }
|
||||
}
|
||||
};
|
||||
if err.is_some() {
|
||||
|
@ -288,16 +231,18 @@ fn eager_macro_recur(
|
|||
}
|
||||
// check if the whole original syntax is replaced
|
||||
if call.syntax() == &original {
|
||||
return ExpandResult { value: value.zip(Some(mapping)), err: error };
|
||||
return ExpandResult { value, err: error };
|
||||
}
|
||||
|
||||
if let Some(insert) = value {
|
||||
offset += u32::from(insert.text_range().len()) as i32
|
||||
- u32::from(call.syntax().text_range().len()) as i32;
|
||||
replacements.push((call, insert));
|
||||
match value {
|
||||
Some((insert, new_offset)) => {
|
||||
replacements.push((call, insert));
|
||||
offset = new_offset;
|
||||
}
|
||||
None => offset += call.syntax().text_range().len(),
|
||||
}
|
||||
}
|
||||
|
||||
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
|
||||
ExpandResult { value: Some((original, mapping)), err: error }
|
||||
ExpandResult { value: Some((original, offset)), err: error }
|
||||
}
|
||||
|
|
375
crates/hir-expand/src/files.rs
Normal file
375
crates/hir-expand/src/files.rs
Normal file
|
@ -0,0 +1,375 @@
|
|||
//! Things to wrap other things in file ids.
|
||||
use std::iter;
|
||||
|
||||
use base_db::{
|
||||
span::{HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId},
|
||||
FileId, FileRange,
|
||||
};
|
||||
use either::Either;
|
||||
use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize};
|
||||
|
||||
use crate::{db, ExpansionInfo, MacroFileIdExt};
|
||||
|
||||
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
|
||||
///
|
||||
/// Typical usages are:
|
||||
///
|
||||
/// * `InFile<SyntaxNode>` -- syntax node in a file
|
||||
/// * `InFile<ast::FnDef>` -- ast node in a file
|
||||
/// * `InFile<TextSize>` -- offset in a file
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
||||
pub struct InFileWrapper<FileKind, T> {
|
||||
pub file_id: FileKind,
|
||||
pub value: T,
|
||||
}
|
||||
pub type InFile<T> = InFileWrapper<HirFileId, T>;
|
||||
pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>;
|
||||
pub type InRealFile<T> = InFileWrapper<FileId, T>;
|
||||
|
||||
impl<FileKind, T> InFileWrapper<FileKind, T> {
|
||||
pub fn new(file_id: FileKind, value: T) -> Self {
|
||||
Self { file_id, value }
|
||||
}
|
||||
|
||||
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFileWrapper<FileKind, U> {
|
||||
InFileWrapper::new(self.file_id, f(self.value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<FileKind: Copy, T> InFileWrapper<FileKind, T> {
|
||||
pub fn with_value<U>(&self, value: U) -> InFileWrapper<FileKind, U> {
|
||||
InFileWrapper::new(self.file_id, value)
|
||||
}
|
||||
|
||||
pub fn as_ref(&self) -> InFileWrapper<FileKind, &T> {
|
||||
self.with_value(&self.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<FileKind: Copy, T: Clone> InFileWrapper<FileKind, &T> {
|
||||
pub fn cloned(&self) -> InFileWrapper<FileKind, T> {
|
||||
self.with_value(self.value.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<InMacroFile<T>> for InFile<T> {
|
||||
fn from(InMacroFile { file_id, value }: InMacroFile<T>) -> Self {
|
||||
InFile { file_id: file_id.into(), value }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<InRealFile<T>> for InFile<T> {
|
||||
fn from(InRealFile { file_id, value }: InRealFile<T>) -> Self {
|
||||
InFile { file_id: file_id.into(), value }
|
||||
}
|
||||
}
|
||||
|
||||
// region:transpose impls
|
||||
|
||||
impl<FileKind, T> InFileWrapper<FileKind, Option<T>> {
|
||||
pub fn transpose(self) -> Option<InFileWrapper<FileKind, T>> {
|
||||
Some(InFileWrapper::new(self.file_id, self.value?))
|
||||
}
|
||||
}
|
||||
|
||||
impl<FileKind, L, R> InFileWrapper<FileKind, Either<L, R>> {
|
||||
pub fn transpose(self) -> Either<InFileWrapper<FileKind, L>, InFileWrapper<FileKind, R>> {
|
||||
match self.value {
|
||||
Either::Left(l) => Either::Left(InFileWrapper::new(self.file_id, l)),
|
||||
Either::Right(r) => Either::Right(InFileWrapper::new(self.file_id, r)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// endregion:transpose impls
|
||||
|
||||
trait FileIdToSyntax: Copy {
|
||||
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode;
|
||||
}
|
||||
|
||||
impl FileIdToSyntax for FileId {
|
||||
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
|
||||
db.parse(self).syntax_node()
|
||||
}
|
||||
}
|
||||
impl FileIdToSyntax for MacroFileId {
|
||||
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
|
||||
db.parse_macro_expansion(self).value.0.syntax_node()
|
||||
}
|
||||
}
|
||||
impl FileIdToSyntax for HirFileId {
|
||||
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
|
||||
db.parse_or_expand(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(private_bounds)]
|
||||
impl<FileId: FileIdToSyntax, T> InFileWrapper<FileId, T> {
|
||||
pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
|
||||
FileIdToSyntax::file_syntax(self.file_id, db)
|
||||
}
|
||||
}
|
||||
|
||||
impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
|
||||
pub fn syntax(&self) -> InFileWrapper<FileId, &SyntaxNode> {
|
||||
self.with_value(self.value.syntax())
|
||||
}
|
||||
}
|
||||
|
||||
// region:specific impls
|
||||
|
||||
impl InFile<&SyntaxNode> {
|
||||
/// Skips the attributed item that caused the macro invocation we are climbing up
|
||||
pub fn ancestors_with_macros_skip_attr_item(
|
||||
self,
|
||||
db: &dyn db::ExpandDatabase,
|
||||
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
|
||||
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
|
||||
Some(parent) => Some(node.with_value(parent)),
|
||||
None => {
|
||||
let macro_file_id = node.file_id.macro_file()?;
|
||||
let parent_node = macro_file_id.call_node(db);
|
||||
if macro_file_id.is_attr_macro(db) {
|
||||
// macro call was an attributed item, skip it
|
||||
// FIXME: does this fail if this is a direct expansion of another macro?
|
||||
parent_node.map(|node| node.parent()).transpose()
|
||||
} else {
|
||||
Some(parent_node)
|
||||
}
|
||||
}
|
||||
};
|
||||
iter::successors(succ(&self.cloned()), succ)
|
||||
}
|
||||
|
||||
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
||||
///
|
||||
/// For attributes and derives, this will point back to the attribute only.
|
||||
/// For the entire item use [`InFile::original_file_range_full`].
|
||||
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
|
||||
match self.file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
|
||||
HirFileIdRepr::MacroFile(mac_file) => {
|
||||
if let Some((res, ctxt)) =
|
||||
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
|
||||
{
|
||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||
// keep pre-token map rewrite behaviour.
|
||||
if ctxt.is_root() {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
// Fall back to whole macro call.
|
||||
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
|
||||
loc.kind.original_call_range(db)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
||||
pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
|
||||
match self.file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
|
||||
HirFileIdRepr::MacroFile(mac_file) => {
|
||||
if let Some((res, ctxt)) =
|
||||
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
|
||||
{
|
||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||
// keep pre-token map rewrite behaviour.
|
||||
if ctxt.is_root() {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
// Fall back to whole macro call.
|
||||
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
|
||||
loc.kind.original_call_range_with_body(db)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempts to map the syntax node back up its macro calls.
|
||||
pub fn original_file_range_opt(
|
||||
self,
|
||||
db: &dyn db::ExpandDatabase,
|
||||
) -> Option<(FileRange, SyntaxContextId)> {
|
||||
match self.file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => {
|
||||
Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
|
||||
}
|
||||
HirFileIdRepr::MacroFile(mac_file) => {
|
||||
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn original_syntax_node(
|
||||
self,
|
||||
db: &dyn db::ExpandDatabase,
|
||||
) -> Option<InRealFile<SyntaxNode>> {
|
||||
// This kind of upmapping can only be achieved in attribute expanded files,
|
||||
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
|
||||
let file_id = match self.file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => {
|
||||
return Some(InRealFile { file_id, value: self.value.clone() })
|
||||
}
|
||||
HirFileIdRepr::MacroFile(m) => m,
|
||||
};
|
||||
if !file_id.is_attr_macro(db) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let (FileRange { file_id, range }, ctx) =
|
||||
ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?;
|
||||
|
||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||
// keep pre-token map rewrite behaviour.
|
||||
if !ctx.is_root() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let anc = db.parse(file_id).syntax_node().covering_element(range);
|
||||
let kind = self.value.kind();
|
||||
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
|
||||
let value = anc.ancestors().find(|it| it.kind() == kind)?;
|
||||
Some(InRealFile::new(file_id, value))
|
||||
}
|
||||
}
|
||||
|
||||
impl InMacroFile<SyntaxToken> {
|
||||
pub fn upmap_once(
|
||||
self,
|
||||
db: &dyn db::ExpandDatabase,
|
||||
) -> InFile<smallvec::SmallVec<[TextRange; 1]>> {
|
||||
self.file_id.expansion_info(db).map_range_up_once(db, self.value.text_range())
|
||||
}
|
||||
}
|
||||
|
||||
impl InFile<SyntaxToken> {
|
||||
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
||||
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
|
||||
match self.file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
|
||||
HirFileIdRepr::MacroFile(mac_file) => {
|
||||
let (range, ctxt) = ExpansionInfo::new(db, mac_file)
|
||||
.span_for_offset(db, self.value.text_range().start());
|
||||
|
||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||
// keep pre-token map rewrite behaviour.
|
||||
if ctxt.is_root() {
|
||||
return range;
|
||||
}
|
||||
|
||||
// Fall back to whole macro call.
|
||||
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
|
||||
loc.kind.original_call_range(db)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempts to map the syntax node back up its macro calls.
|
||||
pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
|
||||
match self.file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => {
|
||||
Some(FileRange { file_id, range: self.value.text_range() })
|
||||
}
|
||||
HirFileIdRepr::MacroFile(mac_file) => {
|
||||
let (range, ctxt) = ExpansionInfo::new(db, mac_file)
|
||||
.span_for_offset(db, self.value.text_range().start());
|
||||
|
||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||
// keep pre-token map rewrite behaviour.
|
||||
if ctxt.is_root() {
|
||||
Some(range)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl InMacroFile<TextSize> {
|
||||
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) {
|
||||
ExpansionInfo::new(db, self.file_id).span_for_offset(db, self.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl InFile<TextRange> {
|
||||
pub fn original_node_file_range(
|
||||
self,
|
||||
db: &dyn db::ExpandDatabase,
|
||||
) -> (FileRange, SyntaxContextId) {
|
||||
match self.file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => {
|
||||
(FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
|
||||
}
|
||||
HirFileIdRepr::MacroFile(mac_file) => {
|
||||
match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
|
||||
Some(it) => it,
|
||||
None => {
|
||||
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
|
||||
(loc.kind.original_call_range(db), SyntaxContextId::ROOT)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
|
||||
match self.file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
|
||||
HirFileIdRepr::MacroFile(mac_file) => {
|
||||
match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
|
||||
Some((it, SyntaxContextId::ROOT)) => it,
|
||||
_ => {
|
||||
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
|
||||
loc.kind.original_call_range(db)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn original_node_file_range_opt(
|
||||
self,
|
||||
db: &dyn db::ExpandDatabase,
|
||||
) -> Option<(FileRange, SyntaxContextId)> {
|
||||
match self.file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => {
|
||||
Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT))
|
||||
}
|
||||
HirFileIdRepr::MacroFile(mac_file) => {
|
||||
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: AstNode> InFile<N> {
|
||||
pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> {
|
||||
// This kind of upmapping can only be achieved in attribute expanded files,
|
||||
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
|
||||
let file_id = match self.file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => {
|
||||
return Some(InRealFile { file_id, value: self.value })
|
||||
}
|
||||
HirFileIdRepr::MacroFile(m) => m,
|
||||
};
|
||||
if !file_id.is_attr_macro(db) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id)
|
||||
.map_node_range_up(db, self.value.syntax().text_range())?;
|
||||
|
||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||
// keep pre-token map rewrite behaviour.
|
||||
if !ctx.is_root() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
|
||||
let anc = db.parse(file_id).syntax_node().covering_element(range);
|
||||
let value = anc.ancestors().find_map(N::cast)?;
|
||||
Some(InRealFile::new(file_id, value))
|
||||
}
|
||||
}
|
|
@ -1,111 +1,124 @@
|
|||
//! To make attribute macros work reliably when typing, we need to take care to
|
||||
//! fix up syntax errors in the code we're passing to them.
|
||||
use std::mem;
|
||||
|
||||
use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
|
||||
use rustc_hash::FxHashMap;
|
||||
use base_db::{
|
||||
span::{ErasedFileAstId, SpanAnchor, SpanData},
|
||||
FileId,
|
||||
};
|
||||
use la_arena::RawIdx;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use smallvec::SmallVec;
|
||||
use syntax::{
|
||||
ast::{self, AstNode, HasLoopBody},
|
||||
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
|
||||
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
|
||||
};
|
||||
use triomphe::Arc;
|
||||
use tt::Spacing;
|
||||
|
||||
use crate::{
|
||||
span::SpanMapRef,
|
||||
tt::{Ident, Leaf, Punct, Subtree},
|
||||
};
|
||||
use tt::token_id::Subtree;
|
||||
|
||||
/// The result of calculating fixes for a syntax node -- a bunch of changes
|
||||
/// (appending to and replacing nodes), the information that is needed to
|
||||
/// reverse those changes afterwards, and a token map.
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct SyntaxFixups {
|
||||
pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
||||
pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
||||
pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
|
||||
pub(crate) remove: FxHashSet<SyntaxNode>,
|
||||
pub(crate) undo_info: SyntaxFixupUndoInfo,
|
||||
pub(crate) token_map: TokenMap,
|
||||
pub(crate) next_id: u32,
|
||||
}
|
||||
|
||||
/// This is the information needed to reverse the fixups.
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct SyntaxFixupUndoInfo {
|
||||
original: Box<[Subtree]>,
|
||||
// FIXME: ThinArc<[Subtree]>
|
||||
original: Option<Arc<Box<[Subtree]>>>,
|
||||
}
|
||||
|
||||
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
|
||||
impl SyntaxFixupUndoInfo {
|
||||
pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None };
|
||||
}
|
||||
|
||||
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
// censoring -> just don't convert the node
|
||||
// replacement -> censor + append
|
||||
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
|
||||
// to remove later
|
||||
|
||||
pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
|
||||
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
||||
let mut replace = FxHashMap::<SyntaxElement, _>::default();
|
||||
let mut remove = FxHashSet::<SyntaxNode>::default();
|
||||
let mut preorder = node.preorder();
|
||||
let mut original = Vec::new();
|
||||
let mut token_map = TokenMap::default();
|
||||
let mut next_id = 0;
|
||||
let dummy_range = TextRange::empty(TextSize::new(0));
|
||||
// we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
|
||||
// the index into the replacement vec but only if the end points to !0
|
||||
let dummy_anchor = SpanAnchor {
|
||||
file_id: FileId::from_raw(!0),
|
||||
ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)),
|
||||
};
|
||||
let fake_span = |range| SpanData {
|
||||
range: dummy_range,
|
||||
anchor: dummy_anchor,
|
||||
ctx: span_map.span_for_range(range).ctx,
|
||||
};
|
||||
while let Some(event) = preorder.next() {
|
||||
let node = match event {
|
||||
syntax::WalkEvent::Enter(node) => node,
|
||||
syntax::WalkEvent::Leave(_) => continue,
|
||||
};
|
||||
let syntax::WalkEvent::Enter(node) = event else { continue };
|
||||
|
||||
let node_range = node.text_range();
|
||||
if can_handle_error(&node) && has_error_to_handle(&node) {
|
||||
remove.insert(node.clone().into());
|
||||
// the node contains an error node, we have to completely replace it by something valid
|
||||
let (original_tree, new_tmap, new_next_id) =
|
||||
mbe::syntax_node_to_token_tree_with_modifications(
|
||||
&node,
|
||||
mem::take(&mut token_map),
|
||||
next_id,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
);
|
||||
token_map = new_tmap;
|
||||
next_id = new_next_id;
|
||||
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map);
|
||||
let idx = original.len() as u32;
|
||||
original.push(original_tree);
|
||||
let replacement = SyntheticToken {
|
||||
kind: SyntaxKind::IDENT,
|
||||
let replacement = Leaf::Ident(Ident {
|
||||
text: "__ra_fixup".into(),
|
||||
range: node.text_range(),
|
||||
id: SyntheticTokenId(idx),
|
||||
};
|
||||
replace.insert(node.clone().into(), vec![replacement]);
|
||||
span: SpanData {
|
||||
range: TextRange::new(TextSize::new(idx), TextSize::new(!0)),
|
||||
anchor: dummy_anchor,
|
||||
ctx: span_map.span_for_range(node_range).ctx,
|
||||
},
|
||||
});
|
||||
append.insert(node.clone().into(), vec![replacement]);
|
||||
preorder.skip_subtree();
|
||||
continue;
|
||||
}
|
||||
|
||||
// In some other situations, we can fix things by just appending some tokens.
|
||||
let end_range = TextRange::empty(node.text_range().end());
|
||||
match_ast! {
|
||||
match node {
|
||||
ast::FieldExpr(it) => {
|
||||
if it.name_ref().is_none() {
|
||||
// incomplete field access: some_expr.|
|
||||
append.insert(node.clone().into(), vec![
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::IDENT,
|
||||
Leaf::Ident(Ident {
|
||||
text: "__ra_fixup".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
span: fake_span(node_range),
|
||||
}),
|
||||
]);
|
||||
}
|
||||
},
|
||||
ast::ExprStmt(it) => {
|
||||
if it.semicolon_token().is_none() {
|
||||
append.insert(node.clone().into(), vec![
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::SEMICOLON,
|
||||
text: ";".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
Leaf::Punct(Punct {
|
||||
char: ';',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span(node_range),
|
||||
}),
|
||||
]);
|
||||
}
|
||||
},
|
||||
ast::LetStmt(it) => {
|
||||
if it.semicolon_token().is_none() {
|
||||
append.insert(node.clone().into(), vec![
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::SEMICOLON,
|
||||
text: ";".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
Leaf::Punct(Punct {
|
||||
char: ';',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
},
|
||||
|
@ -117,28 +130,25 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
|||
None => continue,
|
||||
};
|
||||
append.insert(if_token.into(), vec![
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::IDENT,
|
||||
Leaf::Ident(Ident {
|
||||
text: "__ra_fixup".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
if it.then_branch().is_none() {
|
||||
append.insert(node.clone().into(), vec![
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::L_CURLY,
|
||||
text: "{".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::R_CURLY,
|
||||
text: "}".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
// FIXME: THis should be a subtree no?
|
||||
Leaf::Punct(Punct {
|
||||
char: '{',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
Leaf::Punct(Punct {
|
||||
char: '}',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
},
|
||||
|
@ -150,46 +160,42 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
|||
None => continue,
|
||||
};
|
||||
append.insert(while_token.into(), vec![
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::IDENT,
|
||||
Leaf::Ident(Ident {
|
||||
text: "__ra_fixup".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
if it.loop_body().is_none() {
|
||||
append.insert(node.clone().into(), vec![
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::L_CURLY,
|
||||
text: "{".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::R_CURLY,
|
||||
text: "}".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
// FIXME: THis should be a subtree no?
|
||||
Leaf::Punct(Punct {
|
||||
char: '{',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
Leaf::Punct(Punct {
|
||||
char: '}',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
},
|
||||
ast::LoopExpr(it) => {
|
||||
if it.loop_body().is_none() {
|
||||
append.insert(node.clone().into(), vec![
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::L_CURLY,
|
||||
text: "{".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::R_CURLY,
|
||||
text: "}".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
// FIXME: THis should be a subtree no?
|
||||
Leaf::Punct(Punct {
|
||||
char: '{',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
Leaf::Punct(Punct {
|
||||
char: '}',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
},
|
||||
|
@ -201,29 +207,26 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
|||
None => continue
|
||||
};
|
||||
append.insert(match_token.into(), vec![
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::IDENT,
|
||||
Leaf::Ident(Ident {
|
||||
text: "__ra_fixup".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID
|
||||
},
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
if it.match_arm_list().is_none() {
|
||||
// No match arms
|
||||
append.insert(node.clone().into(), vec![
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::L_CURLY,
|
||||
text: "{".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::R_CURLY,
|
||||
text: "}".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
// FIXME: THis should be a subtree no?
|
||||
Leaf::Punct(Punct {
|
||||
char: '{',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
Leaf::Punct(Punct {
|
||||
char: '}',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
},
|
||||
|
@ -234,10 +237,15 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
|||
};
|
||||
|
||||
let [pat, in_token, iter] = [
|
||||
(SyntaxKind::UNDERSCORE, "_"),
|
||||
(SyntaxKind::IN_KW, "in"),
|
||||
(SyntaxKind::IDENT, "__ra_fixup")
|
||||
].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID});
|
||||
"_",
|
||||
"in",
|
||||
"__ra_fixup"
|
||||
].map(|text|
|
||||
Leaf::Ident(Ident {
|
||||
text: text.into(),
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
);
|
||||
|
||||
if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() {
|
||||
append.insert(for_token.into(), vec![pat, in_token, iter]);
|
||||
|
@ -248,18 +256,17 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
|||
|
||||
if it.loop_body().is_none() {
|
||||
append.insert(node.clone().into(), vec![
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::L_CURLY,
|
||||
text: "{".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::R_CURLY,
|
||||
text: "}".into(),
|
||||
range: end_range,
|
||||
id: EMPTY_ID,
|
||||
},
|
||||
// FIXME: THis should be a subtree no?
|
||||
Leaf::Punct(Punct {
|
||||
char: '{',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
Leaf::Punct(Punct {
|
||||
char: '}',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
},
|
||||
|
@ -267,12 +274,13 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
|||
}
|
||||
}
|
||||
}
|
||||
let needs_fixups = !append.is_empty() || !original.is_empty();
|
||||
SyntaxFixups {
|
||||
append,
|
||||
replace,
|
||||
token_map,
|
||||
next_id,
|
||||
undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
|
||||
remove,
|
||||
undo_info: SyntaxFixupUndoInfo {
|
||||
original: needs_fixups.then(|| Arc::new(original.into_boxed_slice())),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -288,30 +296,32 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
|
|||
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
|
||||
}
|
||||
|
||||
pub(crate) fn reverse_fixups(
|
||||
tt: &mut Subtree,
|
||||
token_map: &TokenMap,
|
||||
undo_info: &SyntaxFixupUndoInfo,
|
||||
) {
|
||||
pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
|
||||
let Some(undo_info) = undo_info.original.as_deref() else { return };
|
||||
let undo_info = &**undo_info;
|
||||
reverse_fixups_(tt, undo_info);
|
||||
}
|
||||
|
||||
fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
|
||||
let tts = std::mem::take(&mut tt.token_trees);
|
||||
tt.token_trees = tts
|
||||
.into_iter()
|
||||
// delete all fake nodes
|
||||
.filter(|tt| match tt {
|
||||
tt::TokenTree::Leaf(leaf) => {
|
||||
token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID)
|
||||
}
|
||||
tt::TokenTree::Subtree(st) => {
|
||||
token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID)
|
||||
let span = leaf.span();
|
||||
span.anchor.file_id != FileId::from_raw(!0) || span.range.end() == TextSize::new(!0)
|
||||
}
|
||||
tt::TokenTree::Subtree(_) => true,
|
||||
})
|
||||
.flat_map(|tt| match tt {
|
||||
tt::TokenTree::Subtree(mut tt) => {
|
||||
reverse_fixups(&mut tt, token_map, undo_info);
|
||||
reverse_fixups_(&mut tt, undo_info);
|
||||
SmallVec::from_const([tt.into()])
|
||||
}
|
||||
tt::TokenTree::Leaf(leaf) => {
|
||||
if let Some(id) = token_map.synthetic_token_id(*leaf.span()) {
|
||||
let original = undo_info.original[id.0 as usize].clone();
|
||||
if leaf.span().anchor.file_id == FileId::from_raw(!0) {
|
||||
let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
|
||||
if original.delimiter.kind == tt::DelimiterKind::Invisible {
|
||||
original.token_trees.into()
|
||||
} else {
|
||||
|
@ -327,11 +337,15 @@ pub(crate) fn reverse_fixups(
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use base_db::FileId;
|
||||
use expect_test::{expect, Expect};
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::tt;
|
||||
|
||||
use super::reverse_fixups;
|
||||
use crate::{
|
||||
fixup::reverse_fixups,
|
||||
span::{RealSpanMap, SpanMap},
|
||||
tt,
|
||||
};
|
||||
|
||||
// The following three functions are only meant to check partial structural equivalence of
|
||||
// `TokenTree`s, see the last assertion in `check()`.
|
||||
|
@ -361,13 +375,13 @@ mod tests {
|
|||
#[track_caller]
|
||||
fn check(ra_fixture: &str, mut expect: Expect) {
|
||||
let parsed = syntax::SourceFile::parse(ra_fixture);
|
||||
let fixups = super::fixup_syntax(&parsed.syntax_node());
|
||||
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
||||
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
|
||||
let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node());
|
||||
let mut tt = mbe::syntax_node_to_token_tree_modified(
|
||||
&parsed.syntax_node(),
|
||||
fixups.token_map,
|
||||
fixups.next_id,
|
||||
fixups.replace,
|
||||
span_map.as_ref(),
|
||||
fixups.append,
|
||||
fixups.remove,
|
||||
);
|
||||
|
||||
let actual = format!("{tt}\n");
|
||||
|
@ -383,14 +397,15 @@ mod tests {
|
|||
parse.syntax_node()
|
||||
);
|
||||
|
||||
reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
|
||||
reverse_fixups(&mut tt, &fixups.undo_info);
|
||||
|
||||
// the fixed-up + reversed version should be equivalent to the original input
|
||||
// modulo token IDs and `Punct`s' spacing.
|
||||
let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
|
||||
let original_as_tt =
|
||||
mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref());
|
||||
assert!(
|
||||
check_subtree_eq(&tt, &original_as_tt),
|
||||
"different token tree: {tt:?},\n{original_as_tt:?}"
|
||||
"different token tree:\n{tt:?}\n\n{original_as_tt:?}"
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -403,7 +418,7 @@ fn foo() {
|
|||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {for _ in __ra_fixup {}}
|
||||
fn foo () {for _ in __ra_fixup { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
@ -431,7 +446,7 @@ fn foo() {
|
|||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {for bar in qux {}}
|
||||
fn foo () {for bar in qux { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
@ -462,7 +477,7 @@ fn foo() {
|
|||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {match __ra_fixup {}}
|
||||
fn foo () {match __ra_fixup { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
@ -494,7 +509,7 @@ fn foo() {
|
|||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {match __ra_fixup {}}
|
||||
fn foo () {match __ra_fixup { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
@ -609,7 +624,7 @@ fn foo() {
|
|||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {if a {}}
|
||||
fn foo () {if a { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
@ -623,7 +638,7 @@ fn foo() {
|
|||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {if __ra_fixup {}}
|
||||
fn foo () {if __ra_fixup { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
@ -637,7 +652,7 @@ fn foo() {
|
|||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {if __ra_fixup {} {}}
|
||||
fn foo () {if __ra_fixup {} { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
@ -651,7 +666,7 @@ fn foo() {
|
|||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {while __ra_fixup {}}
|
||||
fn foo () {while __ra_fixup { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
@ -665,7 +680,7 @@ fn foo() {
|
|||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {while foo {}}
|
||||
fn foo () {while foo { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
@ -692,7 +707,7 @@ fn foo() {
|
|||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {loop {}}
|
||||
fn foo () {loop { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
|
|
@ -2,252 +2,247 @@
|
|||
//!
|
||||
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
|
||||
//! this moment, this is horribly incomplete and handles only `$crate`.
|
||||
use base_db::CrateId;
|
||||
use db::TokenExpander;
|
||||
use either::Either;
|
||||
use mbe::Origin;
|
||||
use syntax::{
|
||||
ast::{self, HasDocComments},
|
||||
AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize,
|
||||
};
|
||||
use triomphe::Arc;
|
||||
use std::iter;
|
||||
|
||||
use crate::{
|
||||
db::{self, ExpandDatabase},
|
||||
fixup,
|
||||
name::{AsName, Name},
|
||||
HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
|
||||
};
|
||||
use base_db::span::{MacroCallId, SpanData, SyntaxContextId};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Hygiene {
|
||||
frames: Option<HygieneFrames>,
|
||||
use crate::db::ExpandDatabase;
|
||||
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct SyntaxContextData {
|
||||
pub outer_expn: Option<MacroCallId>,
|
||||
pub outer_transparency: Transparency,
|
||||
pub parent: SyntaxContextId,
|
||||
/// This context, but with all transparent and semi-transparent expansions filtered away.
|
||||
pub opaque: SyntaxContextId,
|
||||
/// This context, but with all transparent expansions filtered away.
|
||||
pub opaque_and_semitransparent: SyntaxContextId,
|
||||
}
|
||||
|
||||
impl Hygiene {
|
||||
pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene {
|
||||
Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
|
||||
impl std::fmt::Debug for SyntaxContextData {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("SyntaxContextData")
|
||||
.field("outer_expn", &self.outer_expn)
|
||||
.field("outer_transparency", &self.outer_transparency)
|
||||
.field("parent", &self.parent)
|
||||
.field("opaque", &self.opaque)
|
||||
.field("opaque_and_semitransparent", &self.opaque_and_semitransparent)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl SyntaxContextData {
|
||||
pub fn root() -> Self {
|
||||
SyntaxContextData {
|
||||
outer_expn: None,
|
||||
outer_transparency: Transparency::Opaque,
|
||||
parent: SyntaxContextId::ROOT,
|
||||
opaque: SyntaxContextId::ROOT,
|
||||
opaque_and_semitransparent: SyntaxContextId::ROOT,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_unhygienic() -> Hygiene {
|
||||
Hygiene { frames: None }
|
||||
}
|
||||
|
||||
// FIXME: this should just return name
|
||||
pub fn name_ref_to_name(
|
||||
&self,
|
||||
pub fn fancy_debug(
|
||||
self,
|
||||
self_id: SyntaxContextId,
|
||||
db: &dyn ExpandDatabase,
|
||||
name_ref: ast::NameRef,
|
||||
) -> Either<Name, CrateId> {
|
||||
if let Some(frames) = &self.frames {
|
||||
if name_ref.text() == "$crate" {
|
||||
if let Some(krate) = frames.root_crate(db, name_ref.syntax()) {
|
||||
return Either::Right(krate);
|
||||
}
|
||||
f: &mut std::fmt::Formatter<'_>,
|
||||
) -> std::fmt::Result {
|
||||
write!(f, "#{self_id} parent: #{}, outer_mark: (", self.parent)?;
|
||||
match self.outer_expn {
|
||||
Some(id) => {
|
||||
write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)?
|
||||
}
|
||||
None => write!(f, "root")?,
|
||||
}
|
||||
|
||||
Either::Left(name_ref.as_name())
|
||||
}
|
||||
|
||||
pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
|
||||
let mut token = path.syntax().first_token()?.text_range();
|
||||
let frames = self.frames.as_ref()?;
|
||||
let mut current = &frames.0;
|
||||
|
||||
loop {
|
||||
let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
|
||||
if origin == Origin::Def {
|
||||
return if current.local_inner {
|
||||
frames.root_crate(db, path.syntax())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
}
|
||||
current = current.call_site.as_ref()?;
|
||||
token = mapped.value;
|
||||
}
|
||||
write!(f, ", {:?})", self.outer_transparency)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct HygieneFrames(Arc<HygieneFrame>);
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct HygieneFrame {
|
||||
expansion: Option<HygieneInfo>,
|
||||
|
||||
// Indicate this is a local inner macro
|
||||
local_inner: bool,
|
||||
krate: Option<CrateId>,
|
||||
|
||||
call_site: Option<Arc<HygieneFrame>>,
|
||||
def_site: Option<Arc<HygieneFrame>>,
|
||||
/// A property of a macro expansion that determines how identifiers
|
||||
/// produced by that expansion are resolved.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
|
||||
pub enum Transparency {
|
||||
/// Identifier produced by a transparent expansion is always resolved at call-site.
|
||||
/// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
|
||||
Transparent,
|
||||
/// Identifier produced by a semi-transparent expansion may be resolved
|
||||
/// either at call-site or at definition-site.
|
||||
/// If it's a local variable, label or `$crate` then it's resolved at def-site.
|
||||
/// Otherwise it's resolved at call-site.
|
||||
/// `macro_rules` macros behave like this, built-in macros currently behave like this too,
|
||||
/// but that's an implementation detail.
|
||||
SemiTransparent,
|
||||
/// Identifier produced by an opaque expansion is always resolved at definition-site.
|
||||
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
|
||||
Opaque,
|
||||
}
|
||||
|
||||
impl HygieneFrames {
|
||||
fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self {
|
||||
// Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
|
||||
// usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
|
||||
HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
|
||||
}
|
||||
|
||||
fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
|
||||
let mut token = node.first_token()?.text_range();
|
||||
let mut result = self.0.krate;
|
||||
let mut current = self.0.clone();
|
||||
|
||||
while let Some((mapped, origin)) =
|
||||
current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
|
||||
{
|
||||
result = current.krate;
|
||||
|
||||
let site = match origin {
|
||||
Origin::Def => ¤t.def_site,
|
||||
Origin::Call => ¤t.call_site,
|
||||
};
|
||||
|
||||
let site = match site {
|
||||
None => break,
|
||||
Some(it) => it,
|
||||
};
|
||||
|
||||
current = site.clone();
|
||||
token = mapped.value;
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct HygieneInfo {
|
||||
file: MacroFile,
|
||||
/// The start offset of the `macro_rules!` arguments or attribute input.
|
||||
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
|
||||
|
||||
macro_def: TokenExpander,
|
||||
macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
|
||||
macro_arg_shift: mbe::Shift,
|
||||
exp_map: Arc<mbe::TokenMap>,
|
||||
}
|
||||
|
||||
impl HygieneInfo {
|
||||
fn map_ident_up(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
token: TextRange,
|
||||
) -> Option<(InFile<TextRange>, Origin)> {
|
||||
let token_id = self.exp_map.token_by_range(token)?;
|
||||
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
|
||||
|
||||
let loc = db.lookup_intern_macro_call(self.file.macro_call_id);
|
||||
|
||||
let (token_map, tt) = match &loc.kind {
|
||||
MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) {
|
||||
Some(unshifted) => {
|
||||
token_id = unshifted;
|
||||
(&attr_args.1, self.attr_input_or_mac_def_start?)
|
||||
}
|
||||
None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
|
||||
},
|
||||
_ => match origin {
|
||||
mbe::Origin::Call => {
|
||||
(&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
|
||||
}
|
||||
mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
|
||||
(TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
|
||||
(&expander.def_site_token_map, *tt)
|
||||
}
|
||||
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
|
||||
Some((tt.with_value(range + tt.value), origin))
|
||||
}
|
||||
}
|
||||
|
||||
fn make_hygiene_info(
|
||||
pub fn span_with_def_site_ctxt(
|
||||
db: &dyn ExpandDatabase,
|
||||
macro_file: MacroFile,
|
||||
loc: &MacroCallLoc,
|
||||
) -> HygieneInfo {
|
||||
let def = loc.def.ast_id().left().and_then(|id| {
|
||||
let def_tt = match id.to_node(db) {
|
||||
ast::Macro::MacroRules(mac) => mac.token_tree()?,
|
||||
ast::Macro::MacroDef(mac) => mac.body()?,
|
||||
};
|
||||
Some(InFile::new(id.file_id, def_tt))
|
||||
});
|
||||
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
|
||||
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
|
||||
let tt = ast_id
|
||||
.to_node(db)
|
||||
.doc_comments_and_attrs()
|
||||
.nth(invoc_attr_index.ast_index())
|
||||
.and_then(Either::left)?
|
||||
.token_tree()?;
|
||||
Some(InFile::new(ast_id.file_id, tt))
|
||||
}
|
||||
_ => None,
|
||||
});
|
||||
span: SpanData,
|
||||
expn_id: MacroCallId,
|
||||
) -> SpanData {
|
||||
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque)
|
||||
}
|
||||
|
||||
let macro_def = db.macro_expander(loc.def);
|
||||
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
|
||||
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
||||
Arc::new((
|
||||
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
))
|
||||
});
|
||||
pub fn span_with_call_site_ctxt(
|
||||
db: &dyn ExpandDatabase,
|
||||
span: SpanData,
|
||||
expn_id: MacroCallId,
|
||||
) -> SpanData {
|
||||
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent)
|
||||
}
|
||||
|
||||
HygieneInfo {
|
||||
file: macro_file,
|
||||
attr_input_or_mac_def_start: attr_input_or_mac_def
|
||||
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
|
||||
macro_arg_shift: mbe::Shift::new(¯o_arg.0),
|
||||
macro_arg,
|
||||
macro_def,
|
||||
exp_map,
|
||||
pub fn span_with_mixed_site_ctxt(
|
||||
db: &dyn ExpandDatabase,
|
||||
span: SpanData,
|
||||
expn_id: MacroCallId,
|
||||
) -> SpanData {
|
||||
span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent)
|
||||
}
|
||||
|
||||
fn span_with_ctxt_from_mark(
|
||||
db: &dyn ExpandDatabase,
|
||||
span: SpanData,
|
||||
expn_id: MacroCallId,
|
||||
transparency: Transparency,
|
||||
) -> SpanData {
|
||||
SpanData { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span }
|
||||
}
|
||||
|
||||
pub(super) fn apply_mark(
|
||||
db: &dyn ExpandDatabase,
|
||||
ctxt: SyntaxContextId,
|
||||
call_id: MacroCallId,
|
||||
transparency: Transparency,
|
||||
) -> SyntaxContextId {
|
||||
if transparency == Transparency::Opaque {
|
||||
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
|
||||
}
|
||||
|
||||
let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site;
|
||||
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
|
||||
call_site_ctxt.normalize_to_macros_2_0(db)
|
||||
} else {
|
||||
call_site_ctxt.normalize_to_macro_rules(db)
|
||||
};
|
||||
|
||||
if call_site_ctxt.is_root() {
|
||||
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
|
||||
}
|
||||
|
||||
// Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a
|
||||
// macros 2.0 expansion, i.e., a macros 1.0 invocation is in a macros 2.0 definition.
|
||||
//
|
||||
// In this case, the tokens from the macros 1.0 definition inherit the hygiene
|
||||
// at their invocation. That is, we pretend that the macros 1.0 definition
|
||||
// was defined at its invocation (i.e., inside the macros 2.0 definition)
|
||||
// so that the macros 2.0 definition remains hygienic.
|
||||
//
|
||||
// See the example at `test/ui/hygiene/legacy_interaction.rs`.
|
||||
for (call_id, transparency) in ctxt.marks(db) {
|
||||
call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency);
|
||||
}
|
||||
apply_mark_internal(db, call_site_ctxt, Some(call_id), transparency)
|
||||
}
|
||||
|
||||
fn apply_mark_internal(
|
||||
db: &dyn ExpandDatabase,
|
||||
ctxt: SyntaxContextId,
|
||||
call_id: Option<MacroCallId>,
|
||||
transparency: Transparency,
|
||||
) -> SyntaxContextId {
|
||||
let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
|
||||
let mut opaque = syntax_context_data.opaque;
|
||||
let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
|
||||
|
||||
if transparency >= Transparency::Opaque {
|
||||
let parent = opaque;
|
||||
let new_opaque = SyntaxContextId::SELF_REF;
|
||||
// But we can't just grab the to be allocated ID either as that would not deduplicate
|
||||
// things!
|
||||
// So we need a new salsa store type here ...
|
||||
opaque = db.intern_syntax_context(SyntaxContextData {
|
||||
outer_expn: call_id,
|
||||
outer_transparency: transparency,
|
||||
parent,
|
||||
opaque: new_opaque,
|
||||
opaque_and_semitransparent: new_opaque,
|
||||
});
|
||||
}
|
||||
|
||||
if transparency >= Transparency::SemiTransparent {
|
||||
let parent = opaque_and_semitransparent;
|
||||
let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF;
|
||||
opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData {
|
||||
outer_expn: call_id,
|
||||
outer_transparency: transparency,
|
||||
parent,
|
||||
opaque,
|
||||
opaque_and_semitransparent: new_opaque_and_semitransparent,
|
||||
});
|
||||
}
|
||||
|
||||
let parent = ctxt;
|
||||
db.intern_syntax_context(SyntaxContextData {
|
||||
outer_expn: call_id,
|
||||
outer_transparency: transparency,
|
||||
parent,
|
||||
opaque,
|
||||
opaque_and_semitransparent,
|
||||
})
|
||||
}
|
||||
pub trait SyntaxContextExt {
|
||||
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self;
|
||||
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self;
|
||||
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self;
|
||||
fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
|
||||
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
|
||||
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)>;
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId {
|
||||
match n {
|
||||
SyntaxContextId::SELF_REF => p,
|
||||
_ => n,
|
||||
}
|
||||
}
|
||||
|
||||
impl HygieneFrame {
|
||||
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame {
|
||||
let (info, krate, local_inner) = match file_id.macro_file() {
|
||||
None => (None, None, false),
|
||||
Some(macro_file) => {
|
||||
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id()));
|
||||
match loc.def.kind {
|
||||
MacroDefKind::Declarative(_) => {
|
||||
(info, Some(loc.def.krate), loc.def.local_inner)
|
||||
}
|
||||
MacroDefKind::BuiltIn(..) => (info, Some(loc.def.krate), false),
|
||||
MacroDefKind::BuiltInAttr(..) => (info, None, false),
|
||||
MacroDefKind::BuiltInDerive(..) => (info, None, false),
|
||||
MacroDefKind::BuiltInEager(..) => (info, None, false),
|
||||
MacroDefKind::ProcMacro(..) => (info, None, false),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let Some((info, calling_file)) = info else {
|
||||
return HygieneFrame {
|
||||
expansion: None,
|
||||
local_inner,
|
||||
krate,
|
||||
call_site: None,
|
||||
def_site: None,
|
||||
};
|
||||
};
|
||||
|
||||
let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
|
||||
let call_site = Some(db.hygiene_frame(calling_file));
|
||||
|
||||
HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site }
|
||||
impl SyntaxContextExt for SyntaxContextId {
|
||||
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self {
|
||||
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent)
|
||||
}
|
||||
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self {
|
||||
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque)
|
||||
}
|
||||
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self {
|
||||
db.lookup_intern_syntax_context(self).parent
|
||||
}
|
||||
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
|
||||
let data = db.lookup_intern_syntax_context(self);
|
||||
(data.outer_expn, data.outer_transparency)
|
||||
}
|
||||
fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
|
||||
let data = db.lookup_intern_syntax_context(*self);
|
||||
*self = data.parent;
|
||||
(data.outer_expn, data.outer_transparency)
|
||||
}
|
||||
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)> {
|
||||
let mut marks = marks_rev(self, db).collect::<Vec<_>>();
|
||||
marks.reverse();
|
||||
marks
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: Make this a SyntaxContextExt method once we have RPIT
|
||||
pub fn marks_rev(
|
||||
ctxt: SyntaxContextId,
|
||||
db: &dyn ExpandDatabase,
|
||||
) -> impl Iterator<Item = (Option<MacroCallId>, Transparency)> + '_ {
|
||||
iter::successors(Some(ctxt), move |&mark| {
|
||||
Some(mark.parent_ctxt(db)).filter(|&it| it != SyntaxContextId::ROOT)
|
||||
})
|
||||
.map(|ctx| ctx.outer_mark(db))
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -7,11 +7,11 @@ use std::{
|
|||
|
||||
use crate::{
|
||||
db::ExpandDatabase,
|
||||
hygiene::Hygiene,
|
||||
name::{known, Name},
|
||||
hygiene::{marks_rev, SyntaxContextExt, Transparency},
|
||||
name::{known, AsName, Name},
|
||||
span::SpanMapRef,
|
||||
};
|
||||
use base_db::CrateId;
|
||||
use either::Either;
|
||||
use base_db::{span::SyntaxContextId, CrateId};
|
||||
use smallvec::SmallVec;
|
||||
use syntax::{ast, AstNode};
|
||||
|
||||
|
@ -38,6 +38,7 @@ pub enum PathKind {
|
|||
Crate,
|
||||
/// Absolute path (::foo)
|
||||
Abs,
|
||||
// FIXME: Remove this
|
||||
/// `$crate` from macro expansion
|
||||
DollarCrate(CrateId),
|
||||
}
|
||||
|
@ -46,9 +47,9 @@ impl ModPath {
|
|||
pub fn from_src(
|
||||
db: &dyn ExpandDatabase,
|
||||
path: ast::Path,
|
||||
hygiene: &Hygiene,
|
||||
span_map: SpanMapRef<'_>,
|
||||
) -> Option<ModPath> {
|
||||
convert_path(db, None, path, hygiene)
|
||||
convert_path(db, None, path, span_map)
|
||||
}
|
||||
|
||||
pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
|
||||
|
@ -193,33 +194,36 @@ fn convert_path(
|
|||
db: &dyn ExpandDatabase,
|
||||
prefix: Option<ModPath>,
|
||||
path: ast::Path,
|
||||
hygiene: &Hygiene,
|
||||
span_map: SpanMapRef<'_>,
|
||||
) -> Option<ModPath> {
|
||||
let prefix = match path.qualifier() {
|
||||
Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?),
|
||||
Some(qual) => Some(convert_path(db, prefix, qual, span_map)?),
|
||||
None => prefix,
|
||||
};
|
||||
|
||||
let segment = path.segment()?;
|
||||
let mut mod_path = match segment.kind()? {
|
||||
ast::PathSegmentKind::Name(name_ref) => {
|
||||
match hygiene.name_ref_to_name(db, name_ref) {
|
||||
Either::Left(name) => {
|
||||
// no type args in use
|
||||
let mut res = prefix.unwrap_or_else(|| {
|
||||
ModPath::from_kind(
|
||||
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
|
||||
)
|
||||
});
|
||||
res.segments.push(name);
|
||||
res
|
||||
}
|
||||
Either::Right(crate_id) => {
|
||||
return Some(ModPath::from_segments(
|
||||
PathKind::DollarCrate(crate_id),
|
||||
iter::empty(),
|
||||
))
|
||||
if name_ref.text() == "$crate" {
|
||||
if prefix.is_some() {
|
||||
return None;
|
||||
}
|
||||
ModPath::from_kind(
|
||||
resolve_crate_root(
|
||||
db,
|
||||
span_map.span_for_range(name_ref.syntax().text_range()).ctx,
|
||||
)
|
||||
.map(PathKind::DollarCrate)
|
||||
.unwrap_or(PathKind::Crate),
|
||||
)
|
||||
} else {
|
||||
let mut res = prefix.unwrap_or_else(|| {
|
||||
ModPath::from_kind(
|
||||
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
|
||||
)
|
||||
});
|
||||
res.segments.push(name_ref.as_name());
|
||||
res
|
||||
}
|
||||
}
|
||||
ast::PathSegmentKind::SelfTypeKw => {
|
||||
|
@ -261,8 +265,14 @@ fn convert_path(
|
|||
// We follow what it did anyway :)
|
||||
if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
|
||||
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
|
||||
if let Some(crate_id) = hygiene.local_inner_macros(db, path) {
|
||||
mod_path.kind = PathKind::DollarCrate(crate_id);
|
||||
let syn_ctx = span_map.span_for_range(segment.syntax().text_range()).ctx;
|
||||
if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn {
|
||||
if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
|
||||
mod_path.kind = match resolve_crate_root(db, syn_ctx) {
|
||||
Some(crate_root) => PathKind::DollarCrate(crate_root),
|
||||
None => PathKind::Crate,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -270,6 +280,29 @@ fn convert_path(
|
|||
Some(mod_path)
|
||||
}
|
||||
|
||||
pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
|
||||
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
|
||||
// we don't want to pretend that the `macro_rules!` definition is in the `macro`
|
||||
// as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks.
|
||||
// FIXME: This is only a guess and it doesn't work correctly for `macro_rules!`
|
||||
// definitions actually produced by `macro` and `macro` definitions produced by
|
||||
// `macro_rules!`, but at least such configurations are not stable yet.
|
||||
ctxt = ctxt.normalize_to_macro_rules(db);
|
||||
let mut iter = marks_rev(ctxt, db).peekable();
|
||||
let mut result_mark = None;
|
||||
// Find the last opaque mark from the end if it exists.
|
||||
while let Some(&(mark, Transparency::Opaque)) = iter.peek() {
|
||||
result_mark = Some(mark);
|
||||
iter.next();
|
||||
}
|
||||
// Then find the last semi-transparent mark from the end if it exists.
|
||||
while let Some((mark, Transparency::SemiTransparent)) = iter.next() {
|
||||
result_mark = Some(mark);
|
||||
}
|
||||
|
||||
result_mark.flatten().map(|call| db.lookup_intern_macro_call(call.into()).def.krate)
|
||||
}
|
||||
|
||||
pub use crate::name as __name;
|
||||
|
||||
#[macro_export]
|
||||
|
|
|
@ -470,6 +470,7 @@ pub mod known {
|
|||
pub const SELF_TYPE: super::Name = super::Name::new_inline("Self");
|
||||
|
||||
pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static");
|
||||
pub const DOLLAR_CRATE: super::Name = super::Name::new_inline("$crate");
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! name {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Proc Macro Expander stub
|
||||
|
||||
use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
|
||||
use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
|
||||
use stdx::never;
|
||||
|
||||
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
|
||||
|
@ -33,11 +33,15 @@ impl ProcMacroExpander {
|
|||
calling_crate: CrateId,
|
||||
tt: &tt::Subtree,
|
||||
attr_arg: Option<&tt::Subtree>,
|
||||
def_site: SpanData,
|
||||
call_site: SpanData,
|
||||
mixed_site: SpanData,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
match self.proc_macro_id {
|
||||
ProcMacroId(DUMMY_ID) => {
|
||||
ExpandResult::new(tt::Subtree::empty(), ExpandError::UnresolvedProcMacro(def_crate))
|
||||
}
|
||||
ProcMacroId(DUMMY_ID) => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::UnresolvedProcMacro(def_crate),
|
||||
),
|
||||
ProcMacroId(id) => {
|
||||
let proc_macros = db.proc_macros();
|
||||
let proc_macros = match proc_macros.get(&def_crate) {
|
||||
|
@ -45,7 +49,7 @@ impl ProcMacroExpander {
|
|||
Some(Err(_)) | None => {
|
||||
never!("Non-dummy expander even though there are no proc macros");
|
||||
return ExpandResult::new(
|
||||
tt::Subtree::empty(),
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::other("Internal error"),
|
||||
);
|
||||
}
|
||||
|
@ -59,7 +63,7 @@ impl ProcMacroExpander {
|
|||
id
|
||||
);
|
||||
return ExpandResult::new(
|
||||
tt::Subtree::empty(),
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::other("Internal error"),
|
||||
);
|
||||
}
|
||||
|
@ -68,7 +72,8 @@ impl ProcMacroExpander {
|
|||
let krate_graph = db.crate_graph();
|
||||
// Proc macros have access to the environment variables of the invoking crate.
|
||||
let env = &krate_graph[calling_crate].env;
|
||||
match proc_macro.expander.expand(tt, attr_arg, env) {
|
||||
match proc_macro.expander.expand(tt, attr_arg, env, def_site, call_site, mixed_site)
|
||||
{
|
||||
Ok(t) => ExpandResult::ok(t),
|
||||
Err(err) => match err {
|
||||
// Don't discard the item in case something unexpected happened while expanding attributes
|
||||
|
@ -78,9 +83,10 @@ impl ProcMacroExpander {
|
|||
ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) }
|
||||
}
|
||||
ProcMacroExpansionError::System(text)
|
||||
| ProcMacroExpansionError::Panic(text) => {
|
||||
ExpandResult::new(tt::Subtree::empty(), ExpandError::other(text))
|
||||
}
|
||||
| ProcMacroExpansionError::Panic(text) => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::ProcMacroPanic(Box::new(text.into_boxed_str())),
|
||||
),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
//! A simplified version of quote-crate like quasi quote macro
|
||||
|
||||
use base_db::span::SpanData;
|
||||
|
||||
// A helper macro quote macro
|
||||
// FIXME:
|
||||
// 1. Not all puncts are handled
|
||||
|
@ -8,109 +10,109 @@
|
|||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! __quote {
|
||||
() => {
|
||||
($span:ident) => {
|
||||
Vec::<crate::tt::TokenTree>::new()
|
||||
};
|
||||
|
||||
( @SUBTREE $delim:ident $($tt:tt)* ) => {
|
||||
( @SUBTREE($span:ident) $delim:ident $($tt:tt)* ) => {
|
||||
{
|
||||
let children = $crate::__quote!($($tt)*);
|
||||
let children = $crate::__quote!($span $($tt)*);
|
||||
crate::tt::Subtree {
|
||||
delimiter: crate::tt::Delimiter {
|
||||
kind: crate::tt::DelimiterKind::$delim,
|
||||
open: crate::tt::TokenId::unspecified(),
|
||||
close: crate::tt::TokenId::unspecified(),
|
||||
open: $span,
|
||||
close: $span,
|
||||
},
|
||||
token_trees: $crate::quote::IntoTt::to_tokens(children),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
( @PUNCT $first:literal ) => {
|
||||
( @PUNCT($span:ident) $first:literal ) => {
|
||||
{
|
||||
vec![
|
||||
crate::tt::Leaf::Punct(crate::tt::Punct {
|
||||
char: $first,
|
||||
spacing: crate::tt::Spacing::Alone,
|
||||
span: crate::tt::TokenId::unspecified(),
|
||||
span: $span,
|
||||
}).into()
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
( @PUNCT $first:literal, $sec:literal ) => {
|
||||
( @PUNCT($span:ident) $first:literal, $sec:literal ) => {
|
||||
{
|
||||
vec![
|
||||
crate::tt::Leaf::Punct(crate::tt::Punct {
|
||||
char: $first,
|
||||
spacing: crate::tt::Spacing::Joint,
|
||||
span: crate::tt::TokenId::unspecified(),
|
||||
span: $span,
|
||||
}).into(),
|
||||
crate::tt::Leaf::Punct(crate::tt::Punct {
|
||||
char: $sec,
|
||||
spacing: crate::tt::Spacing::Alone,
|
||||
span: crate::tt::TokenId::unspecified(),
|
||||
span: $span,
|
||||
}).into()
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
// hash variable
|
||||
( # $first:ident $($tail:tt)* ) => {
|
||||
($span:ident # $first:ident $($tail:tt)* ) => {
|
||||
{
|
||||
let token = $crate::quote::ToTokenTree::to_token($first);
|
||||
let token = $crate::quote::ToTokenTree::to_token($first, $span);
|
||||
let mut tokens = vec![token.into()];
|
||||
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
|
||||
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
|
||||
tokens.append(&mut tail_tokens);
|
||||
tokens
|
||||
}
|
||||
};
|
||||
|
||||
( ## $first:ident $($tail:tt)* ) => {
|
||||
($span:ident ## $first:ident $($tail:tt)* ) => {
|
||||
{
|
||||
let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<crate::tt::TokenTree>>();
|
||||
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
|
||||
let mut tokens = $first.into_iter().map(|it| $crate::quote::ToTokenTree::to_token(it, $span)).collect::<Vec<crate::tt::TokenTree>>();
|
||||
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
|
||||
tokens.append(&mut tail_tokens);
|
||||
tokens
|
||||
}
|
||||
};
|
||||
|
||||
// Brace
|
||||
( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) };
|
||||
($span:ident { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE($span) Brace $($tt)*) };
|
||||
// Bracket
|
||||
( [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE Bracket $($tt)*) };
|
||||
($span:ident [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE($span) Bracket $($tt)*) };
|
||||
// Parenthesis
|
||||
( ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE Parenthesis $($tt)*) };
|
||||
($span:ident ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE($span) Parenthesis $($tt)*) };
|
||||
|
||||
// Literal
|
||||
( $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt).into()] };
|
||||
($span:ident $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt, $span).into()] };
|
||||
// Ident
|
||||
( $tt:ident ) => {
|
||||
($span:ident $tt:ident ) => {
|
||||
vec![ {
|
||||
crate::tt::Leaf::Ident(crate::tt::Ident {
|
||||
text: stringify!($tt).into(),
|
||||
span: crate::tt::TokenId::unspecified(),
|
||||
span: $span,
|
||||
}).into()
|
||||
}]
|
||||
};
|
||||
|
||||
// Puncts
|
||||
// FIXME: Not all puncts are handled
|
||||
( -> ) => {$crate::__quote!(@PUNCT '-', '>')};
|
||||
( & ) => {$crate::__quote!(@PUNCT '&')};
|
||||
( , ) => {$crate::__quote!(@PUNCT ',')};
|
||||
( : ) => {$crate::__quote!(@PUNCT ':')};
|
||||
( ; ) => {$crate::__quote!(@PUNCT ';')};
|
||||
( :: ) => {$crate::__quote!(@PUNCT ':', ':')};
|
||||
( . ) => {$crate::__quote!(@PUNCT '.')};
|
||||
( < ) => {$crate::__quote!(@PUNCT '<')};
|
||||
( > ) => {$crate::__quote!(@PUNCT '>')};
|
||||
( ! ) => {$crate::__quote!(@PUNCT '!')};
|
||||
($span:ident -> ) => {$crate::__quote!(@PUNCT($span) '-', '>')};
|
||||
($span:ident & ) => {$crate::__quote!(@PUNCT($span) '&')};
|
||||
($span:ident , ) => {$crate::__quote!(@PUNCT($span) ',')};
|
||||
($span:ident : ) => {$crate::__quote!(@PUNCT($span) ':')};
|
||||
($span:ident ; ) => {$crate::__quote!(@PUNCT($span) ';')};
|
||||
($span:ident :: ) => {$crate::__quote!(@PUNCT($span) ':', ':')};
|
||||
($span:ident . ) => {$crate::__quote!(@PUNCT($span) '.')};
|
||||
($span:ident < ) => {$crate::__quote!(@PUNCT($span) '<')};
|
||||
($span:ident > ) => {$crate::__quote!(@PUNCT($span) '>')};
|
||||
($span:ident ! ) => {$crate::__quote!(@PUNCT($span) '!')};
|
||||
|
||||
( $first:tt $($tail:tt)+ ) => {
|
||||
($span:ident $first:tt $($tail:tt)+ ) => {
|
||||
{
|
||||
let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($first));
|
||||
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
|
||||
let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $first ));
|
||||
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
|
||||
|
||||
tokens.append(&mut tail_tokens);
|
||||
tokens
|
||||
|
@ -122,19 +124,22 @@ macro_rules! __quote {
|
|||
/// It probably should implement in proc-macro
|
||||
#[macro_export]
|
||||
macro_rules! quote {
|
||||
( $($tt:tt)* ) => {
|
||||
$crate::quote::IntoTt::to_subtree($crate::__quote!($($tt)*))
|
||||
($span:ident=> $($tt:tt)* ) => {
|
||||
$crate::quote::IntoTt::to_subtree($crate::__quote!($span $($tt)*), $span)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait IntoTt {
|
||||
fn to_subtree(self) -> crate::tt::Subtree;
|
||||
fn to_subtree(self, span: SpanData) -> crate::tt::Subtree;
|
||||
fn to_tokens(self) -> Vec<crate::tt::TokenTree>;
|
||||
}
|
||||
|
||||
impl IntoTt for Vec<crate::tt::TokenTree> {
|
||||
fn to_subtree(self) -> crate::tt::Subtree {
|
||||
crate::tt::Subtree { delimiter: crate::tt::Delimiter::unspecified(), token_trees: self }
|
||||
fn to_subtree(self, span: SpanData) -> crate::tt::Subtree {
|
||||
crate::tt::Subtree {
|
||||
delimiter: crate::tt::Delimiter::invisible_spanned(span),
|
||||
token_trees: self,
|
||||
}
|
||||
}
|
||||
|
||||
fn to_tokens(self) -> Vec<crate::tt::TokenTree> {
|
||||
|
@ -143,7 +148,7 @@ impl IntoTt for Vec<crate::tt::TokenTree> {
|
|||
}
|
||||
|
||||
impl IntoTt for crate::tt::Subtree {
|
||||
fn to_subtree(self) -> crate::tt::Subtree {
|
||||
fn to_subtree(self, _: SpanData) -> crate::tt::Subtree {
|
||||
self
|
||||
}
|
||||
|
||||
|
@ -153,39 +158,39 @@ impl IntoTt for crate::tt::Subtree {
|
|||
}
|
||||
|
||||
pub(crate) trait ToTokenTree {
|
||||
fn to_token(self) -> crate::tt::TokenTree;
|
||||
fn to_token(self, span: SpanData) -> crate::tt::TokenTree;
|
||||
}
|
||||
|
||||
impl ToTokenTree for crate::tt::TokenTree {
|
||||
fn to_token(self) -> crate::tt::TokenTree {
|
||||
fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokenTree for &crate::tt::TokenTree {
|
||||
fn to_token(self) -> crate::tt::TokenTree {
|
||||
fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokenTree for crate::tt::Subtree {
|
||||
fn to_token(self) -> crate::tt::TokenTree {
|
||||
fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
|
||||
self.into()
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_to_to_tokentrees {
|
||||
($($ty:ty => $this:ident $im:block);*) => {
|
||||
($($span:ident: $ty:ty => $this:ident $im:block);*) => {
|
||||
$(
|
||||
impl ToTokenTree for $ty {
|
||||
fn to_token($this) -> crate::tt::TokenTree {
|
||||
fn to_token($this, $span: SpanData) -> crate::tt::TokenTree {
|
||||
let leaf: crate::tt::Leaf = $im.into();
|
||||
leaf.into()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokenTree for &$ty {
|
||||
fn to_token($this) -> crate::tt::TokenTree {
|
||||
fn to_token($this, $span: SpanData) -> crate::tt::TokenTree {
|
||||
let leaf: crate::tt::Leaf = $im.clone().into();
|
||||
leaf.into()
|
||||
}
|
||||
|
@ -195,60 +200,76 @@ macro_rules! impl_to_to_tokentrees {
|
|||
}
|
||||
|
||||
impl_to_to_tokentrees! {
|
||||
u32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
|
||||
usize => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
|
||||
i32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
|
||||
bool => self { crate::tt::Ident{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
|
||||
crate::tt::Leaf => self { self };
|
||||
crate::tt::Literal => self { self };
|
||||
crate::tt::Ident => self { self };
|
||||
crate::tt::Punct => self { self };
|
||||
&str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}};
|
||||
String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}}
|
||||
span: u32 => self { crate::tt::Literal{text: self.to_string().into(), span} };
|
||||
span: usize => self { crate::tt::Literal{text: self.to_string().into(), span} };
|
||||
span: i32 => self { crate::tt::Literal{text: self.to_string().into(), span} };
|
||||
span: bool => self { crate::tt::Ident{text: self.to_string().into(), span} };
|
||||
_span: crate::tt::Leaf => self { self };
|
||||
_span: crate::tt::Literal => self { self };
|
||||
_span: crate::tt::Ident => self { self };
|
||||
_span: crate::tt::Punct => self { self };
|
||||
span: &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}};
|
||||
span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tt;
|
||||
use base_db::{
|
||||
span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
|
||||
FileId,
|
||||
};
|
||||
use expect_test::expect;
|
||||
use syntax::{TextRange, TextSize};
|
||||
|
||||
const DUMMY: tt::SpanData = tt::SpanData {
|
||||
range: TextRange::empty(TextSize::new(0)),
|
||||
anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||
ctx: SyntaxContextId::ROOT,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_quote_delimiters() {
|
||||
assert_eq!(quote!({}).to_string(), "{}");
|
||||
assert_eq!(quote!(()).to_string(), "()");
|
||||
assert_eq!(quote!([]).to_string(), "[]");
|
||||
assert_eq!(quote!(DUMMY =>{}).to_string(), "{}");
|
||||
assert_eq!(quote!(DUMMY =>()).to_string(), "()");
|
||||
assert_eq!(quote!(DUMMY =>[]).to_string(), "[]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quote_idents() {
|
||||
assert_eq!(quote!(32).to_string(), "32");
|
||||
assert_eq!(quote!(struct).to_string(), "struct");
|
||||
assert_eq!(quote!(DUMMY =>32).to_string(), "32");
|
||||
assert_eq!(quote!(DUMMY =>struct).to_string(), "struct");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quote_hash_simple_literal() {
|
||||
let a = 20;
|
||||
assert_eq!(quote!(#a).to_string(), "20");
|
||||
assert_eq!(quote!(DUMMY =>#a).to_string(), "20");
|
||||
let s: String = "hello".into();
|
||||
assert_eq!(quote!(#s).to_string(), "\"hello\"");
|
||||
assert_eq!(quote!(DUMMY =>#s).to_string(), "\"hello\"");
|
||||
}
|
||||
|
||||
fn mk_ident(name: &str) -> crate::tt::Ident {
|
||||
crate::tt::Ident { text: name.into(), span: crate::tt::TokenId::unspecified() }
|
||||
crate::tt::Ident { text: name.into(), span: DUMMY }
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quote_hash_token_tree() {
|
||||
let a = mk_ident("hello");
|
||||
|
||||
let quoted = quote!(#a);
|
||||
let quoted = quote!(DUMMY =>#a);
|
||||
assert_eq!(quoted.to_string(), "hello");
|
||||
let t = format!("{quoted:?}");
|
||||
assert_eq!(t, "SUBTREE $$ 4294967295 4294967295\n IDENT hello 4294967295");
|
||||
expect![[r#"
|
||||
SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }
|
||||
IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quote_simple_derive_copy() {
|
||||
let name = mk_ident("Foo");
|
||||
|
||||
let quoted = quote! {
|
||||
let quoted = quote! {DUMMY =>
|
||||
impl Clone for #name {
|
||||
fn clone(&self) -> Self {
|
||||
Self {}
|
||||
|
@ -268,18 +289,19 @@ mod tests {
|
|||
// }
|
||||
let struct_name = mk_ident("Foo");
|
||||
let fields = [mk_ident("name"), mk_ident("id")];
|
||||
let fields = fields.iter().flat_map(|it| quote!(#it: self.#it.clone(), ).token_trees);
|
||||
let fields =
|
||||
fields.iter().flat_map(|it| quote!(DUMMY =>#it: self.#it.clone(), ).token_trees);
|
||||
|
||||
let list = crate::tt::Subtree {
|
||||
delimiter: crate::tt::Delimiter {
|
||||
kind: crate::tt::DelimiterKind::Brace,
|
||||
open: crate::tt::TokenId::unspecified(),
|
||||
close: crate::tt::TokenId::unspecified(),
|
||||
open: DUMMY,
|
||||
close: DUMMY,
|
||||
},
|
||||
token_trees: fields.collect(),
|
||||
};
|
||||
|
||||
let quoted = quote! {
|
||||
let quoted = quote! {DUMMY =>
|
||||
impl Clone for #struct_name {
|
||||
fn clone(&self) -> Self {
|
||||
Self #list
|
||||
|
|
111
crates/hir-expand/src/span.rs
Normal file
111
crates/hir-expand/src/span.rs
Normal file
|
@ -0,0 +1,111 @@
|
|||
//! Spanmaps allow turning absolute ranges into relative ranges for incrementality purposes as well
|
||||
//! as associating spans with text ranges in a particular file.
|
||||
use base_db::{
|
||||
span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
|
||||
FileId,
|
||||
};
|
||||
use syntax::{ast::HasModuleItem, AstNode, TextRange, TextSize};
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::db::ExpandDatabase;
|
||||
|
||||
pub type ExpansionSpanMap = mbe::SpanMap<SpanData>;
|
||||
|
||||
/// Spanmap for a macro file or a real file
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum SpanMap {
|
||||
/// Spanmap for a macro file
|
||||
ExpansionSpanMap(Arc<ExpansionSpanMap>),
|
||||
/// Spanmap for a real file
|
||||
RealSpanMap(Arc<RealSpanMap>),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum SpanMapRef<'a> {
|
||||
/// Spanmap for a macro file
|
||||
ExpansionSpanMap(&'a ExpansionSpanMap),
|
||||
/// Spanmap for a real file
|
||||
RealSpanMap(&'a RealSpanMap),
|
||||
}
|
||||
|
||||
impl mbe::SpanMapper<SpanData> for SpanMap {
|
||||
fn span_for(&self, range: TextRange) -> SpanData {
|
||||
self.span_for_range(range)
|
||||
}
|
||||
}
|
||||
impl mbe::SpanMapper<SpanData> for SpanMapRef<'_> {
|
||||
fn span_for(&self, range: TextRange) -> SpanData {
|
||||
self.span_for_range(range)
|
||||
}
|
||||
}
|
||||
impl mbe::SpanMapper<SpanData> for RealSpanMap {
|
||||
fn span_for(&self, range: TextRange) -> SpanData {
|
||||
self.span_for_range(range)
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanMap {
|
||||
pub fn span_for_range(&self, range: TextRange) -> SpanData {
|
||||
match self {
|
||||
Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
|
||||
Self::RealSpanMap(span_map) => span_map.span_for_range(range),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_ref(&self) -> SpanMapRef<'_> {
|
||||
match self {
|
||||
Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map),
|
||||
Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanMapRef<'_> {
|
||||
pub fn span_for_range(self, range: TextRange) -> SpanData {
|
||||
match self {
|
||||
Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
|
||||
Self::RealSpanMap(span_map) => span_map.span_for_range(range),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Debug)]
|
||||
pub struct RealSpanMap {
|
||||
file_id: FileId,
|
||||
/// Invariant: Sorted vec over TextSize
|
||||
// FIXME: SortedVec<(TextSize, ErasedFileAstId)>?
|
||||
pairs: Box<[(TextSize, ErasedFileAstId)]>,
|
||||
}
|
||||
|
||||
impl RealSpanMap {
|
||||
/// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id).
|
||||
pub fn absolute(file_id: FileId) -> Self {
|
||||
RealSpanMap { file_id, pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]) }
|
||||
}
|
||||
|
||||
pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self {
|
||||
let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)];
|
||||
let ast_id_map = db.ast_id_map(file_id.into());
|
||||
pairs.extend(
|
||||
db.parse(file_id)
|
||||
.tree()
|
||||
.items()
|
||||
.map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
|
||||
);
|
||||
RealSpanMap { file_id, pairs: pairs.into_boxed_slice() }
|
||||
}
|
||||
|
||||
pub fn span_for_range(&self, range: TextRange) -> SpanData {
|
||||
let start = range.start();
|
||||
let idx = self
|
||||
.pairs
|
||||
.binary_search_by(|&(it, _)| it.cmp(&start).then(std::cmp::Ordering::Less))
|
||||
.unwrap_err();
|
||||
let (offset, ast_id) = self.pairs[idx - 1];
|
||||
SpanData {
|
||||
range: range - offset,
|
||||
anchor: SpanAnchor { file_id: self.file_id, ast_id },
|
||||
ctx: SyntaxContextId::ROOT,
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue