mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 21:05:02 +00:00
Merge commit 'baee6b338b
' into sync-from-ra
This commit is contained in:
parent
0155385b57
commit
aa55ce9567
139 changed files with 4248 additions and 1042 deletions
|
@ -430,14 +430,13 @@ fn macro_arg_node(
|
|||
let loc = db.lookup_intern_macro_call(id);
|
||||
let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
|
||||
let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
|
||||
Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr).0)
|
||||
Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0)
|
||||
} else {
|
||||
loc.kind
|
||||
.arg(db)
|
||||
.and_then(|arg| ast::TokenTree::cast(arg.value))
|
||||
.map(|tt| tt.reparse_as_expr().to_syntax())
|
||||
.map(|tt| tt.reparse_as_comma_separated_expr().to_syntax())
|
||||
};
|
||||
|
||||
match res {
|
||||
Some(res) if res.errors().is_empty() => res.syntax_node(),
|
||||
Some(res) => {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
//!
|
||||
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
||||
use base_db::CrateId;
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
|
||||
use triomphe::Arc;
|
||||
|
||||
|
@ -29,7 +29,7 @@ use crate::{
|
|||
hygiene::Hygiene,
|
||||
mod_path::ModPath,
|
||||
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
|
||||
MacroCallLoc, MacroDefId, MacroDefKind, UnresolvedMacro,
|
||||
MacroCallLoc, MacroDefId, MacroDefKind,
|
||||
};
|
||||
|
||||
pub fn expand_eager_macro_input(
|
||||
|
@ -38,7 +38,7 @@ pub fn expand_eager_macro_input(
|
|||
macro_call: InFile<ast::MacroCall>,
|
||||
def: MacroDefId,
|
||||
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
||||
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
|
||||
) -> ExpandResult<Option<MacroCallId>> {
|
||||
let ast_map = db.ast_id_map(macro_call.file_id);
|
||||
// the expansion which the ast id map is built upon has no whitespace, so the offsets are wrong as macro_call is from the token tree that has whitespace!
|
||||
let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(¯o_call.value));
|
||||
|
@ -71,41 +71,46 @@ pub fn expand_eager_macro_input(
|
|||
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
|
||||
krate,
|
||||
resolver,
|
||||
)?
|
||||
)
|
||||
};
|
||||
let err = parse_err.or(err);
|
||||
|
||||
let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
|
||||
return Ok(ExpandResult { value: None, err });
|
||||
return ExpandResult { value: None, err };
|
||||
};
|
||||
|
||||
let og_tmap = mbe::syntax_node_to_token_map(
|
||||
macro_call.value.token_tree().expect("macro_arg_text succeeded").syntax(),
|
||||
);
|
||||
|
||||
let (mut subtree, expanded_eager_input_token_map) =
|
||||
mbe::syntax_node_to_token_tree(&expanded_eager_input);
|
||||
|
||||
// The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
|
||||
// so we need to remap them to the original input of the eager macro.
|
||||
subtree.visit_ids(&|id| {
|
||||
// Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
|
||||
let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
|
||||
let mut ids_used = FxHashSet::default();
|
||||
let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
|
||||
// The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
|
||||
// so we need to remap them to the original input of the eager macro.
|
||||
subtree.visit_ids(&mut |id| {
|
||||
// Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
|
||||
|
||||
if let Some(range) =
|
||||
expanded_eager_input_token_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
|
||||
{
|
||||
// remap from expanded eager input to eager input expansion
|
||||
if let Some(og_range) = mapping.get(&range) {
|
||||
// remap from eager input expansion to original eager input
|
||||
if let Some(&og_range) = ws_mapping.get(og_range) {
|
||||
if let Some(og_token) = og_tmap.token_by_range(og_range) {
|
||||
return og_token;
|
||||
if let Some(range) = expanded_eager_input_token_map
|
||||
.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
|
||||
{
|
||||
// remap from expanded eager input to eager input expansion
|
||||
if let Some(og_range) = mapping.get(&range) {
|
||||
// remap from eager input expansion to original eager input
|
||||
if let Some(&og_range) = ws_mapping.get(og_range) {
|
||||
if let Some(og_token) = og_tmap.token_by_range(og_range) {
|
||||
ids_used.insert(og_token);
|
||||
return og_token;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
tt::TokenId::UNSPECIFIED
|
||||
});
|
||||
tt::TokenId::UNSPECIFIED
|
||||
});
|
||||
og_tmap.filter(|id| ids_used.contains(&id));
|
||||
og_tmap
|
||||
} else {
|
||||
Default::default()
|
||||
};
|
||||
subtree.delimiter = crate::tt::Delimiter::unspecified();
|
||||
|
||||
let loc = MacroCallLoc {
|
||||
|
@ -119,7 +124,7 @@ pub fn expand_eager_macro_input(
|
|||
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
|
||||
};
|
||||
|
||||
Ok(ExpandResult { value: Some(db.intern_macro_call(loc)), err })
|
||||
ExpandResult { value: Some(db.intern_macro_call(loc)), err }
|
||||
}
|
||||
|
||||
fn lazy_expand(
|
||||
|
@ -145,13 +150,13 @@ fn eager_macro_recur(
|
|||
curr: InFile<SyntaxNode>,
|
||||
krate: CrateId,
|
||||
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
||||
) -> Result<ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>>, UnresolvedMacro> {
|
||||
) -> ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>> {
|
||||
let original = curr.value.clone_for_update();
|
||||
let mut mapping = FxHashMap::default();
|
||||
|
||||
let mut replacements = Vec::new();
|
||||
|
||||
// Note: We only report a single error inside of eager expansions
|
||||
// FIXME: We only report a single error inside of eager expansions
|
||||
let mut error = None;
|
||||
let mut offset = 0i32;
|
||||
let apply_offset = |it: TextSize, offset: i32| {
|
||||
|
@ -182,7 +187,14 @@ fn eager_macro_recur(
|
|||
}
|
||||
};
|
||||
let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
|
||||
Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?,
|
||||
Some(path) => match macro_resolver(path.clone()) {
|
||||
Some(def) => def,
|
||||
None => {
|
||||
error =
|
||||
Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
|
||||
continue;
|
||||
}
|
||||
},
|
||||
None => {
|
||||
error = Some(ExpandError::other("malformed macro invocation"));
|
||||
continue;
|
||||
|
@ -190,32 +202,31 @@ fn eager_macro_recur(
|
|||
};
|
||||
let ExpandResult { value, err } = match def.kind {
|
||||
MacroDefKind::BuiltInEager(..) => {
|
||||
let ExpandResult { value, err } = match expand_eager_macro_input(
|
||||
let ExpandResult { value, err } = expand_eager_macro_input(
|
||||
db,
|
||||
krate,
|
||||
curr.with_value(call.clone()),
|
||||
def,
|
||||
macro_resolver,
|
||||
) {
|
||||
Ok(it) => it,
|
||||
Err(err) => return Err(err),
|
||||
};
|
||||
);
|
||||
match value {
|
||||
Some(call_id) => {
|
||||
let ExpandResult { value, err: err2 } =
|
||||
db.parse_macro_expansion(call_id.as_macro_file());
|
||||
|
||||
let call_tt_start =
|
||||
call.token_tree().unwrap().syntax().text_range().start();
|
||||
let call_start = apply_offset(call.syntax().text_range().start(), offset);
|
||||
if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
|
||||
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||
value
|
||||
.1
|
||||
.first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
|
||||
.map(|r| (r + call_start, range + call_tt_start))
|
||||
}));
|
||||
};
|
||||
if let Some(tt) = call.token_tree() {
|
||||
let call_tt_start = tt.syntax().text_range().start();
|
||||
let call_start =
|
||||
apply_offset(call.syntax().text_range().start(), offset);
|
||||
if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
|
||||
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||
value
|
||||
.1
|
||||
.first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
|
||||
.map(|r| (r + call_start, range + call_tt_start))
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
ExpandResult {
|
||||
value: Some(value.0.syntax_node().clone_for_update()),
|
||||
|
@ -247,25 +258,27 @@ fn eager_macro_recur(
|
|||
parse.as_ref().map(|it| it.syntax_node()),
|
||||
krate,
|
||||
macro_resolver,
|
||||
)?;
|
||||
);
|
||||
let err = err.or(error);
|
||||
|
||||
let call_tt_start = call.token_tree().unwrap().syntax().text_range().start();
|
||||
let call_start = apply_offset(call.syntax().text_range().start(), offset);
|
||||
if let Some((_tt, arg_map, _)) = parse
|
||||
.file_id
|
||||
.macro_file()
|
||||
.and_then(|id| db.macro_arg(id.macro_call_id).value)
|
||||
.as_deref()
|
||||
{
|
||||
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||
tm.first_range_by_token(
|
||||
decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
|
||||
syntax::SyntaxKind::TOMBSTONE,
|
||||
)
|
||||
.map(|r| (r + call_start, range + call_tt_start))
|
||||
}));
|
||||
};
|
||||
if let Some(tt) = call.token_tree() {
|
||||
let call_tt_start = tt.syntax().text_range().start();
|
||||
let call_start = apply_offset(call.syntax().text_range().start(), offset);
|
||||
if let Some((_tt, arg_map, _)) = parse
|
||||
.file_id
|
||||
.macro_file()
|
||||
.and_then(|id| db.macro_arg(id.macro_call_id).value)
|
||||
.as_deref()
|
||||
{
|
||||
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||
tm.first_range_by_token(
|
||||
decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
|
||||
syntax::SyntaxKind::TOMBSTONE,
|
||||
)
|
||||
.map(|r| (r + call_start, range + call_tt_start))
|
||||
}));
|
||||
}
|
||||
}
|
||||
// FIXME: Do we need to re-use _m here?
|
||||
ExpandResult { value: value.map(|(n, _m)| n), err }
|
||||
}
|
||||
|
@ -275,7 +288,7 @@ fn eager_macro_recur(
|
|||
}
|
||||
// check if the whole original syntax is replaced
|
||||
if call.syntax() == &original {
|
||||
return Ok(ExpandResult { value: value.zip(Some(mapping)), err: error });
|
||||
return ExpandResult { value: value.zip(Some(mapping)), err: error };
|
||||
}
|
||||
|
||||
if let Some(insert) = value {
|
||||
|
@ -286,5 +299,5 @@ fn eager_macro_recur(
|
|||
}
|
||||
|
||||
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
|
||||
Ok(ExpandResult { value: Some((original, mapping)), err: error })
|
||||
ExpandResult { value: Some((original, mapping)), err: error }
|
||||
}
|
||||
|
|
|
@ -173,7 +173,7 @@ fn make_hygiene_info(
|
|||
db: &dyn ExpandDatabase,
|
||||
macro_file: MacroFile,
|
||||
loc: &MacroCallLoc,
|
||||
) -> Option<HygieneInfo> {
|
||||
) -> HygieneInfo {
|
||||
let def = loc.def.ast_id().left().and_then(|id| {
|
||||
let def_tt = match id.to_node(db) {
|
||||
ast::Macro::MacroRules(mac) => mac.token_tree()?,
|
||||
|
@ -204,7 +204,7 @@ fn make_hygiene_info(
|
|||
))
|
||||
});
|
||||
|
||||
Some(HygieneInfo {
|
||||
HygieneInfo {
|
||||
file: macro_file,
|
||||
attr_input_or_mac_def_start: attr_input_or_mac_def
|
||||
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
|
||||
|
@ -212,7 +212,7 @@ fn make_hygiene_info(
|
|||
macro_arg,
|
||||
macro_def,
|
||||
exp_map,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl HygieneFrame {
|
||||
|
@ -221,8 +221,7 @@ impl HygieneFrame {
|
|||
None => (None, None, false),
|
||||
Some(macro_file) => {
|
||||
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
let info =
|
||||
make_hygiene_info(db, macro_file, &loc).map(|info| (loc.kind.file_id(), info));
|
||||
let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id()));
|
||||
match loc.def.kind {
|
||||
MacroDefKind::Declarative(_) => {
|
||||
(info, Some(loc.def.krate), loc.def.local_inner)
|
||||
|
@ -236,17 +235,14 @@ impl HygieneFrame {
|
|||
}
|
||||
};
|
||||
|
||||
let (calling_file, info) = match info {
|
||||
None => {
|
||||
return HygieneFrame {
|
||||
expansion: None,
|
||||
local_inner,
|
||||
krate,
|
||||
call_site: None,
|
||||
def_site: None,
|
||||
};
|
||||
let Some((info, calling_file)) = info else {
|
||||
return HygieneFrame {
|
||||
expansion: None,
|
||||
local_inner,
|
||||
krate,
|
||||
call_site: None,
|
||||
def_site: None,
|
||||
}
|
||||
Some(it) => it,
|
||||
};
|
||||
|
||||
let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
|
||||
|
|
|
@ -415,6 +415,24 @@ impl MacroDefId {
|
|||
)
|
||||
}
|
||||
|
||||
pub fn is_derive(&self) -> bool {
|
||||
matches!(
|
||||
self.kind,
|
||||
MacroDefKind::BuiltInDerive(..)
|
||||
| MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_fn_like(&self) -> bool {
|
||||
matches!(
|
||||
self.kind,
|
||||
MacroDefKind::BuiltIn(..)
|
||||
| MacroDefKind::ProcMacro(_, ProcMacroKind::FuncLike, _)
|
||||
| MacroDefKind::BuiltInEager(..)
|
||||
| MacroDefKind::Declarative(..)
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_attribute_derive(&self) -> bool {
|
||||
matches!(self.kind, MacroDefKind::BuiltInAttr(expander, ..) if expander.is_derive())
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue