mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-11-01 12:24:29 +00:00
Properly implement might_be_inside_macro_call() using semantic information instead of syntactical hacks
And rename it to `is_inside_macro_call()` accordingly.
This commit is contained in:
parent
1511c5b7fd
commit
87529e8631
9 changed files with 125 additions and 85 deletions
|
|
@ -407,14 +407,14 @@ impl<'db> SemanticsImpl<'db> {
|
|||
res
|
||||
}
|
||||
|
||||
pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
|
||||
pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<InFile<SyntaxNode>> {
|
||||
let sa = self.analyze_no_infer(macro_call.syntax())?;
|
||||
|
||||
let macro_call = InFile::new(sa.file_id, macro_call);
|
||||
let file_id = sa.expand(self.db, macro_call)?;
|
||||
|
||||
let node = self.parse_or_expand(file_id.into());
|
||||
Some(node)
|
||||
Some(InFile::new(file_id.into(), node))
|
||||
}
|
||||
|
||||
pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
|
||||
|
|
@ -468,10 +468,10 @@ impl<'db> SemanticsImpl<'db> {
|
|||
}
|
||||
|
||||
/// If `item` has an attribute macro attached to it, expands it.
|
||||
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<SyntaxNode>> {
|
||||
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<InFile<SyntaxNode>>> {
|
||||
let src = self.wrap_node_infile(item.clone());
|
||||
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
|
||||
Some(self.expand(macro_call_id))
|
||||
Some(self.expand(macro_call_id).map(|it| InFile::new(macro_call_id.into(), it)))
|
||||
}
|
||||
|
||||
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
|
||||
|
|
@ -846,49 +846,35 @@ impl<'db> SemanticsImpl<'db> {
|
|||
res
|
||||
}
|
||||
|
||||
// FIXME: This isn't quite right wrt to inner attributes
|
||||
/// Does a syntactic traversal to check whether this token might be inside a macro call
|
||||
pub fn might_be_inside_macro_call(&self, token: &SyntaxToken) -> bool {
|
||||
token.parent_ancestors().any(|ancestor| {
|
||||
pub fn is_inside_macro_call(&self, token: InFile<&SyntaxToken>) -> bool {
|
||||
// FIXME: Maybe `ancestors_with_macros()` is more suitable here? Currently
|
||||
// this is only used on real (not macro) files so this is not a problem.
|
||||
token.value.parent_ancestors().any(|ancestor| {
|
||||
if ast::MacroCall::can_cast(ancestor.kind()) {
|
||||
return true;
|
||||
}
|
||||
// Check if it is an item (only items can have macro attributes) that has a non-builtin attribute.
|
||||
let Some(item) = ast::Item::cast(ancestor) else { return false };
|
||||
item.attrs().any(|attr| {
|
||||
let Some(meta) = attr.meta() else { return false };
|
||||
let Some(path) = meta.path() else { return false };
|
||||
if let Some(attr_name) = path.as_single_name_ref() {
|
||||
let attr_name = attr_name.text();
|
||||
let attr_name = Symbol::intern(attr_name.as_str());
|
||||
if attr_name == sym::derive {
|
||||
|
||||
let Some(item) = ast::Item::cast(ancestor) else {
|
||||
return false;
|
||||
};
|
||||
// Optimization to skip the semantic check.
|
||||
if item.attrs().all(|attr| {
|
||||
attr.simple_name()
|
||||
.is_some_and(|attr| find_builtin_attr_idx(&Symbol::intern(&attr)).is_some())
|
||||
}) {
|
||||
return false;
|
||||
}
|
||||
self.with_ctx(|ctx| {
|
||||
if ctx.item_to_macro_call(token.with_value(&item)).is_some() {
|
||||
return true;
|
||||
}
|
||||
// We ignore `#[test]` and friends in the def map, so we cannot expand them.
|
||||
// FIXME: We match by text. This is both hacky and incorrect (people can, and do, create
|
||||
// other macros named `test`). We cannot fix that unfortunately because we use this method
|
||||
// for speculative expansion in completion, which we cannot analyze. Fortunately, most macros
|
||||
// named `test` are test-like, meaning their expansion is not terribly important for IDE.
|
||||
if attr_name == sym::test
|
||||
|| attr_name == sym::bench
|
||||
|| attr_name == sym::test_case
|
||||
|| find_builtin_attr_idx(&attr_name).is_some()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
let mut segments = path.segments();
|
||||
let mut next_segment_text = || segments.next().and_then(|it| it.name_ref());
|
||||
// `#[core::prelude::rust_2024::test]` or `#[std::prelude::rust_2024::test]`.
|
||||
if next_segment_text().is_some_and(|it| matches!(&*it.text(), "core" | "std"))
|
||||
&& next_segment_text().is_some_and(|it| it.text() == "prelude")
|
||||
&& next_segment_text().is_some()
|
||||
&& next_segment_text()
|
||||
.is_some_and(|it| matches!(&*it.text(), "test" | "bench" | "test_case"))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
true
|
||||
let adt = match item {
|
||||
ast::Item::Struct(it) => it.into(),
|
||||
ast::Item::Enum(it) => it.into(),
|
||||
ast::Item::Union(it) => it.into(),
|
||||
_ => return false,
|
||||
};
|
||||
ctx.has_derives(token.with_value(&adt))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,8 +8,8 @@ use std::{iter, ops::ControlFlow};
|
|||
|
||||
use base_db::RootQueryDb as _;
|
||||
use hir::{
|
||||
DisplayTarget, HasAttrs, Local, ModuleDef, ModuleSource, Name, PathResolution, ScopeDef,
|
||||
Semantics, SemanticsScope, Symbol, Type, TypeInfo,
|
||||
DisplayTarget, HasAttrs, InFile, Local, ModuleDef, ModuleSource, Name, PathResolution,
|
||||
ScopeDef, Semantics, SemanticsScope, Symbol, Type, TypeInfo,
|
||||
};
|
||||
use ide_db::{
|
||||
FilePosition, FxHashMap, FxHashSet, RootDatabase, famous_defs::FamousDefs,
|
||||
|
|
@ -751,7 +751,7 @@ impl<'a> CompletionContext<'a> {
|
|||
original_offset,
|
||||
} = expand_and_analyze(
|
||||
&sema,
|
||||
original_file.syntax().clone(),
|
||||
InFile::new(editioned_file_id.into(), original_file.syntax().clone()),
|
||||
file_with_fake_ident.syntax().clone(),
|
||||
offset,
|
||||
&original_token,
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
//! Module responsible for analyzing the code surrounding the cursor for completion.
|
||||
use std::iter;
|
||||
|
||||
use hir::{ExpandResult, Semantics, Type, TypeInfo, Variant};
|
||||
use hir::{ExpandResult, InFile, Semantics, Type, TypeInfo, Variant};
|
||||
use ide_db::{RootDatabase, active_parameter::ActiveParameter};
|
||||
use itertools::Either;
|
||||
use syntax::{
|
||||
|
|
@ -50,7 +50,7 @@ pub(super) struct AnalysisResult {
|
|||
|
||||
pub(super) fn expand_and_analyze(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
original_file: SyntaxNode,
|
||||
original_file: InFile<SyntaxNode>,
|
||||
speculative_file: SyntaxNode,
|
||||
offset: TextSize,
|
||||
original_token: &SyntaxToken,
|
||||
|
|
@ -72,7 +72,7 @@ pub(super) fn expand_and_analyze(
|
|||
relative_offset,
|
||||
)
|
||||
.unwrap_or(ExpansionResult {
|
||||
original_file,
|
||||
original_file: original_file.value,
|
||||
speculative_file,
|
||||
original_offset: offset,
|
||||
speculative_offset: fake_ident_token.text_range().start(),
|
||||
|
|
@ -125,7 +125,7 @@ fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Opt
|
|||
/// the best we can do.
|
||||
fn expand_maybe_stop(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
original_file: SyntaxNode,
|
||||
original_file: InFile<SyntaxNode>,
|
||||
speculative_file: SyntaxNode,
|
||||
original_offset: TextSize,
|
||||
fake_ident_token: SyntaxToken,
|
||||
|
|
@ -142,17 +142,16 @@ fn expand_maybe_stop(
|
|||
return result;
|
||||
}
|
||||
|
||||
// This needs to come after the recursive call, because our "inside macro" detection is subtly wrong
|
||||
// with regard to attribute macros named `test` that are not std's test. So hopefully we will expand
|
||||
// them successfully above and be able to analyze.
|
||||
// Left biased since there may already be an identifier token there, and we appended to it.
|
||||
if !sema.might_be_inside_macro_call(&fake_ident_token)
|
||||
&& token_at_offset_ignore_whitespace(&original_file, original_offset + relative_offset)
|
||||
.is_some_and(|original_token| !sema.might_be_inside_macro_call(&original_token))
|
||||
// We can't check whether the fake expansion is inside macro call, because that requires semantic info.
|
||||
// But hopefully checking just the real one should be enough.
|
||||
if token_at_offset_ignore_whitespace(&original_file.value, original_offset + relative_offset)
|
||||
.is_some_and(|original_token| {
|
||||
!sema.is_inside_macro_call(original_file.with_value(&original_token))
|
||||
})
|
||||
{
|
||||
// Recursion base case.
|
||||
Some(ExpansionResult {
|
||||
original_file,
|
||||
original_file: original_file.value,
|
||||
speculative_file,
|
||||
original_offset,
|
||||
speculative_offset: fake_ident_token.text_range().start(),
|
||||
|
|
@ -166,7 +165,7 @@ fn expand_maybe_stop(
|
|||
|
||||
fn expand(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
original_file: SyntaxNode,
|
||||
original_file: InFile<SyntaxNode>,
|
||||
speculative_file: SyntaxNode,
|
||||
original_offset: TextSize,
|
||||
fake_ident_token: SyntaxToken,
|
||||
|
|
@ -176,7 +175,7 @@ fn expand(
|
|||
|
||||
let parent_item =
|
||||
|item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
|
||||
let original_node = token_at_offset_ignore_whitespace(&original_file, original_offset)
|
||||
let original_node = token_at_offset_ignore_whitespace(&original_file.value, original_offset)
|
||||
.and_then(|token| token.parent_ancestors().find_map(ast::Item::cast));
|
||||
let ancestor_items = iter::successors(
|
||||
Option::zip(
|
||||
|
|
@ -249,7 +248,7 @@ fn expand(
|
|||
}
|
||||
|
||||
// No attributes have been expanded, so look for macro_call! token trees or derive token trees
|
||||
let orig_tt = ancestors_at_offset(&original_file, original_offset)
|
||||
let orig_tt = ancestors_at_offset(&original_file.value, original_offset)
|
||||
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
|
||||
.last()?;
|
||||
let spec_tt = ancestors_at_offset(&speculative_file, fake_ident_token.text_range().start())
|
||||
|
|
@ -292,7 +291,7 @@ fn expand(
|
|||
fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank)
|
||||
{
|
||||
return Some(ExpansionResult {
|
||||
original_file,
|
||||
original_file: original_file.value,
|
||||
speculative_file,
|
||||
original_offset,
|
||||
speculative_offset: fake_ident_token.text_range().start(),
|
||||
|
|
@ -349,7 +348,7 @@ fn expand(
|
|||
}
|
||||
let result = expand_maybe_stop(
|
||||
sema,
|
||||
actual_expansion.clone(),
|
||||
InFile::new(file.into(), actual_expansion.clone()),
|
||||
fake_expansion.clone(),
|
||||
new_offset,
|
||||
fake_mapped_token,
|
||||
|
|
|
|||
|
|
@ -2111,6 +2111,56 @@ fn foo() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cfg_attr_attr_macro() {
|
||||
check(
|
||||
r#"
|
||||
//- proc_macros: identity
|
||||
#[cfg_attr(test, proc_macros::identity)]
|
||||
fn foo() {
|
||||
$0
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo() fn()
|
||||
md proc_macros
|
||||
bt u32 u32
|
||||
kw async
|
||||
kw const
|
||||
kw crate::
|
||||
kw enum
|
||||
kw extern
|
||||
kw false
|
||||
kw fn
|
||||
kw for
|
||||
kw if
|
||||
kw if let
|
||||
kw impl
|
||||
kw impl for
|
||||
kw let
|
||||
kw letm
|
||||
kw loop
|
||||
kw match
|
||||
kw mod
|
||||
kw return
|
||||
kw self::
|
||||
kw static
|
||||
kw struct
|
||||
kw trait
|
||||
kw true
|
||||
kw type
|
||||
kw union
|
||||
kw unsafe
|
||||
kw use
|
||||
kw while
|
||||
kw while let
|
||||
sn macro_rules
|
||||
sn pd
|
||||
sn ppd
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn escaped_label() {
|
||||
check(
|
||||
|
|
|
|||
|
|
@ -524,6 +524,7 @@ impl<'a> FindUsages<'a> {
|
|||
fn find_nodes<'b>(
|
||||
sema: &'b Semantics<'_, RootDatabase>,
|
||||
name: &str,
|
||||
file_id: EditionedFileId,
|
||||
node: &syntax::SyntaxNode,
|
||||
offset: TextSize,
|
||||
) -> impl Iterator<Item = SyntaxNode> + 'b {
|
||||
|
|
@ -534,7 +535,7 @@ impl<'a> FindUsages<'a> {
|
|||
})
|
||||
.into_iter()
|
||||
.flat_map(move |token| {
|
||||
if sema.might_be_inside_macro_call(&token) {
|
||||
if sema.is_inside_macro_call(InFile::new(file_id.into(), &token)) {
|
||||
sema.descend_into_macros_exact(token)
|
||||
} else {
|
||||
<_>::from([token])
|
||||
|
|
@ -654,11 +655,14 @@ impl<'a> FindUsages<'a> {
|
|||
let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
|
||||
|
||||
for offset in FindUsages::match_indices(&file_text, &finder, search_range) {
|
||||
let usages =
|
||||
FindUsages::find_nodes(sema, ¤t_to_process, &tree, offset)
|
||||
.filter(|it| {
|
||||
matches!(it.kind(), SyntaxKind::NAME | SyntaxKind::NAME_REF)
|
||||
});
|
||||
let usages = FindUsages::find_nodes(
|
||||
sema,
|
||||
¤t_to_process,
|
||||
file_id,
|
||||
&tree,
|
||||
offset,
|
||||
)
|
||||
.filter(|it| matches!(it.kind(), SyntaxKind::NAME | SyntaxKind::NAME_REF));
|
||||
for usage in usages {
|
||||
if let Some(alias) = usage.parent().and_then(|it| {
|
||||
let path = ast::PathSegment::cast(it)?.parent_path();
|
||||
|
|
@ -813,7 +817,7 @@ impl<'a> FindUsages<'a> {
|
|||
let tree = LazyCell::new(move || this.sema.parse(file_id).syntax().clone());
|
||||
|
||||
for offset in FindUsages::match_indices(&file_text, finder, search_range) {
|
||||
let usages = FindUsages::find_nodes(this.sema, name, &tree, offset)
|
||||
let usages = FindUsages::find_nodes(this.sema, name, file_id, &tree, offset)
|
||||
.filter_map(ast::NameRef::cast);
|
||||
for usage in usages {
|
||||
let found_usage = usage
|
||||
|
|
@ -970,8 +974,8 @@ impl<'a> FindUsages<'a> {
|
|||
return;
|
||||
}
|
||||
|
||||
for name in
|
||||
Self::find_nodes(sema, name, &tree, offset).filter_map(ast::NameLike::cast)
|
||||
for name in Self::find_nodes(sema, name, file_id, &tree, offset)
|
||||
.filter_map(ast::NameLike::cast)
|
||||
{
|
||||
if match name {
|
||||
ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
|
||||
|
|
@ -985,8 +989,8 @@ impl<'a> FindUsages<'a> {
|
|||
// Search for occurrences of the `Self` referring to our type
|
||||
if let Some((self_ty, finder)) = &include_self_kw_refs {
|
||||
for offset in Self::match_indices(&text, finder, search_range) {
|
||||
for name_ref in
|
||||
Self::find_nodes(sema, "Self", &tree, offset).filter_map(ast::NameRef::cast)
|
||||
for name_ref in Self::find_nodes(sema, "Self", file_id, &tree, offset)
|
||||
.filter_map(ast::NameRef::cast)
|
||||
{
|
||||
if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
|
||||
return;
|
||||
|
|
@ -1010,7 +1014,7 @@ impl<'a> FindUsages<'a> {
|
|||
let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
|
||||
|
||||
for offset in Self::match_indices(&text, finder, search_range) {
|
||||
for name_ref in Self::find_nodes(sema, "super", &tree, offset)
|
||||
for name_ref in Self::find_nodes(sema, "super", file_id, &tree, offset)
|
||||
.filter_map(ast::NameRef::cast)
|
||||
{
|
||||
if self.found_name_ref(&name_ref, sink) {
|
||||
|
|
@ -1020,7 +1024,7 @@ impl<'a> FindUsages<'a> {
|
|||
}
|
||||
if let Some(finder) = &is_crate_root {
|
||||
for offset in Self::match_indices(&text, finder, search_range) {
|
||||
for name_ref in Self::find_nodes(sema, "crate", &tree, offset)
|
||||
for name_ref in Self::find_nodes(sema, "crate", file_id, &tree, offset)
|
||||
.filter_map(ast::NameRef::cast)
|
||||
{
|
||||
if self.found_name_ref(&name_ref, sink) {
|
||||
|
|
@ -1064,8 +1068,8 @@ impl<'a> FindUsages<'a> {
|
|||
let finder = &Finder::new("self");
|
||||
|
||||
for offset in Self::match_indices(&text, finder, search_range) {
|
||||
for name_ref in
|
||||
Self::find_nodes(sema, "self", &tree, offset).filter_map(ast::NameRef::cast)
|
||||
for name_ref in Self::find_nodes(sema, "self", file_id, &tree, offset)
|
||||
.filter_map(ast::NameRef::cast)
|
||||
{
|
||||
if self.found_self_module_name_ref(&name_ref, sink) {
|
||||
return;
|
||||
|
|
|
|||
|
|
@ -287,7 +287,7 @@ impl<'db> MatchFinder<'db> {
|
|||
if let Some(expanded) = self.sema.expand_macro_call(¯o_call) {
|
||||
if let Some(tt) = macro_call.token_tree() {
|
||||
self.output_debug_for_nodes_at_range(
|
||||
&expanded,
|
||||
&expanded.value,
|
||||
range,
|
||||
&Some(self.sema.original_range(tt.syntax())),
|
||||
out,
|
||||
|
|
|
|||
|
|
@ -194,7 +194,7 @@ impl MatchFinder<'_> {
|
|||
// nodes that originated entirely from within the token tree of the macro call.
|
||||
// i.e. we don't want to match something that came from the macro itself.
|
||||
if let Some(range) = self.sema.original_range_opt(tt.syntax()) {
|
||||
self.slow_scan_node(&expanded, rule, &Some(range), matches_out);
|
||||
self.slow_scan_node(&expanded.value, rule, &Some(range), matches_out);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -146,10 +146,11 @@ fn expand_macro_recur(
|
|||
offset_in_original_node: TextSize,
|
||||
) -> Option<SyntaxNode> {
|
||||
let ExpandResult { value: expanded, err } = match macro_call {
|
||||
item @ ast::Item::MacroCall(macro_call) => {
|
||||
sema.expand_attr_macro(item).or_else(|| sema.expand_allowed_builtins(macro_call))?
|
||||
}
|
||||
item => sema.expand_attr_macro(item)?,
|
||||
item @ ast::Item::MacroCall(macro_call) => sema
|
||||
.expand_attr_macro(item)
|
||||
.map(|it| it.map(|it| it.value))
|
||||
.or_else(|| sema.expand_allowed_builtins(macro_call))?,
|
||||
item => sema.expand_attr_macro(item)?.map(|it| it.value),
|
||||
};
|
||||
let expanded = expanded.clone_for_update();
|
||||
if let Some(err) = err {
|
||||
|
|
|
|||
|
|
@ -653,7 +653,7 @@ impl<'a> WalkExpandedExprCtx<'a> {
|
|||
expr.macro_call().and_then(|call| self.sema.expand_macro_call(&call))
|
||||
{
|
||||
match_ast! {
|
||||
match expanded {
|
||||
match (expanded.value) {
|
||||
ast::MacroStmts(it) => {
|
||||
self.handle_expanded(it, cb);
|
||||
},
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue