mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-11-01 04:18:20 +00:00
Properly implement might_be_inside_macro_call() using semantic information instead of syntactical hacks
And rename it to `is_inside_macro_call()` accordingly.
This commit is contained in:
parent
1511c5b7fd
commit
87529e8631
9 changed files with 125 additions and 85 deletions
|
|
@ -407,14 +407,14 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
|
pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<InFile<SyntaxNode>> {
|
||||||
let sa = self.analyze_no_infer(macro_call.syntax())?;
|
let sa = self.analyze_no_infer(macro_call.syntax())?;
|
||||||
|
|
||||||
let macro_call = InFile::new(sa.file_id, macro_call);
|
let macro_call = InFile::new(sa.file_id, macro_call);
|
||||||
let file_id = sa.expand(self.db, macro_call)?;
|
let file_id = sa.expand(self.db, macro_call)?;
|
||||||
|
|
||||||
let node = self.parse_or_expand(file_id.into());
|
let node = self.parse_or_expand(file_id.into());
|
||||||
Some(node)
|
Some(InFile::new(file_id.into(), node))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
|
pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
|
||||||
|
|
@ -468,10 +468,10 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If `item` has an attribute macro attached to it, expands it.
|
/// If `item` has an attribute macro attached to it, expands it.
|
||||||
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<SyntaxNode>> {
|
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<InFile<SyntaxNode>>> {
|
||||||
let src = self.wrap_node_infile(item.clone());
|
let src = self.wrap_node_infile(item.clone());
|
||||||
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
|
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
|
||||||
Some(self.expand(macro_call_id))
|
Some(self.expand(macro_call_id).map(|it| InFile::new(macro_call_id.into(), it)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
|
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
|
||||||
|
|
@ -846,49 +846,35 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: This isn't quite right wrt to inner attributes
|
pub fn is_inside_macro_call(&self, token: InFile<&SyntaxToken>) -> bool {
|
||||||
/// Does a syntactic traversal to check whether this token might be inside a macro call
|
// FIXME: Maybe `ancestors_with_macros()` is more suitable here? Currently
|
||||||
pub fn might_be_inside_macro_call(&self, token: &SyntaxToken) -> bool {
|
// this is only used on real (not macro) files so this is not a problem.
|
||||||
token.parent_ancestors().any(|ancestor| {
|
token.value.parent_ancestors().any(|ancestor| {
|
||||||
if ast::MacroCall::can_cast(ancestor.kind()) {
|
if ast::MacroCall::can_cast(ancestor.kind()) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
// Check if it is an item (only items can have macro attributes) that has a non-builtin attribute.
|
|
||||||
let Some(item) = ast::Item::cast(ancestor) else { return false };
|
let Some(item) = ast::Item::cast(ancestor) else {
|
||||||
item.attrs().any(|attr| {
|
return false;
|
||||||
let Some(meta) = attr.meta() else { return false };
|
};
|
||||||
let Some(path) = meta.path() else { return false };
|
// Optimization to skip the semantic check.
|
||||||
if let Some(attr_name) = path.as_single_name_ref() {
|
if item.attrs().all(|attr| {
|
||||||
let attr_name = attr_name.text();
|
attr.simple_name()
|
||||||
let attr_name = Symbol::intern(attr_name.as_str());
|
.is_some_and(|attr| find_builtin_attr_idx(&Symbol::intern(&attr)).is_some())
|
||||||
if attr_name == sym::derive {
|
}) {
|
||||||
return true;
|
return false;
|
||||||
}
|
}
|
||||||
// We ignore `#[test]` and friends in the def map, so we cannot expand them.
|
self.with_ctx(|ctx| {
|
||||||
// FIXME: We match by text. This is both hacky and incorrect (people can, and do, create
|
if ctx.item_to_macro_call(token.with_value(&item)).is_some() {
|
||||||
// other macros named `test`). We cannot fix that unfortunately because we use this method
|
return true;
|
||||||
// for speculative expansion in completion, which we cannot analyze. Fortunately, most macros
|
|
||||||
// named `test` are test-like, meaning their expansion is not terribly important for IDE.
|
|
||||||
if attr_name == sym::test
|
|
||||||
|| attr_name == sym::bench
|
|
||||||
|| attr_name == sym::test_case
|
|
||||||
|| find_builtin_attr_idx(&attr_name).is_some()
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
let mut segments = path.segments();
|
let adt = match item {
|
||||||
let mut next_segment_text = || segments.next().and_then(|it| it.name_ref());
|
ast::Item::Struct(it) => it.into(),
|
||||||
// `#[core::prelude::rust_2024::test]` or `#[std::prelude::rust_2024::test]`.
|
ast::Item::Enum(it) => it.into(),
|
||||||
if next_segment_text().is_some_and(|it| matches!(&*it.text(), "core" | "std"))
|
ast::Item::Union(it) => it.into(),
|
||||||
&& next_segment_text().is_some_and(|it| it.text() == "prelude")
|
_ => return false,
|
||||||
&& next_segment_text().is_some()
|
};
|
||||||
&& next_segment_text()
|
ctx.has_derives(token.with_value(&adt))
|
||||||
.is_some_and(|it| matches!(&*it.text(), "test" | "bench" | "test_case"))
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
true
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -8,8 +8,8 @@ use std::{iter, ops::ControlFlow};
|
||||||
|
|
||||||
use base_db::RootQueryDb as _;
|
use base_db::RootQueryDb as _;
|
||||||
use hir::{
|
use hir::{
|
||||||
DisplayTarget, HasAttrs, Local, ModuleDef, ModuleSource, Name, PathResolution, ScopeDef,
|
DisplayTarget, HasAttrs, InFile, Local, ModuleDef, ModuleSource, Name, PathResolution,
|
||||||
Semantics, SemanticsScope, Symbol, Type, TypeInfo,
|
ScopeDef, Semantics, SemanticsScope, Symbol, Type, TypeInfo,
|
||||||
};
|
};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
FilePosition, FxHashMap, FxHashSet, RootDatabase, famous_defs::FamousDefs,
|
FilePosition, FxHashMap, FxHashSet, RootDatabase, famous_defs::FamousDefs,
|
||||||
|
|
@ -751,7 +751,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
original_offset,
|
original_offset,
|
||||||
} = expand_and_analyze(
|
} = expand_and_analyze(
|
||||||
&sema,
|
&sema,
|
||||||
original_file.syntax().clone(),
|
InFile::new(editioned_file_id.into(), original_file.syntax().clone()),
|
||||||
file_with_fake_ident.syntax().clone(),
|
file_with_fake_ident.syntax().clone(),
|
||||||
offset,
|
offset,
|
||||||
&original_token,
|
&original_token,
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
//! Module responsible for analyzing the code surrounding the cursor for completion.
|
//! Module responsible for analyzing the code surrounding the cursor for completion.
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
use hir::{ExpandResult, Semantics, Type, TypeInfo, Variant};
|
use hir::{ExpandResult, InFile, Semantics, Type, TypeInfo, Variant};
|
||||||
use ide_db::{RootDatabase, active_parameter::ActiveParameter};
|
use ide_db::{RootDatabase, active_parameter::ActiveParameter};
|
||||||
use itertools::Either;
|
use itertools::Either;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
|
@ -50,7 +50,7 @@ pub(super) struct AnalysisResult {
|
||||||
|
|
||||||
pub(super) fn expand_and_analyze(
|
pub(super) fn expand_and_analyze(
|
||||||
sema: &Semantics<'_, RootDatabase>,
|
sema: &Semantics<'_, RootDatabase>,
|
||||||
original_file: SyntaxNode,
|
original_file: InFile<SyntaxNode>,
|
||||||
speculative_file: SyntaxNode,
|
speculative_file: SyntaxNode,
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
original_token: &SyntaxToken,
|
original_token: &SyntaxToken,
|
||||||
|
|
@ -72,7 +72,7 @@ pub(super) fn expand_and_analyze(
|
||||||
relative_offset,
|
relative_offset,
|
||||||
)
|
)
|
||||||
.unwrap_or(ExpansionResult {
|
.unwrap_or(ExpansionResult {
|
||||||
original_file,
|
original_file: original_file.value,
|
||||||
speculative_file,
|
speculative_file,
|
||||||
original_offset: offset,
|
original_offset: offset,
|
||||||
speculative_offset: fake_ident_token.text_range().start(),
|
speculative_offset: fake_ident_token.text_range().start(),
|
||||||
|
|
@ -125,7 +125,7 @@ fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Opt
|
||||||
/// the best we can do.
|
/// the best we can do.
|
||||||
fn expand_maybe_stop(
|
fn expand_maybe_stop(
|
||||||
sema: &Semantics<'_, RootDatabase>,
|
sema: &Semantics<'_, RootDatabase>,
|
||||||
original_file: SyntaxNode,
|
original_file: InFile<SyntaxNode>,
|
||||||
speculative_file: SyntaxNode,
|
speculative_file: SyntaxNode,
|
||||||
original_offset: TextSize,
|
original_offset: TextSize,
|
||||||
fake_ident_token: SyntaxToken,
|
fake_ident_token: SyntaxToken,
|
||||||
|
|
@ -142,17 +142,16 @@ fn expand_maybe_stop(
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
// This needs to come after the recursive call, because our "inside macro" detection is subtly wrong
|
// We can't check whether the fake expansion is inside macro call, because that requires semantic info.
|
||||||
// with regard to attribute macros named `test` that are not std's test. So hopefully we will expand
|
// But hopefully checking just the real one should be enough.
|
||||||
// them successfully above and be able to analyze.
|
if token_at_offset_ignore_whitespace(&original_file.value, original_offset + relative_offset)
|
||||||
// Left biased since there may already be an identifier token there, and we appended to it.
|
.is_some_and(|original_token| {
|
||||||
if !sema.might_be_inside_macro_call(&fake_ident_token)
|
!sema.is_inside_macro_call(original_file.with_value(&original_token))
|
||||||
&& token_at_offset_ignore_whitespace(&original_file, original_offset + relative_offset)
|
})
|
||||||
.is_some_and(|original_token| !sema.might_be_inside_macro_call(&original_token))
|
|
||||||
{
|
{
|
||||||
// Recursion base case.
|
// Recursion base case.
|
||||||
Some(ExpansionResult {
|
Some(ExpansionResult {
|
||||||
original_file,
|
original_file: original_file.value,
|
||||||
speculative_file,
|
speculative_file,
|
||||||
original_offset,
|
original_offset,
|
||||||
speculative_offset: fake_ident_token.text_range().start(),
|
speculative_offset: fake_ident_token.text_range().start(),
|
||||||
|
|
@ -166,7 +165,7 @@ fn expand_maybe_stop(
|
||||||
|
|
||||||
fn expand(
|
fn expand(
|
||||||
sema: &Semantics<'_, RootDatabase>,
|
sema: &Semantics<'_, RootDatabase>,
|
||||||
original_file: SyntaxNode,
|
original_file: InFile<SyntaxNode>,
|
||||||
speculative_file: SyntaxNode,
|
speculative_file: SyntaxNode,
|
||||||
original_offset: TextSize,
|
original_offset: TextSize,
|
||||||
fake_ident_token: SyntaxToken,
|
fake_ident_token: SyntaxToken,
|
||||||
|
|
@ -176,7 +175,7 @@ fn expand(
|
||||||
|
|
||||||
let parent_item =
|
let parent_item =
|
||||||
|item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
|
|item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
|
||||||
let original_node = token_at_offset_ignore_whitespace(&original_file, original_offset)
|
let original_node = token_at_offset_ignore_whitespace(&original_file.value, original_offset)
|
||||||
.and_then(|token| token.parent_ancestors().find_map(ast::Item::cast));
|
.and_then(|token| token.parent_ancestors().find_map(ast::Item::cast));
|
||||||
let ancestor_items = iter::successors(
|
let ancestor_items = iter::successors(
|
||||||
Option::zip(
|
Option::zip(
|
||||||
|
|
@ -249,7 +248,7 @@ fn expand(
|
||||||
}
|
}
|
||||||
|
|
||||||
// No attributes have been expanded, so look for macro_call! token trees or derive token trees
|
// No attributes have been expanded, so look for macro_call! token trees or derive token trees
|
||||||
let orig_tt = ancestors_at_offset(&original_file, original_offset)
|
let orig_tt = ancestors_at_offset(&original_file.value, original_offset)
|
||||||
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
|
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
|
||||||
.last()?;
|
.last()?;
|
||||||
let spec_tt = ancestors_at_offset(&speculative_file, fake_ident_token.text_range().start())
|
let spec_tt = ancestors_at_offset(&speculative_file, fake_ident_token.text_range().start())
|
||||||
|
|
@ -292,7 +291,7 @@ fn expand(
|
||||||
fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank)
|
fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank)
|
||||||
{
|
{
|
||||||
return Some(ExpansionResult {
|
return Some(ExpansionResult {
|
||||||
original_file,
|
original_file: original_file.value,
|
||||||
speculative_file,
|
speculative_file,
|
||||||
original_offset,
|
original_offset,
|
||||||
speculative_offset: fake_ident_token.text_range().start(),
|
speculative_offset: fake_ident_token.text_range().start(),
|
||||||
|
|
@ -349,7 +348,7 @@ fn expand(
|
||||||
}
|
}
|
||||||
let result = expand_maybe_stop(
|
let result = expand_maybe_stop(
|
||||||
sema,
|
sema,
|
||||||
actual_expansion.clone(),
|
InFile::new(file.into(), actual_expansion.clone()),
|
||||||
fake_expansion.clone(),
|
fake_expansion.clone(),
|
||||||
new_offset,
|
new_offset,
|
||||||
fake_mapped_token,
|
fake_mapped_token,
|
||||||
|
|
|
||||||
|
|
@ -2111,6 +2111,56 @@ fn foo() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn cfg_attr_attr_macro() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- proc_macros: identity
|
||||||
|
#[cfg_attr(test, proc_macros::identity)]
|
||||||
|
fn foo() {
|
||||||
|
$0
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
fn foo() fn()
|
||||||
|
md proc_macros
|
||||||
|
bt u32 u32
|
||||||
|
kw async
|
||||||
|
kw const
|
||||||
|
kw crate::
|
||||||
|
kw enum
|
||||||
|
kw extern
|
||||||
|
kw false
|
||||||
|
kw fn
|
||||||
|
kw for
|
||||||
|
kw if
|
||||||
|
kw if let
|
||||||
|
kw impl
|
||||||
|
kw impl for
|
||||||
|
kw let
|
||||||
|
kw letm
|
||||||
|
kw loop
|
||||||
|
kw match
|
||||||
|
kw mod
|
||||||
|
kw return
|
||||||
|
kw self::
|
||||||
|
kw static
|
||||||
|
kw struct
|
||||||
|
kw trait
|
||||||
|
kw true
|
||||||
|
kw type
|
||||||
|
kw union
|
||||||
|
kw unsafe
|
||||||
|
kw use
|
||||||
|
kw while
|
||||||
|
kw while let
|
||||||
|
sn macro_rules
|
||||||
|
sn pd
|
||||||
|
sn ppd
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn escaped_label() {
|
fn escaped_label() {
|
||||||
check(
|
check(
|
||||||
|
|
|
||||||
|
|
@ -524,6 +524,7 @@ impl<'a> FindUsages<'a> {
|
||||||
fn find_nodes<'b>(
|
fn find_nodes<'b>(
|
||||||
sema: &'b Semantics<'_, RootDatabase>,
|
sema: &'b Semantics<'_, RootDatabase>,
|
||||||
name: &str,
|
name: &str,
|
||||||
|
file_id: EditionedFileId,
|
||||||
node: &syntax::SyntaxNode,
|
node: &syntax::SyntaxNode,
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
) -> impl Iterator<Item = SyntaxNode> + 'b {
|
) -> impl Iterator<Item = SyntaxNode> + 'b {
|
||||||
|
|
@ -534,7 +535,7 @@ impl<'a> FindUsages<'a> {
|
||||||
})
|
})
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(move |token| {
|
.flat_map(move |token| {
|
||||||
if sema.might_be_inside_macro_call(&token) {
|
if sema.is_inside_macro_call(InFile::new(file_id.into(), &token)) {
|
||||||
sema.descend_into_macros_exact(token)
|
sema.descend_into_macros_exact(token)
|
||||||
} else {
|
} else {
|
||||||
<_>::from([token])
|
<_>::from([token])
|
||||||
|
|
@ -654,11 +655,14 @@ impl<'a> FindUsages<'a> {
|
||||||
let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
|
let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
|
||||||
|
|
||||||
for offset in FindUsages::match_indices(&file_text, &finder, search_range) {
|
for offset in FindUsages::match_indices(&file_text, &finder, search_range) {
|
||||||
let usages =
|
let usages = FindUsages::find_nodes(
|
||||||
FindUsages::find_nodes(sema, ¤t_to_process, &tree, offset)
|
sema,
|
||||||
.filter(|it| {
|
¤t_to_process,
|
||||||
matches!(it.kind(), SyntaxKind::NAME | SyntaxKind::NAME_REF)
|
file_id,
|
||||||
});
|
&tree,
|
||||||
|
offset,
|
||||||
|
)
|
||||||
|
.filter(|it| matches!(it.kind(), SyntaxKind::NAME | SyntaxKind::NAME_REF));
|
||||||
for usage in usages {
|
for usage in usages {
|
||||||
if let Some(alias) = usage.parent().and_then(|it| {
|
if let Some(alias) = usage.parent().and_then(|it| {
|
||||||
let path = ast::PathSegment::cast(it)?.parent_path();
|
let path = ast::PathSegment::cast(it)?.parent_path();
|
||||||
|
|
@ -813,7 +817,7 @@ impl<'a> FindUsages<'a> {
|
||||||
let tree = LazyCell::new(move || this.sema.parse(file_id).syntax().clone());
|
let tree = LazyCell::new(move || this.sema.parse(file_id).syntax().clone());
|
||||||
|
|
||||||
for offset in FindUsages::match_indices(&file_text, finder, search_range) {
|
for offset in FindUsages::match_indices(&file_text, finder, search_range) {
|
||||||
let usages = FindUsages::find_nodes(this.sema, name, &tree, offset)
|
let usages = FindUsages::find_nodes(this.sema, name, file_id, &tree, offset)
|
||||||
.filter_map(ast::NameRef::cast);
|
.filter_map(ast::NameRef::cast);
|
||||||
for usage in usages {
|
for usage in usages {
|
||||||
let found_usage = usage
|
let found_usage = usage
|
||||||
|
|
@ -970,8 +974,8 @@ impl<'a> FindUsages<'a> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
for name in
|
for name in Self::find_nodes(sema, name, file_id, &tree, offset)
|
||||||
Self::find_nodes(sema, name, &tree, offset).filter_map(ast::NameLike::cast)
|
.filter_map(ast::NameLike::cast)
|
||||||
{
|
{
|
||||||
if match name {
|
if match name {
|
||||||
ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
|
ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
|
||||||
|
|
@ -985,8 +989,8 @@ impl<'a> FindUsages<'a> {
|
||||||
// Search for occurrences of the `Self` referring to our type
|
// Search for occurrences of the `Self` referring to our type
|
||||||
if let Some((self_ty, finder)) = &include_self_kw_refs {
|
if let Some((self_ty, finder)) = &include_self_kw_refs {
|
||||||
for offset in Self::match_indices(&text, finder, search_range) {
|
for offset in Self::match_indices(&text, finder, search_range) {
|
||||||
for name_ref in
|
for name_ref in Self::find_nodes(sema, "Self", file_id, &tree, offset)
|
||||||
Self::find_nodes(sema, "Self", &tree, offset).filter_map(ast::NameRef::cast)
|
.filter_map(ast::NameRef::cast)
|
||||||
{
|
{
|
||||||
if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
|
if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
|
||||||
return;
|
return;
|
||||||
|
|
@ -1010,7 +1014,7 @@ impl<'a> FindUsages<'a> {
|
||||||
let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
|
let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
|
||||||
|
|
||||||
for offset in Self::match_indices(&text, finder, search_range) {
|
for offset in Self::match_indices(&text, finder, search_range) {
|
||||||
for name_ref in Self::find_nodes(sema, "super", &tree, offset)
|
for name_ref in Self::find_nodes(sema, "super", file_id, &tree, offset)
|
||||||
.filter_map(ast::NameRef::cast)
|
.filter_map(ast::NameRef::cast)
|
||||||
{
|
{
|
||||||
if self.found_name_ref(&name_ref, sink) {
|
if self.found_name_ref(&name_ref, sink) {
|
||||||
|
|
@ -1020,7 +1024,7 @@ impl<'a> FindUsages<'a> {
|
||||||
}
|
}
|
||||||
if let Some(finder) = &is_crate_root {
|
if let Some(finder) = &is_crate_root {
|
||||||
for offset in Self::match_indices(&text, finder, search_range) {
|
for offset in Self::match_indices(&text, finder, search_range) {
|
||||||
for name_ref in Self::find_nodes(sema, "crate", &tree, offset)
|
for name_ref in Self::find_nodes(sema, "crate", file_id, &tree, offset)
|
||||||
.filter_map(ast::NameRef::cast)
|
.filter_map(ast::NameRef::cast)
|
||||||
{
|
{
|
||||||
if self.found_name_ref(&name_ref, sink) {
|
if self.found_name_ref(&name_ref, sink) {
|
||||||
|
|
@ -1064,8 +1068,8 @@ impl<'a> FindUsages<'a> {
|
||||||
let finder = &Finder::new("self");
|
let finder = &Finder::new("self");
|
||||||
|
|
||||||
for offset in Self::match_indices(&text, finder, search_range) {
|
for offset in Self::match_indices(&text, finder, search_range) {
|
||||||
for name_ref in
|
for name_ref in Self::find_nodes(sema, "self", file_id, &tree, offset)
|
||||||
Self::find_nodes(sema, "self", &tree, offset).filter_map(ast::NameRef::cast)
|
.filter_map(ast::NameRef::cast)
|
||||||
{
|
{
|
||||||
if self.found_self_module_name_ref(&name_ref, sink) {
|
if self.found_self_module_name_ref(&name_ref, sink) {
|
||||||
return;
|
return;
|
||||||
|
|
|
||||||
|
|
@ -287,7 +287,7 @@ impl<'db> MatchFinder<'db> {
|
||||||
if let Some(expanded) = self.sema.expand_macro_call(¯o_call) {
|
if let Some(expanded) = self.sema.expand_macro_call(¯o_call) {
|
||||||
if let Some(tt) = macro_call.token_tree() {
|
if let Some(tt) = macro_call.token_tree() {
|
||||||
self.output_debug_for_nodes_at_range(
|
self.output_debug_for_nodes_at_range(
|
||||||
&expanded,
|
&expanded.value,
|
||||||
range,
|
range,
|
||||||
&Some(self.sema.original_range(tt.syntax())),
|
&Some(self.sema.original_range(tt.syntax())),
|
||||||
out,
|
out,
|
||||||
|
|
|
||||||
|
|
@ -194,7 +194,7 @@ impl MatchFinder<'_> {
|
||||||
// nodes that originated entirely from within the token tree of the macro call.
|
// nodes that originated entirely from within the token tree of the macro call.
|
||||||
// i.e. we don't want to match something that came from the macro itself.
|
// i.e. we don't want to match something that came from the macro itself.
|
||||||
if let Some(range) = self.sema.original_range_opt(tt.syntax()) {
|
if let Some(range) = self.sema.original_range_opt(tt.syntax()) {
|
||||||
self.slow_scan_node(&expanded, rule, &Some(range), matches_out);
|
self.slow_scan_node(&expanded.value, rule, &Some(range), matches_out);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -146,10 +146,11 @@ fn expand_macro_recur(
|
||||||
offset_in_original_node: TextSize,
|
offset_in_original_node: TextSize,
|
||||||
) -> Option<SyntaxNode> {
|
) -> Option<SyntaxNode> {
|
||||||
let ExpandResult { value: expanded, err } = match macro_call {
|
let ExpandResult { value: expanded, err } = match macro_call {
|
||||||
item @ ast::Item::MacroCall(macro_call) => {
|
item @ ast::Item::MacroCall(macro_call) => sema
|
||||||
sema.expand_attr_macro(item).or_else(|| sema.expand_allowed_builtins(macro_call))?
|
.expand_attr_macro(item)
|
||||||
}
|
.map(|it| it.map(|it| it.value))
|
||||||
item => sema.expand_attr_macro(item)?,
|
.or_else(|| sema.expand_allowed_builtins(macro_call))?,
|
||||||
|
item => sema.expand_attr_macro(item)?.map(|it| it.value),
|
||||||
};
|
};
|
||||||
let expanded = expanded.clone_for_update();
|
let expanded = expanded.clone_for_update();
|
||||||
if let Some(err) = err {
|
if let Some(err) = err {
|
||||||
|
|
|
||||||
|
|
@ -653,7 +653,7 @@ impl<'a> WalkExpandedExprCtx<'a> {
|
||||||
expr.macro_call().and_then(|call| self.sema.expand_macro_call(&call))
|
expr.macro_call().and_then(|call| self.sema.expand_macro_call(&call))
|
||||||
{
|
{
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match expanded {
|
match (expanded.value) {
|
||||||
ast::MacroStmts(it) => {
|
ast::MacroStmts(it) => {
|
||||||
self.handle_expanded(it, cb);
|
self.handle_expanded(it, cb);
|
||||||
},
|
},
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue