diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index b9b94fd22a..aaa9e308f1 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -498,68 +498,65 @@ impl<'db> SemanticsImpl<'db> { // otherwise push the remapped tokens back into the queue as they can potentially be remapped again. while let Some(token) = queue.pop() { self.db.unwind_if_cancelled(); - let was_not_remapped = (|| { - for node in token.value.ancestors() { - if let Some(macro_call) = ast::MacroCall::cast(node.clone()) { - let tt = match macro_call.token_tree() { - Some(tt) => tt, - None => continue, - }; - let l_delim = match tt.left_delimiter_token() { - Some(it) => it.text_range().end(), - None => tt.syntax().text_range().start(), - }; - let r_delim = match tt.right_delimiter_token() { - Some(it) => it.text_range().start(), - None => tt.syntax().text_range().end(), - }; - if !TextRange::new(l_delim, r_delim) - .contains_range(token.value.text_range()) - { - continue; - } - let file_id = match sa.expand(self.db, token.with_value(¯o_call)) { - Some(file_id) => file_id, - None => continue, - }; - let tokens = cache - .entry(file_id) - .or_insert_with(|| file_id.expansion_info(self.db.upcast())) - .as_ref()? - .map_token_down(self.db.upcast(), None, token.as_ref())?; + if let Some((call_id, item)) = token + .value + .ancestors() + .filter_map(ast::Item::cast) + .filter_map(|item| { + self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item.clone()))) + .zip(Some(item)) + }) + .last() + { + let file_id = call_id.as_file(); + let tokens = cache + .entry(file_id) + .or_insert_with(|| file_id.expansion_info(self.db.upcast())) + .as_ref()? + .map_token_down(self.db.upcast(), Some(item), token.as_ref())?; - let len = queue.len(); - queue.extend(tokens.inspect(|token| { - if let Some(parent) = token.value.parent() { - self.cache(find_root(&parent), token.file_id); - } - })); - return (queue.len() != len).then(|| ()); - } else if let Some(item) = ast::Item::cast(node.clone()) { - if let Some(call_id) = self - .with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item.clone()))) - { - let file_id = call_id.as_file(); - let tokens = cache - .entry(file_id) - .or_insert_with(|| file_id.expansion_info(self.db.upcast())) - .as_ref()? - .map_token_down(self.db.upcast(), Some(item), token.as_ref())?; - - let len = queue.len(); - queue.extend(tokens.inspect(|token| { - if let Some(parent) = token.value.parent() { - self.cache(find_root(&parent), token.file_id); - } - })); - return (queue.len() != len).then(|| ()); + let len = queue.len(); + queue.extend(tokens.inspect(|token| { + if let Some(parent) = token.value.parent() { + self.cache(find_root(&parent), token.file_id); } + })); + return (queue.len() != len).then(|| ()); + } + + if let Some(macro_call) = token.value.ancestors().find_map(ast::MacroCall::cast) { + let tt = macro_call.token_tree()?; + let l_delim = match tt.left_delimiter_token() { + Some(it) => it.text_range().end(), + None => tt.syntax().text_range().start(), + }; + let r_delim = match tt.right_delimiter_token() { + Some(it) => it.text_range().start(), + None => tt.syntax().text_range().end(), + }; + if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) { + return None; } + let file_id = sa.expand(self.db, token.with_value(¯o_call))?; + let tokens = cache + .entry(file_id) + .or_insert_with(|| file_id.expansion_info(self.db.upcast())) + .as_ref()? + .map_token_down(self.db.upcast(), None, token.as_ref())?; + + let len = queue.len(); + queue.extend(tokens.inspect(|token| { + if let Some(parent) = token.value.parent() { + self.cache(find_root(&parent), token.file_id); + } + })); + return (queue.len() != len).then(|| ()); } None })() .is_none(); + if was_not_remapped { res.push(token.value) } diff --git a/crates/ide/src/fixture.rs b/crates/ide/src/fixture.rs index 35c219cadd..b7f7e35e2f 100644 --- a/crates/ide/src/fixture.rs +++ b/crates/ide/src/fixture.rs @@ -1,4 +1,5 @@ //! Utilities for creating `Analysis` instances for tests. +use hir::db::DefDatabase; use ide_db::base_db::fixture::ChangeFixture; use test_utils::{extract_annotations, RangeOrOffset}; @@ -44,6 +45,7 @@ pub(crate) fn range_or_position(ra_fixture: &str) -> (Analysis, FileId, RangeOrO /// Creates analysis from a multi-file fixture, returns positions marked with $0. pub(crate) fn annotations(ra_fixture: &str) -> (Analysis, FilePosition, Vec<(FileRange, String)>) { let mut host = AnalysisHost::default(); + host.db.set_enable_proc_attr_macros(true); let change_fixture = ChangeFixture::parse(ra_fixture); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index ca22b10dfc..645b405e5e 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -220,6 +220,29 @@ mod tests { assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {:?}", navs) } + #[test] + fn goto_def_in_mac_call_in_attr_invoc() { + check( + r#" +//- proc_macros: identity +pub struct Struct { + // ^^^^^^ + field: i32, +} + +macro_rules! identity { + ($($tt:tt)*) => {$($tt)*}; +} + +#[proc_macros::identity] +fn function() { + identity!(Struct$0 { field: 0 }); +} + +"#, + ) + } + #[test] fn goto_def_for_extern_crate() { check(