Short-circuit descend_into_macros_single

This commit is contained in:
Lukas Wirth 2021-11-05 16:02:56 +01:00
parent addd93ed9a
commit 4d768bede0

View file

@ -178,10 +178,12 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map) self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
} }
/// Descend the token into macrocalls to its first mapped counterpart.
pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken { pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
self.imp.descend_into_macros(token).pop().unwrap() self.imp.descend_into_macros_single(token)
} }
/// Descend the token into macrocalls to all its mapped counterparts.
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
self.imp.descend_into_macros(token) self.imp.descend_into_macros(token)
} }
@ -509,47 +511,70 @@ impl<'db> SemanticsImpl<'db> {
}; };
if first == last { if first == last {
self.descend_into_macros_impl(first, |InFile { value, .. }| { self.descend_into_macros_impl(
if let Some(node) = value.ancestors().find_map(N::cast) { first,
res.push(node) |InFile { value, .. }| {
} if let Some(node) = value.ancestors().find_map(N::cast) {
}); res.push(node)
}
},
false,
);
} else { } else {
// Descend first and last token, then zip them to look for the node they belong to // Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![]; let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl(first, |token| { self.descend_into_macros_impl(
scratch.push(token); first,
}); |token| {
scratch.push(token);
},
false,
);
let mut scratch = scratch.into_iter(); let mut scratch = scratch.into_iter();
self.descend_into_macros_impl(last, |InFile { value: last, file_id: last_fid }| { self.descend_into_macros_impl(
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { last,
if first_fid == last_fid { |InFile { value: last, file_id: last_fid }| {
if let Some(p) = first.parent() { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
let range = first.text_range().cover(last.text_range()); if first_fid == last_fid {
let node = find_root(&p) if let Some(p) = first.parent() {
.covering_element(range) let range = first.text_range().cover(last.text_range());
.ancestors() let node = find_root(&p)
.take_while(|it| it.text_range() == range) .covering_element(range)
.find_map(N::cast); .ancestors()
if let Some(node) = node { .take_while(|it| it.text_range() == range)
res.push(node); .find_map(N::cast);
if let Some(node) = node {
res.push(node);
}
} }
} }
} }
} },
}); false,
);
} }
res res
} }
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![]; let mut res = smallvec![];
self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value)); self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value), false);
res res
} }
fn descend_into_macros_impl(&self, token: SyntaxToken, mut f: impl FnMut(InFile<SyntaxToken>)) { fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
let mut res = token.clone();
self.descend_into_macros_impl(token, |InFile { value, .. }| res = value, true);
res
}
fn descend_into_macros_impl(
&self,
token: SyntaxToken,
mut f: impl FnMut(InFile<SyntaxToken>),
single: bool,
) {
let _p = profile::span("descend_into_macros"); let _p = profile::span("descend_into_macros");
let parent = match token.parent() { let parent = match token.parent() {
Some(it) => it, Some(it) => it,
@ -572,11 +597,16 @@ impl<'db> SemanticsImpl<'db> {
self.cache(value, file_id); self.cache(value, file_id);
} }
let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?; let mut mapped_tokens =
expansion_info.map_token_down(self.db.upcast(), item, token)?;
let len = stack.len(); let len = stack.len();
// requeue the tokens we got from mapping our current token down // requeue the tokens we got from mapping our current token down
stack.extend(mapped_tokens); if single {
stack.extend(mapped_tokens.next());
} else {
stack.extend(mapped_tokens);
}
// if the length changed we have found a mapping for the token // if the length changed we have found a mapping for the token
(stack.len() != len).then(|| ()) (stack.len() != len).then(|| ())
}; };