mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-08-19 01:50:32 +00:00
Merge #10704
10704: internal: Short-circuit `descend_into_macros_single` r=Veykril a=Veykril There is no need to descend everything if all we are interested in is the first mapping. This bring `descend_into_macros` timing in highlighting in `rust-analyzer/src/config.rs` from `154ms - descend_into_macros (2190 calls)` to `24ms - descend_into_macros (2190 calls)` since we use the single variant there(will regress once we want to highlight multiple namespaces again though). bors r+ Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
This commit is contained in:
commit
726b4dd8bd
2 changed files with 84 additions and 52 deletions
|
@ -18,7 +18,7 @@ use smallvec::{smallvec, SmallVec};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::skip_trivia_token,
|
algo::skip_trivia_token,
|
||||||
ast::{self, HasAttrs, HasGenericParams, HasLoopBody},
|
ast::{self, HasAttrs, HasGenericParams, HasLoopBody},
|
||||||
match_ast, AstNode, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize,
|
match_ast, AstNode, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -178,10 +178,12 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||||
self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
|
self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Descend the token into macrocalls to its first mapped counterpart.
|
||||||
pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
|
pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
|
||||||
self.imp.descend_into_macros(token).pop().unwrap()
|
self.imp.descend_into_macros_single(token)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Descend the token into macrocalls to all its mapped counterparts.
|
||||||
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
||||||
self.imp.descend_into_macros(token)
|
self.imp.descend_into_macros(token)
|
||||||
}
|
}
|
||||||
|
@ -509,72 +511,102 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
};
|
};
|
||||||
|
|
||||||
if first == last {
|
if first == last {
|
||||||
self.descend_into_macros_impl(first, |InFile { value, .. }| {
|
self.descend_into_macros_impl(
|
||||||
if let Some(node) = value.ancestors().find_map(N::cast) {
|
first,
|
||||||
res.push(node)
|
|InFile { value, .. }| {
|
||||||
}
|
if let Some(node) = value.ancestors().find_map(N::cast) {
|
||||||
});
|
res.push(node)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
false,
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
// Descend first and last token, then zip them to look for the node they belong to
|
// Descend first and last token, then zip them to look for the node they belong to
|
||||||
let mut scratch: SmallVec<[_; 1]> = smallvec![];
|
let mut scratch: SmallVec<[_; 1]> = smallvec![];
|
||||||
self.descend_into_macros_impl(first, |token| {
|
self.descend_into_macros_impl(
|
||||||
scratch.push(token);
|
first,
|
||||||
});
|
|token| {
|
||||||
|
scratch.push(token);
|
||||||
|
},
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
let mut scratch = scratch.into_iter();
|
let mut scratch = scratch.into_iter();
|
||||||
self.descend_into_macros_impl(last, |InFile { value: last, file_id: last_fid }| {
|
self.descend_into_macros_impl(
|
||||||
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
|
last,
|
||||||
if first_fid == last_fid {
|
|InFile { value: last, file_id: last_fid }| {
|
||||||
if let Some(p) = first.parent() {
|
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
|
||||||
let range = first.text_range().cover(last.text_range());
|
if first_fid == last_fid {
|
||||||
let node = find_root(&p)
|
if let Some(p) = first.parent() {
|
||||||
.covering_element(range)
|
let range = first.text_range().cover(last.text_range());
|
||||||
.ancestors()
|
let node = find_root(&p)
|
||||||
.take_while(|it| it.text_range() == range)
|
.covering_element(range)
|
||||||
.find_map(N::cast);
|
.ancestors()
|
||||||
if let Some(node) = node {
|
.take_while(|it| it.text_range() == range)
|
||||||
res.push(node);
|
.find_map(N::cast);
|
||||||
|
if let Some(node) = node {
|
||||||
|
res.push(node);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
});
|
false,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
||||||
let mut res = smallvec![];
|
let mut res = smallvec![];
|
||||||
self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value));
|
self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value), false);
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
fn descend_into_macros_impl(&self, token: SyntaxToken, mut f: impl FnMut(InFile<SyntaxToken>)) {
|
fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
|
||||||
|
let mut res = token.clone();
|
||||||
|
self.descend_into_macros_impl(token, |InFile { value, .. }| res = value, true);
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
fn descend_into_macros_impl(
|
||||||
|
&self,
|
||||||
|
token: SyntaxToken,
|
||||||
|
mut f: impl FnMut(InFile<SyntaxToken>),
|
||||||
|
single: bool,
|
||||||
|
) {
|
||||||
let _p = profile::span("descend_into_macros");
|
let _p = profile::span("descend_into_macros");
|
||||||
let parent = match token.parent() {
|
let parent = match token.parent() {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => return,
|
None => return,
|
||||||
};
|
};
|
||||||
let sa = self.analyze(&parent);
|
let sa = self.analyze(&parent);
|
||||||
let mut stack: SmallVec<[_; 1]> = smallvec![InFile::new(sa.file_id, token)];
|
let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
|
||||||
let mut cache = self.expansion_info_cache.borrow_mut();
|
let mut cache = self.expansion_info_cache.borrow_mut();
|
||||||
let mut mcache = self.macro_call_cache.borrow_mut();
|
let mut mcache = self.macro_call_cache.borrow_mut();
|
||||||
|
|
||||||
let mut process_expansion_for_token =
|
let mut process_expansion_for_token =
|
||||||
|stack: &mut SmallVec<_>, file_id, item, token: InFile<&_>| {
|
|stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| {
|
||||||
let mapped_tokens = cache
|
let expansion_info = cache
|
||||||
.entry(file_id)
|
.entry(macro_file)
|
||||||
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
|
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
|
||||||
.as_ref()?
|
.as_ref()?;
|
||||||
.map_token_down(self.db.upcast(), item, token)?;
|
|
||||||
|
{
|
||||||
|
let InFile { file_id, value } = expansion_info.expanded();
|
||||||
|
self.cache(value, file_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut mapped_tokens =
|
||||||
|
expansion_info.map_token_down(self.db.upcast(), item, token)?;
|
||||||
|
|
||||||
let len = stack.len();
|
let len = stack.len();
|
||||||
// requeue the tokens we got from mapping our current token down
|
// requeue the tokens we got from mapping our current token down
|
||||||
stack.extend(mapped_tokens.inspect(|token| {
|
if single {
|
||||||
if let Some(parent) = token.value.parent() {
|
stack.extend(mapped_tokens.next());
|
||||||
self.cache(find_root(&parent), token.file_id);
|
} else {
|
||||||
}
|
stack.extend(mapped_tokens);
|
||||||
}));
|
}
|
||||||
// if the length changed we have found a mapping for the token
|
// if the length changed we have found a mapping for the token
|
||||||
(stack.len() != len).then(|| ())
|
(stack.len() != len).then(|| ())
|
||||||
};
|
};
|
||||||
|
@ -606,17 +638,15 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// or are we inside a function-like macro call
|
// or are we inside a function-like macro call
|
||||||
if let Some(macro_call) = token.value.ancestors().find_map(ast::MacroCall::cast) {
|
if let Some(tt) =
|
||||||
let tt = macro_call.token_tree()?;
|
// FIXME replace map.while_some with take_while once stable
|
||||||
let l_delim = match tt.left_delimiter_token() {
|
token.value.ancestors().map(ast::TokenTree::cast).while_some().last()
|
||||||
Some(it) => it.text_range().end(),
|
{
|
||||||
None => tt.syntax().text_range().start(),
|
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
|
||||||
};
|
if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
|
||||||
let r_delim = match tt.right_delimiter_token() {
|
return None;
|
||||||
Some(it) => it.text_range().start(),
|
}
|
||||||
None => tt.syntax().text_range().end(),
|
if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
|
||||||
};
|
|
||||||
if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) {
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ pub mod eager;
|
||||||
use base_db::ProcMacroKind;
|
use base_db::ProcMacroKind;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
|
|
||||||
pub use mbe::{ExpandError, ExpandResult};
|
pub use mbe::{ExpandError, ExpandResult, Origin};
|
||||||
|
|
||||||
use std::{hash::Hash, iter, sync::Arc};
|
use std::{hash::Hash, iter, sync::Arc};
|
||||||
|
|
||||||
|
@ -380,9 +380,11 @@ pub struct ExpansionInfo {
|
||||||
exp_map: Arc<mbe::TokenMap>,
|
exp_map: Arc<mbe::TokenMap>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub use mbe::Origin;
|
|
||||||
|
|
||||||
impl ExpansionInfo {
|
impl ExpansionInfo {
|
||||||
|
pub fn expanded(&self) -> InFile<SyntaxNode> {
|
||||||
|
self.expanded.clone()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
|
pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
|
||||||
Some(self.arg.with_value(self.arg.value.parent()?))
|
Some(self.arg.with_value(self.arg.value.parent()?))
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue