Auto merge of #16054 - Veykril:fix-downmapping, r=Veykril

fix: Fix token downmapping being quadratic

Fixes https://github.com/rust-lang/rust-analyzer/issues/16050
This commit is contained in:
bors 2023-12-08 14:28:18 +00:00
commit 9c3de09f6d
2 changed files with 167 additions and 134 deletions

View file

@ -605,8 +605,8 @@ pub struct ExpansionInfo {
} }
impl ExpansionInfo { impl ExpansionInfo {
pub fn expanded(&self) -> InFile<SyntaxNode> { pub fn expanded(&self) -> InMacroFile<SyntaxNode> {
self.expanded.clone().into() self.expanded.clone()
} }
pub fn call_node(&self) -> Option<InFile<SyntaxNode>> { pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
@ -617,13 +617,13 @@ impl ExpansionInfo {
pub fn map_range_down<'a>( pub fn map_range_down<'a>(
&'a self, &'a self,
span: SpanData, span: SpanData,
) -> Option<impl Iterator<Item = InMacroFile<SyntaxToken>> + 'a> { ) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + 'a>> {
let tokens = self let tokens = self
.exp_map .exp_map
.ranges_with_span(span) .ranges_with_span(span)
.flat_map(move |range| self.expanded.value.covering_element(range).into_token()); .flat_map(move |range| self.expanded.value.covering_element(range).into_token());
Some(tokens.map(move |token| InMacroFile::new(self.expanded.file_id, token))) Some(InMacroFile::new(self.expanded.file_id, tokens))
} }
/// Looks up the span at the given offset. /// Looks up the span at the given offset.

View file

@ -5,7 +5,7 @@ mod source_to_def;
use std::{ use std::{
cell::RefCell, cell::RefCell,
fmt, iter, mem, fmt, iter, mem,
ops::{self, ControlFlow}, ops::{self, ControlFlow, Not},
}; };
use base_db::{FileId, FileRange}; use base_db::{FileId, FileRange};
@ -20,8 +20,8 @@ use hir_def::{
AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId, AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
}; };
use hir_expand::{ use hir_expand::{
db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, MacroCallId, MacroFileId, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, InMacroFile, MacroCallId,
MacroFileIdExt, MacroFileId, MacroFileIdExt,
}; };
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
@ -29,7 +29,7 @@ use smallvec::{smallvec, SmallVec};
use stdx::TupleExt; use stdx::TupleExt;
use syntax::{ use syntax::{
algo::skip_trivia_token, algo::skip_trivia_token,
ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _}, ast::{self, HasAttrs as _, HasDocComments, HasGenericParams, HasLoopBody, IsString as _},
match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
TextRange, TextSize, TextRange, TextSize,
}; };
@ -129,9 +129,10 @@ pub struct Semantics<'db, DB> {
pub struct SemanticsImpl<'db> { pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase, pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>, s2d_cache: RefCell<SourceToDefCache>,
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
/// Rootnode to HirFileId cache /// Rootnode to HirFileId cache
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
// These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
/// MacroCall to its expansion's MacroFileId cache /// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>, macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
} }
@ -616,90 +617,107 @@ impl<'db> SemanticsImpl<'db> {
res res
} }
// FIXME: should only take real file inputs for simplicity
fn descend_into_macros_impl( fn descend_into_macros_impl(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>, f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) { ) {
// FIXME: Clean this up
let _p = profile::span("descend_into_macros"); let _p = profile::span("descend_into_macros");
let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) { let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
Some(it) => it, Some(it) => it,
None => return, None => return,
}; };
let mut cache = self.expansion_info_cache.borrow_mut(); let span = match sa.file_id.file_id() {
let mut mcache = self.macro_call_cache.borrow_mut(); Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()),
let span = match sa.file_id.repr() { None => {
base_db::span::HirFileIdRepr::FileId(file_id) => { stdx::never!();
self.db.real_span_map(file_id).span_for_range(token.text_range()) return;
} }
base_db::span::HirFileIdRepr::MacroFile(macro_file) => cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
.exp_map
.span_at(token.text_range().start()),
}; };
let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut();
let def_map = sa.resolver.def_map(); let def_map = sa.resolver.def_map();
let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
let mut process_expansion_for_token = |stack: &mut SmallVec<_>, macro_file| { let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
let expansion_info = cache let expansion_info = cache
.entry(macro_file) .entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(self.db.upcast())); .or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
{ {
let InFile { file_id, value } = expansion_info.expanded(); let InMacroFile { file_id, value } = expansion_info.expanded();
self.cache(value, file_id); self.cache(value, file_id.into());
} }
let mapped_tokens = expansion_info.map_range_down(span)?; let InMacroFile { file_id, value: mapped_tokens } =
let len = stack.len(); expansion_info.map_range_down(span)?;
let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect();
// requeue the tokens we got from mapping our current token down
stack.extend(mapped_tokens.map(Into::into));
// if the length changed we have found a mapping for the token // if the length changed we have found a mapping for the token
(stack.len() != len).then_some(()) let res = mapped_tokens.is_empty().not().then_some(());
// requeue the tokens we got from mapping our current token down
stack.push((HirFileId::from(file_id), mapped_tokens));
res
}; };
// Remap the next token in the queue into a macro call its in, if it is not being remapped let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(sa.file_id, smallvec![token])];
// either due to not being in a macro-call or because its unused push it into the result vec,
// otherwise push the remapped tokens back into the queue as they can potentially be remapped again. while let Some((file_id, mut tokens)) = stack.pop() {
while let Some(token) = stack.pop() { while let Some(token) = tokens.pop() {
let was_not_remapped = (|| { let was_not_remapped = (|| {
// First expand into attribute invocations // First expand into attribute invocations
let containing_attribute_macro_call = self.with_ctx(|ctx| { let containing_attribute_macro_call = self.with_ctx(|ctx| {
token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| { token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
if item.attrs().next().is_none() { if item.attrs().next().is_none() {
// Don't force populate the dyn cache for items that don't have an attribute anyways // Don't force populate the dyn cache for items that don't have an attribute anyways
return None; return None;
} }
Some(ctx.item_to_macro_call(token.with_value(item.clone()))?) Some((
ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?,
item,
))
}) })
}); });
if let Some(call_id) = containing_attribute_macro_call { if let Some((call_id, item)) = containing_attribute_macro_call {
let file_id = call_id.as_macro_file(); let file_id = call_id.as_macro_file();
let attr_id = match self.db.lookup_intern_macro_call(call_id).kind {
hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => {
invoc_attr_index.ast_index()
}
_ => 0,
};
let text_range = item.syntax().text_range();
let start = item
.doc_comments_and_attrs()
.nth(attr_id)
.map(|attr| match attr {
Either::Left(it) => it.syntax().text_range().start(),
Either::Right(it) => it.syntax().text_range().start(),
})
.unwrap_or_else(|| text_range.start());
let text_range = TextRange::new(start, text_range.end());
// remove any other token in this macro input, all their mappings are the
// same as this one
tokens.retain(|t| !text_range.contains_range(t.text_range()));
return process_expansion_for_token(&mut stack, file_id); return process_expansion_for_token(&mut stack, file_id);
} }
// Then check for token trees, that means we are either in a function-like macro or // Then check for token trees, that means we are either in a function-like macro or
// secondary attribute inputs // secondary attribute inputs
let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?; let tt = token.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
let parent = tt.syntax().parent()?; let parent = tt.syntax().parent()?;
if tt.left_delimiter_token().map_or(false, |it| it == token.value) { if tt.left_delimiter_token().map_or(false, |it| it == token) {
return None; return None;
} }
if tt.right_delimiter_token().map_or(false, |it| it == token.value) { if tt.right_delimiter_token().map_or(false, |it| it == token) {
return None; return None;
} }
if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) { if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
let mcall: hir_expand::files::InFileWrapper<HirFileId, ast::MacroCall> = let mcall: hir_expand::files::InFileWrapper<HirFileId, ast::MacroCall> =
token.with_value(macro_call); InFile::new(file_id, macro_call);
let file_id = match mcache.get(&mcall) { let file_id = match mcache.get(&mcall) {
Some(&it) => it, Some(&it) => it,
None => { None => {
@ -708,20 +726,25 @@ impl<'db> SemanticsImpl<'db> {
it it
} }
}; };
let text_range = tt.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this one
tokens.retain(|t| !text_range.contains_range(t.text_range()));
process_expansion_for_token(&mut stack, file_id) process_expansion_for_token(&mut stack, file_id)
} else if let Some(meta) = ast::Meta::cast(parent) { } else if let Some(meta) = ast::Meta::cast(parent) {
// attribute we failed expansion for earlier, this might be a derive invocation // attribute we failed expansion for earlier, this might be a derive invocation
// or derive helper attribute // or derive helper attribute
let attr = meta.parent_attr()?; let attr = meta.parent_attr()?;
let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) { let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast)
{
// this might be a derive, or a derive helper on an ADT // this might be a derive, or a derive helper on an ADT
let derive_call = self.with_ctx(|ctx| { let derive_call = self.with_ctx(|ctx| {
// so try downmapping the token into the pseudo derive expansion // so try downmapping the token into the pseudo derive expansion
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
ctx.attr_to_derive_macro_call( ctx.attr_to_derive_macro_call(
token.with_value(&adt), InFile::new(file_id, &adt),
token.with_value(attr.clone()), InFile::new(file_id, attr.clone()),
) )
.map(|(_, call_id, _)| call_id) .map(|(_, call_id, _)| call_id)
}); });
@ -730,13 +753,18 @@ impl<'db> SemanticsImpl<'db> {
Some(call_id) => { Some(call_id) => {
// resolved to a derive // resolved to a derive
let file_id = call_id.as_macro_file(); let file_id = call_id.as_macro_file();
let text_range = attr.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this one
tokens.retain(|t| !text_range.contains_range(t.text_range()));
return process_expansion_for_token(&mut stack, file_id); return process_expansion_for_token(&mut stack, file_id);
} }
None => Some(adt), None => Some(adt),
} }
} else { } else {
// Otherwise this could be a derive helper on a variant or field // Otherwise this could be a derive helper on a variant or field
if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast) if let Some(field) =
attr.syntax().parent().and_then(ast::RecordField::cast)
{ {
field.syntax().ancestors().take(4).find_map(ast::Adt::cast) field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
} else if let Some(field) = } else if let Some(field) =
@ -751,19 +779,23 @@ impl<'db> SemanticsImpl<'db> {
None None
} }
}?; }?;
if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) { if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(file_id, &adt))) {
return None; return None;
} }
// Not an attribute, nor a derive, so it's either a builtin or a derive helper // Not an attribute, nor a derive, so it's either a builtin or a derive helper
// Try to resolve to a derive helper and downmap // Try to resolve to a derive helper and downmap
let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name(); let attr_name =
let id = self.db.ast_id_map(token.file_id).ast_id(&adt); attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
let helpers = let id = self.db.ast_id_map(file_id).ast_id(&adt);
def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?; let helpers = def_map.derive_helpers_in_scope(InFile::new(file_id, id))?;
let mut res = None; let mut res = None;
for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) { for (.., derive) in
res = helpers.iter().filter(|(helper, ..)| *helper == attr_name)
res.or(process_expansion_for_token(&mut stack, derive.as_macro_file())); {
res = res.or(process_expansion_for_token(
&mut stack,
derive.as_macro_file(),
));
} }
res res
} else { } else {
@ -772,11 +804,12 @@ impl<'db> SemanticsImpl<'db> {
})() })()
.is_none(); .is_none();
if was_not_remapped && f(token).is_break() { if was_not_remapped && f(InFile::new(file_id, token)).is_break() {
break; break;
} }
} }
} }
}
// Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop // Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop
// traversing the inner iterator when it finds a node. // traversing the inner iterator when it finds a node.