Thread file id through descension API for semantic highlighting

This commit is contained in:
Lukas Wirth 2024-08-22 16:45:37 +02:00
parent 354ab7a9e8
commit c2a07e21f5
3 changed files with 115 additions and 73 deletions

View file

@ -461,3 +461,12 @@ impl<N: AstNode> InFile<N> {
Some(InRealFile::new(file_id, value)) Some(InRealFile::new(file_id, value))
} }
} }
impl<T> InFile<T> {
pub fn into_real_file(self) -> Result<InRealFile<T>, InFile<T>> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }),
HirFileIdRepr::MacroFile(_) => Err(self),
}
}
}

View file

@ -550,7 +550,9 @@ impl<'db> SemanticsImpl<'db> {
string: &ast::String, string: &ast::String,
) -> Option<Vec<(TextRange, Option<PathResolution>)>> { ) -> Option<Vec<(TextRange, Option<PathResolution>)>> {
let quote = string.open_quote_text_range()?; let quote = string.open_quote_text_range()?;
self.descend_into_macros_breakable(string.syntax().clone(), |token| {
let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?;
self.descend_into_macros_breakable(token, |token| {
(|| { (|| {
let token = token.value; let token = token.value;
let string = ast::String::cast(token)?; let string = ast::String::cast(token)?;
@ -576,8 +578,9 @@ impl<'db> SemanticsImpl<'db> {
offset: TextSize, offset: TextSize,
) -> Option<(TextRange, Option<PathResolution>)> { ) -> Option<(TextRange, Option<PathResolution>)> {
let original_string = ast::String::cast(original_token.clone())?; let original_string = ast::String::cast(original_token.clone())?;
let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?;
let quote = original_string.open_quote_text_range()?; let quote = original_string.open_quote_text_range()?;
self.descend_into_macros_breakable(original_token.clone(), |token| { self.descend_into_macros_breakable(original_token, |token| {
(|| { (|| {
let token = token.value; let token = token.value;
self.resolve_offset_in_format_args( self.resolve_offset_in_format_args(
@ -617,30 +620,37 @@ impl<'db> SemanticsImpl<'db> {
Some(it) => it, Some(it) => it,
None => return res, None => return res,
}; };
let file = self.find_file(node.syntax());
let Some(file_id) = file.file_id.file_id() else {
return res;
};
if first == last { if first == last {
// node is just the token, so descend the token // node is just the token, so descend the token
self.descend_into_macros_impl(first, &mut |InFile { value, .. }| { self.descend_into_macros_impl(
if let Some(node) = value InRealFile::new(file_id, first),
.parent_ancestors() &mut |InFile { value, .. }| {
.take_while(|it| it.text_range() == value.text_range()) if let Some(node) = value
.find_map(N::cast) .parent_ancestors()
{ .take_while(|it| it.text_range() == value.text_range())
res.push(node) .find_map(N::cast)
} {
CONTINUE_NO_BREAKS res.push(node)
}); }
CONTINUE_NO_BREAKS
},
);
} else { } else {
// Descend first and last token, then zip them to look for the node they belong to // Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![]; let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl(first, &mut |token| { self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token| {
scratch.push(token); scratch.push(token);
CONTINUE_NO_BREAKS CONTINUE_NO_BREAKS
}); });
let mut scratch = scratch.into_iter(); let mut scratch = scratch.into_iter();
self.descend_into_macros_impl( self.descend_into_macros_impl(
last, InRealFile::new(file_id, last),
&mut |InFile { value: last, file_id: last_fid }| { &mut |InFile { value: last, file_id: last_fid }| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid { if first_fid == last_fid {
@ -669,18 +679,22 @@ impl<'db> SemanticsImpl<'db> {
token: SyntaxToken, token: SyntaxToken,
mut cb: impl FnMut(InFile<SyntaxToken>), mut cb: impl FnMut(InFile<SyntaxToken>),
) { ) {
self.descend_into_macros_impl(token.clone(), &mut |t| { if let Ok(token) = self.wrap_token_infile(token).into_real_file() {
cb(t); self.descend_into_macros_impl(token, &mut |t| {
CONTINUE_NO_BREAKS cb(t);
}); CONTINUE_NO_BREAKS
});
}
} }
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![]; let mut res = smallvec![];
self.descend_into_macros_impl(token.clone(), &mut |t| { if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
res.push(t.value); self.descend_into_macros_impl(token, &mut |t| {
CONTINUE_NO_BREAKS res.push(t.value);
}); CONTINUE_NO_BREAKS
});
}
if res.is_empty() { if res.is_empty() {
res.push(token); res.push(token);
} }
@ -689,7 +703,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn descend_into_macros_breakable<T>( pub fn descend_into_macros_breakable<T>(
&self, &self,
token: SyntaxToken, token: InRealFile<SyntaxToken>,
mut cb: impl FnMut(InFile<SyntaxToken>) -> ControlFlow<T>, mut cb: impl FnMut(InFile<SyntaxToken>) -> ControlFlow<T>,
) -> Option<T> { ) -> Option<T> {
self.descend_into_macros_impl(token.clone(), &mut cb) self.descend_into_macros_impl(token.clone(), &mut cb)
@ -721,28 +735,36 @@ impl<'db> SemanticsImpl<'db> {
pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken { pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken {
let text = token.text(); let text = token.text();
let kind = token.kind(); let kind = token.kind();
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
self.descend_into_macros_breakable(token.clone(), |InFile { value, file_id: _ }| { self.descend_into_macros_breakable(token.clone(), |InFile { value, file_id: _ }| {
let mapped_kind = value.kind(); let mapped_kind = value.kind();
let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); let any_ident_match =
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); || kind.is_any_identifier() && value.kind().is_any_identifier();
if matches { let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
ControlFlow::Break(value) if matches {
} else { ControlFlow::Break(value)
ControlFlow::Continue(()) } else {
} ControlFlow::Continue(())
}) }
})
} else {
None
}
.unwrap_or(token) .unwrap_or(token)
} }
fn descend_into_macros_impl<T>( fn descend_into_macros_impl<T>(
&self, &self,
token: SyntaxToken, InRealFile { value: token, file_id }: InRealFile<SyntaxToken>,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<T>, f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<T>,
) -> Option<T> { ) -> Option<T> {
let _p = tracing::info_span!("descend_into_macros_impl").entered(); let _p = tracing::info_span!("descend_into_macros_impl").entered();
let (sa, span, file_id) = let (sa, span, file_id) = token
token.parent().and_then(|parent| self.analyze_no_infer(&parent)).and_then(|sa| { .parent()
.and_then(|parent| {
self.analyze_impl(InRealFile::new(file_id, &parent).into(), None, false)
})
.and_then(|sa| {
let file_id = sa.file_id.file_id()?; let file_id = sa.file_id.file_id()?;
Some(( Some((
sa, sa,
@ -1400,11 +1422,13 @@ impl<'db> SemanticsImpl<'db> {
/// Returns none if the file of the node is not part of a crate. /// Returns none if the file of the node is not part of a crate.
fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> { fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
let node = self.find_file(node);
self.analyze_impl(node, None, true) self.analyze_impl(node, None, true)
} }
/// Returns none if the file of the node is not part of a crate. /// Returns none if the file of the node is not part of a crate.
fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> { fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
let node = self.find_file(node);
self.analyze_impl(node, None, false) self.analyze_impl(node, None, false)
} }
@ -1413,17 +1437,17 @@ impl<'db> SemanticsImpl<'db> {
node: &SyntaxNode, node: &SyntaxNode,
offset: TextSize, offset: TextSize,
) -> Option<SourceAnalyzer> { ) -> Option<SourceAnalyzer> {
let node = self.find_file(node);
self.analyze_impl(node, Some(offset), false) self.analyze_impl(node, Some(offset), false)
} }
fn analyze_impl( fn analyze_impl(
&self, &self,
node: &SyntaxNode, node: InFile<&SyntaxNode>,
offset: Option<TextSize>, offset: Option<TextSize>,
infer_body: bool, infer_body: bool,
) -> Option<SourceAnalyzer> { ) -> Option<SourceAnalyzer> {
let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered(); let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered();
let node = self.find_file(node);
let container = self.with_ctx(|ctx| ctx.find_container(node))?; let container = self.with_ctx(|ctx| ctx.find_container(node))?;
@ -1468,6 +1492,11 @@ impl<'db> SemanticsImpl<'db> {
InFile::new(file_id, node) InFile::new(file_id, node)
} }
fn wrap_token_infile(&self, token: SyntaxToken) -> InFile<SyntaxToken> {
let InFile { file_id, .. } = self.find_file(&token.parent().unwrap());
InFile::new(file_id, token)
}
/// Wraps the node in a [`InFile`] with the file id it belongs to. /// Wraps the node in a [`InFile`] with the file id it belongs to.
fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> { fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
let root_node = find_root(node); let root_node = find_root(node);

View file

@ -15,7 +15,7 @@ mod tests;
use std::ops::ControlFlow; use std::ops::ControlFlow;
use hir::{Name, Semantics}; use hir::{InRealFile, Name, Semantics};
use ide_db::{FxHashMap, RootDatabase, SymbolKind}; use ide_db::{FxHashMap, RootDatabase, SymbolKind};
use span::EditionedFileId; use span::EditionedFileId;
use syntax::{ use syntax::{
@ -409,43 +409,47 @@ fn traverse(
let mut r = 0; let mut r = 0;
// FIXME: Add an extra API that takes the file id of this. That is a simple way // FIXME: Add an extra API that takes the file id of this. That is a simple way
// to prevent us constantly walking up the tree to fetch the file // to prevent us constantly walking up the tree to fetch the file
sema.descend_into_macros_breakable(token.clone(), |tok| { sema.descend_into_macros_breakable(
let tok = tok.value; InRealFile::new(file_id, token.clone()),
let tok_kind = tok.kind(); |tok| {
let tok = tok.value;
let tok_kind = tok.kind();
let exact_same_kind = tok_kind == kind; let exact_same_kind = tok_kind == kind;
let both_idents = let both_idents =
exact_same_kind || (tok_kind.is_any_identifier() && ident_kind); exact_same_kind || (tok_kind.is_any_identifier() && ident_kind);
let same_text = tok.text() == text; let same_text = tok.text() == text;
// anything that mapped into a token tree has likely no semantic information // anything that mapped into a token tree has likely no semantic information
let no_tt_parent = tok.parent().map_or(false, |it| it.kind() != TOKEN_TREE); let no_tt_parent =
let my_rank = (both_idents as usize) tok.parent().map_or(false, |it| it.kind() != TOKEN_TREE);
| ((exact_same_kind as usize) << 1) let my_rank = (both_idents as usize)
| ((same_text as usize) << 2) | ((exact_same_kind as usize) << 1)
| ((no_tt_parent as usize) << 3); | ((same_text as usize) << 2)
| ((no_tt_parent as usize) << 3);
if my_rank > 0b1110 { if my_rank > 0b1110 {
// a rank of 0b1110 means that we have found a maximally interesting // a rank of 0b1110 means that we have found a maximally interesting
// token so stop early. // token so stop early.
t = Some(tok); t = Some(tok);
return ControlFlow::Break(()); return ControlFlow::Break(());
}
// r = r.max(my_rank);
// t = Some(t.take_if(|_| r < my_rank).unwrap_or(tok));
match &mut t {
Some(prev) if r < my_rank => {
*prev = tok;
r = my_rank;
} }
Some(_) => (),
None => { // r = r.max(my_rank);
r = my_rank; // t = Some(t.take_if(|_| r < my_rank).unwrap_or(tok));
t = Some(tok) match &mut t {
Some(prev) if r < my_rank => {
*prev = tok;
r = my_rank;
}
Some(_) => (),
None => {
r = my_rank;
t = Some(tok)
}
} }
} ControlFlow::Continue(())
ControlFlow::Continue(()) },
}); );
let token = t.unwrap_or(token); let token = t.unwrap_or(token);
match token.parent().and_then(ast::NameLike::cast) { match token.parent().and_then(ast::NameLike::cast) {