fix: handle highlightings inside macro calls & only highlight kws in current file

This commit is contained in:
roife 2024-07-10 22:51:23 +08:00
parent d94dcfa841
commit 55cd8ab904
4 changed files with 277 additions and 219 deletions

View file

@ -2,10 +2,10 @@ use std::{iter, mem::discriminant};
use crate::{ use crate::{
doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget, doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget,
RangeInfo, TryToNav, UpmappingResult, RangeInfo, TryToNav,
}; };
use hir::{ use hir::{
AsAssocItem, AssocItem, DescendPreference, InFile, MacroFileIdExt, ModuleDef, Semantics, AsAssocItem, AssocItem, DescendPreference, FileRange, InFile, MacroFileIdExt, ModuleDef, Semantics
}; };
use ide_db::{ use ide_db::{
base_db::{AnchoredPath, FileLoader}, base_db::{AnchoredPath, FileLoader},
@ -14,11 +14,12 @@ use ide_db::{
FileId, RootDatabase, FileId, RootDatabase,
}; };
use itertools::Itertools; use itertools::Itertools;
use syntax::{ use syntax::{
ast::{self, HasLoopBody, Label}, ast::{self, HasLoopBody},
match_ast, AstNode, AstToken, match_ast, AstNode, AstToken,
SyntaxKind::*, SyntaxKind::*,
SyntaxToken, TextRange, T, SyntaxNode, SyntaxToken, TextRange, T,
}; };
// Feature: Go to Definition // Feature: Go to Definition
@ -208,136 +209,127 @@ fn handle_control_flow_keywords(
match token.kind() { match token.kind() {
// For `fn` / `loop` / `while` / `for` / `async`, return the keyword it self, // For `fn` / `loop` / `while` / `for` / `async`, return the keyword it self,
// so that VSCode will find the references when using `ctrl + click` // so that VSCode will find the references when using `ctrl + click`
T![fn] | T![async] | T![try] | T![return] => try_find_fn_or_closure(sema, token), T![fn] | T![async] | T![try] | T![return] => nav_for_exit_points(sema, token),
T![loop] | T![while] | T![break] | T![continue] => try_find_loop(sema, token), T![loop] | T![while] | T![break] | T![continue] => nav_for_break_points(sema, token),
T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => { T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => {
try_find_loop(sema, token) nav_for_break_points(sema, token)
} }
_ => None, _ => None,
} }
} }
fn try_find_fn_or_closure( pub(crate) fn find_fn_or_blocks(
sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken,
) -> Vec<SyntaxNode> {
let find_ancestors = |token: SyntaxToken| {
let token_kind = token.kind();
for anc in sema.token_ancestors_with_macros(token) {
let node = match_ast! {
match anc {
ast::Fn(fn_) => fn_.syntax().clone(),
ast::ClosureExpr(c) => c.syntax().clone(),
ast::BlockExpr(blk) => {
match blk.modifier() {
Some(ast::BlockModifier::Async(_)) => blk.syntax().clone(),
Some(ast::BlockModifier::Try(_)) if token_kind != T![return] => blk.syntax().clone(),
_ => continue,
}
},
_ => continue,
}
};
return Some(node);
}
None
};
sema.descend_into_macros(DescendPreference::None, token.clone())
.into_iter()
.filter_map(find_ancestors)
.collect_vec()
}
fn nav_for_exit_points(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken, token: &SyntaxToken,
) -> Option<Vec<NavigationTarget>> { ) -> Option<Vec<NavigationTarget>> {
fn find_exit_point(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
) -> Option<UpmappingResult<NavigationTarget>> {
let db = sema.db; let db = sema.db;
let token_kind = token.kind();
let navs = find_fn_or_blocks(sema, token)
.into_iter()
.filter_map(|node| {
let file_id = sema.hir_file_for(&node);
for anc in sema.token_ancestors_with_macros(token.clone()) {
let file_id = sema.hir_file_for(&anc);
match_ast! { match_ast! {
match anc { match node {
ast::Fn(fn_) => { ast::Fn(fn_) => {
let fn_: ast::Fn = fn_; let mut nav = sema.to_def(&fn_)?.try_to_nav(db)?;
let nav = sema.to_def(&fn_)?.try_to_nav(db)?;
// For async token, we navigate to itself, which triggers // For async token, we navigate to itself, which triggers
// VSCode to find the references // VSCode to find the references
let focus_token = if matches!(token.kind(), T![async]) { let focus_token = if matches!(token_kind, T![async]) {
fn_.async_token()? fn_.async_token()?
} else { } else {
fn_.fn_token()? fn_.fn_token()?
}; };
let focus_range = InFile::new(file_id, focus_token.text_range()) let focus_frange = InFile::new(file_id, focus_token.text_range())
.original_node_file_range_opt(db) .original_node_file_range_opt(db)
.map(|(frange, _)| frange.range); .map(|(frange, _)| frange);
return Some(nav.map(|it| {
if focus_range.is_some_and(|range| it.full_range.contains_range(range)) { if let Some(FileRange { file_id, range }) = focus_frange {
NavigationTarget { focus_range, ..it } let contains_frange = |nav: &NavigationTarget| {
} else { nav.file_id == file_id && nav.full_range.contains_range(range)
it };
if let Some(def_site) = nav.def_site.as_mut() {
if contains_frange(def_site) {
def_site.focus_range = Some(range);
} }
})); } else if contains_frange(&nav.call_site) {
nav.call_site.focus_range = Some(range);
}
}
Some(nav)
}, },
ast::ClosureExpr(c) => { ast::ClosureExpr(c) => {
let pipe_tok = c.param_list().and_then(|it| it.pipe_token())?.into(); let pipe_tok = c.param_list().and_then(|it| it.pipe_token())?.text_range();
let c_infile = InFile::new(file_id, c.into()); let closure_in_file = InFile::new(file_id, c.into());
let nav = NavigationTarget::from_expr(db, c_infile, pipe_tok); Some(NavigationTarget::from_expr(db, closure_in_file, Some(pipe_tok)))
return Some(nav);
}, },
ast::BlockExpr(blk) => { ast::BlockExpr(blk) => {
match blk.modifier() { match blk.modifier() {
Some(ast::BlockModifier::Async(_)) => { Some(ast::BlockModifier::Async(_)) => {
let async_tok = blk.async_token()?.into(); let async_tok = blk.async_token()?.text_range();
let blk_infile = InFile::new(file_id, blk.into()); let blk_in_file = InFile::new(file_id, blk.into());
let nav = NavigationTarget::from_expr(db, blk_infile, async_tok); Some(NavigationTarget::from_expr(db, blk_in_file, Some(async_tok)))
return Some(nav);
}, },
Some(ast::BlockModifier::Try(_)) if token.kind() != T![return] => { Some(ast::BlockModifier::Try(_)) if token_kind != T![return] => {
let try_tok = blk.try_token()?.into(); let try_tok = blk.try_token()?.text_range();
let blk_infile = InFile::new(file_id, blk.into()); let blk_in_file = InFile::new(file_id, blk.into());
let nav = NavigationTarget::from_expr(db, blk_infile, try_tok); Some(NavigationTarget::from_expr(db, blk_in_file, Some(try_tok)))
return Some(nav);
}, },
_ => {} _ => None,
} }
}, },
_ => {} _ => None,
} }
} }
} })
None
}
sema.descend_into_macros(DescendPreference::None, token.clone())
.into_iter()
.filter_map(|descended| find_exit_point(sema, descended))
.flatten() .flatten()
.collect_vec() .collect_vec();
.into()
Some(navs)
} }
fn try_find_loop( pub(crate) fn find_loops(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken, token: &SyntaxToken,
) -> Option<Vec<NavigationTarget>> { ) -> Option<Vec<ast::Expr>> {
fn find_break_point(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
label_matches: impl Fn(Option<Label>) -> bool,
) -> Option<UpmappingResult<NavigationTarget>> {
let db = sema.db;
let file_id = sema.hir_file_for(&token.parent()?);
for anc in sema.token_ancestors_with_macros(token.clone()).filter_map(ast::Expr::cast) {
match anc {
ast::Expr::LoopExpr(loop_) if label_matches(loop_.label()) => {
let expr = ast::Expr::LoopExpr(loop_.clone());
let loop_tok = loop_.loop_token()?.into();
let nav = NavigationTarget::from_expr(db, InFile::new(file_id, expr), loop_tok);
return Some(nav);
}
ast::Expr::WhileExpr(while_) if label_matches(while_.label()) => {
let expr = ast::Expr::WhileExpr(while_.clone());
let while_tok = while_.while_token()?.into();
let nav =
NavigationTarget::from_expr(db, InFile::new(file_id, expr), while_tok);
return Some(nav);
}
ast::Expr::ForExpr(for_) if label_matches(for_.label()) => {
let expr = ast::Expr::ForExpr(for_.clone());
let for_tok = for_.for_token()?.into();
let nav = NavigationTarget::from_expr(db, InFile::new(file_id, expr), for_tok);
return Some(nav);
}
ast::Expr::BlockExpr(blk)
if blk.label().is_some() && label_matches(blk.label()) =>
{
let expr = ast::Expr::BlockExpr(blk.clone());
let lbl = blk.label().unwrap().syntax().clone().into();
let nav = NavigationTarget::from_expr(db, InFile::new(file_id, expr), lbl);
return Some(nav);
}
_ => {}
}
}
None
}
let parent = token.parent()?; let parent = token.parent()?;
let lbl = match_ast! { let lbl = match_ast! {
match parent { match parent {
@ -353,14 +345,60 @@ fn try_find_loop(
(Some(_), None) => false, (Some(_), None) => false,
}; };
let find_ancestors = |token: SyntaxToken| {
for anc in sema.token_ancestors_with_macros(token).filter_map(ast::Expr::cast) {
let node = match &anc {
ast::Expr::LoopExpr(loop_) if label_matches(loop_.label()) => anc,
ast::Expr::WhileExpr(while_) if label_matches(while_.label()) => anc,
ast::Expr::ForExpr(for_) if label_matches(for_.label()) => anc,
ast::Expr::BlockExpr(blk)
if blk.label().is_some() && label_matches(blk.label()) =>
{
anc
}
_ => continue,
};
return Some(node);
}
None
};
sema.descend_into_macros(DescendPreference::None, token.clone()) sema.descend_into_macros(DescendPreference::None, token.clone())
.into_iter() .into_iter()
.filter_map(|descended| find_break_point(sema, descended, label_matches)) .filter_map(find_ancestors)
.flatten()
.collect_vec() .collect_vec()
.into() .into()
} }
fn nav_for_break_points(
sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken,
) -> Option<Vec<NavigationTarget>> {
let db = sema.db;
let navs = find_loops(sema, token)?
.into_iter()
.filter_map(|expr| {
let file_id = sema.hir_file_for(expr.syntax());
let expr_in_file = InFile::new(file_id, expr.clone());
let focus_range = match expr {
ast::Expr::LoopExpr(loop_) => loop_.loop_token()?.text_range(),
ast::Expr::WhileExpr(while_) => while_.while_token()?.text_range(),
ast::Expr::ForExpr(for_) => for_.for_token()?.text_range(),
// We garentee that the label exists
ast::Expr::BlockExpr(blk) => blk.label().unwrap().syntax().text_range(),
_ => return None,
};
let nav = NavigationTarget::from_expr(db, expr_in_file, Some(focus_range));
Some(nav)
})
.flatten()
.collect_vec();
Some(navs)
}
fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec<NavigationTarget> { fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec<NavigationTarget> {
def.try_to_nav(db).map(|it| it.collect()).unwrap_or_default() def.try_to_nav(db).map(|it| it.collect()).unwrap_or_default()
} }

View file

@ -2,14 +2,10 @@ use std::iter;
use hir::{db, DescendPreference, FilePosition, FileRange, HirFileId, InFile, Semantics}; use hir::{db, DescendPreference, FilePosition, FileRange, HirFileId, InFile, Semantics};
use ide_db::{ use ide_db::{
defs::{Definition, IdentClass}, defs::{Definition, IdentClass}, helpers::pick_best_token, search::{FileReference, ReferenceCategory, SearchScope}, syntax_helpers::node_ext::{
helpers::pick_best_token,
search::{FileReference, ReferenceCategory, SearchScope},
syntax_helpers::node_ext::{
eq_label_lt, for_each_tail_expr, full_path_of_name_ref, is_closure_or_blk_with_modif, eq_label_lt, for_each_tail_expr, full_path_of_name_ref, is_closure_or_blk_with_modif,
preorder_expr_with_ctx_checker, preorder_expr_with_ctx_checker,
}, }, FxHashMap, FxHashSet, RootDatabase
FxHashSet, RootDatabase,
}; };
use span::EditionedFileId; use span::EditionedFileId;
use syntax::{ use syntax::{
@ -19,7 +15,7 @@ use syntax::{
SyntaxToken, TextRange, WalkEvent, T, SyntaxToken, TextRange, WalkEvent, T,
}; };
use crate::{navigation_target::ToNav, NavigationTarget, TryToNav}; use crate::{goto_definition, navigation_target::ToNav, NavigationTarget, TryToNav};
#[derive(PartialEq, Eq, Hash)] #[derive(PartialEq, Eq, Hash)]
pub struct HighlightedRange { pub struct HighlightedRange {
@ -73,15 +69,19 @@ pub(crate) fn highlight_related(
// most if not all of these should be re-implemented with information seeded from hir // most if not all of these should be re-implemented with information seeded from hir
match token.kind() { match token.kind() {
T![?] if config.exit_points && token.parent().and_then(ast::TryExpr::cast).is_some() => { T![?] if config.exit_points && token.parent().and_then(ast::TryExpr::cast).is_some() => {
highlight_exit_points(sema, token) highlight_exit_points(sema, token).remove(&file_id)
}
T![fn] | T![return] | T![->] if config.exit_points => {
highlight_exit_points(sema, token).remove(&file_id)
}
T![await] | T![async] if config.yield_points => {
highlight_yield_points(sema, token).remove(&file_id)
} }
T![fn] | T![return] | T![->] if config.exit_points => highlight_exit_points(sema, token),
T![await] | T![async] if config.yield_points => highlight_yield_points(sema, token),
T![for] if config.break_points && token.parent().and_then(ast::ForExpr::cast).is_some() => { T![for] if config.break_points && token.parent().and_then(ast::ForExpr::cast).is_some() => {
highlight_break_points(sema, token) highlight_break_points(sema, token).remove(&file_id)
} }
T![break] | T![loop] | T![while] | T![continue] if config.break_points => { T![break] | T![loop] | T![while] | T![continue] if config.break_points => {
highlight_break_points(sema, token) highlight_break_points(sema, token).remove(&file_id)
} }
T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id), T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id), T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
@ -277,24 +277,35 @@ fn highlight_references(
} }
} }
// If `file_id` is None,
pub(crate) fn highlight_exit_points( pub(crate) fn highlight_exit_points(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken, token: SyntaxToken,
) -> Option<Vec<HighlightedRange>> { ) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> {
fn hl( fn hl(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
def_range: Option<TextRange>, def_token: Option<SyntaxToken>,
body: ast::Expr, body: ast::Expr,
) -> Option<Vec<HighlightedRange>> { ) -> Option<FxHashMap<EditionedFileId, Vec<HighlightedRange>>> {
let mut highlights = Vec::new(); let mut highlights: FxHashMap<EditionedFileId, Vec<_>> = FxHashMap::default();
if let Some(range) = def_range {
highlights.push(HighlightedRange { category: ReferenceCategory::empty(), range }); let mut push_to_highlights = |file_id, range| {
if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) {
let hrange = HighlightedRange { category: ReferenceCategory::empty(), range };
highlights.entry(file_id).or_default().push(hrange);
}
};
if let Some(tok) = def_token {
let file_id = sema.hir_file_for(&tok.parent()?);
let range = Some(tok.text_range());
push_to_highlights(file_id, range);
} }
WalkExpandedExprCtx::new(sema).walk(&body, &mut |_, expr| { WalkExpandedExprCtx::new(sema).walk(&body, &mut |_, expr| {
let file_id = sema.hir_file_for(expr.syntax()); let file_id = sema.hir_file_for(expr.syntax());
let text_range = match &expr { let range = match &expr {
ast::Expr::TryExpr(try_) => { ast::Expr::TryExpr(try_) => {
try_.question_mark_token().map(|token| token.text_range()) try_.question_mark_token().map(|token| token.text_range())
} }
@ -306,29 +317,24 @@ pub(crate) fn highlight_exit_points(
_ => None, _ => None,
}; };
if let Some(range) = original_range(sema.db, file_id, text_range) { push_to_highlights(file_id, range);
highlights.push(HighlightedRange { category: ReferenceCategory::empty(), range })
}
}); });
// We should handle `return` separately because when it is used in `try` block // We should handle `return` separately, because when it is used in a `try` block,
// it will exit the outside function instead of the block it self. // it will exit the outside function instead of the block itself.
WalkExpandedExprCtx::new(sema) WalkExpandedExprCtx::new(sema)
.with_check_ctx(&WalkExpandedExprCtx::is_async_const_block_or_closure) .with_check_ctx(&WalkExpandedExprCtx::is_async_const_block_or_closure)
.walk(&body, &mut |_, expr| { .walk(&body, &mut |_, expr| {
let file_id = sema.hir_file_for(expr.syntax()); let file_id = sema.hir_file_for(expr.syntax());
let text_range = match &expr { let range = match &expr {
ast::Expr::ReturnExpr(expr) => { ast::Expr::ReturnExpr(expr) => {
expr.return_token().map(|token| token.text_range()) expr.return_token().map(|token| token.text_range())
} }
_ => None, _ => None,
}; };
if let Some(range) = original_range(sema.db, file_id, text_range) { push_to_highlights(file_id, range);
highlights
.push(HighlightedRange { category: ReferenceCategory::empty(), range })
}
}); });
let tail = match body { let tail = match body {
@ -338,59 +344,74 @@ pub(crate) fn highlight_exit_points(
if let Some(tail) = tail { if let Some(tail) = tail {
for_each_tail_expr(&tail, &mut |tail| { for_each_tail_expr(&tail, &mut |tail| {
let file_id = sema.hir_file_for(tail.syntax());
let range = match tail { let range = match tail {
ast::Expr::BreakExpr(b) => b ast::Expr::BreakExpr(b) => b
.break_token() .break_token()
.map_or_else(|| tail.syntax().text_range(), |tok| tok.text_range()), .map_or_else(|| tail.syntax().text_range(), |tok| tok.text_range()),
_ => tail.syntax().text_range(), _ => tail.syntax().text_range(),
}; };
highlights.push(HighlightedRange { category: ReferenceCategory::empty(), range }) push_to_highlights(file_id, Some(range));
}); });
} }
Some(highlights) Some(highlights)
} }
for anc in token.parent_ancestors() { let mut res = FxHashMap::default();
return match_ast! { for def in goto_definition::find_fn_or_blocks(sema, &token) {
match anc { let new_map = match_ast! {
ast::Fn(fn_) => hl(sema, fn_.fn_token().map(|it| it.text_range()), ast::Expr::BlockExpr(fn_.body()?)), match def {
ast::ClosureExpr(closure) => hl( ast::Fn(fn_) => fn_.body().and_then(|body| hl(sema, fn_.fn_token(), body.into())),
sema, ast::ClosureExpr(closure) => {
closure.param_list().and_then(|p| p.pipe_token()).map(|tok| tok.text_range()), let pipe_tok = closure.param_list().and_then(|p| p.pipe_token());
closure.body()? closure.body().and_then(|body| hl(sema, pipe_tok, body))
), },
ast::BlockExpr(blk) => match blk.modifier() { ast::BlockExpr(blk) => match blk.modifier() {
Some(ast::BlockModifier::Async(t)) => hl(sema, Some(t.text_range()), blk.into()), Some(ast::BlockModifier::Async(t)) => hl(sema, Some(t), blk.into()),
Some(ast::BlockModifier::Try(t)) if token.kind() != T![return] => { Some(ast::BlockModifier::Try(t)) if token.kind() != T![return] => {
hl(sema, Some(t.text_range()), blk.into()) hl(sema, Some(t), blk.into())
}, },
_ => continue, _ => continue,
}, },
_ => continue, _ => continue,
} }
}; };
merge_map(&mut res, new_map);
} }
None
res
} }
pub(crate) fn highlight_break_points( pub(crate) fn highlight_break_points(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken, token: SyntaxToken,
) -> Option<Vec<HighlightedRange>> { ) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> {
fn hl( pub(crate) fn hl(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
cursor_token_kind: SyntaxKind, cursor_token_kind: SyntaxKind,
loop_token: Option<SyntaxToken>, loop_token: Option<SyntaxToken>,
label: Option<ast::Label>, label: Option<ast::Label>,
expr: ast::Expr, expr: ast::Expr,
) -> Option<Vec<HighlightedRange>> { ) -> Option<FxHashMap<EditionedFileId, Vec<HighlightedRange>>> {
let mut highlights = Vec::new(); let mut highlights: FxHashMap<EditionedFileId, Vec<_>> = FxHashMap::default();
let (label_range, label_lt) = label let mut push_to_highlights = |file_id, range| {
.map_or((None, None), |label| (Some(label.syntax().text_range()), label.lifetime())); if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) {
let hrange = HighlightedRange { category: ReferenceCategory::empty(), range };
highlights.entry(file_id).or_default().push(hrange);
}
};
if let Some(range) = cover_range(loop_token.map(|tok| tok.text_range()), label_range) { let label_lt = label.as_ref().and_then(|it| it.lifetime());
highlights.push(HighlightedRange { category: ReferenceCategory::empty(), range })
if let Some(range) = cover_range(
loop_token.as_ref().map(|tok| tok.text_range()),
label.as_ref().map(|it| it.syntax().text_range()),
) {
let file_id = loop_token
.and_then(|tok| Some(sema.hir_file_for(&tok.parent()?)))
.unwrap_or_else(|| sema.hir_file_for(label.unwrap().syntax()));
push_to_highlights(file_id, Some(range));
} }
WalkExpandedExprCtx::new(sema) WalkExpandedExprCtx::new(sema)
@ -418,68 +439,53 @@ pub(crate) fn highlight_break_points(
token_lt.map(|it| it.syntax().text_range()), token_lt.map(|it| it.syntax().text_range()),
); );
if let Some(range) = original_range(sema.db, file_id, text_range) { push_to_highlights(file_id, text_range);
highlights
.push(HighlightedRange { category: ReferenceCategory::empty(), range })
}
}); });
Some(highlights) Some(highlights)
} }
let parent = token.parent()?; let mut res = FxHashMap::default();
let lbl = match_ast! { let token_kind = token.kind();
match parent { let Some(loops) = goto_definition::find_loops(sema, &token) else {
ast::BreakExpr(b) => b.lifetime(), return res;
ast::ContinueExpr(c) => c.lifetime(),
ast::LoopExpr(l) => l.label().and_then(|it| it.lifetime()),
ast::ForExpr(f) => f.label().and_then(|it| it.lifetime()),
ast::WhileExpr(w) => w.label().and_then(|it| it.lifetime()),
ast::BlockExpr(b) => Some(b.label().and_then(|it| it.lifetime())?),
_ => return None,
}
}; };
for expr in loops {
let label_matches = |def_lbl: Option<ast::Label>| match lbl.as_ref() { let new_map = match &expr {
Some(lbl) => { ast::Expr::LoopExpr(l) => hl(sema, token_kind, l.loop_token(), l.label(), expr),
Some(lbl.text()) == def_lbl.and_then(|it| it.lifetime()).as_ref().map(|it| it.text()) ast::Expr::ForExpr(f) => hl(sema, token_kind, f.for_token(), f.label(), expr),
} ast::Expr::WhileExpr(w) => hl(sema, token_kind, w.while_token(), w.label(), expr),
None => true, ast::Expr::BlockExpr(e) => hl(sema, token_kind, None, e.label(), expr),
};
for anc in token.parent_ancestors().flat_map(ast::Expr::cast) {
return match &anc {
ast::Expr::LoopExpr(l) if label_matches(l.label()) => {
hl(sema, token.kind(), l.loop_token(), l.label(), anc)
}
ast::Expr::ForExpr(f) if label_matches(f.label()) => {
hl(sema, token.kind(), f.for_token(), f.label(), anc)
}
ast::Expr::WhileExpr(w) if label_matches(w.label()) => {
hl(sema, token.kind(), w.while_token(), w.label(), anc)
}
ast::Expr::BlockExpr(e) if e.label().is_some() && label_matches(e.label()) => {
hl(sema, token.kind(), None, e.label(), anc)
}
_ => continue, _ => continue,
}; };
merge_map(&mut res, new_map);
} }
None
res
} }
pub(crate) fn highlight_yield_points( pub(crate) fn highlight_yield_points(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken, token: SyntaxToken,
) -> Option<Vec<HighlightedRange>> { ) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> {
fn hl( fn hl(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
async_token: Option<SyntaxToken>, async_token: Option<SyntaxToken>,
body: Option<ast::Expr>, body: Option<ast::Expr>,
) -> Option<Vec<HighlightedRange>> { ) -> Option<FxHashMap<EditionedFileId, Vec<HighlightedRange>>> {
let mut highlights = vec![HighlightedRange { let mut highlights: FxHashMap<EditionedFileId, Vec<_>> = FxHashMap::default();
category: ReferenceCategory::empty(),
range: async_token?.text_range(), let mut push_to_highlights = |file_id, range| {
}]; if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) {
let hrange = HighlightedRange { category: ReferenceCategory::empty(), range };
highlights.entry(file_id).or_default().push(hrange);
}
};
let async_token = async_token?;
let async_tok_file_id = sema.hir_file_for(&async_token.parent()?);
push_to_highlights(async_tok_file_id, Some(async_token.text_range()));
let Some(body) = body else { let Some(body) = body else {
return Some(highlights); return Some(highlights);
}; };
@ -487,22 +493,22 @@ pub(crate) fn highlight_yield_points(
WalkExpandedExprCtx::new(sema).walk(&body, &mut |_, expr| { WalkExpandedExprCtx::new(sema).walk(&body, &mut |_, expr| {
let file_id = sema.hir_file_for(expr.syntax()); let file_id = sema.hir_file_for(expr.syntax());
let token_range = match expr { let text_range = match expr {
ast::Expr::AwaitExpr(expr) => expr.await_token(), ast::Expr::AwaitExpr(expr) => expr.await_token(),
ast::Expr::ReturnExpr(expr) => expr.return_token(), ast::Expr::ReturnExpr(expr) => expr.return_token(),
_ => None, _ => None,
} }
.map(|it| it.text_range()); .map(|it| it.text_range());
if let Some(range) = original_range(sema.db, file_id, token_range) { push_to_highlights(file_id, text_range);
highlights.push(HighlightedRange { category: ReferenceCategory::empty(), range });
}
}); });
Some(highlights) Some(highlights)
} }
for anc in token.parent_ancestors() {
return match_ast! { let mut res = FxHashMap::default();
for anc in goto_definition::find_fn_or_blocks(sema, &token) {
let new_map = match_ast! {
match anc { match anc {
ast::Fn(fn_) => hl(sema, fn_.async_token(), fn_.body().map(ast::Expr::BlockExpr)), ast::Fn(fn_) => hl(sema, fn_.async_token(), fn_.body().map(ast::Expr::BlockExpr)),
ast::BlockExpr(block_expr) => { ast::BlockExpr(block_expr) => {
@ -515,8 +521,10 @@ pub(crate) fn highlight_yield_points(
_ => continue, _ => continue,
} }
}; };
merge_map(&mut res, new_map);
} }
None
res
} }
fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange> { fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange> {
@ -536,14 +544,24 @@ fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSe
.collect() .collect()
} }
fn original_range( fn original_frange(
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
file_id: HirFileId, file_id: HirFileId,
text_range: Option<TextRange>, text_range: Option<TextRange>,
) -> Option<TextRange> { ) -> Option<FileRange> {
InFile::new(file_id, text_range?) InFile::new(file_id, text_range?).original_node_file_range_opt(db).map(|(frange, _)| frange)
.original_node_file_range_opt(db) }
.map(|(frange, _)| frange.range)
fn merge_map(
res: &mut FxHashMap<EditionedFileId, Vec<HighlightedRange>>,
new: Option<FxHashMap<EditionedFileId, Vec<HighlightedRange>>>,
) {
let Some(new) = new else {
return;
};
new.into_iter().for_each(|(file_id, ranges)| {
res.entry(file_id).or_default().extend(ranges);
});
} }
/// Preorder walk all the expression's child expressions. /// Preorder walk all the expression's child expressions.

View file

@ -16,7 +16,7 @@ use ide_db::{
use stdx::never; use stdx::never;
use syntax::{ use syntax::{
ast::{self, HasName}, ast::{self, HasName},
format_smolstr, AstNode, SmolStr, SyntaxElement, SyntaxNode, TextRange, ToSmolStr, format_smolstr, AstNode, SmolStr, SyntaxNode, TextRange, ToSmolStr,
}; };
/// `NavigationTarget` represents an element in the editor's UI which you can /// `NavigationTarget` represents an element in the editor's UI which you can
@ -155,11 +155,10 @@ impl NavigationTarget {
pub(crate) fn from_expr( pub(crate) fn from_expr(
db: &RootDatabase, db: &RootDatabase,
InFile { file_id, value }: InFile<ast::Expr>, InFile { file_id, value }: InFile<ast::Expr>,
focus_syntax: SyntaxElement, focus_range: Option<TextRange>,
) -> UpmappingResult<NavigationTarget> { ) -> UpmappingResult<NavigationTarget> {
let name: SmolStr = "<expr>".into(); let name: SmolStr = "<expr>".into();
let kind = SymbolKind::Label; let kind = SymbolKind::Label;
let focus_range = Some(focus_syntax.text_range());
orig_range_with_focus_r(db, file_id, value.syntax().text_range(), focus_range).map( orig_range_with_focus_r(db, file_id, value.syntax().text_range(), focus_range).map(
|(FileRange { file_id, range: full_range }, focus_range)| { |(FileRange { file_id, range: full_range }, focus_range)| {

View file

@ -305,28 +305,31 @@ fn handle_control_flow_keywords(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
FilePosition { file_id, offset }: FilePosition, FilePosition { file_id, offset }: FilePosition,
) -> Option<ReferenceSearchResult> { ) -> Option<ReferenceSearchResult> {
let file = sema.parse(file_id); let file = sema.parse_guess_edition(file_id);
let token = file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword())?; let token = file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword())?;
let refs = match token.kind() { let references = match token.kind() {
T![fn] | T![return] | T![try] => highlight_related::highlight_exit_points(sema, token)?, T![fn] | T![return] | T![try] => highlight_related::highlight_exit_points(sema, token),
T![async] => highlight_related::highlight_yield_points(sema, token)?, T![async] => highlight_related::highlight_yield_points(sema, token),
T![loop] | T![while] | T![break] | T![continue] => { T![loop] | T![while] | T![break] | T![continue] => {
highlight_related::highlight_break_points(sema, token)? highlight_related::highlight_break_points(sema, token)
} }
T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => { T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => {
highlight_related::highlight_break_points(sema, token)? highlight_related::highlight_break_points(sema, token)
} }
_ => return None, _ => return None,
} }
.into_iter()
.map(|(file_id, ranges)| {
let ranges = ranges
.into_iter() .into_iter()
.map(|HighlightedRange { range, category }| (range, category)) .map(|HighlightedRange { range, category }| (range, category))
.collect(); .collect();
(file_id.into(), ranges)
Some(ReferenceSearchResult {
declaration: None,
references: IntMap::from_iter([(file_id, refs)]),
}) })
.collect();
Some(ReferenceSearchResult { declaration: None, references })
} }
#[cfg(test)] #[cfg(test)]