refactor: simplify functions related to branch_exit_points in highlight_related

This commit is contained in:
roifewu 2025-04-15 11:04:46 +08:00
parent c36758def4
commit 79de21bd9d
3 changed files with 74 additions and 72 deletions

View file

@ -291,14 +291,14 @@ fn handle_control_flow_keywords(
token: &SyntaxToken, token: &SyntaxToken,
) -> Option<Vec<NavigationTarget>> { ) -> Option<Vec<NavigationTarget>> {
match token.kind() { match token.kind() {
// For `fn` / `loop` / `while` / `for` / `async`, return the keyword it self, // For `fn` / `loop` / `while` / `for` / `async` / `match`, return the keyword it self,
// so that VSCode will find the references when using `ctrl + click` // so that VSCode will find the references when using `ctrl + click`
T![fn] | T![async] | T![try] | T![return] => nav_for_exit_points(sema, token), T![fn] | T![async] | T![try] | T![return] => nav_for_exit_points(sema, token),
T![loop] | T![while] | T![break] | T![continue] => nav_for_break_points(sema, token), T![loop] | T![while] | T![break] | T![continue] => nav_for_break_points(sema, token),
T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => { T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => {
nav_for_break_points(sema, token) nav_for_break_points(sema, token)
} }
T![match] | T![=>] | T![if] => nav_for_branches(sema, token), T![match] | T![=>] | T![if] => nav_for_branch_exit_points(sema, token),
_ => None, _ => None,
} }
} }
@ -408,22 +408,66 @@ fn nav_for_exit_points(
Some(navs) Some(navs)
} }
fn nav_for_branches( pub(crate) fn find_branch_root(
sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken,
) -> Vec<SyntaxNode> {
fn find_root(
sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken,
pred: impl Fn(SyntaxNode) -> Option<SyntaxNode>,
) -> Vec<SyntaxNode> {
let mut result = Vec::new();
for token in sema.descend_into_macros(token.clone()) {
for node in sema.token_ancestors_with_macros(token) {
if ast::MacroCall::can_cast(node.kind()) {
break;
}
if let Some(node) = pred(node) {
result.push(node);
break;
}
}
}
result
}
match token.kind() {
T![match] => {
find_root(sema, token, |node| Some(ast::MatchExpr::cast(node)?.syntax().clone()))
}
T![=>] => find_root(sema, token, |node| Some(ast::MatchArm::cast(node)?.syntax().clone())),
T![if] => find_root(sema, token, |node| {
let if_expr = ast::IfExpr::cast(node)?;
iter::successors(Some(if_expr.clone()), |if_expr| {
let parent_if = if_expr.syntax().parent().and_then(ast::IfExpr::cast)?;
if let ast::ElseBranch::IfExpr(nested_if) = parent_if.else_branch()? {
(nested_if.syntax() == if_expr.syntax()).then_some(parent_if)
} else {
None
}
})
.last()
.map(|if_expr| if_expr.syntax().clone())
}),
_ => vec![],
}
}
fn nav_for_branch_exit_points(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken, token: &SyntaxToken,
) -> Option<Vec<NavigationTarget>> { ) -> Option<Vec<NavigationTarget>> {
let db = sema.db; let db = sema.db;
let navs = match token.kind() { let navs = match token.kind() {
T![match] => sema T![match] => find_branch_root(sema, token)
.descend_into_macros(token.clone())
.into_iter() .into_iter()
.filter_map(|token| { .filter_map(|node| {
let match_expr = sema let file_id = sema.hir_file_for(&node);
.token_ancestors_with_macros(token) let match_expr = ast::MatchExpr::cast(node)?;
.take_while(|node| !ast::MacroCall::can_cast(node.kind()))
.find_map(ast::MatchExpr::cast)?;
let file_id = sema.hir_file_for(match_expr.syntax());
let focus_range = match_expr.match_token()?.text_range(); let focus_range = match_expr.match_token()?.text_range();
let match_expr_in_file = InFile::new(file_id, match_expr.into()); let match_expr_in_file = InFile::new(file_id, match_expr.into());
Some(expr_to_nav(db, match_expr_in_file, Some(focus_range))) Some(expr_to_nav(db, match_expr_in_file, Some(focus_range)))
@ -431,14 +475,10 @@ fn nav_for_branches(
.flatten() .flatten()
.collect_vec(), .collect_vec(),
T![=>] => sema T![=>] => find_branch_root(sema, token)
.descend_into_macros(token.clone())
.into_iter() .into_iter()
.filter_map(|token| { .filter_map(|node| {
let match_arm = sema let match_arm = ast::MatchArm::cast(node)?;
.token_ancestors_with_macros(token)
.take_while(|node| !ast::MacroCall::can_cast(node.kind()))
.find_map(ast::MatchArm::cast)?;
let match_expr = sema let match_expr = sema
.ancestors_with_macros(match_arm.syntax().clone()) .ancestors_with_macros(match_arm.syntax().clone())
.find_map(ast::MatchExpr::cast)?; .find_map(ast::MatchExpr::cast)?;
@ -450,15 +490,11 @@ fn nav_for_branches(
.flatten() .flatten()
.collect_vec(), .collect_vec(),
T![if] => sema T![if] => find_branch_root(sema, token)
.descend_into_macros(token.clone())
.into_iter() .into_iter()
.filter_map(|token| { .filter_map(|node| {
let if_expr = sema let file_id = sema.hir_file_for(&node);
.token_ancestors_with_macros(token) let if_expr = ast::IfExpr::cast(node)?;
.take_while(|node| !ast::MacroCall::can_cast(node.kind()))
.find_map(ast::IfExpr::cast)?;
let file_id = sema.hir_file_for(if_expr.syntax());
let focus_range = if_expr.if_token()?.text_range(); let focus_range = if_expr.if_token()?.text_range();
let if_expr_in_file = InFile::new(file_id, if_expr.into()); let if_expr_in_file = InFile::new(file_id, if_expr.into());
Some(expr_to_nav(db, if_expr_in_file, Some(focus_range))) Some(expr_to_nav(db, if_expr_in_file, Some(focus_range)))
@ -3785,9 +3821,9 @@ fn main() {
r#" r#"
fn main() { fn main() {
if true { if true {
// ^^
() ()
} else if$0 false { } else if$0 false {
// ^^
() ()
} else { } else {
() ()

View file

@ -331,17 +331,10 @@ pub(crate) fn highlight_branch_exit_points(
}); });
}; };
let nodes = goto_definition::find_branch_root(sema, &token).into_iter();
match token.kind() { match token.kind() {
T![match] => { T![match] => {
for token in sema.descend_into_macros(token.clone()) { for match_expr in nodes.filter_map(ast::MatchExpr::cast) {
let Some(match_expr) = sema
.token_ancestors_with_macros(token)
.take_while(|node| !ast::MacroCall::can_cast(node.kind()))
.find_map(ast::MatchExpr::cast)
else {
continue;
};
let file_id = sema.hir_file_for(match_expr.syntax()); let file_id = sema.hir_file_for(match_expr.syntax());
let range = match_expr.match_token().map(|token| token.text_range()); let range = match_expr.match_token().map(|token| token.text_range());
push_to_highlights(file_id, range, &mut highlights); push_to_highlights(file_id, range, &mut highlights);
@ -349,22 +342,13 @@ pub(crate) fn highlight_branch_exit_points(
let Some(arm_list) = match_expr.match_arm_list() else { let Some(arm_list) = match_expr.match_arm_list() else {
continue; continue;
}; };
for arm in arm_list.arms() { for arm in arm_list.arms() {
push_tail_expr(arm.expr(), &mut highlights); push_tail_expr(arm.expr(), &mut highlights);
} }
} }
} }
T![=>] => { T![=>] => {
for token in sema.descend_into_macros(token.clone()) { for arm in nodes.filter_map(ast::MatchArm::cast) {
let Some(arm) = sema
.token_ancestors_with_macros(token)
.take_while(|node| !ast::MacroCall::can_cast(node.kind()))
.find_map(ast::MatchArm::cast)
else {
continue;
};
let file_id = sema.hir_file_for(arm.syntax()); let file_id = sema.hir_file_for(arm.syntax());
let range = arm.fat_arrow_token().map(|token| token.text_range()); let range = arm.fat_arrow_token().map(|token| token.text_range());
push_to_highlights(file_id, range, &mut highlights); push_to_highlights(file_id, range, &mut highlights);
@ -373,27 +357,7 @@ pub(crate) fn highlight_branch_exit_points(
} }
} }
T![if] => { T![if] => {
for token in sema.descend_into_macros(token.clone()) { for mut if_to_process in nodes.map(ast::IfExpr::cast) {
let Some(if_expr) = sema
.token_ancestors_with_macros(token)
.take_while(|node| !ast::MacroCall::can_cast(node.kind()))
.find_map(ast::IfExpr::cast)
else {
continue;
};
// Find the root of the if expression
let mut if_to_process = iter::successors(Some(if_expr.clone()), |if_expr| {
let parent_if = if_expr.syntax().parent().and_then(ast::IfExpr::cast)?;
if let ast::ElseBranch::IfExpr(nested_if) = parent_if.else_branch()? {
(nested_if.syntax() == if_expr.syntax()).then_some(parent_if)
} else {
None
}
})
.last()
.or(Some(if_expr));
while let Some(cur_if) = if_to_process.take() { while let Some(cur_if) = if_to_process.take() {
let file_id = sema.hir_file_for(cur_if.syntax()); let file_id = sema.hir_file_for(cur_if.syntax());
@ -415,7 +379,7 @@ pub(crate) fn highlight_branch_exit_points(
} }
} }
} }
_ => unreachable!(), _ => {}
} }
highlights highlights

View file

@ -21,6 +21,7 @@ use hir::{PathResolution, Semantics};
use ide_db::{ use ide_db::{
FileId, RootDatabase, FileId, RootDatabase,
defs::{Definition, NameClass, NameRefClass}, defs::{Definition, NameClass, NameRefClass},
helpers::pick_best_token,
search::{ReferenceCategory, SearchScope, UsageSearchResult}, search::{ReferenceCategory, SearchScope, UsageSearchResult},
}; };
use itertools::Itertools; use itertools::Itertools;
@ -397,10 +398,11 @@ fn handle_control_flow_keywords(
.attach_first_edition(file_id) .attach_first_edition(file_id)
.map(|it| it.edition(sema.db)) .map(|it| it.edition(sema.db))
.unwrap_or(Edition::CURRENT); .unwrap_or(Edition::CURRENT);
let token = file let token = pick_best_token(file.syntax().token_at_offset(offset), |kind| match kind {
.syntax() _ if kind.is_keyword(edition) => 4,
.token_at_offset(offset) T![=>] => 3,
.find(|t| t.kind().is_keyword(edition) || t.kind() == T![=>])?; _ => 1,
})?;
let references = match token.kind() { let references = match token.kind() {
T![fn] | T![return] | T![try] => highlight_related::highlight_exit_points(sema, token), T![fn] | T![return] | T![try] => highlight_related::highlight_exit_points(sema, token),