mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 04:44:57 +00:00
Remove lossy Definition::from_token
/Definition::from_node
methods
This commit is contained in:
parent
68bc12c3b8
commit
c5a9985fc5
6 changed files with 91 additions and 92 deletions
|
@ -4,7 +4,7 @@ use crate::{doc_links::token_as_doc_comment, FilePosition, NavigationTarget, Ran
|
|||
use hir::{AsAssocItem, Semantics};
|
||||
use ide_db::{
|
||||
base_db::{AnchoredPath, FileId, FileLoader},
|
||||
defs::Definition,
|
||||
defs::{Definition, IdentClass},
|
||||
helpers::pick_best_token,
|
||||
RootDatabase,
|
||||
};
|
||||
|
@ -46,20 +46,20 @@ pub(crate) fn goto_definition(
|
|||
.filter_map(|token| {
|
||||
let parent = token.parent()?;
|
||||
if let Some(tt) = ast::TokenTree::cast(parent) {
|
||||
if let x @ Some(_) =
|
||||
try_lookup_include_path(sema, tt, token.clone(), position.file_id)
|
||||
if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), position.file_id)
|
||||
{
|
||||
return x;
|
||||
return Some(vec![x]);
|
||||
}
|
||||
}
|
||||
Some(
|
||||
Definition::from_token(sema, &token)
|
||||
IdentClass::classify_token(sema, &token)?
|
||||
.definitions()
|
||||
.into_iter()
|
||||
.flat_map(|def| {
|
||||
try_find_trait_item_definition(sema.db, &def)
|
||||
.unwrap_or_else(|| def_to_nav(sema.db, def))
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.flatten()
|
||||
|
@ -74,7 +74,7 @@ fn try_lookup_include_path(
|
|||
tt: ast::TokenTree,
|
||||
token: SyntaxToken,
|
||||
file_id: FileId,
|
||||
) -> Option<Vec<NavigationTarget>> {
|
||||
) -> Option<NavigationTarget> {
|
||||
let token = ast::String::cast(token)?;
|
||||
let path = token.value()?.into_owned();
|
||||
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
|
||||
|
@ -84,7 +84,7 @@ fn try_lookup_include_path(
|
|||
}
|
||||
let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
|
||||
let size = sema.db.file_text(file_id).len().try_into().ok()?;
|
||||
Some(vec![NavigationTarget {
|
||||
Some(NavigationTarget {
|
||||
file_id,
|
||||
full_range: TextRange::new(0.into(), size),
|
||||
name: path.into(),
|
||||
|
@ -93,7 +93,7 @@ fn try_lookup_include_path(
|
|||
container_name: None,
|
||||
description: None,
|
||||
docs: None,
|
||||
}])
|
||||
})
|
||||
}
|
||||
|
||||
/// finds the trait definition of an impl'd item
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use hir::Semantics;
|
||||
use ide_db::{
|
||||
base_db::{FileId, FilePosition},
|
||||
defs::Definition,
|
||||
defs::{Definition, IdentClass},
|
||||
helpers::{for_each_break_expr, for_each_tail_expr, node_ext::walk_expr, pick_best_token},
|
||||
search::{FileReference, ReferenceCategory, SearchScope},
|
||||
RootDatabase,
|
||||
|
@ -293,7 +293,8 @@ fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange
|
|||
fn find_defs(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
|
||||
sema.descend_into_macros(token)
|
||||
.into_iter()
|
||||
.flat_map(|token| Definition::from_token(sema, &token))
|
||||
.filter_map(|token| IdentClass::classify_token(sema, &token).map(IdentClass::definitions))
|
||||
.flatten()
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ use either::Either;
|
|||
use hir::{HasSource, Semantics};
|
||||
use ide_db::{
|
||||
base_db::FileRange,
|
||||
defs::Definition,
|
||||
defs::{Definition, IdentClass},
|
||||
helpers::{pick_best_token, FamousDefs},
|
||||
FxIndexSet, RootDatabase,
|
||||
};
|
||||
|
@ -129,8 +129,8 @@ pub(crate) fn hover(
|
|||
.iter()
|
||||
.filter_map(|token| {
|
||||
let node = token.parent()?;
|
||||
let defs = Definition::from_token(sema, token);
|
||||
Some(defs.into_iter().zip(iter::once(node).cycle()))
|
||||
let class = IdentClass::classify_token(sema, token)?;
|
||||
Some(class.definitions().into_iter().zip(iter::once(node).cycle()))
|
||||
})
|
||||
.flatten()
|
||||
.unique_by(|&(def, _)| def)
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
use hir::{db::DefDatabase, AsAssocItem, AssocItemContainer, Crate, Name, Semantics};
|
||||
use ide_db::{
|
||||
base_db::{CrateOrigin, FileId, FileLoader, FilePosition},
|
||||
defs::Definition,
|
||||
defs::{Definition, IdentClass},
|
||||
helpers::pick_best_token,
|
||||
RootDatabase,
|
||||
};
|
||||
|
@ -82,11 +82,10 @@ pub(crate) fn moniker(
|
|||
let navs = sema
|
||||
.descend_into_macros(original_token.clone())
|
||||
.into_iter()
|
||||
.map(|token| {
|
||||
Definition::from_token(sema, &token)
|
||||
.into_iter()
|
||||
.flat_map(|def| def_to_moniker(sema.db, def, current_crate))
|
||||
.collect::<Vec<_>>()
|
||||
.filter_map(|token| {
|
||||
IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| {
|
||||
it.into_iter().flat_map(|def| def_to_moniker(sema.db, def, current_crate))
|
||||
})
|
||||
})
|
||||
.flatten()
|
||||
.unique()
|
||||
|
|
|
@ -6,7 +6,7 @@ use std::collections::HashMap;
|
|||
use hir::{db::HirDatabase, Crate, Module, Semantics};
|
||||
use ide_db::{
|
||||
base_db::{FileId, FileRange, SourceDatabaseExt},
|
||||
defs::Definition,
|
||||
defs::{Definition, IdentClass},
|
||||
RootDatabase,
|
||||
};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
@ -195,9 +195,9 @@ impl StaticIndex<'_> {
|
|||
|
||||
fn get_definition(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Definition> {
|
||||
for token in sema.descend_into_macros(token) {
|
||||
let def = Definition::from_token(sema, &token);
|
||||
if let [x] = def.as_slice() {
|
||||
return Some(*x);
|
||||
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions);
|
||||
if let Some(&[x]) = def.as_deref() {
|
||||
return Some(x);
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue