mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-27 12:29:21 +00:00
Remove lossy Definition::from_token
/Definition::from_node
methods
This commit is contained in:
parent
68bc12c3b8
commit
c5a9985fc5
6 changed files with 91 additions and 92 deletions
|
@ -4,7 +4,7 @@ use crate::{doc_links::token_as_doc_comment, FilePosition, NavigationTarget, Ran
|
||||||
use hir::{AsAssocItem, Semantics};
|
use hir::{AsAssocItem, Semantics};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{AnchoredPath, FileId, FileLoader},
|
base_db::{AnchoredPath, FileId, FileLoader},
|
||||||
defs::Definition,
|
defs::{Definition, IdentClass},
|
||||||
helpers::pick_best_token,
|
helpers::pick_best_token,
|
||||||
RootDatabase,
|
RootDatabase,
|
||||||
};
|
};
|
||||||
|
@ -46,20 +46,20 @@ pub(crate) fn goto_definition(
|
||||||
.filter_map(|token| {
|
.filter_map(|token| {
|
||||||
let parent = token.parent()?;
|
let parent = token.parent()?;
|
||||||
if let Some(tt) = ast::TokenTree::cast(parent) {
|
if let Some(tt) = ast::TokenTree::cast(parent) {
|
||||||
if let x @ Some(_) =
|
if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), position.file_id)
|
||||||
try_lookup_include_path(sema, tt, token.clone(), position.file_id)
|
|
||||||
{
|
{
|
||||||
return x;
|
return Some(vec![x]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(
|
Some(
|
||||||
Definition::from_token(sema, &token)
|
IdentClass::classify_token(sema, &token)?
|
||||||
|
.definitions()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|def| {
|
.flat_map(|def| {
|
||||||
try_find_trait_item_definition(sema.db, &def)
|
try_find_trait_item_definition(sema.db, &def)
|
||||||
.unwrap_or_else(|| def_to_nav(sema.db, def))
|
.unwrap_or_else(|| def_to_nav(sema.db, def))
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>(),
|
.collect(),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.flatten()
|
.flatten()
|
||||||
|
@ -74,7 +74,7 @@ fn try_lookup_include_path(
|
||||||
tt: ast::TokenTree,
|
tt: ast::TokenTree,
|
||||||
token: SyntaxToken,
|
token: SyntaxToken,
|
||||||
file_id: FileId,
|
file_id: FileId,
|
||||||
) -> Option<Vec<NavigationTarget>> {
|
) -> Option<NavigationTarget> {
|
||||||
let token = ast::String::cast(token)?;
|
let token = ast::String::cast(token)?;
|
||||||
let path = token.value()?.into_owned();
|
let path = token.value()?.into_owned();
|
||||||
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
|
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
|
||||||
|
@ -84,7 +84,7 @@ fn try_lookup_include_path(
|
||||||
}
|
}
|
||||||
let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
|
let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
|
||||||
let size = sema.db.file_text(file_id).len().try_into().ok()?;
|
let size = sema.db.file_text(file_id).len().try_into().ok()?;
|
||||||
Some(vec![NavigationTarget {
|
Some(NavigationTarget {
|
||||||
file_id,
|
file_id,
|
||||||
full_range: TextRange::new(0.into(), size),
|
full_range: TextRange::new(0.into(), size),
|
||||||
name: path.into(),
|
name: path.into(),
|
||||||
|
@ -93,7 +93,7 @@ fn try_lookup_include_path(
|
||||||
container_name: None,
|
container_name: None,
|
||||||
description: None,
|
description: None,
|
||||||
docs: None,
|
docs: None,
|
||||||
}])
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// finds the trait definition of an impl'd item
|
/// finds the trait definition of an impl'd item
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use hir::Semantics;
|
use hir::Semantics;
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{FileId, FilePosition},
|
base_db::{FileId, FilePosition},
|
||||||
defs::Definition,
|
defs::{Definition, IdentClass},
|
||||||
helpers::{for_each_break_expr, for_each_tail_expr, node_ext::walk_expr, pick_best_token},
|
helpers::{for_each_break_expr, for_each_tail_expr, node_ext::walk_expr, pick_best_token},
|
||||||
search::{FileReference, ReferenceCategory, SearchScope},
|
search::{FileReference, ReferenceCategory, SearchScope},
|
||||||
RootDatabase,
|
RootDatabase,
|
||||||
|
@ -293,7 +293,8 @@ fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange
|
||||||
fn find_defs(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
|
fn find_defs(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
|
||||||
sema.descend_into_macros(token)
|
sema.descend_into_macros(token)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|token| Definition::from_token(sema, &token))
|
.filter_map(|token| IdentClass::classify_token(sema, &token).map(IdentClass::definitions))
|
||||||
|
.flatten()
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ use either::Either;
|
||||||
use hir::{HasSource, Semantics};
|
use hir::{HasSource, Semantics};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::FileRange,
|
base_db::FileRange,
|
||||||
defs::Definition,
|
defs::{Definition, IdentClass},
|
||||||
helpers::{pick_best_token, FamousDefs},
|
helpers::{pick_best_token, FamousDefs},
|
||||||
FxIndexSet, RootDatabase,
|
FxIndexSet, RootDatabase,
|
||||||
};
|
};
|
||||||
|
@ -129,8 +129,8 @@ pub(crate) fn hover(
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|token| {
|
.filter_map(|token| {
|
||||||
let node = token.parent()?;
|
let node = token.parent()?;
|
||||||
let defs = Definition::from_token(sema, token);
|
let class = IdentClass::classify_token(sema, token)?;
|
||||||
Some(defs.into_iter().zip(iter::once(node).cycle()))
|
Some(class.definitions().into_iter().zip(iter::once(node).cycle()))
|
||||||
})
|
})
|
||||||
.flatten()
|
.flatten()
|
||||||
.unique_by(|&(def, _)| def)
|
.unique_by(|&(def, _)| def)
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
use hir::{db::DefDatabase, AsAssocItem, AssocItemContainer, Crate, Name, Semantics};
|
use hir::{db::DefDatabase, AsAssocItem, AssocItemContainer, Crate, Name, Semantics};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{CrateOrigin, FileId, FileLoader, FilePosition},
|
base_db::{CrateOrigin, FileId, FileLoader, FilePosition},
|
||||||
defs::Definition,
|
defs::{Definition, IdentClass},
|
||||||
helpers::pick_best_token,
|
helpers::pick_best_token,
|
||||||
RootDatabase,
|
RootDatabase,
|
||||||
};
|
};
|
||||||
|
@ -82,11 +82,10 @@ pub(crate) fn moniker(
|
||||||
let navs = sema
|
let navs = sema
|
||||||
.descend_into_macros(original_token.clone())
|
.descend_into_macros(original_token.clone())
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|token| {
|
.filter_map(|token| {
|
||||||
Definition::from_token(sema, &token)
|
IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| {
|
||||||
.into_iter()
|
it.into_iter().flat_map(|def| def_to_moniker(sema.db, def, current_crate))
|
||||||
.flat_map(|def| def_to_moniker(sema.db, def, current_crate))
|
})
|
||||||
.collect::<Vec<_>>()
|
|
||||||
})
|
})
|
||||||
.flatten()
|
.flatten()
|
||||||
.unique()
|
.unique()
|
||||||
|
|
|
@ -6,7 +6,7 @@ use std::collections::HashMap;
|
||||||
use hir::{db::HirDatabase, Crate, Module, Semantics};
|
use hir::{db::HirDatabase, Crate, Module, Semantics};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{FileId, FileRange, SourceDatabaseExt},
|
base_db::{FileId, FileRange, SourceDatabaseExt},
|
||||||
defs::Definition,
|
defs::{Definition, IdentClass},
|
||||||
RootDatabase,
|
RootDatabase,
|
||||||
};
|
};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
@ -195,9 +195,9 @@ impl StaticIndex<'_> {
|
||||||
|
|
||||||
fn get_definition(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Definition> {
|
fn get_definition(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Definition> {
|
||||||
for token in sema.descend_into_macros(token) {
|
for token in sema.descend_into_macros(token) {
|
||||||
let def = Definition::from_token(sema, &token);
|
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions);
|
||||||
if let [x] = def.as_slice() {
|
if let Some(&[x]) = def.as_deref() {
|
||||||
return Some(*x);
|
return Some(x);
|
||||||
} else {
|
} else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
|
@ -42,74 +42,6 @@ pub enum Definition {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Definition {
|
impl Definition {
|
||||||
pub fn from_token(
|
|
||||||
sema: &Semantics<RootDatabase>,
|
|
||||||
token: &SyntaxToken,
|
|
||||||
) -> ArrayVec<Definition, 2> {
|
|
||||||
let parent = match token.parent() {
|
|
||||||
Some(parent) => parent,
|
|
||||||
None => return Default::default(),
|
|
||||||
};
|
|
||||||
// resolve derives if possible
|
|
||||||
if let Some(ident) = ast::Ident::cast(token.clone()) {
|
|
||||||
let attr = ast::TokenTree::cast(parent.clone())
|
|
||||||
.and_then(|tt| tt.parent_meta())
|
|
||||||
.and_then(|meta| meta.parent_attr());
|
|
||||||
if let Some(attr) = attr {
|
|
||||||
return sema
|
|
||||||
.resolve_derive_ident(&attr, &ident)
|
|
||||||
.map(Into::into)
|
|
||||||
.into_iter()
|
|
||||||
.collect();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Self::from_node(sema, &parent)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_node(sema: &Semantics<RootDatabase>, node: &SyntaxNode) -> ArrayVec<Definition, 2> {
|
|
||||||
let mut res = ArrayVec::new();
|
|
||||||
(|| {
|
|
||||||
match_ast! {
|
|
||||||
match node {
|
|
||||||
ast::Name(name) => {
|
|
||||||
match NameClass::classify(&sema, &name)? {
|
|
||||||
NameClass::Definition(it) | NameClass::ConstReference(it) => res.push(it),
|
|
||||||
NameClass::PatFieldShorthand { local_def, field_ref } => {
|
|
||||||
res.push(Definition::Local(local_def));
|
|
||||||
res.push(Definition::Field(field_ref));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
ast::NameRef(name_ref) => {
|
|
||||||
match NameRefClass::classify(sema, &name_ref)? {
|
|
||||||
NameRefClass::Definition(it) => res.push(it),
|
|
||||||
NameRefClass::FieldShorthand { local_ref, field_ref } => {
|
|
||||||
res.push(Definition::Local(local_ref));
|
|
||||||
res.push(Definition::Field(field_ref));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
ast::Lifetime(lifetime) => {
|
|
||||||
let def = if let Some(x) = NameClass::classify_lifetime(&sema, &lifetime) {
|
|
||||||
NameClass::defined(x)
|
|
||||||
} else {
|
|
||||||
NameRefClass::classify_lifetime(&sema, &lifetime).and_then(|class| match class {
|
|
||||||
NameRefClass::Definition(it) => Some(it),
|
|
||||||
_ => None,
|
|
||||||
})
|
|
||||||
};
|
|
||||||
if let Some(def) = def {
|
|
||||||
res.push(def);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(())
|
|
||||||
})();
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn canonical_module_path(&self, db: &RootDatabase) -> Option<impl Iterator<Item = Module>> {
|
pub fn canonical_module_path(&self, db: &RootDatabase) -> Option<impl Iterator<Item = Module>> {
|
||||||
self.module(db).map(|it| it.path_to_root(db).into_iter().rev())
|
self.module(db).map(|it| it.path_to_root(db).into_iter().rev())
|
||||||
}
|
}
|
||||||
|
@ -184,6 +116,65 @@ impl Definition {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub enum IdentClass {
|
||||||
|
NameClass(NameClass),
|
||||||
|
NameRefClass(NameRefClass),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IdentClass {
|
||||||
|
pub fn classify_node(sema: &Semantics<RootDatabase>, node: &SyntaxNode) -> Option<IdentClass> {
|
||||||
|
match_ast! {
|
||||||
|
match node {
|
||||||
|
ast::Name(name) => NameClass::classify(sema, &name).map(IdentClass::NameClass),
|
||||||
|
ast::NameRef(name_ref) => NameRefClass::classify(sema, &name_ref).map(IdentClass::NameRefClass),
|
||||||
|
ast::Lifetime(lifetime) => {
|
||||||
|
NameClass::classify_lifetime(sema, &lifetime)
|
||||||
|
.map(IdentClass::NameClass)
|
||||||
|
.or_else(|| NameRefClass::classify_lifetime(sema, &lifetime).map(IdentClass::NameRefClass))
|
||||||
|
},
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn classify_token(
|
||||||
|
sema: &Semantics<RootDatabase>,
|
||||||
|
token: &SyntaxToken,
|
||||||
|
) -> Option<IdentClass> {
|
||||||
|
let parent = token.parent()?;
|
||||||
|
// resolve derives if possible
|
||||||
|
if let Some(ident) = ast::Ident::cast(token.clone()) {
|
||||||
|
let attr = ast::TokenTree::cast(parent.clone())
|
||||||
|
.and_then(|tt| tt.parent_meta())
|
||||||
|
.and_then(|meta| meta.parent_attr());
|
||||||
|
if let Some(attr) = attr {
|
||||||
|
return NameRefClass::classify_derive(sema, &attr, &ident)
|
||||||
|
.map(IdentClass::NameRefClass);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Self::classify_node(sema, &parent)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn definitions(self) -> ArrayVec<Definition, 2> {
|
||||||
|
let mut res = ArrayVec::new();
|
||||||
|
match self {
|
||||||
|
IdentClass::NameClass(NameClass::Definition(it) | NameClass::ConstReference(it)) => {
|
||||||
|
res.push(it)
|
||||||
|
}
|
||||||
|
IdentClass::NameClass(NameClass::PatFieldShorthand { local_def, field_ref }) => {
|
||||||
|
res.push(Definition::Local(local_def));
|
||||||
|
res.push(Definition::Field(field_ref));
|
||||||
|
}
|
||||||
|
IdentClass::NameRefClass(NameRefClass::Definition(it)) => res.push(it),
|
||||||
|
IdentClass::NameRefClass(NameRefClass::FieldShorthand { local_ref, field_ref }) => {
|
||||||
|
res.push(Definition::Local(local_ref));
|
||||||
|
res.push(Definition::Field(field_ref));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// On a first blush, a single `ast::Name` defines a single definition at some
|
/// On a first blush, a single `ast::Name` defines a single definition at some
|
||||||
/// scope. That is, that, by just looking at the syntactical category, we can
|
/// scope. That is, that, by just looking at the syntactical category, we can
|
||||||
/// unambiguously define the semantic category.
|
/// unambiguously define the semantic category.
|
||||||
|
@ -465,6 +456,14 @@ impl NameRefClass {
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn classify_derive(
|
||||||
|
sema: &Semantics<RootDatabase>,
|
||||||
|
attr: &ast::Attr,
|
||||||
|
ident: &ast::Ident,
|
||||||
|
) -> Option<NameRefClass> {
|
||||||
|
sema.resolve_derive_ident(&attr, &ident).map(Definition::from).map(NameRefClass::Definition)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_from!(
|
impl_from!(
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue