Refactor primary IDE API

This introduces the new type -- Semantics.
Semantics maps SyntaxNodes to various semantic info, such as type,
name resolution or macro expansions.

To do so, Semantics maintains a HashMap which maps every node it saw
to the file from which the node originated. This is enough to get all
the necessary hir bits just from syntax.
This commit is contained in:
Aleksey Kladov 2020-02-18 18:35:10 +01:00
parent 04deae3dba
commit c3a4c4429d
49 changed files with 1026 additions and 978 deletions

View file

@ -1,6 +1,6 @@
//! This module defines `AssistCtx` -- the API surface that is exposed to assists.
use hir::{InFile, SourceAnalyzer, SourceBinder};
use ra_db::{FileRange, SourceDatabase};
use hir::Semantics;
use ra_db::FileRange;
use ra_fmt::{leading_indent, reindent};
use ra_ide_db::RootDatabase;
use ra_syntax::{
@ -74,29 +74,23 @@ pub(crate) type AssistHandler = fn(AssistCtx) -> Option<Assist>;
/// Note, however, that we don't actually use such two-phase logic at the
/// moment, because the LSP API is pretty awkward in this place, and it's much
/// easier to just compute the edit eagerly :-)
#[derive(Debug)]
#[derive(Clone)]
pub(crate) struct AssistCtx<'a> {
pub(crate) sema: &'a Semantics<'a, RootDatabase>,
pub(crate) db: &'a RootDatabase,
pub(crate) frange: FileRange,
source_file: SourceFile,
should_compute_edit: bool,
}
impl Clone for AssistCtx<'_> {
fn clone(&self) -> Self {
AssistCtx {
db: self.db,
frange: self.frange,
source_file: self.source_file.clone(),
should_compute_edit: self.should_compute_edit,
}
}
}
impl<'a> AssistCtx<'a> {
pub fn new(db: &RootDatabase, frange: FileRange, should_compute_edit: bool) -> AssistCtx {
let parse = db.parse(frange.file_id);
AssistCtx { db, frange, source_file: parse.tree(), should_compute_edit }
pub fn new(
sema: &'a Semantics<'a, RootDatabase>,
frange: FileRange,
should_compute_edit: bool,
) -> AssistCtx<'a> {
let source_file = sema.parse(frange.file_id);
AssistCtx { sema, db: sema.db, frange, source_file, should_compute_edit }
}
pub(crate) fn add_assist(
@ -138,18 +132,6 @@ impl<'a> AssistCtx<'a> {
pub(crate) fn covering_element(&self) -> SyntaxElement {
find_covering_element(self.source_file.syntax(), self.frange.range)
}
pub(crate) fn source_binder(&self) -> SourceBinder<'a, RootDatabase> {
SourceBinder::new(self.db)
}
pub(crate) fn source_analyzer(
&self,
node: &SyntaxNode,
offset: Option<TextUnit>,
) -> SourceAnalyzer {
let src = InFile::new(self.frange.file_id.into(), node);
self.source_binder().analyze(src, offset)
}
pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement {
find_covering_element(self.source_file.syntax(), range)
}

View file

@ -1,15 +1,12 @@
//! `AstTransformer`s are functions that replace nodes in an AST and can be easily combined.
use rustc_hash::FxHashMap;
use hir::{InFile, PathResolution};
use hir::{PathResolution, SemanticsScope};
use ra_ide_db::RootDatabase;
use ra_syntax::ast::{self, AstNode};
pub trait AstTransform<'a> {
fn get_substitution(
&self,
node: InFile<&ra_syntax::SyntaxNode>,
) -> Option<ra_syntax::SyntaxNode>;
fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode>;
fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a>;
fn or<T: AstTransform<'a> + 'a>(self, other: T) -> Box<dyn AstTransform<'a> + 'a>
@ -23,10 +20,7 @@ pub trait AstTransform<'a> {
struct NullTransformer;
impl<'a> AstTransform<'a> for NullTransformer {
fn get_substitution(
&self,
_node: InFile<&ra_syntax::SyntaxNode>,
) -> Option<ra_syntax::SyntaxNode> {
fn get_substitution(&self, _node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
None
}
fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {
@ -35,14 +29,16 @@ impl<'a> AstTransform<'a> for NullTransformer {
}
pub struct SubstituteTypeParams<'a> {
db: &'a RootDatabase,
source_scope: &'a SemanticsScope<'a, RootDatabase>,
substs: FxHashMap<hir::TypeParam, ast::TypeRef>,
previous: Box<dyn AstTransform<'a> + 'a>,
}
impl<'a> SubstituteTypeParams<'a> {
pub fn for_trait_impl(
source_scope: &'a SemanticsScope<'a, RootDatabase>,
db: &'a RootDatabase,
// FIXME: there's implicit invariant that `trait_` and `source_scope` match...
trait_: hir::Trait,
impl_block: ast::ImplBlock,
) -> SubstituteTypeParams<'a> {
@ -56,7 +52,7 @@ impl<'a> SubstituteTypeParams<'a> {
.zip(substs.into_iter())
.collect();
return SubstituteTypeParams {
db,
source_scope,
substs: substs_by_param,
previous: Box::new(NullTransformer),
};
@ -80,15 +76,15 @@ impl<'a> SubstituteTypeParams<'a> {
}
fn get_substitution_inner(
&self,
node: InFile<&ra_syntax::SyntaxNode>,
node: &ra_syntax::SyntaxNode,
) -> Option<ra_syntax::SyntaxNode> {
let type_ref = ast::TypeRef::cast(node.value.clone())?;
let type_ref = ast::TypeRef::cast(node.clone())?;
let path = match &type_ref {
ast::TypeRef::PathType(path_type) => path_type.path()?,
_ => return None,
};
let analyzer = hir::SourceAnalyzer::new(self.db, node, None);
let resolution = analyzer.resolve_path(self.db, &path)?;
let path = hir::Path::from_ast(path)?;
let resolution = self.source_scope.resolve_hir_path(&path)?;
match resolution {
hir::PathResolution::TypeParam(tp) => Some(self.substs.get(&tp)?.syntax().clone()),
_ => None,
@ -97,10 +93,7 @@ impl<'a> SubstituteTypeParams<'a> {
}
impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> {
fn get_substitution(
&self,
node: InFile<&ra_syntax::SyntaxNode>,
) -> Option<ra_syntax::SyntaxNode> {
fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node))
}
fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {
@ -109,29 +102,34 @@ impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> {
}
pub struct QualifyPaths<'a> {
target_scope: &'a SemanticsScope<'a, RootDatabase>,
source_scope: &'a SemanticsScope<'a, RootDatabase>,
db: &'a RootDatabase,
from: Option<hir::Module>,
previous: Box<dyn AstTransform<'a> + 'a>,
}
impl<'a> QualifyPaths<'a> {
pub fn new(db: &'a RootDatabase, from: Option<hir::Module>) -> Self {
Self { db, from, previous: Box::new(NullTransformer) }
pub fn new(
target_scope: &'a SemanticsScope<'a, RootDatabase>,
source_scope: &'a SemanticsScope<'a, RootDatabase>,
db: &'a RootDatabase,
) -> Self {
Self { target_scope, source_scope, db, previous: Box::new(NullTransformer) }
}
fn get_substitution_inner(
&self,
node: InFile<&ra_syntax::SyntaxNode>,
node: &ra_syntax::SyntaxNode,
) -> Option<ra_syntax::SyntaxNode> {
// FIXME handle value ns?
let from = self.from?;
let p = ast::Path::cast(node.value.clone())?;
let from = self.target_scope.module()?;
let p = ast::Path::cast(node.clone())?;
if p.segment().and_then(|s| s.param_list()).is_some() {
// don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway
return None;
}
let analyzer = hir::SourceAnalyzer::new(self.db, node, None);
let resolution = analyzer.resolve_path(self.db, &p)?;
let hir_path = hir::Path::from_ast(p.clone());
let resolution = self.source_scope.resolve_hir_path(&hir_path?)?;
match resolution {
PathResolution::Def(def) => {
let found_path = from.find_use_path(self.db, def)?;
@ -140,7 +138,7 @@ impl<'a> QualifyPaths<'a> {
let type_args = p
.segment()
.and_then(|s| s.type_arg_list())
.map(|arg_list| apply(self, node.with_value(arg_list)));
.map(|arg_list| apply(self, arg_list));
if let Some(type_args) = type_args {
let last_segment = path.segment().unwrap();
path = path.with_segment(last_segment.with_type_args(type_args))
@ -157,11 +155,11 @@ impl<'a> QualifyPaths<'a> {
}
}
pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: InFile<N>) -> N {
let syntax = node.value.syntax();
pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N {
let syntax = node.syntax();
let result = ra_syntax::algo::replace_descendants(syntax, &|element| match element {
ra_syntax::SyntaxElement::Node(n) => {
let replacement = transformer.get_substitution(node.with_value(&n))?;
let replacement = transformer.get_substitution(&n)?;
Some(replacement.into())
}
_ => None,
@ -170,10 +168,7 @@ pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: InFile<N>
}
impl<'a> AstTransform<'a> for QualifyPaths<'a> {
fn get_substitution(
&self,
node: InFile<&ra_syntax::SyntaxNode>,
) -> Option<ra_syntax::SyntaxNode> {
fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node))
}
fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {

View file

@ -51,14 +51,13 @@ pub(crate) fn add_explicit_type(ctx: AssistCtx) -> Option<Assist> {
}
}
// Infer type
let db = ctx.db;
let analyzer = ctx.source_analyzer(stmt.syntax(), None);
let ty = analyzer.type_of(db, &expr)?;
let ty = ctx.sema.type_of_expr(&expr)?;
// Assist not applicable if the type is unknown
if ty.contains_unknown() {
return None;
}
let db = ctx.db;
ctx.add_assist(
AssistId("add_explicit_type"),
format!("Insert explicit type '{}'", ty.display(db)),

View file

@ -1,4 +1,4 @@
use hir::{HasSource, InFile};
use hir::HasSource;
use ra_syntax::{
ast::{self, edit, make, AstNode, NameOwner},
SmolStr,
@ -104,9 +104,7 @@ fn add_missing_impl_members_inner(
let impl_node = ctx.find_node_at_offset::<ast::ImplBlock>()?;
let impl_item_list = impl_node.item_list()?;
let analyzer = ctx.source_analyzer(impl_node.syntax(), None);
let trait_ = resolve_target_trait(ctx.db, &analyzer, &impl_node)?;
let trait_ = resolve_target_trait(&ctx.sema, &impl_node)?;
let def_name = |item: &ast::ImplItem| -> Option<SmolStr> {
match item {
@ -117,7 +115,7 @@ fn add_missing_impl_members_inner(
.map(|it| it.text().clone())
};
let missing_items = get_missing_impl_items(ctx.db, &analyzer, &impl_node)
let missing_items = get_missing_impl_items(&ctx.sema, &impl_node)
.iter()
.map(|i| match i {
hir::AssocItem::Function(i) => ast::ImplItem::FnDef(i.source(ctx.db).value),
@ -138,23 +136,17 @@ fn add_missing_impl_members_inner(
return None;
}
let db = ctx.db;
let file_id = ctx.frange.file_id;
let trait_file_id = trait_.source(db).file_id;
let sema = ctx.sema;
ctx.add_assist(AssistId(assist_id), label, |edit| {
let n_existing_items = impl_item_list.impl_items().count();
let module = hir::SourceAnalyzer::new(
db,
hir::InFile::new(file_id.into(), impl_node.syntax()),
None,
)
.module();
let ast_transform = QualifyPaths::new(db, module)
.or(SubstituteTypeParams::for_trait_impl(db, trait_, impl_node));
let source_scope = sema.scope_for_def(trait_);
let target_scope = sema.scope(impl_item_list.syntax());
let ast_transform = QualifyPaths::new(&target_scope, &source_scope, sema.db)
.or(SubstituteTypeParams::for_trait_impl(&source_scope, sema.db, trait_, impl_node));
let items = missing_items
.into_iter()
.map(|it| ast_transform::apply(&*ast_transform, InFile::new(trait_file_id, it)))
.map(|it| ast_transform::apply(&*ast_transform, it))
.map(|it| match it {
ast::ImplItem::FnDef(def) => ast::ImplItem::FnDef(add_body(def)),
_ => it,
@ -181,9 +173,10 @@ fn add_body(fn_def: ast::FnDef) -> ast::FnDef {
#[cfg(test)]
mod tests {
use super::*;
use crate::helpers::{check_assist, check_assist_not_applicable};
use super::*;
#[test]
fn test_add_missing_impl_members() {
check_assist(

View file

@ -1,5 +1,5 @@
use format_buf::format;
use hir::{Adt, InFile};
use hir::Adt;
use join_to_string::join;
use ra_syntax::{
ast::{
@ -133,16 +133,11 @@ fn find_struct_impl(ctx: &AssistCtx, strukt: &ast::StructDef) -> Option<Option<a
let module = strukt.syntax().ancestors().find(|node| {
ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind())
})?;
let mut sb = ctx.source_binder();
let struct_def = {
let src = InFile { file_id: ctx.frange.file_id.into(), value: strukt.clone() };
sb.to_def(src)?
};
let struct_def = ctx.sema.to_def(strukt)?;
let block = module.descendants().filter_map(ast::ImplBlock::cast).find_map(|impl_blk| {
let src = InFile { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() };
let blk = sb.to_def(src)?;
let blk = ctx.sema.to_def(&impl_blk)?;
// FIXME: handle e.g. `struct S<T>; impl<U> S<U> {}`
// (we currently use the wrong type parameter)

View file

@ -3,8 +3,8 @@ use crate::{
insert_use_statement, AssistId,
};
use hir::{
db::HirDatabase, AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution,
SourceAnalyzer, Trait, Type,
AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution, Semantics, Trait,
Type,
};
use ra_ide_db::{imports_locator::ImportsLocator, RootDatabase};
use ra_prof::profile;
@ -78,14 +78,9 @@ impl AutoImportAssets {
fn for_method_call(method_call: ast::MethodCallExpr, ctx: &AssistCtx) -> Option<Self> {
let syntax_under_caret = method_call.syntax().to_owned();
let source_analyzer = ctx.source_analyzer(&syntax_under_caret, None);
let module_with_name_to_import = source_analyzer.module()?;
let module_with_name_to_import = ctx.sema.scope(&syntax_under_caret).module()?;
Some(Self {
import_candidate: ImportCandidate::for_method_call(
&method_call,
&source_analyzer,
ctx.db,
)?,
import_candidate: ImportCandidate::for_method_call(&ctx.sema, &method_call)?,
module_with_name_to_import,
syntax_under_caret,
})
@ -97,14 +92,9 @@ impl AutoImportAssets {
return None;
}
let source_analyzer = ctx.source_analyzer(&syntax_under_caret, None);
let module_with_name_to_import = source_analyzer.module()?;
let module_with_name_to_import = ctx.sema.scope(&syntax_under_caret).module()?;
Some(Self {
import_candidate: ImportCandidate::for_regular_path(
&path_under_caret,
&source_analyzer,
ctx.db,
)?,
import_candidate: ImportCandidate::for_regular_path(&ctx.sema, &path_under_caret)?,
module_with_name_to_import,
syntax_under_caret,
})
@ -229,25 +219,23 @@ enum ImportCandidate {
impl ImportCandidate {
fn for_method_call(
sema: &Semantics<RootDatabase>,
method_call: &ast::MethodCallExpr,
source_analyzer: &SourceAnalyzer,
db: &impl HirDatabase,
) -> Option<Self> {
if source_analyzer.resolve_method_call(method_call).is_some() {
if sema.resolve_method_call(method_call).is_some() {
return None;
}
Some(Self::TraitMethod(
source_analyzer.type_of(db, &method_call.expr()?)?,
sema.type_of_expr(&method_call.expr()?)?,
method_call.name_ref()?.syntax().to_string(),
))
}
fn for_regular_path(
sema: &Semantics<RootDatabase>,
path_under_caret: &ast::Path,
source_analyzer: &SourceAnalyzer,
db: &impl HirDatabase,
) -> Option<Self> {
if source_analyzer.resolve_path(db, path_under_caret).is_some() {
if sema.resolve_path(path_under_caret).is_some() {
return None;
}
@ -256,17 +244,15 @@ impl ImportCandidate {
let qualifier_start = qualifier.syntax().descendants().find_map(ast::NameRef::cast)?;
let qualifier_start_path =
qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?;
if let Some(qualifier_start_resolution) =
source_analyzer.resolve_path(db, &qualifier_start_path)
{
if let Some(qualifier_start_resolution) = sema.resolve_path(&qualifier_start_path) {
let qualifier_resolution = if qualifier_start_path == qualifier {
qualifier_start_resolution
} else {
source_analyzer.resolve_path(db, &qualifier)?
sema.resolve_path(&qualifier)?
};
if let PathResolution::Def(ModuleDef::Adt(assoc_item_path)) = qualifier_resolution {
Some(ImportCandidate::TraitAssocItem(
assoc_item_path.ty(db),
assoc_item_path.ty(sema.db),
segment.syntax().to_string(),
))
} else {

View file

@ -2,10 +2,11 @@
use std::iter;
use hir::{db::HirDatabase, Adt, HasSource};
use hir::{db::HirDatabase, Adt, HasSource, Semantics};
use ra_syntax::ast::{self, edit::IndentLevel, make, AstNode, NameOwner};
use crate::{Assist, AssistCtx, AssistId};
use ra_ide_db::RootDatabase;
// Assist: fill_match_arms
//
@ -46,10 +47,9 @@ pub(crate) fn fill_match_arms(ctx: AssistCtx) -> Option<Assist> {
};
let expr = match_expr.expr()?;
let (enum_def, module) = {
let analyzer = ctx.source_analyzer(expr.syntax(), None);
(resolve_enum_def(ctx.db, &analyzer, &expr)?, analyzer.module()?)
};
let enum_def = resolve_enum_def(&ctx.sema, &expr)?;
let module = ctx.sema.scope(expr.syntax()).module()?;
let variants = enum_def.variants(ctx.db);
if variants.is_empty() {
return None;
@ -81,18 +81,11 @@ fn is_trivial(arm: &ast::MatchArm) -> bool {
}
}
fn resolve_enum_def(
db: &impl HirDatabase,
analyzer: &hir::SourceAnalyzer,
expr: &ast::Expr,
) -> Option<hir::Enum> {
let expr_ty = analyzer.type_of(db, &expr)?;
let result = expr_ty.autoderef(db).find_map(|ty| match ty.as_adt() {
fn resolve_enum_def(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<hir::Enum> {
sema.type_of_expr(&expr)?.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
Some(Adt::Enum(e)) => Some(e),
_ => None,
});
result
})
}
fn build_pat(

View file

@ -44,8 +44,7 @@ pub(crate) fn inline_local_variable(ctx: AssistCtx) -> Option<Assist> {
} else {
let_stmt.syntax().text_range()
};
let analyzer = ctx.source_analyzer(bind_pat.syntax(), None);
let refs = analyzer.find_all_refs(&bind_pat);
let refs = ctx.sema.find_all_refs(&bind_pat);
if refs.is_empty() {
return None;
};

View file

@ -19,6 +19,7 @@ use ra_text_edit::TextEdit;
pub(crate) use crate::assist_ctx::{Assist, AssistCtx, AssistHandler};
pub use crate::handlers::replace_qualified_name_with_use::insert_use_statement;
use hir::Semantics;
/// Unique identifier of the assist, should not be shown to the user
/// directly.
@ -63,7 +64,8 @@ pub struct ResolvedAssist {
/// Assists are returned in the "unresolved" state, that is only labels are
/// returned, without actual edits.
pub fn unresolved_assists(db: &RootDatabase, range: FileRange) -> Vec<AssistLabel> {
let ctx = AssistCtx::new(db, range, false);
let sema = Semantics::new(db);
let ctx = AssistCtx::new(&sema, range, false);
handlers::all()
.iter()
.filter_map(|f| f(ctx.clone()))
@ -77,7 +79,8 @@ pub fn unresolved_assists(db: &RootDatabase, range: FileRange) -> Vec<AssistLabe
/// Assists are returned in the "resolved" state, that is with edit fully
/// computed.
pub fn resolved_assists(db: &RootDatabase, range: FileRange) -> Vec<ResolvedAssist> {
let ctx = AssistCtx::new(db, range, true);
let sema = Semantics::new(db);
let ctx = AssistCtx::new(&sema, range, true);
let mut a = handlers::all()
.iter()
.filter_map(|f| f(ctx.clone()))
@ -165,6 +168,7 @@ mod helpers {
use test_utils::{add_cursor, assert_eq_text, extract_range_or_offset, RangeOrOffset};
use crate::{AssistCtx, AssistHandler};
use hir::Semantics;
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
let (mut db, file_id) = RootDatabase::with_single_file(text);
@ -202,7 +206,8 @@ mod helpers {
let (db, file_id) = with_single_file(&before);
let frange = FileRange { file_id, range };
let assist_ctx = AssistCtx::new(&db, frange, true);
let sema = Semantics::new(&db);
let assist_ctx = AssistCtx::new(&sema, frange, true);
match (assist(assist_ctx), expected) {
(Some(assist), ExpectedResult::After(after)) => {

View file

@ -1,16 +1,15 @@
//! Assorted functions shared by several assists.
use hir::Semantics;
use ra_ide_db::RootDatabase;
use ra_syntax::{
ast::{self, make, NameOwner},
AstNode, T,
};
use hir::db::HirDatabase;
use rustc_hash::FxHashSet;
pub fn get_missing_impl_items(
db: &impl HirDatabase,
analyzer: &hir::SourceAnalyzer,
sema: &Semantics<RootDatabase>,
impl_block: &ast::ImplBlock,
) -> Vec<hir::AssocItem> {
// Names must be unique between constants and functions. However, type aliases
@ -42,15 +41,17 @@ pub fn get_missing_impl_items(
}
}
resolve_target_trait(db, analyzer, impl_block).map_or(vec![], |target_trait| {
resolve_target_trait(sema, impl_block).map_or(vec![], |target_trait| {
target_trait
.items(db)
.items(sema.db)
.iter()
.filter(|i| match i {
hir::AssocItem::Function(f) => !impl_fns_consts.contains(&f.name(db).to_string()),
hir::AssocItem::TypeAlias(t) => !impl_type.contains(&t.name(db).to_string()),
hir::AssocItem::Function(f) => {
!impl_fns_consts.contains(&f.name(sema.db).to_string())
}
hir::AssocItem::TypeAlias(t) => !impl_type.contains(&t.name(sema.db).to_string()),
hir::AssocItem::Const(c) => c
.name(db)
.name(sema.db)
.map(|n| !impl_fns_consts.contains(&n.to_string()))
.unwrap_or_default(),
})
@ -60,8 +61,7 @@ pub fn get_missing_impl_items(
}
pub(crate) fn resolve_target_trait(
db: &impl HirDatabase,
analyzer: &hir::SourceAnalyzer,
sema: &Semantics<RootDatabase>,
impl_block: &ast::ImplBlock,
) -> Option<hir::Trait> {
let ast_path = impl_block
@ -70,7 +70,7 @@ pub(crate) fn resolve_target_trait(
.and_then(ast::PathType::cast)?
.path()?;
match analyzer.resolve_path(db, &ast_path) {
match sema.resolve_path(&ast_path) {
Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def),
_ => None,
}

View file

@ -26,6 +26,7 @@ macro_rules! impl_froms {
}
}
mod semantics;
pub mod db;
pub mod source_analyzer;
pub mod source_binder;
@ -45,8 +46,8 @@ pub use crate::{
StructField, Trait, Type, TypeAlias, TypeParam, Union, VariantDef,
},
has_source::HasSource,
source_analyzer::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
source_binder::SourceBinder,
semantics::{original_range, Semantics, SemanticsScope},
source_analyzer::{PathResolution, ScopeEntryWithSyntax},
};
pub use hir_def::{

View file

@ -0,0 +1,335 @@
//! See `Semantics`.
use std::{cell::RefCell, fmt, iter::successors};
use hir_def::{
resolver::{self, HasResolver, Resolver},
TraitId,
};
use ra_db::{FileId, FileRange};
use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken, TextRange, TextUnit};
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{
db::HirDatabase,
source_analyzer::{resolve_hir_path, ReferenceDescriptor, SourceAnalyzer},
source_binder::{ChildContainer, SourceBinder, ToDef},
Function, HirFileId, InFile, Local, MacroDef, Module, Name, Origin, Path, PathResolution,
ScopeDef, StructField, Trait, Type, TypeParam, VariantDef,
};
use ra_prof::profile;
/// Primary API to get semantic information, like types, from syntax trees.
pub struct Semantics<'db, DB> {
pub db: &'db DB,
pub(crate) sb: RefCell<SourceBinder>,
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
}
impl<DB> fmt::Debug for Semantics<'_, DB> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Semantics {{ ... }}")
}
}
impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn new(db: &DB) -> Semantics<DB> {
let sb = RefCell::new(SourceBinder::new());
Semantics { db, sb, cache: RefCell::default() }
}
pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), file_id.into());
tree
}
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
let file_id = sa.expand(self.db, macro_call)?;
let node = self.db.parse_or_expand(file_id)?;
self.cache(node.clone(), file_id);
Some(node)
}
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
let parent = token.parent();
let parent = self.find_file(parent);
let sa = self.analyze2(parent.as_ref(), None);
let token = successors(Some(parent.with_value(token)), |token| {
let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
let tt = macro_call.token_tree()?;
if !token.value.text_range().is_subrange(&tt.syntax().text_range()) {
return None;
}
let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
let token = file_id.expansion_info(self.db)?.map_token_down(token.as_ref())?;
self.cache(find_root(&token.value.parent()), token.file_id);
Some(token)
})
.last()
.unwrap();
token.value
}
pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
let node = self.find_file(node.clone());
original_range(self.db, node.as_ref())
}
pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
let node = self.find_file(node);
node.ancestors_with_macros(self.db).map(|it| it.value)
}
pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
self.analyze(expr.syntax()).type_of(self.db, &expr)
}
pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> {
self.analyze(pat.syntax()).type_of_pat(self.db, &pat)
}
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
self.analyze(call.syntax()).resolve_method_call(call)
}
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<StructField> {
self.analyze(field.syntax()).resolve_field(field)
}
pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<StructField> {
self.analyze(field.syntax()).resolve_record_field(field)
}
pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<VariantDef> {
self.analyze(record_lit.syntax()).resolve_record_literal(record_lit)
}
pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<VariantDef> {
self.analyze(record_pat.syntax()).resolve_record_pattern(record_pat)
}
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
let sa = self.analyze(macro_call.syntax());
let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
sa.resolve_macro_call(self.db, macro_call)
}
pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
self.analyze(path.syntax()).resolve_path(self.db, path)
}
// FIXME: use this instead?
// pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
pub fn to_def<T: ToDef + Clone>(&self, src: &T) -> Option<T::Def> {
let src = self.find_file(src.syntax().clone()).with_value(src.clone());
let mut sb = self.sb.borrow_mut();
T::to_def(self.db, &mut sb, src)
}
pub fn to_module_def(&self, file: FileId) -> Option<Module> {
let mut sb = self.sb.borrow_mut();
sb.to_module_def(self.db, file)
}
pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db, DB> {
let node = self.find_file(node.clone());
let resolver = self.analyze2(node.as_ref(), None).resolver;
SemanticsScope { db: self.db, resolver }
}
pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextUnit) -> SemanticsScope<'db, DB> {
let node = self.find_file(node.clone());
let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver;
SemanticsScope { db: self.db, resolver }
}
pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db, DB> {
let resolver = def.id.resolver(self.db);
SemanticsScope { db: self.db, resolver }
}
// FIXME: we only use this in `inline_local_variable` assist, ideally, we
// should switch to general reference search infra there.
pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
self.analyze(pat.syntax()).find_all_refs(pat)
}
fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
let src = self.find_file(node.clone());
self.analyze2(src.as_ref(), None)
}
fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextUnit>) -> SourceAnalyzer {
let _p = profile("Semantics::analyze2");
let container = match self.sb.borrow_mut().find_container(self.db, src) {
Some(it) => it,
None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src),
};
let resolver = match container {
ChildContainer::DefWithBodyId(def) => {
return SourceAnalyzer::new_for_body(self.db, def, src, offset)
}
ChildContainer::TraitId(it) => it.resolver(self.db),
ChildContainer::ImplId(it) => it.resolver(self.db),
ChildContainer::ModuleId(it) => it.resolver(self.db),
ChildContainer::EnumId(it) => it.resolver(self.db),
ChildContainer::VariantId(it) => it.resolver(self.db),
ChildContainer::GenericDefId(it) => it.resolver(self.db),
};
SourceAnalyzer::new_for_resolver(resolver, src)
}
fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
assert!(root_node.parent().is_none());
let mut cache = self.cache.borrow_mut();
let prev = cache.insert(root_node, file_id);
assert!(prev == None || prev == Some(file_id))
}
pub fn assert_contains_node(&self, node: &SyntaxNode) {
self.find_file(node.clone());
}
fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
let cache = self.cache.borrow();
cache.get(root_node).copied()
}
fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> {
let root_node = find_root(&node);
let file_id = self.lookup(&root_node).unwrap_or_else(|| {
panic!(
"\n\nFailed to lookup {:?} in this Semantics.\n\
Make sure to use only query nodes, derived from this instance of Semantics.\n\
root node: {:?}\n\
known nodes: {}\n\n",
node,
root_node,
self.cache
.borrow()
.keys()
.map(|it| format!("{:?}", it))
.collect::<Vec<_>>()
.join(", ")
)
});
InFile::new(file_id, node)
}
}
fn find_root(node: &SyntaxNode) -> SyntaxNode {
node.ancestors().last().unwrap()
}
pub struct SemanticsScope<'a, DB> {
pub db: &'a DB,
resolver: Resolver,
}
impl<'a, DB: HirDatabase> SemanticsScope<'a, DB> {
pub fn module(&self) -> Option<Module> {
Some(Module { id: self.resolver.module()? })
}
/// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type
// FIXME: rename to visible_traits to not repeat scope?
pub fn traits_in_scope(&self) -> FxHashSet<TraitId> {
let resolver = &self.resolver;
resolver.traits_in_scope(self.db)
}
pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
let resolver = &self.resolver;
resolver.process_all_names(self.db, &mut |name, def| {
let def = match def {
resolver::ScopeDef::PerNs(it) => it.into(),
resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }),
resolver::ScopeDef::Local(pat_id) => {
let parent = resolver.body_owner().unwrap().into();
ScopeDef::Local(Local { parent, pat_id })
}
};
f(name, def)
})
}
pub fn resolve_hir_path(&self, path: &Path) -> Option<PathResolution> {
resolve_hir_path(self.db, &self.resolver, path)
}
}
// FIXME: Change `HasSource` trait to work with `Semantics` and remove this?
pub fn original_range(db: &impl HirDatabase, node: InFile<&SyntaxNode>) -> FileRange {
if let Some((range, Origin::Call)) = original_range_and_origin(db, node) {
return range;
}
if let Some(expansion) = node.file_id.expansion_info(db) {
if let Some(call_node) = expansion.call_node() {
return FileRange {
file_id: call_node.file_id.original_file(db),
range: call_node.value.text_range(),
};
}
}
FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
}
fn original_range_and_origin(
db: &impl HirDatabase,
node: InFile<&SyntaxNode>,
) -> Option<(FileRange, Origin)> {
let expansion = node.file_id.expansion_info(db)?;
// the input node has only one token ?
let single = node.value.first_token()? == node.value.last_token()?;
// FIXME: We should handle recurside macro expansions
let (range, origin) = node.value.descendants().find_map(|it| {
let first = it.first_token()?;
let last = it.last_token()?;
if !single && first == last {
return None;
}
// Try to map first and last tokens of node, and, if success, return the union range of mapped tokens
let (first, first_origin) = expansion.map_token_up(node.with_value(&first))?;
let (last, last_origin) = expansion.map_token_up(node.with_value(&last))?;
if first.file_id != last.file_id || first_origin != last_origin {
return None;
}
// FIXME: Add union method in TextRange
Some((
first.with_value(union_range(first.value.text_range(), last.value.text_range())),
first_origin,
))
})?;
return Some((
FileRange { file_id: range.file_id.original_file(db), range: range.value },
origin,
));
fn union_range(a: TextRange, b: TextRange) -> TextRange {
let start = a.start().min(b.start());
let end = a.end().max(b.end());
TextRange::from_to(start, end)
}
}

View file

@ -14,29 +14,27 @@ use hir_def::{
BodySourceMap,
},
expr::{ExprId, PatId},
resolver::{self, resolver_for_scope, Resolver, TypeNs, ValueNs},
AsMacroCall, DefWithBodyId, TraitId,
resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
AsMacroCall, DefWithBodyId,
};
use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile, MacroCallId};
use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile};
use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment};
use ra_syntax::{
ast::{self, AstNode},
AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit,
AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit,
};
use rustc_hash::FxHashSet;
use crate::{
db::HirDatabase, Adt, Const, DefWithBody, EnumVariant, Function, Local, MacroDef, Name, Path,
ScopeDef, Static, Struct, Trait, Type, TypeAlias, TypeParam,
db::HirDatabase, Adt, Const, EnumVariant, Function, Local, MacroDef, Name, Path, Static,
Struct, Trait, Type, TypeAlias, TypeParam,
};
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
/// original source files. It should not be used inside the HIR itself.
#[derive(Debug)]
pub struct SourceAnalyzer {
pub(crate) struct SourceAnalyzer {
file_id: HirFileId,
resolver: Resolver,
body_owner: Option<DefWithBody>,
pub(crate) resolver: Resolver,
body_source_map: Option<Arc<BodySourceMap>>,
infer: Option<Arc<InferenceResult>>,
scopes: Option<Arc<ExprScopes>>,
@ -77,35 +75,7 @@ pub struct ReferenceDescriptor {
pub name: String,
}
#[derive(Debug)]
pub struct Expansion {
macro_call_id: MacroCallId,
}
impl Expansion {
pub fn map_token_down(
&self,
db: &impl HirDatabase,
token: InFile<&SyntaxToken>,
) -> Option<InFile<SyntaxToken>> {
let exp_info = self.file_id().expansion_info(db)?;
exp_info.map_token_down(token)
}
pub fn file_id(&self) -> HirFileId {
self.macro_call_id.as_file()
}
}
impl SourceAnalyzer {
pub fn new(
db: &impl HirDatabase,
node: InFile<&SyntaxNode>,
offset: Option<TextUnit>,
) -> SourceAnalyzer {
crate::source_binder::SourceBinder::new(db).analyze(node, offset)
}
pub(crate) fn new_for_body(
db: &impl HirDatabase,
def: DefWithBodyId,
@ -121,7 +91,6 @@ impl SourceAnalyzer {
let resolver = resolver_for_scope(db, def, scope);
SourceAnalyzer {
resolver,
body_owner: Some(def.into()),
body_source_map: Some(source_map),
infer: Some(db.infer(def)),
scopes: Some(scopes),
@ -135,7 +104,6 @@ impl SourceAnalyzer {
) -> SourceAnalyzer {
SourceAnalyzer {
resolver,
body_owner: None,
body_source_map: None,
infer: None,
scopes: None,
@ -143,10 +111,6 @@ impl SourceAnalyzer {
}
}
pub fn module(&self) -> Option<crate::code_model::Module> {
Some(crate::code_model::Module { id: self.resolver.module()? })
}
fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> {
let src = InFile { file_id: self.file_id, value: expr };
self.body_source_map.as_ref()?.node_expr(src)
@ -180,7 +144,7 @@ impl SourceAnalyzer {
TraitEnvironment::lower(db, &self.resolver)
}
pub fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> {
pub(crate) fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> {
let expr_id = if let Some(expr) = self.expand_expr(db, InFile::new(self.file_id, expr)) {
self.body_source_map.as_ref()?.node_expr(expr.as_ref())?
} else {
@ -192,24 +156,27 @@ impl SourceAnalyzer {
Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } })
}
pub fn type_of_pat(&self, db: &impl HirDatabase, pat: &ast::Pat) -> Option<Type> {
pub(crate) fn type_of_pat(&self, db: &impl HirDatabase, pat: &ast::Pat) -> Option<Type> {
let pat_id = self.pat_id(pat)?;
let ty = self.infer.as_ref()?[pat_id].clone();
let environment = self.trait_env(db);
Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } })
}
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
pub(crate) fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
let expr_id = self.expr_id(&call.clone().into())?;
self.infer.as_ref()?.method_resolution(expr_id).map(Function::from)
}
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> {
pub(crate) fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> {
let expr_id = self.expr_id(&field.clone().into())?;
self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
}
pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<crate::StructField> {
pub(crate) fn resolve_record_field(
&self,
field: &ast::RecordField,
) -> Option<crate::StructField> {
let expr_id = match field.expr() {
Some(it) => self.expr_id(&it)?,
None => {
@ -220,17 +187,23 @@ impl SourceAnalyzer {
self.infer.as_ref()?.record_field_resolution(expr_id).map(|it| it.into())
}
pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<crate::VariantDef> {
pub(crate) fn resolve_record_literal(
&self,
record_lit: &ast::RecordLit,
) -> Option<crate::VariantDef> {
let expr_id = self.expr_id(&record_lit.clone().into())?;
self.infer.as_ref()?.variant_resolution_for_expr(expr_id).map(|it| it.into())
}
pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<crate::VariantDef> {
pub(crate) fn resolve_record_pattern(
&self,
record_pat: &ast::RecordPat,
) -> Option<crate::VariantDef> {
let pat_id = self.pat_id(&record_pat.clone().into())?;
self.infer.as_ref()?.variant_resolution_for_pat(pat_id).map(|it| it.into())
}
pub fn resolve_macro_call(
pub(crate) fn resolve_macro_call(
&self,
db: &impl HirDatabase,
macro_call: InFile<&ast::MacroCall>,
@ -240,52 +213,11 @@ impl SourceAnalyzer {
self.resolver.resolve_path_as_macro(db, path.mod_path()).map(|it| it.into())
}
pub fn resolve_hir_path(
pub(crate) fn resolve_path(
&self,
db: &impl HirDatabase,
path: &crate::Path,
path: &ast::Path,
) -> Option<PathResolution> {
let types =
self.resolver.resolve_path_in_type_ns_fully(db, path.mod_path()).map(|ty| match ty {
TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }),
TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
PathResolution::Def(Adt::from(it).into())
}
TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
TypeNs::BuiltinType(it) => PathResolution::Def(it.into()),
TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
});
let values =
self.resolver.resolve_path_in_value_ns_fully(db, path.mod_path()).and_then(|val| {
let res = match val {
ValueNs::LocalBinding(pat_id) => {
let var = Local { parent: self.body_owner?, pat_id };
PathResolution::Local(var)
}
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
};
Some(res)
});
let items = self
.resolver
.resolve_module_path_in_items(db, path.mod_path())
.take_types()
.map(|it| PathResolution::Def(it.into()));
types.or(values).or(items).or_else(|| {
self.resolver
.resolve_path_as_macro(db, path.mod_path())
.map(|def| PathResolution::Macro(def.into()))
})
}
pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> {
if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) {
let expr_id = self.expr_id(&path_expr.into())?;
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) {
@ -300,7 +232,7 @@ impl SourceAnalyzer {
}
// This must be a normal source file rather than macro file.
let hir_path = crate::Path::from_ast(path.clone())?;
self.resolve_hir_path(db, &hir_path)
resolve_hir_path(db, &self.resolver, &hir_path)
}
fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> {
@ -315,25 +247,9 @@ impl SourceAnalyzer {
})
}
pub fn process_all_names(&self, db: &impl HirDatabase, f: &mut dyn FnMut(Name, ScopeDef)) {
self.resolver.process_all_names(db, &mut |name, def| {
let def = match def {
resolver::ScopeDef::PerNs(it) => it.into(),
resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }),
resolver::ScopeDef::Local(pat_id) => {
let parent = self.resolver.body_owner().unwrap().into();
ScopeDef::Local(Local { parent, pat_id })
}
};
f(name, def)
})
}
// FIXME: we only use this in `inline_local_variable` assist, ideally, we
// should switch to general reference search infra there.
pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
pub(crate) fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
let ptr = Either::Left(AstPtr::new(&ast::Pat::from(pat.clone())));
fn_def
@ -351,19 +267,14 @@ impl SourceAnalyzer {
.collect()
}
/// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type
pub fn traits_in_scope(&self, db: &impl HirDatabase) -> FxHashSet<TraitId> {
self.resolver.traits_in_scope(db)
}
pub fn expand(
pub(crate) fn expand(
&self,
db: &impl HirDatabase,
macro_call: InFile<&ast::MacroCall>,
) -> Option<Expansion> {
) -> Option<HirFileId> {
let macro_call_id =
macro_call.as_call_id(db, |path| self.resolver.resolve_path_as_macro(db, &path))?;
Some(Expansion { macro_call_id })
Some(macro_call_id.as_file())
}
}
@ -409,6 +320,47 @@ fn scope_for_offset(
})
}
pub(crate) fn resolve_hir_path(
db: &impl HirDatabase,
resolver: &Resolver,
path: &crate::Path,
) -> Option<PathResolution> {
let types = resolver.resolve_path_in_type_ns_fully(db, path.mod_path()).map(|ty| match ty {
TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }),
TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => PathResolution::Def(Adt::from(it).into()),
TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
TypeNs::BuiltinType(it) => PathResolution::Def(it.into()),
TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
});
let body_owner = resolver.body_owner();
let values = resolver.resolve_path_in_value_ns_fully(db, path.mod_path()).and_then(|val| {
let res = match val {
ValueNs::LocalBinding(pat_id) => {
let var = Local { parent: body_owner?.into(), pat_id };
PathResolution::Local(var)
}
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
};
Some(res)
});
let items = resolver
.resolve_module_path_in_items(db, path.mod_path())
.take_types()
.map(|it| PathResolution::Def(it.into()));
types.or(values).or(items).or_else(|| {
resolver
.resolve_path_as_macro(db, path.mod_path())
.map(|def| PathResolution::Macro(def.into()))
})
}
// XXX: during completion, cursor might be outside of any particular
// expression. Try to figure out the correct scope...
fn adjust(

View file

@ -5,112 +5,85 @@ use hir_def::{
child_by_source::ChildBySource,
dyn_map::DynMap,
keys::{self, Key},
resolver::{HasResolver, Resolver},
ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, GenericDefId, ImplId, ModuleId,
StaticId, StructFieldId, StructId, TraitId, TypeAliasId, UnionId, VariantId,
};
use hir_expand::{name::AsName, AstId, InFile, MacroDefId, MacroDefKind};
use ra_db::FileId;
use ra_prof::profile;
use ra_syntax::{
ast::{self, NameOwner},
match_ast, AstNode, SyntaxNode, TextUnit,
match_ast, AstNode, SyntaxNode,
};
use rustc_hash::FxHashMap;
use crate::{db::HirDatabase, Local, Module, SourceAnalyzer, TypeParam};
use ra_db::FileId;
use crate::{db::HirDatabase, Local, Module, TypeParam};
pub struct SourceBinder<'a, DB> {
pub db: &'a DB,
pub struct SourceBinder {
child_by_source_cache: FxHashMap<ChildContainer, DynMap>,
}
impl<DB: HirDatabase> SourceBinder<'_, DB> {
pub fn new(db: &DB) -> SourceBinder<DB> {
SourceBinder { db, child_by_source_cache: FxHashMap::default() }
impl SourceBinder {
pub(crate) fn new() -> SourceBinder {
SourceBinder { child_by_source_cache: FxHashMap::default() }
}
pub fn analyze(
&mut self,
src: InFile<&SyntaxNode>,
offset: Option<TextUnit>,
) -> SourceAnalyzer {
let _p = profile("SourceBinder::analyzer");
let container = match self.find_container(src) {
Some(it) => it,
None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src),
};
let resolver = match container {
ChildContainer::DefWithBodyId(def) => {
return SourceAnalyzer::new_for_body(self.db, def, src, offset)
}
ChildContainer::TraitId(it) => it.resolver(self.db),
ChildContainer::ImplId(it) => it.resolver(self.db),
ChildContainer::ModuleId(it) => it.resolver(self.db),
ChildContainer::EnumId(it) => it.resolver(self.db),
ChildContainer::VariantId(it) => it.resolver(self.db),
ChildContainer::GenericDefId(it) => it.resolver(self.db),
};
SourceAnalyzer::new_for_resolver(resolver, src)
}
pub fn to_def<T: ToDef>(&mut self, src: InFile<T>) -> Option<T::Def> {
T::to_def(self, src)
}
pub fn to_module_def(&mut self, file: FileId) -> Option<Module> {
pub(crate) fn to_module_def(&mut self, db: &impl HirDatabase, file: FileId) -> Option<Module> {
let _p = profile("SourceBinder::to_module_def");
let (krate, local_id) = self.db.relevant_crates(file).iter().find_map(|&crate_id| {
let crate_def_map = self.db.crate_def_map(crate_id);
let (krate, local_id) = db.relevant_crates(file).iter().find_map(|&crate_id| {
let crate_def_map = db.crate_def_map(crate_id);
let local_id = crate_def_map.modules_for_file(file).next()?;
Some((crate_id, local_id))
})?;
Some(Module { id: ModuleId { krate, local_id } })
}
fn to_id<T: ToId>(&mut self, src: InFile<T>) -> Option<T::ID> {
T::to_id(self, src)
fn to_id<T: ToId>(&mut self, db: &impl HirDatabase, src: InFile<T>) -> Option<T::ID> {
T::to_id(db, self, src)
}
fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> {
for container in src.cloned().ancestors_with_macros(self.db).skip(1) {
pub(crate) fn find_container(
&mut self,
db: &impl HirDatabase,
src: InFile<&SyntaxNode>,
) -> Option<ChildContainer> {
for container in src.cloned().ancestors_with_macros(db).skip(1) {
let res: ChildContainer = match_ast! {
match (container.value) {
ast::TraitDef(it) => {
let def: TraitId = self.to_id(container.with_value(it))?;
let def: TraitId = self.to_id(db, container.with_value(it))?;
def.into()
},
ast::ImplBlock(it) => {
let def: ImplId = self.to_id(container.with_value(it))?;
let def: ImplId = self.to_id(db, container.with_value(it))?;
def.into()
},
ast::FnDef(it) => {
let def: FunctionId = self.to_id(container.with_value(it))?;
let def: FunctionId = self.to_id(db, container.with_value(it))?;
DefWithBodyId::from(def).into()
},
ast::StaticDef(it) => {
let def: StaticId = self.to_id(container.with_value(it))?;
let def: StaticId = self.to_id(db, container.with_value(it))?;
DefWithBodyId::from(def).into()
},
ast::ConstDef(it) => {
let def: ConstId = self.to_id(container.with_value(it))?;
let def: ConstId = self.to_id(db, container.with_value(it))?;
DefWithBodyId::from(def).into()
},
ast::EnumDef(it) => {
let def: EnumId = self.to_id(container.with_value(it))?;
let def: EnumId = self.to_id(db, container.with_value(it))?;
def.into()
},
ast::StructDef(it) => {
let def: StructId = self.to_id(container.with_value(it))?;
let def: StructId = self.to_id(db, container.with_value(it))?;
VariantId::from(def).into()
},
ast::UnionDef(it) => {
let def: UnionId = self.to_id(container.with_value(it))?;
let def: UnionId = self.to_id(db, container.with_value(it))?;
VariantId::from(def).into()
},
ast::Module(it) => {
let def: ModuleId = self.to_id(container.with_value(it))?;
let def: ModuleId = self.to_id(db, container.with_value(it))?;
def.into()
},
_ => { continue },
@ -119,12 +92,11 @@ impl<DB: HirDatabase> SourceBinder<'_, DB> {
return Some(res);
}
let c = self.to_module_def(src.file_id.original_file(self.db))?;
let c = self.to_module_def(db, src.file_id.original_file(db))?;
Some(c.id.into())
}
fn child_by_source(&mut self, container: ChildContainer) -> &DynMap {
let db = self.db;
fn child_by_source(&mut self, db: &impl HirDatabase, container: ChildContainer) -> &DynMap {
self.child_by_source_cache.entry(container).or_insert_with(|| match container {
ChildContainer::DefWithBodyId(it) => it.child_by_source(db),
ChildContainer::ModuleId(it) => it.child_by_source(db),
@ -137,16 +109,20 @@ impl<DB: HirDatabase> SourceBinder<'_, DB> {
}
}
pub trait ToId: Sized {
pub(crate) trait ToId: Sized {
type ID: Sized + Copy + 'static;
fn to_id<DB: HirDatabase>(sb: &mut SourceBinder<'_, DB>, src: InFile<Self>)
-> Option<Self::ID>;
fn to_id<DB: HirDatabase>(
db: &DB,
sb: &mut SourceBinder,
src: InFile<Self>,
) -> Option<Self::ID>;
}
pub trait ToDef: Sized + AstNode + 'static {
type Def;
fn to_def<DB: HirDatabase>(
sb: &mut SourceBinder<'_, DB>,
db: &DB,
sb: &mut SourceBinder,
src: InFile<Self>,
) -> Option<Self::Def>;
}
@ -155,9 +131,9 @@ macro_rules! to_def_impls {
($(($def:path, $ast:path)),* ,) => {$(
impl ToDef for $ast {
type Def = $def;
fn to_def<DB: HirDatabase>(sb: &mut SourceBinder<'_, DB>, src: InFile<Self>)
fn to_def<DB: HirDatabase>(db: &DB, sb: &mut SourceBinder, src: InFile<Self>)
-> Option<Self::Def>
{ sb.to_id(src).map(Into::into) }
{ sb.to_id(db, src).map(Into::into) }
}
)*}
}
@ -179,7 +155,7 @@ to_def_impls![
];
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
enum ChildContainer {
pub(crate) enum ChildContainer {
DefWithBodyId(DefWithBodyId),
ModuleId(ModuleId),
TraitId(TraitId),
@ -201,7 +177,7 @@ impl_froms! {
GenericDefId
}
pub trait ToIdByKey: Sized + AstNode + 'static {
pub(crate) trait ToIdByKey: Sized + AstNode + 'static {
type ID: Sized + Copy + 'static;
const KEY: Key<Self, Self::ID>;
}
@ -209,11 +185,11 @@ pub trait ToIdByKey: Sized + AstNode + 'static {
impl<T: ToIdByKey> ToId for T {
type ID = <T as ToIdByKey>::ID;
fn to_id<DB: HirDatabase>(
sb: &mut SourceBinder<'_, DB>,
db: &DB,
sb: &mut SourceBinder,
src: InFile<Self>,
) -> Option<Self::ID> {
let container = sb.find_container(src.as_ref().map(|it| it.syntax()))?;
let db = sb.db;
let container = sb.find_container(db, src.as_ref().map(|it| it.syntax()))?;
let dyn_map =
&*sb.child_by_source_cache.entry(container).or_insert_with(|| match container {
ChildContainer::DefWithBodyId(it) => it.child_by_source(db),
@ -255,15 +231,15 @@ to_id_key_impls![
impl ToId for ast::MacroCall {
type ID = MacroDefId;
fn to_id<DB: HirDatabase>(
sb: &mut SourceBinder<'_, DB>,
db: &DB,
sb: &mut SourceBinder,
src: InFile<Self>,
) -> Option<Self::ID> {
let kind = MacroDefKind::Declarative;
let krate = sb.to_module_def(src.file_id.original_file(sb.db))?.id.krate;
let krate = sb.to_module_def(db, src.file_id.original_file(db))?.id.krate;
let ast_id =
Some(AstId::new(src.file_id, sb.db.ast_id_map(src.file_id).ast_id(&src.value)));
let ast_id = Some(AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.value)));
Some(MacroDefId { krate: Some(krate), ast_id, kind })
}
@ -272,20 +248,20 @@ impl ToId for ast::MacroCall {
impl ToDef for ast::BindPat {
type Def = Local;
fn to_def<DB: HirDatabase>(sb: &mut SourceBinder<'_, DB>, src: InFile<Self>) -> Option<Local> {
fn to_def<DB: HirDatabase>(db: &DB, sb: &mut SourceBinder, src: InFile<Self>) -> Option<Local> {
let file_id = src.file_id;
let parent: DefWithBodyId = src.value.syntax().ancestors().find_map(|it| {
let res = match_ast! {
match it {
ast::ConstDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
ast::StaticDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
ast::FnDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
ast::ConstDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
ast::StaticDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
ast::FnDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
_ => return None,
}
};
Some(res)
})?;
let (_body, source_map) = sb.db.body_with_source_map(parent);
let (_body, source_map) = db.body_with_source_map(parent);
let src = src.map(ast::Pat::from);
let pat_id = source_map.node_pat(src.as_ref())?;
Some(Local { parent: parent.into(), pat_id })
@ -296,26 +272,26 @@ impl ToDef for ast::TypeParam {
type Def = TypeParam;
fn to_def<DB: HirDatabase>(
sb: &mut SourceBinder<'_, DB>,
db: &DB,
sb: &mut SourceBinder,
src: InFile<ast::TypeParam>,
) -> Option<TypeParam> {
let mut sb = SourceBinder::new(sb.db);
let file_id = src.file_id;
let parent: GenericDefId = src.value.syntax().ancestors().find_map(|it| {
let res = match_ast! {
match it {
ast::FnDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
ast::StructDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
ast::EnumDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
ast::TraitDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
ast::TypeAliasDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
ast::ImplBlock(value) => { sb.to_id(InFile { value, file_id})?.into() },
ast::FnDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
ast::StructDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
ast::EnumDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
ast::TraitDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
ast::TypeAliasDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
ast::ImplBlock(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
_ => return None,
}
};
Some(res)
})?;
let &id = sb.child_by_source(parent.into())[keys::TYPE_PARAM].get(&src)?;
let &id = sb.child_by_source(db, parent.into())[keys::TYPE_PARAM].get(&src)?;
Some(TypeParam { id })
}
}
@ -324,7 +300,8 @@ impl ToId for ast::Module {
type ID = ModuleId;
fn to_id<DB: HirDatabase>(
sb: &mut SourceBinder<'_, DB>,
db: &DB,
sb: &mut SourceBinder,
src: InFile<ast::Module>,
) -> Option<ModuleId> {
{
@ -333,7 +310,7 @@ impl ToId for ast::Module {
.as_ref()
.map(|it| it.syntax())
.cloned()
.ancestors_with_macros(sb.db)
.ancestors_with_macros(db)
.skip(1)
.find_map(|it| {
let m = ast::Module::cast(it.value.clone())?;
@ -341,15 +318,15 @@ impl ToId for ast::Module {
});
let parent_module = match parent_declaration {
Some(parent_declaration) => sb.to_id(parent_declaration)?,
Some(parent_declaration) => sb.to_id(db, parent_declaration)?,
None => {
let file_id = src.file_id.original_file(sb.db);
sb.to_module_def(file_id)?.id
let file_id = src.file_id.original_file(db);
sb.to_module_def(db, file_id)?.id
}
};
let child_name = src.value.name()?.as_name();
let def_map = sb.db.crate_def_map(parent_module.krate);
let def_map = db.crate_def_map(parent_module.krate);
let child_id = *def_map[parent_module.local_id].children.get(&child_name)?;
Some(ModuleId { krate: parent_module.krate, local_id: child_id })
}

View file

@ -2,13 +2,13 @@
use indexmap::IndexMap;
use hir::db::AstDatabase;
use hir::Semantics;
use ra_ide_db::RootDatabase;
use ra_syntax::{ast, match_ast, AstNode, TextRange};
use crate::{
call_info::FnCallNode, display::ToNav, expand::descend_into_macros, goto_definition,
references, FilePosition, NavigationTarget, RangeInfo,
call_info::FnCallNode, display::ToNav, goto_definition, references, FilePosition,
NavigationTarget, RangeInfo,
};
#[derive(Debug, Clone)]
@ -38,30 +38,31 @@ pub(crate) fn call_hierarchy(
}
pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> {
let sema = Semantics::new(db);
// 1. Find all refs
// 2. Loop through refs and determine unique fndef. This will become our `from: CallHierarchyItem,` in the reply.
// 3. Add ranges relative to the start of the fndef.
let refs = references::find_all_refs(db, position, None)?;
let mut calls = CallLocations::default();
let mut sb = hir::SourceBinder::new(db);
for reference in refs.info.references() {
let file_id = reference.file_range.file_id;
let file = db.parse_or_expand(file_id.into())?;
let file = sema.parse(file_id);
let file = file.syntax();
let token = file.token_at_offset(reference.file_range.range.start()).next()?;
let token = descend_into_macros(db, file_id, token);
let syntax = token.value.parent();
let token = sema.descend_into_macros(token);
let syntax = token.parent();
// This target is the containing function
if let Some(nav) = syntax.ancestors().find_map(|node| {
match_ast! {
match node {
ast::FnDef(it) => {
let def = sb.to_def(token.with_value(it))?;
Some(def.to_nav(sb.db))
let def = sema.to_def(&it)?;
Some(def.to_nav(sema.db))
},
_ => { None },
_ => None,
}
}
}) {
@ -74,11 +75,13 @@ pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Optio
}
pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> {
let sema = Semantics::new(db);
let file_id = position.file_id;
let file = db.parse_or_expand(file_id.into())?;
let file = sema.parse(file_id);
let file = file.syntax();
let token = file.token_at_offset(position.offset).next()?;
let token = descend_into_macros(db, file_id, token);
let syntax = token.value.parent();
let token = sema.descend_into_macros(token);
let syntax = token.parent();
let mut calls = CallLocations::default();
@ -87,14 +90,11 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio
.filter_map(|node| FnCallNode::with_node_exact(&node))
.filter_map(|call_node| {
let name_ref = call_node.name_ref()?;
let name_ref = token.with_value(name_ref.syntax());
let analyzer = hir::SourceAnalyzer::new(db, name_ref, None);
if let Some(func_target) = match &call_node {
FnCallNode::CallExpr(expr) => {
//FIXME: Type::as_callable is broken
let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?;
let callable_def = sema.type_of_expr(&expr.expr()?)?.as_callable()?;
match callable_def {
hir::CallableDef::FunctionId(it) => {
let fn_def: hir::Function = it.into();
@ -105,15 +105,15 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio
}
}
FnCallNode::MethodCallExpr(expr) => {
let function = analyzer.resolve_method_call(&expr)?;
let function = sema.resolve_method_call(&expr)?;
Some(function.to_nav(db))
}
FnCallNode::MacroCallExpr(expr) => {
let macro_def = analyzer.resolve_macro_call(db, name_ref.with_value(&expr))?;
FnCallNode::MacroCallExpr(macro_call) => {
let macro_def = sema.resolve_macro_call(&macro_call)?;
Some(macro_def.to_nav(db))
}
} {
Some((func_target, name_ref.value.text_range()))
Some((func_target, name_ref.syntax().text_range()))
} else {
None
}

View file

@ -1,5 +1,5 @@
//! FIXME: write short doc here
use hir::db::AstDatabase;
use hir::Semantics;
use ra_ide_db::RootDatabase;
use ra_syntax::{
ast::{self, ArgListOwner},
@ -7,24 +7,23 @@ use ra_syntax::{
};
use test_utils::tested_by;
use crate::{expand::descend_into_macros, CallInfo, FilePosition, FunctionSignature};
use crate::{CallInfo, FilePosition, FunctionSignature};
/// Computes parameter information for the given call expression.
pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> {
let file = db.parse_or_expand(position.file_id.into())?;
let sema = Semantics::new(db);
let file = sema.parse(position.file_id);
let file = file.syntax();
let token = file.token_at_offset(position.offset).next()?;
let token = descend_into_macros(db, position.file_id, token);
let token = sema.descend_into_macros(token);
// Find the calling expression and it's NameRef
let calling_node = FnCallNode::with_node(&token.value.parent())?;
let name_ref = calling_node.name_ref()?;
let name_ref = token.with_value(name_ref.syntax());
let calling_node = FnCallNode::with_node(&token.parent())?;
let analyzer = hir::SourceAnalyzer::new(db, name_ref, None);
let (mut call_info, has_self) = match &calling_node {
FnCallNode::CallExpr(expr) => {
FnCallNode::CallExpr(call) => {
//FIXME: Type::as_callable is broken
let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?;
let callable_def = sema.type_of_expr(&call.expr()?)?.as_callable()?;
match callable_def {
hir::CallableDef::FunctionId(it) => {
let fn_def = it.into();
@ -36,12 +35,12 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
}
}
}
FnCallNode::MethodCallExpr(expr) => {
let function = analyzer.resolve_method_call(&expr)?;
FnCallNode::MethodCallExpr(method_call) => {
let function = sema.resolve_method_call(&method_call)?;
(CallInfo::with_fn(db, function), function.has_self_param(db))
}
FnCallNode::MacroCallExpr(expr) => {
let macro_def = analyzer.resolve_macro_call(db, name_ref.with_value(&expr))?;
FnCallNode::MacroCallExpr(macro_call) => {
let macro_def = sema.resolve_macro_call(&macro_call)?;
(CallInfo::with_macro(db, macro_def)?, false)
}
};

View file

@ -17,7 +17,6 @@ mod complete_postfix;
mod complete_macro_in_item_position;
mod complete_trait_impl;
use ra_db::SourceDatabase;
use ra_ide_db::RootDatabase;
#[cfg(test)]
@ -57,8 +56,7 @@ pub use crate::completion::completion_item::{
/// identifier prefix/fuzzy match should be done higher in the stack, together
/// with ordering of completions (currently this is done by the client).
pub(crate) fn completions(db: &RootDatabase, position: FilePosition) -> Option<Completions> {
let original_parse = db.parse(position.file_id);
let ctx = CompletionContext::new(db, &original_parse, position)?;
let ctx = CompletionContext::new(db, position)?;
let mut acc = Completions::default();

View file

@ -16,7 +16,7 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
_ => return,
};
let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) {
let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) {
Some(ty) => ty,
_ => return,
};
@ -55,7 +55,7 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Ty
fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) {
if let Some(krate) = ctx.module.map(|it| it.krate()) {
let mut seen_methods = FxHashSet::default();
let traits_in_scope = ctx.analyzer.traits_in_scope(ctx.db);
let traits_in_scope = ctx.scope().traits_in_scope();
receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| {
if func.has_self_param(ctx.db) && seen_methods.insert(func.name(ctx.db)) {
acc.add_function(ctx, func);

View file

@ -5,7 +5,7 @@ use crate::completion::{CompletionContext, Completions};
pub(super) fn complete_macro_in_item_position(acc: &mut Completions, ctx: &CompletionContext) {
// Show only macros in top level.
if ctx.is_new_item {
ctx.analyzer.process_all_names(ctx.db, &mut |name, res| {
ctx.scope().process_all_names(&mut |name, res| {
if let hir::ScopeDef::MacroDef(mac) = res {
acc.add_macro(ctx, Some(name.to_string()), mac);
}

View file

@ -11,7 +11,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
Some(path) => path.clone(),
_ => return,
};
let def = match ctx.analyzer.resolve_hir_path(ctx.db, &path) {
let def = match ctx.scope().resolve_hir_path(&path) {
Some(PathResolution::Def(def)) => def,
_ => return,
};
@ -49,7 +49,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
// FIXME: complete T::AssocType
let krate = ctx.module.map(|m| m.krate());
if let Some(krate) = krate {
let traits_in_scope = ctx.analyzer.traits_in_scope(ctx.db);
let traits_in_scope = ctx.scope().traits_in_scope();
ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| {
match item {
hir::AssocItem::Function(func) => {

View file

@ -9,7 +9,7 @@ pub(super) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) {
}
// FIXME: ideally, we should look at the type we are matching against and
// suggest variants + auto-imports
ctx.analyzer.process_all_names(ctx.db, &mut |name, res| {
ctx.scope().process_all_names(&mut |name, res| {
let def = match &res {
hir::ScopeDef::ModuleDef(def) => def,
_ => return,

View file

@ -29,7 +29,7 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
dot_receiver.syntax().text().to_string()
};
let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) {
let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) {
Some(it) => it,
None => return,
};

View file

@ -5,10 +5,7 @@ use crate::completion::{CompletionContext, Completions};
/// Complete fields in fields literals.
pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionContext) {
let (ty, variant) = match ctx.record_lit_syntax.as_ref().and_then(|it| {
Some((
ctx.analyzer.type_of(ctx.db, &it.clone().into())?,
ctx.analyzer.resolve_record_literal(it)?,
))
Some((ctx.sema.type_of_expr(&it.clone().into())?, ctx.sema.resolve_record_literal(it)?))
}) {
Some(it) => it,
_ => return,

View file

@ -4,10 +4,7 @@ use crate::completion::{CompletionContext, Completions};
pub(super) fn complete_record_pattern(acc: &mut Completions, ctx: &CompletionContext) {
let (ty, variant) = match ctx.record_lit_pat.as_ref().and_then(|it| {
Some((
ctx.analyzer.type_of_pat(ctx.db, &it.clone().into())?,
ctx.analyzer.resolve_record_pattern(it)?,
))
Some((ctx.sema.type_of_pat(&it.clone().into())?, ctx.sema.resolve_record_pattern(it)?))
}) {
Some(it) => it,
_ => return,

View file

@ -7,9 +7,7 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) {
return;
}
ctx.analyzer.process_all_names(ctx.db, &mut |name, res| {
acc.add_resolution(ctx, name.to_string(), &res)
});
ctx.scope().process_all_names(&mut |name, res| acc.add_resolution(ctx, name.to_string(), &res));
}
#[cfg(test)]

View file

@ -64,11 +64,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
if let (Some(trigger), Some(impl_block)) = (trigger, impl_block) {
match trigger.kind() {
SyntaxKind::FN_DEF => {
for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block)
.iter()
.filter_map(|item| match item {
for missing_fn in
get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| {
match item {
hir::AssocItem::Function(fn_item) => Some(fn_item),
_ => None,
}
})
{
add_function_impl(&trigger, acc, ctx, &missing_fn);
@ -76,11 +77,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
}
SyntaxKind::TYPE_ALIAS_DEF => {
for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block)
.iter()
.filter_map(|item| match item {
for missing_fn in
get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| {
match item {
hir::AssocItem::TypeAlias(type_item) => Some(type_item),
_ => None,
}
})
{
add_type_alias_impl(&trigger, acc, ctx, &missing_fn);
@ -88,11 +90,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
}
SyntaxKind::CONST_DEF => {
for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block)
.iter()
.filter_map(|item| match item {
for missing_fn in
get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| {
match item {
hir::AssocItem::Const(const_item) => Some(const_item),
_ => None,
}
})
{
add_const_impl(&trigger, acc, ctx, &missing_fn);

View file

@ -1,9 +1,11 @@
//! FIXME: write short doc here
use hir::{Semantics, SemanticsScope};
use ra_db::SourceDatabase;
use ra_ide_db::RootDatabase;
use ra_syntax::{
algo::{find_covering_element, find_node_at_offset},
ast, AstNode, Parse, SourceFile,
ast, AstNode, SourceFile,
SyntaxKind::*,
SyntaxNode, SyntaxToken, TextRange, TextUnit,
};
@ -15,8 +17,8 @@ use crate::FilePosition;
/// exactly is the cursor, syntax-wise.
#[derive(Debug)]
pub(crate) struct CompletionContext<'a> {
pub(super) sema: Semantics<'a, RootDatabase>,
pub(super) db: &'a RootDatabase,
pub(super) analyzer: hir::SourceAnalyzer,
pub(super) offset: TextUnit,
pub(super) token: SyntaxToken,
pub(super) module: Option<hir::Module>,
@ -51,20 +53,26 @@ pub(crate) struct CompletionContext<'a> {
impl<'a> CompletionContext<'a> {
pub(super) fn new(
db: &'a RootDatabase,
original_parse: &'a Parse<ast::SourceFile>,
position: FilePosition,
) -> Option<CompletionContext<'a>> {
let mut sb = hir::SourceBinder::new(db);
let module = sb.to_module_def(position.file_id);
let token =
original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?;
let analyzer = sb.analyze(
hir::InFile::new(position.file_id.into(), &token.parent()),
Some(position.offset),
);
let sema = Semantics::new(db);
let original_file = sema.parse(position.file_id);
// Insert a fake ident to get a valid parse tree. We will use this file
// to determine context, though the original_file will be used for
// actual completion.
let file_with_fake_ident = {
let parse = db.parse(position.file_id);
let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string());
parse.reparse(&edit).tree()
};
let module = sema.to_module_def(position.file_id);
let token = original_file.syntax().token_at_offset(position.offset).left_biased()?;
let mut ctx = CompletionContext {
sema,
db,
analyzer,
token,
offset: position.offset,
module,
@ -87,7 +95,7 @@ impl<'a> CompletionContext<'a> {
has_type_args: false,
dot_receiver_is_ambiguous_float_literal: false,
};
ctx.fill(&original_parse, position.offset);
ctx.fill(&original_file, file_with_fake_ident, position.offset);
Some(ctx)
}
@ -100,29 +108,33 @@ impl<'a> CompletionContext<'a> {
}
}
fn fill(&mut self, original_parse: &'a Parse<ast::SourceFile>, offset: TextUnit) {
// Insert a fake ident to get a valid parse tree. We will use this file
// to determine context, though the original_file will be used for
// actual completion.
let file = {
let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string());
original_parse.reparse(&edit).tree()
};
pub(crate) fn scope(&self) -> SemanticsScope<'_, RootDatabase> {
self.sema.scope_at_offset(&self.token.parent(), self.offset)
}
fn fill(
&mut self,
original_file: &ast::SourceFile,
file_with_fake_ident: ast::SourceFile,
offset: TextUnit,
) {
// First, let's try to complete a reference to some declaration.
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), offset) {
if let Some(name_ref) =
find_node_at_offset::<ast::NameRef>(file_with_fake_ident.syntax(), offset)
{
// Special case, `trait T { fn foo(i_am_a_name_ref) {} }`.
// See RFC#1685.
if is_node::<ast::Param>(name_ref.syntax()) {
self.is_param = true;
return;
}
self.classify_name_ref(original_parse.tree(), name_ref);
self.classify_name_ref(original_file, name_ref);
}
// Otherwise, see if this is a declaration. We can use heuristics to
// suggest declaration names, see `CompletionKind::Magic`.
if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) {
if let Some(name) = find_node_at_offset::<ast::Name>(file_with_fake_ident.syntax(), offset)
{
if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) {
let parent = bind_pat.syntax().parent();
if parent.clone().and_then(ast::MatchArm::cast).is_some()
@ -136,13 +148,12 @@ impl<'a> CompletionContext<'a> {
return;
}
if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() {
self.record_lit_pat =
find_node_at_offset(original_parse.tree().syntax(), self.offset);
self.record_lit_pat = find_node_at_offset(original_file.syntax(), self.offset);
}
}
}
fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) {
fn classify_name_ref(&mut self, original_file: &SourceFile, name_ref: ast::NameRef) {
self.name_ref_syntax =
find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start());
let name_range = name_ref.syntax().text_range();

View file

@ -2,7 +2,10 @@
use std::cell::RefCell;
use hir::diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink};
use hir::{
diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink},
Semantics,
};
use itertools::Itertools;
use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt};
use ra_ide_db::RootDatabase;
@ -24,7 +27,7 @@ pub enum Severity {
pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> {
let _p = profile("diagnostics");
let mut sb = hir::SourceBinder::new(db);
let sema = Semantics::new(db);
let parse = db.parse(file_id);
let mut res = Vec::new();
@ -110,7 +113,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
fix: Some(fix),
})
});
if let Some(m) = sb.to_module_def(file_id) {
if let Some(m) = sema.to_module_def(file_id) {
m.diagnostics(db, &mut sink);
};
drop(sink);

View file

@ -1,7 +1,7 @@
//! FIXME: write short doc here
use either::Either;
use hir::{AssocItem, FieldSource, HasSource, InFile, ModuleSource};
use hir::{original_range, AssocItem, FieldSource, HasSource, InFile, ModuleSource};
use ra_db::{FileId, SourceDatabase};
use ra_ide_db::RootDatabase;
use ra_syntax::{
@ -11,7 +11,11 @@ use ra_syntax::{
TextRange,
};
use crate::{expand::original_range, references::NameDefinition, FileSymbol};
use crate::{
// expand::original_range,
references::NameDefinition,
FileSymbol,
};
use super::short_label::ShortLabel;

View file

@ -1,102 +0,0 @@
//! Utilities to work with files, produced by macros.
use std::iter::successors;
use hir::{InFile, Origin};
use ra_db::FileId;
use ra_ide_db::RootDatabase;
use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken, TextRange};
use crate::FileRange;
pub(crate) fn original_range(db: &RootDatabase, node: InFile<&SyntaxNode>) -> FileRange {
if let Some((range, Origin::Call)) = original_range_and_origin(db, node) {
return range;
}
if let Some(expansion) = node.file_id.expansion_info(db) {
if let Some(call_node) = expansion.call_node() {
return FileRange {
file_id: call_node.file_id.original_file(db),
range: call_node.value.text_range(),
};
}
}
FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
}
fn original_range_and_origin(
db: &RootDatabase,
node: InFile<&SyntaxNode>,
) -> Option<(FileRange, Origin)> {
let expansion = node.file_id.expansion_info(db)?;
// the input node has only one token ?
let single = node.value.first_token()? == node.value.last_token()?;
// FIXME: We should handle recurside macro expansions
let (range, origin) = node.value.descendants().find_map(|it| {
let first = it.first_token()?;
let last = it.last_token()?;
if !single && first == last {
return None;
}
// Try to map first and last tokens of node, and, if success, return the union range of mapped tokens
let (first, first_origin) = expansion.map_token_up(node.with_value(&first))?;
let (last, last_origin) = expansion.map_token_up(node.with_value(&last))?;
if first.file_id != last.file_id || first_origin != last_origin {
return None;
}
// FIXME: Add union method in TextRange
Some((
first.with_value(union_range(first.value.text_range(), last.value.text_range())),
first_origin,
))
})?;
return Some((
FileRange { file_id: range.file_id.original_file(db), range: range.value },
origin,
));
fn union_range(a: TextRange, b: TextRange) -> TextRange {
let start = a.start().min(b.start());
let end = a.end().max(b.end());
TextRange::from_to(start, end)
}
}
pub(crate) fn descend_into_macros(
db: &RootDatabase,
file_id: FileId,
token: SyntaxToken,
) -> InFile<SyntaxToken> {
let src = InFile::new(file_id.into(), token);
let source_analyzer =
hir::SourceAnalyzer::new(db, src.with_value(src.value.parent()).as_ref(), None);
descend_into_macros_with_analyzer(db, &source_analyzer, src)
}
pub(crate) fn descend_into_macros_with_analyzer(
db: &RootDatabase,
source_analyzer: &hir::SourceAnalyzer,
src: InFile<SyntaxToken>,
) -> InFile<SyntaxToken> {
successors(Some(src), |token| {
let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
let tt = macro_call.token_tree()?;
if !token.value.text_range().is_subrange(&tt.syntax().text_range()) {
return None;
}
let exp = source_analyzer.expand(db, token.with_value(&macro_call))?;
exp.map_token_down(db, token.as_ref())
})
.last()
.unwrap()
}

View file

@ -1,7 +1,6 @@
//! This modules implements "expand macro" functionality in the IDE
use hir::db::AstDatabase;
use ra_db::SourceDatabase;
use hir::Semantics;
use ra_ide_db::RootDatabase;
use ra_syntax::{
algo::{find_node_at_offset, replace_descendants},
@ -17,13 +16,12 @@ pub struct ExpandedMacro {
}
pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> {
let parse = db.parse(position.file_id);
let file = parse.tree();
let sema = Semantics::new(db);
let file = sema.parse(position.file_id);
let name_ref = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset)?;
let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?;
let source = hir::InFile::new(position.file_id.into(), mac.syntax());
let expanded = expand_macro_recur(db, source, source.with_value(&mac))?;
let expanded = expand_macro_recur(&sema, &mac)?;
// FIXME:
// macro expansion may lose all white space information
@ -33,21 +31,16 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
}
fn expand_macro_recur(
db: &RootDatabase,
source: hir::InFile<&SyntaxNode>,
macro_call: hir::InFile<&ast::MacroCall>,
sema: &Semantics<RootDatabase>,
macro_call: &ast::MacroCall,
) -> Option<SyntaxNode> {
let analyzer = hir::SourceAnalyzer::new(db, source, None);
let expansion = analyzer.expand(db, macro_call)?;
let macro_file_id = expansion.file_id();
let mut expanded: SyntaxNode = db.parse_or_expand(macro_file_id)?;
let mut expanded = sema.expand(macro_call)?;
let children = expanded.descendants().filter_map(ast::MacroCall::cast);
let mut replaces: FxHashMap<SyntaxElement, SyntaxElement> = FxHashMap::default();
for child in children.into_iter() {
let node = hir::InFile::new(macro_file_id, &child);
if let Some(new_node) = expand_macro_recur(db, source, node) {
if let Some(new_node) = expand_macro_recur(sema, &child) {
// Replace the whole node if it is root
// `replace_descendants` will not replace the parent node
// but `SyntaxNode::descendants include itself
@ -120,10 +113,12 @@ fn insert_whitespaces(syn: SyntaxNode) -> String {
#[cfg(test)]
mod tests {
use super::*;
use crate::mock_analysis::analysis_and_position;
use insta::assert_snapshot;
use crate::mock_analysis::analysis_and_position;
use super::*;
fn check_expand_macro(fixture: &str) -> ExpandedMacro {
let (analysis, pos) = analysis_and_position(fixture);
analysis.expand_macro(pos).unwrap().unwrap()

View file

@ -2,26 +2,26 @@
use std::iter::successors;
use hir::db::AstDatabase;
use ra_db::SourceDatabase;
use hir::Semantics;
use ra_ide_db::RootDatabase;
use ra_syntax::{
algo::find_covering_element,
algo::{self, find_covering_element},
ast::{self, AstNode, AstToken},
Direction, NodeOrToken, SyntaxElement,
Direction, NodeOrToken,
SyntaxKind::{self, *},
SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T,
};
use crate::{expand::descend_into_macros, FileId, FileRange};
use crate::FileRange;
pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
let src = db.parse(frange.file_id).tree();
try_extend_selection(db, src.syntax(), frange).unwrap_or(frange.range)
let sema = Semantics::new(db);
let src = sema.parse(frange.file_id);
try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range)
}
fn try_extend_selection(
db: &RootDatabase,
sema: &Semantics<RootDatabase>,
root: &SyntaxNode,
frange: FileRange,
) -> Option<TextRange> {
@ -86,7 +86,7 @@ fn try_extend_selection(
// if we are in single token_tree, we maybe live in macro or attr
if node.kind() == TOKEN_TREE {
if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) {
if let Some(range) = extend_tokens_from_range(db, frange.file_id, macro_call, range) {
if let Some(range) = extend_tokens_from_range(sema, macro_call, range) {
return Some(range);
}
}
@ -96,7 +96,7 @@ fn try_extend_selection(
return Some(node.text_range());
}
let node = shallowest_node(&node.into()).unwrap();
let node = shallowest_node(&node.into());
if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
if let Some(range) = extend_list_item(&node) {
@ -108,8 +108,7 @@ fn try_extend_selection(
}
fn extend_tokens_from_range(
db: &RootDatabase,
file_id: FileId,
sema: &Semantics<RootDatabase>,
macro_call: ast::MacroCall,
original_range: TextRange,
) -> Option<TextRange> {
@ -130,25 +129,21 @@ fn extend_tokens_from_range(
}
// compute original mapped token range
let expanded = {
let first_node = descend_into_macros(db, file_id, first_token.clone());
let first_node = first_node.map(|it| it.text_range());
let last_node = descend_into_macros(db, file_id, last_token.clone());
if last_node.file_id == file_id.into() || first_node.file_id != last_node.file_id {
return None;
let extended = {
let fst_expanded = sema.descend_into_macros(first_token.clone());
let lst_expanded = sema.descend_into_macros(last_token.clone());
let mut lca = algo::least_common_ancestor(&fst_expanded.parent(), &lst_expanded.parent())?;
lca = shallowest_node(&lca);
if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) {
lca = lca.parent()?;
}
first_node.map(|it| union_range(it, last_node.value.text_range()))
lca
};
// Compute parent node range
let src = db.parse_or_expand(expanded.file_id)?;
let parent = shallowest_node(&find_covering_element(&src, expanded.value))?.parent()?;
let validate = |token: &SyntaxToken| {
let node = descend_into_macros(db, file_id, token.clone());
node.file_id == expanded.file_id
&& node.value.text_range().is_subrange(&parent.text_range())
let expanded = sema.descend_into_macros(token.clone());
algo::least_common_ancestor(&extended, &expanded.parent()).as_ref() == Some(&extended)
};
// Find the first and last text range under expanded parent
@ -191,8 +186,8 @@ fn union_range(range: TextRange, r: TextRange) -> TextRange {
}
/// Find the shallowest node with same range, which allows us to traverse siblings.
fn shallowest_node(node: &SyntaxElement) -> Option<SyntaxNode> {
node.ancestors().take_while(|n| n.text_range() == node.text_range()).last()
fn shallowest_node(node: &SyntaxNode) -> SyntaxNode {
node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap()
}
fn extend_single_word_in_comment_or_string(

View file

@ -1,7 +1,7 @@
//! FIXME: write short doc here
use hir::{db::AstDatabase, InFile, SourceBinder};
use ra_ide_db::{symbol_index, RootDatabase};
use hir::Semantics;
use ra_ide_db::{defs::classify_name, symbol_index, RootDatabase};
use ra_syntax::{
ast::{self},
match_ast, AstNode,
@ -11,8 +11,7 @@ use ra_syntax::{
use crate::{
display::{ToNav, TryToNav},
expand::descend_into_macros,
references::{classify_name, classify_name_ref},
references::classify_name_ref,
FilePosition, NavigationTarget, RangeInfo,
};
@ -20,18 +19,18 @@ pub(crate) fn goto_definition(
db: &RootDatabase,
position: FilePosition,
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
let file = db.parse_or_expand(position.file_id.into())?;
let sema = Semantics::new(db);
let file = sema.parse(position.file_id).syntax().clone();
let original_token = pick_best(file.token_at_offset(position.offset))?;
let token = descend_into_macros(db, position.file_id, original_token.clone());
let token = sema.descend_into_macros(original_token.clone());
let mut sb = SourceBinder::new(db);
let nav_targets = match_ast! {
match (token.value.parent()) {
match (token.parent()) {
ast::NameRef(name_ref) => {
reference_definition(&mut sb, token.with_value(&name_ref)).to_vec()
reference_definition(&sema, &name_ref).to_vec()
},
ast::Name(name) => {
name_definition(&mut sb, token.with_value(&name))?
name_definition(&sema, &name)?
},
_ => return None,
}
@ -68,33 +67,33 @@ impl ReferenceResult {
}
pub(crate) fn reference_definition(
sb: &mut SourceBinder<RootDatabase>,
name_ref: InFile<&ast::NameRef>,
sema: &Semantics<RootDatabase>,
name_ref: &ast::NameRef,
) -> ReferenceResult {
use self::ReferenceResult::*;
let name_kind = classify_name_ref(sb, name_ref);
let name_kind = classify_name_ref(sema, name_ref);
if let Some(def) = name_kind {
return match def.try_to_nav(sb.db) {
return match def.try_to_nav(sema.db) {
Some(nav) => ReferenceResult::Exact(nav),
None => ReferenceResult::Approximate(Vec::new()),
};
}
// Fallback index based approach:
let navs = symbol_index::index_resolve(sb.db, name_ref.value)
let navs = symbol_index::index_resolve(sema.db, name_ref)
.into_iter()
.map(|s| s.to_nav(sb.db))
.map(|s| s.to_nav(sema.db))
.collect();
Approximate(navs)
}
fn name_definition(
sb: &mut SourceBinder<RootDatabase>,
name: InFile<&ast::Name>,
sema: &Semantics<RootDatabase>,
name: &ast::Name,
) -> Option<Vec<NavigationTarget>> {
let def = classify_name(sb, name)?;
let nav = def.try_to_nav(sb.db)?;
let def = classify_name(sema, name)?;
let nav = def.try_to_nav(sema.db)?;
Some(vec![nav])
}

View file

@ -1,31 +1,31 @@
//! FIXME: write short doc here
use hir::db::AstDatabase;
use ra_ide_db::RootDatabase;
use ra_syntax::{ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset};
use ra_syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset};
use crate::{
display::ToNav, expand::descend_into_macros, FilePosition, NavigationTarget, RangeInfo,
};
use crate::{display::ToNav, FilePosition, NavigationTarget, RangeInfo};
pub(crate) fn goto_type_definition(
db: &RootDatabase,
position: FilePosition,
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
let file = db.parse_or_expand(position.file_id.into())?;
let token = pick_best(file.token_at_offset(position.offset))?;
let token = descend_into_macros(db, position.file_id, token);
let sema = hir::Semantics::new(db);
let node = token
.value
.ancestors()
.find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?;
let file: ast::SourceFile = sema.parse(position.file_id);
let token: SyntaxToken = pick_best(file.syntax().token_at_offset(position.offset))?;
let token: SyntaxToken = sema.descend_into_macros(token);
let analyzer = hir::SourceAnalyzer::new(db, token.with_value(&node), None);
let (ty, node) = sema.ancestors_with_macros(token.parent()).find_map(|node| {
let ty = match_ast! {
match node {
ast::Expr(expr) => { sema.type_of_expr(&expr)? },
ast::Pat(pat) => { sema.type_of_pat(&pat)? },
_ => { return None },
}
};
let ty: hir::Type = ast::Expr::cast(node.clone())
.and_then(|e| analyzer.type_of(db, &e))
.or_else(|| ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p)))?;
Some((ty, node))
})?;
let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt())?;

View file

@ -1,8 +1,10 @@
//! FIXME: write short doc here
use hir::{db::AstDatabase, Adt, HasSource, HirDisplay, SourceBinder};
use ra_db::SourceDatabase;
use ra_ide_db::{defs::NameDefinition, RootDatabase};
use hir::{Adt, HasSource, HirDisplay, Semantics};
use ra_ide_db::{
defs::{classify_name, NameDefinition},
RootDatabase,
};
use ra_syntax::{
algo::find_covering_element,
ast::{self, DocCommentsOwner},
@ -13,8 +15,7 @@ use ra_syntax::{
use crate::{
display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel},
expand::{descend_into_macros, original_range},
references::{classify_name, classify_name_ref},
references::classify_name_ref,
FilePosition, FileRange, RangeInfo,
};
@ -143,25 +144,25 @@ fn hover_text_from_name_kind(db: &RootDatabase, def: NameDefinition) -> Option<S
}
pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> {
let file = db.parse_or_expand(position.file_id.into())?;
let sema = Semantics::new(db);
let file = sema.parse(position.file_id).syntax().clone();
let token = pick_best(file.token_at_offset(position.offset))?;
let token = descend_into_macros(db, position.file_id, token);
let token = sema.descend_into_macros(token);
let mut res = HoverResult::new();
let mut sb = SourceBinder::new(db);
if let Some((node, name_kind)) = match_ast! {
match (token.value.parent()) {
match (token.parent()) {
ast::NameRef(name_ref) => {
classify_name_ref(&mut sb, token.with_value(&name_ref)).map(|d| (name_ref.syntax().clone(), d))
classify_name_ref(&sema, &name_ref).map(|d| (name_ref.syntax().clone(), d))
},
ast::Name(name) => {
classify_name(&mut sb, token.with_value(&name)).map(|d| (name.syntax().clone(), d))
classify_name(&sema, &name).map(|d| (name.syntax().clone(), d))
},
_ => None,
}
} {
let range = original_range(db, token.with_value(&node)).range;
let range = sema.original_range(&node).range;
res.extend(hover_text_from_name_kind(db, name_kind));
if !res.is_empty() {
@ -170,11 +171,10 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
}
let node = token
.value
.ancestors()
.find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?;
let frange = original_range(db, token.with_value(&node));
let frange = sema.original_range(&node);
res.extend(type_of(db, frange).map(rust_code_markup));
if res.is_empty() {
return None;
@ -197,19 +197,17 @@ fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> {
}
pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
let parse = db.parse(frange.file_id);
let leaf_node = find_covering_element(parse.tree().syntax(), frange.range);
let sema = Semantics::new(db);
let source_file = sema.parse(frange.file_id);
let leaf_node = find_covering_element(source_file.syntax(), frange.range);
// if we picked identifier, expand to pattern/expression
let node = leaf_node
.ancestors()
.take_while(|it| it.text_range() == leaf_node.text_range())
.find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?;
let analyzer =
hir::SourceAnalyzer::new(db, hir::InFile::new(frange.file_id.into(), &node), None);
let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
{
let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| sema.type_of_expr(&e)) {
ty
} else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, &p)) {
} else if let Some(ty) = ast::Pat::cast(node).and_then(|p| sema.type_of_pat(&p)) {
ty
} else {
return None;
@ -219,11 +217,12 @@ pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
#[cfg(test)]
mod tests {
use ra_db::FileLoader;
use ra_syntax::TextRange;
use crate::mock_analysis::{
analysis_and_position, single_file_with_position, single_file_with_range,
};
use ra_db::FileLoader;
use ra_syntax::TextRange;
fn trim_markup(s: &str) -> &str {
s.trim_start_matches("```rust\n").trim_end_matches("\n```")

View file

@ -1,7 +1,6 @@
//! FIXME: write short doc here
use hir::{Crate, ImplBlock, SourceBinder};
use ra_db::SourceDatabase;
use hir::{Crate, ImplBlock, Semantics};
use ra_ide_db::RootDatabase;
use ra_syntax::{algo::find_node_at_offset, ast, AstNode};
@ -11,21 +10,21 @@ pub(crate) fn goto_implementation(
db: &RootDatabase,
position: FilePosition,
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
let parse = db.parse(position.file_id);
let syntax = parse.tree().syntax().clone();
let mut sb = SourceBinder::new(db);
let sema = Semantics::new(db);
let source_file = sema.parse(position.file_id);
let syntax = source_file.syntax().clone();
let krate = sb.to_module_def(position.file_id)?.krate();
let krate = sema.to_module_def(position.file_id)?.krate();
if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) {
return Some(RangeInfo::new(
nominal_def.syntax().text_range(),
impls_for_def(&mut sb, position, &nominal_def, krate)?,
impls_for_def(&sema, &nominal_def, krate)?,
));
} else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(&syntax, position.offset) {
return Some(RangeInfo::new(
trait_def.syntax().text_range(),
impls_for_trait(&mut sb, position, &trait_def, krate)?,
impls_for_trait(&sema, &trait_def, krate)?,
));
}
@ -33,49 +32,37 @@ pub(crate) fn goto_implementation(
}
fn impls_for_def(
sb: &mut SourceBinder<RootDatabase>,
position: FilePosition,
sema: &Semantics<RootDatabase>,
node: &ast::NominalDef,
krate: Crate,
) -> Option<Vec<NavigationTarget>> {
let ty = match node {
ast::NominalDef::StructDef(def) => {
let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() };
sb.to_def(src)?.ty(sb.db)
}
ast::NominalDef::EnumDef(def) => {
let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() };
sb.to_def(src)?.ty(sb.db)
}
ast::NominalDef::UnionDef(def) => {
let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() };
sb.to_def(src)?.ty(sb.db)
}
ast::NominalDef::StructDef(def) => sema.to_def(def)?.ty(sema.db),
ast::NominalDef::EnumDef(def) => sema.to_def(def)?.ty(sema.db),
ast::NominalDef::UnionDef(def) => sema.to_def(def)?.ty(sema.db),
};
let impls = ImplBlock::all_in_crate(sb.db, krate);
let impls = ImplBlock::all_in_crate(sema.db, krate);
Some(
impls
.into_iter()
.filter(|impl_block| ty.is_equal_for_find_impls(&impl_block.target_ty(sb.db)))
.map(|imp| imp.to_nav(sb.db))
.filter(|impl_block| ty.is_equal_for_find_impls(&impl_block.target_ty(sema.db)))
.map(|imp| imp.to_nav(sema.db))
.collect(),
)
}
fn impls_for_trait(
sb: &mut SourceBinder<RootDatabase>,
position: FilePosition,
sema: &Semantics<RootDatabase>,
node: &ast::TraitDef,
krate: Crate,
) -> Option<Vec<NavigationTarget>> {
let src = hir::InFile { file_id: position.file_id.into(), value: node.clone() };
let tr = sb.to_def(src)?;
let tr = sema.to_def(node)?;
let impls = ImplBlock::for_trait(sb.db, krate, tr);
let impls = ImplBlock::for_trait(sema.db, krate, tr);
Some(impls.into_iter().map(|imp| imp.to_nav(sb.db)).collect())
Some(impls.into_iter().map(|imp| imp.to_nav(sema.db)).collect())
}
#[cfg(test)]

View file

@ -1,12 +1,11 @@
//! FIXME: write short doc here
use hir::{Adt, HirDisplay, SourceAnalyzer, SourceBinder, Type};
use once_cell::unsync::Lazy;
use hir::{Adt, HirDisplay, Semantics, Type};
use ra_ide_db::RootDatabase;
use ra_prof::profile;
use ra_syntax::{
ast::{self, ArgListOwner, AstNode, TypeAscriptionOwner},
match_ast, SmolStr, SourceFile, SyntaxNode, TextRange,
match_ast, SmolStr, SyntaxNode, TextRange,
};
use crate::{FileId, FunctionSignature};
@ -27,38 +26,36 @@ pub struct InlayHint {
pub(crate) fn inlay_hints(
db: &RootDatabase,
file_id: FileId,
file: &SourceFile,
max_inlay_hint_length: Option<usize>,
) -> Vec<InlayHint> {
let mut sb = SourceBinder::new(db);
let sema = Semantics::new(db);
let file = sema.parse(file_id);
let mut res = Vec::new();
for node in file.syntax().descendants() {
get_inlay_hints(&mut res, &mut sb, file_id, &node, max_inlay_hint_length);
get_inlay_hints(&mut res, &sema, &node, max_inlay_hint_length);
}
res
}
fn get_inlay_hints(
acc: &mut Vec<InlayHint>,
sb: &mut SourceBinder<RootDatabase>,
file_id: FileId,
sema: &Semantics<RootDatabase>,
node: &SyntaxNode,
max_inlay_hint_length: Option<usize>,
) -> Option<()> {
let _p = profile("get_inlay_hints");
let db = sb.db;
let analyzer = Lazy::new(move || sb.analyze(hir::InFile::new(file_id.into(), node), None));
let db = sema.db;
match_ast! {
match node {
ast::CallExpr(it) => {
get_param_name_hints(acc, db, &analyzer, ast::Expr::from(it));
get_param_name_hints(acc, sema, ast::Expr::from(it));
},
ast::MethodCallExpr(it) => {
get_param_name_hints(acc, db, &analyzer, ast::Expr::from(it));
get_param_name_hints(acc, sema, ast::Expr::from(it));
},
ast::BindPat(it) => {
let pat = ast::Pat::from(it.clone());
let ty = analyzer.type_of_pat(db, &pat)?;
let ty = sema.type_of_pat(&pat)?;
if should_not_display_type_hint(db, &it, &ty) {
return None;
@ -125,8 +122,7 @@ fn should_not_display_type_hint(db: &RootDatabase, bind_pat: &ast::BindPat, pat_
fn get_param_name_hints(
acc: &mut Vec<InlayHint>,
db: &RootDatabase,
analyzer: &SourceAnalyzer,
sema: &Semantics<RootDatabase>,
expr: ast::Expr,
) -> Option<()> {
let args = match &expr {
@ -138,7 +134,7 @@ fn get_param_name_hints(
// we need args len to determine whether to skip or not the &self parameter
.collect::<Vec<_>>();
let fn_signature = get_fn_signature(db, analyzer, &expr)?;
let fn_signature = get_fn_signature(sema, &expr)?;
let n_params_to_skip =
if fn_signature.has_self_param && fn_signature.parameter_names.len() > args.len() {
1
@ -184,28 +180,26 @@ fn should_show_param_hint(
true
}
fn get_fn_signature(
db: &RootDatabase,
analyzer: &SourceAnalyzer,
expr: &ast::Expr,
) -> Option<FunctionSignature> {
fn get_fn_signature(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<FunctionSignature> {
match expr {
ast::Expr::CallExpr(expr) => {
// FIXME: Type::as_callable is broken for closures
let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?;
let callable_def = sema.type_of_expr(&expr.expr()?)?.as_callable()?;
match callable_def {
hir::CallableDef::FunctionId(it) => {
Some(FunctionSignature::from_hir(db, it.into()))
Some(FunctionSignature::from_hir(sema.db, it.into()))
}
hir::CallableDef::StructId(it) => {
FunctionSignature::from_struct(sema.db, it.into())
}
hir::CallableDef::StructId(it) => FunctionSignature::from_struct(db, it.into()),
hir::CallableDef::EnumVariantId(it) => {
FunctionSignature::from_enum_variant(db, it.into())
FunctionSignature::from_enum_variant(sema.db, it.into())
}
}
}
ast::Expr::MethodCallExpr(expr) => {
let fn_def = analyzer.resolve_method_call(&expr)?;
Some(FunctionSignature::from_hir(db, fn_def))
let fn_def = sema.resolve_method_call(&expr)?;
Some(FunctionSignature::from_hir(sema.db, fn_def))
}
_ => None,
}

View file

@ -35,7 +35,6 @@ mod typing;
mod matching_brace;
mod display;
mod inlay_hints;
mod expand;
mod expand_macro;
mod ssr;
@ -319,9 +318,7 @@ impl Analysis {
file_id: FileId,
max_inlay_hint_length: Option<usize>,
) -> Cancelable<Vec<InlayHint>> {
self.with_db(|db| {
inlay_hints::inlay_hints(db, file_id, &db.parse(file_id).tree(), max_inlay_hint_length)
})
self.with_db(|db| inlay_hints::inlay_hints(db, file_id, max_inlay_hint_length))
}
/// Returns the set of folding ranges.

View file

@ -11,4 +11,5 @@ test_utils::marks!(
call_info_bad_offset
dont_complete_current_use
test_resolve_parent_module_on_module_decl
search_filters_by_range
);

View file

@ -1,6 +1,7 @@
//! FIXME: write short doc here
use ra_db::{CrateId, FileId, FilePosition, SourceDatabase};
use hir::Semantics;
use ra_db::{CrateId, FileId, FilePosition};
use ra_ide_db::RootDatabase;
use ra_syntax::{
algo::find_node_at_offset,
@ -13,10 +14,10 @@ use crate::NavigationTarget;
/// This returns `Vec` because a module may be included from several places. We
/// don't handle this case yet though, so the Vec has length at most one.
pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> {
let mut sb = hir::SourceBinder::new(db);
let parse = db.parse(position.file_id);
let sema = Semantics::new(db);
let source_file = sema.parse(position.file_id);
let mut module = find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset);
let mut module = find_node_at_offset::<ast::Module>(source_file.syntax(), position.offset);
// If cursor is literally on `mod foo`, go to the grandpa.
if let Some(m) = &module {
@ -30,8 +31,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
}
let module = match module {
Some(module) => sb.to_def(hir::InFile::new(position.file_id.into(), module)),
None => sb.to_module_def(position.file_id),
Some(module) => sema.to_def(&module),
None => sema.to_module_def(position.file_id),
};
let module = match module {
None => return Vec::new(),
@ -43,8 +44,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
/// Returns `Vec` for the same reason as `parent_module`
pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
let mut sb = hir::SourceBinder::new(db);
let module = match sb.to_module_def(file_id) {
let sema = Semantics::new(db);
let module = match sema.to_module_def(file_id) {
Some(it) => it,
None => return Vec::new(),
};

View file

@ -13,25 +13,22 @@ mod classify;
mod rename;
mod search_scope;
use crate::expand::descend_into_macros_with_analyzer;
use hir::{InFile, SourceBinder};
use hir::Semantics;
use once_cell::unsync::Lazy;
use ra_db::{SourceDatabase, SourceDatabaseExt};
use ra_db::SourceDatabaseExt;
use ra_ide_db::RootDatabase;
use ra_prof::profile;
use ra_syntax::{
algo::find_node_at_offset,
ast::{self, NameOwner},
match_ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, TextUnit, TokenAtOffset,
match_ast, AstNode, SyntaxKind, SyntaxNode, TextRange, TextUnit, TokenAtOffset,
};
use test_utils::tested_by;
use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo};
pub(crate) use self::{
classify::{classify_name, classify_name_ref},
rename::rename,
};
pub(crate) use ra_ide_db::defs::NameDefinition;
pub(crate) use self::{classify::classify_name_ref, rename::rename};
pub(crate) use ra_ide_db::defs::{classify_name, NameDefinition};
pub use self::search_scope::SearchScope;
@ -114,8 +111,8 @@ pub(crate) fn find_all_refs(
position: FilePosition,
search_scope: Option<SearchScope>,
) -> Option<RangeInfo<ReferenceSearchResult>> {
let parse = db.parse(position.file_id);
let syntax = parse.tree().syntax().clone();
let sema = Semantics::new(db);
let syntax = sema.parse(position.file_id).syntax().clone();
let (opt_name, search_kind) =
if let Some(name) = get_struct_def_name_for_struc_litetal_search(&syntax, position) {
@ -124,7 +121,7 @@ pub(crate) fn find_all_refs(
(find_node_at_offset::<ast::Name>(&syntax, position.offset), ReferenceKind::Other)
};
let RangeInfo { range, info: (name, def) } = find_name(db, &syntax, position, opt_name)?;
let RangeInfo { range, info: (name, def) } = find_name(&sema, &syntax, position, opt_name)?;
let declaration = def.try_to_nav(db)?;
let search_scope = {
@ -152,19 +149,18 @@ pub(crate) fn find_all_refs(
}
fn find_name(
db: &RootDatabase,
sema: &Semantics<RootDatabase>,
syntax: &SyntaxNode,
position: FilePosition,
opt_name: Option<ast::Name>,
) -> Option<RangeInfo<(String, NameDefinition)>> {
let mut sb = SourceBinder::new(db);
if let Some(name) = opt_name {
let def = classify_name(&mut sb, InFile::new(position.file_id.into(), &name))?;
let def = classify_name(sema, &name)?;
let range = name.syntax().text_range();
return Some(RangeInfo::new(range, (name.text().to_string(), def)));
}
let name_ref = find_node_at_offset::<ast::NameRef>(&syntax, position.offset)?;
let def = classify_name_ref(&mut sb, InFile::new(position.file_id.into(), &name_ref))?;
let def = classify_name_ref(sema, &name_ref)?;
let range = name_ref.syntax().text_range();
Some(RangeInfo::new(range, (name_ref.text().to_string(), def)))
}
@ -182,64 +178,53 @@ fn process_definition(
for (file_id, search_range) in scope {
let text = db.file_text(file_id);
let search_range =
search_range.unwrap_or(TextRange::offset_len(0.into(), TextUnit::of_str(&text)));
let parse = Lazy::new(|| SourceFile::parse(&text));
let mut sb = Lazy::new(|| SourceBinder::new(db));
let mut analyzer = None;
let sema = Semantics::new(db);
let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
for (idx, _) in text.match_indices(pat) {
let offset = TextUnit::from_usize(idx);
if !search_range.contains_inclusive(offset) {
tested_by!(search_filters_by_range);
continue;
}
let (name_ref, range) = if let Some(name_ref) =
find_node_at_offset::<ast::NameRef>(parse.tree().syntax(), offset)
{
let range = name_ref.syntax().text_range();
(InFile::new(file_id.into(), name_ref), range)
let name_ref =
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&tree, offset) {
name_ref
} else {
// Handle macro token cases
let t = match parse.tree().syntax().token_at_offset(offset) {
let token = match tree.token_at_offset(offset) {
TokenAtOffset::None => continue,
TokenAtOffset::Single(t) => t,
TokenAtOffset::Between(_, t) => t,
};
let range = t.text_range();
let analyzer = analyzer.get_or_insert_with(|| {
sb.analyze(InFile::new(file_id.into(), parse.tree().syntax()), None)
});
let expanded = descend_into_macros_with_analyzer(
db,
&analyzer,
InFile::new(file_id.into(), t),
);
if let Some(token) = ast::NameRef::cast(expanded.value.parent()) {
(expanded.with_value(token), range)
} else {
continue;
let expanded = sema.descend_into_macros(token);
match ast::NameRef::cast(expanded.parent()) {
Some(name_ref) => name_ref,
_ => continue,
}
};
if let Some(search_range) = search_range {
if !range.is_subrange(&search_range) {
continue;
}
}
// FIXME: reuse sb
// See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098
if let Some(d) = classify_name_ref(&mut sb, name_ref.as_ref()) {
if let Some(d) = classify_name_ref(&sema, &name_ref) {
if d == def {
let kind = if is_record_lit_name_ref(&name_ref.value)
|| is_call_expr_name_ref(&name_ref.value)
{
let kind =
if is_record_lit_name_ref(&name_ref) || is_call_expr_name_ref(&name_ref) {
ReferenceKind::StructLiteral
} else {
ReferenceKind::Other
};
let file_range = sema.original_range(name_ref.syntax());
refs.push(Reference {
file_range: FileRange { file_id, range },
file_range,
kind,
access: reference_access(&d, &name_ref.value),
access: reference_access(&d, &name_ref),
});
}
}
@ -348,6 +333,8 @@ fn is_call_expr_name_ref(name_ref: &ast::NameRef) -> bool {
#[cfg(test)]
mod tests {
use test_utils::covers;
use crate::{
mock_analysis::{analysis_and_position, single_file_with_position, MockAnalysis},
Declaration, Reference, ReferenceSearchResult, SearchScope,
@ -455,6 +442,27 @@ mod tests {
);
}
#[test]
fn search_filters_by_range() {
covers!(search_filters_by_range);
let code = r#"
fn foo() {
let spam<|> = 92;
spam + spam
}
fn bar() {
let spam = 92;
spam + spam
}
"#;
let refs = get_all_refs(code);
check_result(
refs,
"spam BIND_PAT FileId(1) [44; 48) Other Write",
&["FileId(1) [71; 75) Other Read", "FileId(1) [78; 82) Other Read"],
);
}
#[test]
fn test_find_all_refs_for_param_inside() {
let code = r#"

View file

@ -1,34 +1,32 @@
//! Functions that are used to classify an element from its definition or reference.
use hir::{InFile, PathResolution, SourceBinder};
use hir::{PathResolution, Semantics};
use ra_ide_db::defs::NameDefinition;
use ra_ide_db::RootDatabase;
use ra_prof::profile;
use ra_syntax::{ast, AstNode};
use test_utils::tested_by;
use super::NameDefinition;
use ra_ide_db::RootDatabase;
pub use ra_ide_db::defs::{classify_name, from_module_def, from_struct_field};
pub use ra_ide_db::defs::{from_module_def, from_struct_field};
pub(crate) fn classify_name_ref(
sb: &mut SourceBinder<RootDatabase>,
name_ref: InFile<&ast::NameRef>,
sema: &Semantics<RootDatabase>,
name_ref: &ast::NameRef,
) -> Option<NameDefinition> {
let _p = profile("classify_name_ref");
let parent = name_ref.value.syntax().parent()?;
let analyzer = sb.analyze(name_ref.map(|it| it.syntax()), None);
let parent = name_ref.syntax().parent()?;
if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
tested_by!(goto_def_for_methods);
if let Some(func) = analyzer.resolve_method_call(&method_call) {
if let Some(func) = sema.resolve_method_call(&method_call) {
return Some(from_module_def(func.into()));
}
}
if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) {
tested_by!(goto_def_for_fields);
if let Some(field) = analyzer.resolve_field(&field_expr) {
if let Some(field) = sema.resolve_field(&field_expr) {
return Some(from_struct_field(field));
}
}
@ -36,22 +34,20 @@ pub(crate) fn classify_name_ref(
if let Some(record_field) = ast::RecordField::cast(parent.clone()) {
tested_by!(goto_def_for_record_fields);
tested_by!(goto_def_for_field_init_shorthand);
if let Some(field_def) = analyzer.resolve_record_field(&record_field) {
if let Some(field_def) = sema.resolve_record_field(&record_field) {
return Some(from_struct_field(field_def));
}
}
if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) {
tested_by!(goto_def_for_macros);
if let Some(macro_def) =
analyzer.resolve_macro_call(sb.db, name_ref.with_value(&macro_call))
{
if let Some(macro_def) = sema.resolve_macro_call(&macro_call) {
return Some(NameDefinition::Macro(macro_def));
}
}
let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?;
let resolved = analyzer.resolve_path(sb.db, &path)?;
let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?;
let resolved = sema.resolve_path(&path)?;
let res = match resolved {
PathResolution::Def(def) => from_module_def(def),
PathResolution::AssocItem(item) => {

View file

@ -1,7 +1,7 @@
//! FIXME: write short doc here
use hir::ModuleSource;
use ra_db::{RelativePath, RelativePathBuf, SourceDatabase, SourceDatabaseExt};
use hir::{ModuleSource, Semantics};
use ra_db::{RelativePath, RelativePathBuf, SourceDatabaseExt};
use ra_ide_db::RootDatabase;
use ra_syntax::{
algo::find_node_at_offset, ast, lex_single_valid_syntax_kind, AstNode, SyntaxKind, SyntaxNode,
@ -24,15 +24,16 @@ pub(crate) fn rename(
_ => return None,
}
let parse = db.parse(position.file_id);
let sema = Semantics::new(db);
let source_file = sema.parse(position.file_id);
if let Some((ast_name, ast_module)) =
find_name_and_module_at_offset(parse.tree().syntax(), position)
find_name_and_module_at_offset(source_file.syntax(), position)
{
let range = ast_name.syntax().text_range();
rename_mod(db, &ast_name, &ast_module, position, new_name)
rename_mod(&sema, &ast_name, &ast_module, position, new_name)
.map(|info| RangeInfo::new(range, info))
} else {
rename_reference(db, position, new_name)
rename_reference(sema.db, position, new_name)
}
}
@ -54,7 +55,7 @@ fn source_edit_from_file_id_range(
}
fn rename_mod(
db: &RootDatabase,
sema: &Semantics<RootDatabase>,
ast_name: &ast::Name,
ast_module: &ast::Module,
position: FilePosition,
@ -62,13 +63,12 @@ fn rename_mod(
) -> Option<SourceChange> {
let mut source_file_edits = Vec::new();
let mut file_system_edits = Vec::new();
let module_src = hir::InFile { file_id: position.file_id.into(), value: ast_module.clone() };
if let Some(module) = hir::SourceBinder::new(db).to_def(module_src) {
let src = module.definition_source(db);
let file_id = src.file_id.original_file(db);
if let Some(module) = sema.to_def(ast_module) {
let src = module.definition_source(sema.db);
let file_id = src.file_id.original_file(sema.db);
match src.value {
ModuleSource::SourceFile(..) => {
let mod_path: RelativePathBuf = db.file_relative_path(file_id);
let mod_path: RelativePathBuf = sema.db.file_relative_path(file_id);
// mod is defined in path/to/dir/mod.rs
let dst_path = if mod_path.file_stem() == Some("mod") {
mod_path
@ -82,7 +82,7 @@ fn rename_mod(
if let Some(path) = dst_path {
let move_file = FileSystemEdit::MoveFile {
src: file_id,
dst_source_root: db.file_source_root(position.file_id),
dst_source_root: sema.db.file_source_root(position.file_id),
dst_path: path,
};
file_system_edits.push(move_file);
@ -98,7 +98,7 @@ fn rename_mod(
};
source_file_edits.push(edit);
if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(db, position, None) {
if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(sema.db, position, None) {
let ref_edits = refs.references.into_iter().map(|reference| {
source_edit_from_file_id_range(
reference.file_range.file_id,

View file

@ -1,8 +1,7 @@
//! FIXME: write short doc here
use hir::{InFile, SourceBinder};
use hir::Semantics;
use itertools::Itertools;
use ra_db::SourceDatabase;
use ra_ide_db::RootDatabase;
use ra_syntax::{
ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner},
@ -42,46 +41,33 @@ pub enum RunnableKind {
}
pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
let parse = db.parse(file_id);
let mut sb = SourceBinder::new(db);
parse.tree().syntax().descendants().filter_map(|i| runnable(db, &mut sb, file_id, i)).collect()
let sema = Semantics::new(db);
let source_file = sema.parse(file_id);
source_file.syntax().descendants().filter_map(|i| runnable(&sema, i)).collect()
}
fn runnable(
db: &RootDatabase,
source_binder: &mut SourceBinder<RootDatabase>,
file_id: FileId,
item: SyntaxNode,
) -> Option<Runnable> {
fn runnable(sema: &Semantics<RootDatabase>, item: SyntaxNode) -> Option<Runnable> {
match_ast! {
match item {
ast::FnDef(it) => { runnable_fn(db, source_binder, file_id, it) },
ast::Module(it) => { runnable_mod(db, source_binder, file_id, it) },
_ => { None },
ast::FnDef(it) => { runnable_fn(sema, it) },
ast::Module(it) => { runnable_mod(sema, it) },
_ => None,
}
}
}
fn runnable_fn(
db: &RootDatabase,
source_binder: &mut SourceBinder<RootDatabase>,
file_id: FileId,
fn_def: ast::FnDef,
) -> Option<Runnable> {
fn runnable_fn(sema: &Semantics<RootDatabase>, fn_def: ast::FnDef) -> Option<Runnable> {
let name_string = fn_def.name()?.text().to_string();
let kind = if name_string == "main" {
RunnableKind::Bin
} else {
let test_id = if let Some(module) = source_binder
.to_def(InFile::new(file_id.into(), fn_def.clone()))
.map(|def| def.module(db))
{
let test_id = if let Some(module) = sema.to_def(&fn_def).map(|def| def.module(sema.db)) {
let path = module
.path_to_root(db)
.path_to_root(sema.db)
.into_iter()
.rev()
.filter_map(|it| it.name(db))
.filter_map(|it| it.name(sema.db))
.map(|name| name.to_string())
.chain(std::iter::once(name_string))
.join("::");
@ -115,12 +101,7 @@ fn has_test_related_attribute(fn_def: &ast::FnDef) -> bool {
.any(|attribute_text| attribute_text.contains("test"))
}
fn runnable_mod(
db: &RootDatabase,
source_binder: &mut SourceBinder<RootDatabase>,
file_id: FileId,
module: ast::Module,
) -> Option<Runnable> {
fn runnable_mod(sema: &Semantics<RootDatabase>, module: ast::Module) -> Option<Runnable> {
let has_test_function = module
.item_list()?
.items()
@ -133,9 +114,10 @@ fn runnable_mod(
return None;
}
let range = module.syntax().text_range();
let module = source_binder.to_def(InFile::new(file_id.into(), module))?;
let module = sema.to_def(&module)?;
let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::");
let path =
module.path_to_root(sema.db).into_iter().rev().filter_map(|it| it.name(sema.db)).join("::");
Some(Runnable { range, kind: RunnableKind::TestMod { path } })
}

View file

@ -25,14 +25,14 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.keyword\.control { color: #F0DFAF; font-weight: bold; }
</style>
<pre><code><span class="keyword">fn</span> <span class="function">main</span>() {
<span class="keyword">let</span> <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span> = <span class="string">"hello"</span>;
<span class="keyword">let</span> <span class="variable" data-binding-hash="4303609361109701698" style="color: hsl(242,75%,88%);">x</span> = <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span>.to_string();
<span class="keyword">let</span> <span class="variable" data-binding-hash="13865792086344377029" style="color: hsl(340,64%,86%);">y</span> = <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span>.to_string();
<span class="keyword">let</span> <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string">"hello"</span>;
<span class="keyword">let</span> <span class="variable" data-binding-hash="2705725358298919760" style="color: hsl(17,51%,74%);">x</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string();
<span class="keyword">let</span> <span class="variable" data-binding-hash="3365759661443752373" style="color: hsl(127,76%,66%);">y</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string();
<span class="keyword">let</span> <span class="variable" data-binding-hash="7011301204224269512" style="color: hsl(198,45%,40%);">x</span> = <span class="string">"other color please!"</span>;
<span class="keyword">let</span> <span class="variable" data-binding-hash="12461245066629867975" style="color: hsl(132,91%,68%);">y</span> = <span class="variable" data-binding-hash="7011301204224269512" style="color: hsl(198,45%,40%);">x</span>.to_string();
<span class="keyword">let</span> <span class="variable" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span> = <span class="string">"other color please!"</span>;
<span class="keyword">let</span> <span class="variable" data-binding-hash="6717528807933952652" style="color: hsl(85,49%,84%);">y</span> = <span class="variable" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span>.to_string();
}
<span class="keyword">fn</span> <span class="function">bar</span>() {
<span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span> = <span class="string">"hello"</span>;
<span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string">"hello"</span>;
}</code></pre>

View file

@ -1,8 +1,11 @@
//! FIXME: write short doc here
use hir::{HirFileId, InFile, Name, SourceAnalyzer, SourceBinder};
use hir::{Name, Semantics};
use ra_db::SourceDatabase;
use ra_ide_db::{defs::NameDefinition, RootDatabase};
use ra_ide_db::{
defs::{classify_name, NameDefinition},
RootDatabase,
};
use ra_prof::profile;
use ra_syntax::{
ast, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken,
@ -10,11 +13,7 @@ use ra_syntax::{
};
use rustc_hash::FxHashMap;
use crate::{
expand::descend_into_macros_with_analyzer,
references::{classify_name, classify_name_ref},
FileId,
};
use crate::{references::classify_name_ref, FileId};
pub mod tags {
pub const FIELD: &str = "field";
@ -73,14 +72,11 @@ pub(crate) fn highlight(
range: Option<TextRange>,
) -> Vec<HighlightedRange> {
let _p = profile("highlight");
let sema = Semantics::new(db);
let root = sema.parse(file_id).syntax().clone();
let parse = db.parse(file_id);
let root = parse.tree().syntax().clone();
let mut sb = SourceBinder::new(db);
let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
let mut res = Vec::new();
let analyzer = sb.analyze(InFile::new(file_id.into(), &root), None);
let mut in_macro_call = None;
@ -105,7 +101,7 @@ pub(crate) fn highlight(
match node.kind() {
MACRO_CALL => {
in_macro_call = Some(node.clone());
if let Some(range) = highlight_macro(InFile::new(file_id.into(), node)) {
if let Some(range) = highlight_macro(node) {
res.push(HighlightedRange {
range,
tag: tags::MACRO,
@ -116,10 +112,9 @@ pub(crate) fn highlight(
_ if in_macro_call.is_some() => {
if let Some(token) = node.as_token() {
if let Some((tag, binding_hash)) = highlight_token_tree(
&mut sb,
&analyzer,
&sema,
&mut bindings_shadow_count,
InFile::new(file_id.into(), token.clone()),
token.clone(),
) {
res.push(HighlightedRange {
range: node.text_range(),
@ -130,11 +125,9 @@ pub(crate) fn highlight(
}
}
_ => {
if let Some((tag, binding_hash)) = highlight_node(
&mut sb,
&mut bindings_shadow_count,
InFile::new(file_id.into(), node.clone()),
) {
if let Some((tag, binding_hash)) =
highlight_node(&sema, &mut bindings_shadow_count, node.clone())
{
res.push(HighlightedRange {
range: node.text_range(),
tag,
@ -161,8 +154,8 @@ pub(crate) fn highlight(
res
}
fn highlight_macro(node: InFile<SyntaxElement>) -> Option<TextRange> {
let macro_call = ast::MacroCall::cast(node.value.as_node()?.clone())?;
fn highlight_macro(node: SyntaxElement) -> Option<TextRange> {
let macro_call = ast::MacroCall::cast(node.as_node()?.clone())?;
let path = macro_call.path()?;
let name_ref = path.segment()?.name_ref()?;
@ -179,35 +172,34 @@ fn highlight_macro(node: InFile<SyntaxElement>) -> Option<TextRange> {
}
fn highlight_token_tree(
sb: &mut SourceBinder<RootDatabase>,
analyzer: &SourceAnalyzer,
sema: &Semantics<RootDatabase>,
bindings_shadow_count: &mut FxHashMap<Name, u32>,
token: InFile<SyntaxToken>,
token: SyntaxToken,
) -> Option<(&'static str, Option<u64>)> {
if token.value.parent().kind() != TOKEN_TREE {
if token.parent().kind() != TOKEN_TREE {
return None;
}
let token = descend_into_macros_with_analyzer(sb.db, analyzer, token);
let token = sema.descend_into_macros(token.clone());
let expanded = {
let parent = token.value.parent();
let parent = token.parent();
// We only care Name and Name_ref
match (token.value.kind(), parent.kind()) {
(IDENT, NAME) | (IDENT, NAME_REF) => token.with_value(parent.into()),
_ => token.map(|it| it.into()),
match (token.kind(), parent.kind()) {
(IDENT, NAME) | (IDENT, NAME_REF) => parent.into(),
_ => token.into(),
}
};
highlight_node(sb, bindings_shadow_count, expanded)
highlight_node(sema, bindings_shadow_count, expanded)
}
fn highlight_node(
sb: &mut SourceBinder<RootDatabase>,
sema: &Semantics<RootDatabase>,
bindings_shadow_count: &mut FxHashMap<Name, u32>,
node: InFile<SyntaxElement>,
node: SyntaxElement,
) -> Option<(&'static str, Option<u64>)> {
let db = sb.db;
let db = sema.db;
let mut binding_hash = None;
let tag = match node.value.kind() {
let tag = match node.kind() {
FN_DEF => {
bindings_shadow_count.clear();
return None;
@ -216,19 +208,18 @@ fn highlight_node(
STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => tags::LITERAL_STRING,
ATTR => tags::LITERAL_ATTRIBUTE,
// Special-case field init shorthand
NAME_REF if node.value.parent().and_then(ast::RecordField::cast).is_some() => tags::FIELD,
NAME_REF if node.value.ancestors().any(|it| it.kind() == ATTR) => return None,
NAME_REF if node.parent().and_then(ast::RecordField::cast).is_some() => tags::FIELD,
NAME_REF if node.ancestors().any(|it| it.kind() == ATTR) => return None,
NAME_REF => {
let name_ref = node.value.as_node().cloned().and_then(ast::NameRef::cast).unwrap();
let name_kind = classify_name_ref(sb, node.with_value(&name_ref));
let name_ref = node.as_node().cloned().and_then(ast::NameRef::cast).unwrap();
let name_kind = classify_name_ref(sema, &name_ref);
match name_kind {
Some(name_kind) => {
if let NameDefinition::Local(local) = &name_kind {
if let Some(name) = local.name(db) {
let shadow_count =
bindings_shadow_count.entry(name.clone()).or_default();
binding_hash =
Some(calc_binding_hash(node.file_id, &name, *shadow_count))
binding_hash = Some(calc_binding_hash(&name, *shadow_count))
}
};
@ -238,14 +229,14 @@ fn highlight_node(
}
}
NAME => {
let name = node.value.as_node().cloned().and_then(ast::Name::cast).unwrap();
let name_kind = classify_name(sb, node.with_value(&name));
let name = node.as_node().cloned().and_then(ast::Name::cast).unwrap();
let name_kind = classify_name(sema, &name);
if let Some(NameDefinition::Local(local)) = &name_kind {
if let Some(name) = local.name(db) {
let shadow_count = bindings_shadow_count.entry(name.clone()).or_default();
*shadow_count += 1;
binding_hash = Some(calc_binding_hash(node.file_id, &name, *shadow_count))
binding_hash = Some(calc_binding_hash(&name, *shadow_count))
}
};
@ -272,7 +263,7 @@ fn highlight_node(
return Some((tag, binding_hash));
fn calc_binding_hash(file_id: HirFileId, name: &Name, shadow_count: u32) -> u64 {
fn calc_binding_hash(name: &Name, shadow_count: u32) -> u64 {
fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 {
use std::{collections::hash_map::DefaultHasher, hash::Hasher};
@ -281,7 +272,7 @@ fn highlight_node(
hasher.finish()
}
hash((file_id, name, shadow_count))
hash((name, shadow_count))
}
}

View file

@ -6,8 +6,8 @@
// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
use hir::{
Adt, FieldSource, HasSource, ImplBlock, InFile, Local, MacroDef, Module, ModuleDef,
SourceBinder, StructField, TypeParam,
Adt, FieldSource, HasSource, ImplBlock, Local, MacroDef, Module, ModuleDef, Semantics,
StructField, TypeParam,
};
use ra_prof::profile;
use ra_syntax::{
@ -68,78 +68,62 @@ impl NameDefinition {
}
}
pub fn classify_name(
sb: &mut SourceBinder<RootDatabase>,
name: InFile<&ast::Name>,
) -> Option<NameDefinition> {
pub fn classify_name(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Option<NameDefinition> {
let _p = profile("classify_name");
let parent = name.value.syntax().parent()?;
let parent = name.syntax().parent()?;
match_ast! {
match parent {
ast::BindPat(it) => {
let src = name.with_value(it);
let local = sb.to_def(src)?;
let local = sema.to_def(&it)?;
Some(NameDefinition::Local(local))
},
ast::RecordFieldDef(it) => {
let src = name.with_value(it);
let field: hir::StructField = sb.to_def(src)?;
let field: hir::StructField = sema.to_def(&it)?;
Some(from_struct_field(field))
},
ast::Module(it) => {
let def = sb.to_def(name.with_value(it))?;
let def = sema.to_def(&it)?;
Some(from_module_def(def.into()))
},
ast::StructDef(it) => {
let src = name.with_value(it);
let def: hir::Struct = sb.to_def(src)?;
let def: hir::Struct = sema.to_def(&it)?;
Some(from_module_def(def.into()))
},
ast::EnumDef(it) => {
let src = name.with_value(it);
let def: hir::Enum = sb.to_def(src)?;
let def: hir::Enum = sema.to_def(&it)?;
Some(from_module_def(def.into()))
},
ast::TraitDef(it) => {
let src = name.with_value(it);
let def: hir::Trait = sb.to_def(src)?;
let def: hir::Trait = sema.to_def(&it)?;
Some(from_module_def(def.into()))
},
ast::StaticDef(it) => {
let src = name.with_value(it);
let def: hir::Static = sb.to_def(src)?;
let def: hir::Static = sema.to_def(&it)?;
Some(from_module_def(def.into()))
},
ast::EnumVariant(it) => {
let src = name.with_value(it);
let def: hir::EnumVariant = sb.to_def(src)?;
let def: hir::EnumVariant = sema.to_def(&it)?;
Some(from_module_def(def.into()))
},
ast::FnDef(it) => {
let src = name.with_value(it);
let def: hir::Function = sb.to_def(src)?;
let def: hir::Function = sema.to_def(&it)?;
Some(from_module_def(def.into()))
},
ast::ConstDef(it) => {
let src = name.with_value(it);
let def: hir::Const = sb.to_def(src)?;
let def: hir::Const = sema.to_def(&it)?;
Some(from_module_def(def.into()))
},
ast::TypeAliasDef(it) => {
let src = name.with_value(it);
let def: hir::TypeAlias = sb.to_def(src)?;
let def: hir::TypeAlias = sema.to_def(&it)?;
Some(from_module_def(def.into()))
},
ast::MacroCall(it) => {
let src = name.with_value(it);
let def = sb.to_def(src.clone())?;
let def = sema.to_def(&it)?;
Some(NameDefinition::Macro(def))
},
ast::TypeParam(it) => {
let src = name.with_value(it);
let def = sb.to_def(src)?;
let def = sema.to_def(&it)?;
Some(NameDefinition::TypeParam(def))
},
_ => None,

View file

@ -1,7 +1,7 @@
//! This module contains an import search funcionality that is provided to the ra_assists module.
//! Later, this should be moved away to a separate crate that is accessible from the ra_assists module.
use hir::{db::HirDatabase, ModuleDef, SourceBinder};
use hir::{ModuleDef, Semantics};
use ra_prof::profile;
use ra_syntax::{ast, AstNode, SyntaxKind::NAME};
@ -12,17 +12,17 @@ use crate::{
};
pub struct ImportsLocator<'a> {
source_binder: SourceBinder<'a, RootDatabase>,
sema: Semantics<'a, RootDatabase>,
}
impl<'a> ImportsLocator<'a> {
pub fn new(db: &'a RootDatabase) -> Self {
Self { source_binder: SourceBinder::new(db) }
Self { sema: Semantics::new(db) }
}
pub fn find_imports(&mut self, name_to_import: &str) -> Vec<ModuleDef> {
let _p = profile("search_for_imports");
let db = self.source_binder.db;
let db = self.sema.db;
let project_results = {
let mut query = Query::new(name_to_import.to_string());
@ -41,7 +41,7 @@ impl<'a> ImportsLocator<'a> {
project_results
.into_iter()
.chain(lib_results.into_iter())
.filter_map(|import_candidate| self.get_name_definition(db, &import_candidate))
.filter_map(|import_candidate| self.get_name_definition(&import_candidate))
.filter_map(|name_definition_to_import| match name_definition_to_import {
NameDefinition::ModuleDef(module_def) => Some(module_def),
_ => None,
@ -49,22 +49,16 @@ impl<'a> ImportsLocator<'a> {
.collect()
}
fn get_name_definition(
&mut self,
db: &impl HirDatabase,
import_candidate: &FileSymbol,
) -> Option<NameDefinition> {
fn get_name_definition(&mut self, import_candidate: &FileSymbol) -> Option<NameDefinition> {
let _p = profile("get_name_definition");
let file_id = import_candidate.file_id.into();
let candidate_node = import_candidate.ptr.to_node(&db.parse_or_expand(file_id)?);
let file_id = import_candidate.file_id;
let candidate_node = import_candidate.ptr.to_node(self.sema.parse(file_id).syntax());
let candidate_name_node = if candidate_node.kind() != NAME {
candidate_node.children().find(|it| it.kind() == NAME)?
} else {
candidate_node
};
classify_name(
&mut self.source_binder,
hir::InFile { file_id, value: &ast::Name::cast(candidate_name_node)? },
)
classify_name(&self.sema, &ast::Name::cast(candidate_name_node)?)
}
}

View file

@ -4,7 +4,7 @@ use std::ops::RangeInclusive;
use itertools::Itertools;
use ra_text_edit::TextEditBuilder;
use rustc_hash::FxHashMap;
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{
AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit,
@ -56,6 +56,11 @@ pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxEleme
root.covering_element(range)
}
pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option<SyntaxNode> {
let u_ancestors = u.ancestors().collect::<FxHashSet<SyntaxNode>>();
v.ancestors().find(|it| u_ancestors.contains(it))
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum InsertPosition<T> {
First,