Introduce SourceBinder

This commit is contained in:
Aleksey Kladov 2020-01-14 15:27:05 +01:00
parent c0661ce744
commit ccfe53376a
5 changed files with 217 additions and 101 deletions

View file

@ -8,7 +8,7 @@
#![recursion_limit = "512"] #![recursion_limit = "512"]
macro_rules! impl_froms { macro_rules! impl_froms {
($e:ident: $($v:ident $(($($sv:ident),*))?),*) => { ($e:ident: $($v:ident $(($($sv:ident),*))?),*$(,)?) => {
$( $(
impl From<$v> for $e { impl From<$v> for $e {
fn from(it: $v) -> $e { fn from(it: $v) -> $e {
@ -28,6 +28,7 @@ macro_rules! impl_froms {
pub mod db; pub mod db;
pub mod source_analyzer; pub mod source_analyzer;
pub mod source_binder;
pub mod diagnostics; pub mod diagnostics;

View file

@ -14,30 +14,26 @@ use hir_def::{
BodySourceMap, BodySourceMap,
}, },
expr::{ExprId, PatId}, expr::{ExprId, PatId},
nameres::ModuleSource, resolver::{self, resolver_for_scope, Resolver, TypeNs, ValueNs},
resolver::{self, resolver_for_scope, HasResolver, Resolver, TypeNs, ValueNs},
DefWithBodyId, TraitId, DefWithBodyId, TraitId,
}; };
use hir_expand::{ use hir_expand::{
hygiene::Hygiene, name::AsName, AstId, HirFileId, InFile, MacroCallId, MacroCallKind, hygiene::Hygiene, name::AsName, AstId, HirFileId, InFile, MacroCallId, MacroCallKind,
}; };
use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment}; use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment};
use ra_prof::profile;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
match_ast, AstPtr, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit,
SyntaxKind::*,
SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit,
}; };
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use crate::{ use crate::{
db::HirDatabase, Adt, Const, DefWithBody, Enum, EnumVariant, FromSource, Function, ImplBlock, db::HirDatabase, Adt, Const, DefWithBody, EnumVariant, Function, Local, MacroDef, Name, Path,
Local, MacroDef, Name, Path, ScopeDef, Static, Struct, Trait, Type, TypeAlias, TypeParam, ScopeDef, Static, Struct, Trait, Type, TypeAlias, TypeParam,
}; };
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of /// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
/// original source files. It should not be used inside the HIR itself. /// original source files. It should not be used pinside the HIR itself.
#[derive(Debug)] #[derive(Debug)]
pub struct SourceAnalyzer { pub struct SourceAnalyzer {
file_id: HirFileId, file_id: HirFileId,
@ -109,37 +105,43 @@ impl SourceAnalyzer {
node: InFile<&SyntaxNode>, node: InFile<&SyntaxNode>,
offset: Option<TextUnit>, offset: Option<TextUnit>,
) -> SourceAnalyzer { ) -> SourceAnalyzer {
let _p = profile("SourceAnalyzer::new"); crate::source_binder::SourceBinder::default().analyze(db, node, offset)
let def_with_body = def_with_body_from_child_node(db, node); }
if let Some(def) = def_with_body {
let (_body, source_map) = db.body_with_source_map(def.into()); pub(crate) fn new_for_body(
let scopes = db.expr_scopes(def.into()); db: &impl HirDatabase,
let scope = match offset { def: DefWithBodyId,
None => scope_for(&scopes, &source_map, node), node: InFile<&SyntaxNode>,
Some(offset) => scope_for_offset(&scopes, &source_map, node.with_value(offset)), offset: Option<TextUnit>,
}; ) -> SourceAnalyzer {
let resolver = resolver_for_scope(db, def.into(), scope); let (_body, source_map) = db.body_with_source_map(def);
SourceAnalyzer { let scopes = db.expr_scopes(def);
resolver, let scope = match offset {
body_owner: Some(def), None => scope_for(&scopes, &source_map, node),
body_source_map: Some(source_map), Some(offset) => scope_for_offset(&scopes, &source_map, node.with_value(offset)),
infer: Some(db.infer(def.into())), };
scopes: Some(scopes), let resolver = resolver_for_scope(db, def, scope);
file_id: node.file_id, SourceAnalyzer {
} resolver,
} else { body_owner: Some(def.into()),
SourceAnalyzer { body_source_map: Some(source_map),
resolver: node infer: Some(db.infer(def)),
.value scopes: Some(scopes),
.ancestors() file_id: node.file_id,
.find_map(|it| try_get_resolver_for_node(db, node.with_value(&it))) }
.unwrap_or_default(), }
body_owner: None,
body_source_map: None, pub(crate) fn new_for_resolver(
infer: None, resolver: Resolver,
scopes: None, node: InFile<&SyntaxNode>,
file_id: node.file_id, ) -> SourceAnalyzer {
} SourceAnalyzer {
resolver,
body_owner: None,
body_source_map: None,
infer: None,
scopes: None,
file_id: node.file_id,
} }
} }
@ -366,64 +368,6 @@ impl SourceAnalyzer {
} }
} }
fn try_get_resolver_for_node(db: &impl HirDatabase, node: InFile<&SyntaxNode>) -> Option<Resolver> {
match_ast! {
match (node.value) {
ast::Module(it) => {
let src = node.with_value(it);
Some(crate::Module::from_declaration(db, src)?.id.resolver(db))
},
ast::SourceFile(it) => {
let src = node.with_value(ModuleSource::SourceFile(it));
Some(crate::Module::from_definition(db, src)?.id.resolver(db))
},
ast::StructDef(it) => {
let src = node.with_value(it);
Some(Struct::from_source(db, src)?.id.resolver(db))
},
ast::EnumDef(it) => {
let src = node.with_value(it);
Some(Enum::from_source(db, src)?.id.resolver(db))
},
ast::ImplBlock(it) => {
let src = node.with_value(it);
Some(ImplBlock::from_source(db, src)?.id.resolver(db))
},
ast::TraitDef(it) => {
let src = node.with_value(it);
Some(Trait::from_source(db, src)?.id.resolver(db))
},
_ => match node.value.kind() {
FN_DEF | CONST_DEF | STATIC_DEF => {
let def = def_with_body_from_child_node(db, node)?;
let def = DefWithBodyId::from(def);
Some(def.resolver(db))
}
// FIXME add missing cases
_ => None
}
}
}
}
fn def_with_body_from_child_node(
db: &impl HirDatabase,
child: InFile<&SyntaxNode>,
) -> Option<DefWithBody> {
let _p = profile("def_with_body_from_child_node");
child.cloned().ancestors_with_macros(db).find_map(|node| {
let n = &node.value;
match_ast! {
match n {
ast::FnDef(def) => { return Function::from_source(db, node.with_value(def)).map(DefWithBody::from); },
ast::ConstDef(def) => { return Const::from_source(db, node.with_value(def)).map(DefWithBody::from); },
ast::StaticDef(def) => { return Static::from_source(db, node.with_value(def)).map(DefWithBody::from); },
_ => { None },
}
}
})
}
fn scope_for( fn scope_for(
scopes: &ExprScopes, scopes: &ExprScopes,
source_map: &BodySourceMap, source_map: &BodySourceMap,

View file

@ -0,0 +1,171 @@
//! `SourceBinder` should be the main entry point for getting info about source code.
//! It's main task is to map source syntax trees to hir-level IDs.
//!
//! It is intended to subsume `FromSource` and `SourceAnalyzer`.
use hir_def::{
child_by_source::ChildBySource,
dyn_map::DynMap,
keys::{self, Key},
resolver::{HasResolver, Resolver},
ConstId, DefWithBodyId, EnumId, FunctionId, ImplId, ModuleId, StaticId, StructId, TraitId,
UnionId, VariantId,
};
use hir_expand::InFile;
use ra_prof::profile;
use ra_syntax::{ast, match_ast, AstNode, SyntaxNode, TextUnit};
use rustc_hash::FxHashMap;
use crate::{db::HirDatabase, ModuleSource, SourceAnalyzer};
#[derive(Default)]
pub struct SourceBinder {
child_by_source_cache: FxHashMap<ChildContainer, DynMap>,
}
impl SourceBinder {
pub fn analyze(
&mut self,
db: &impl HirDatabase,
src: InFile<&SyntaxNode>,
offset: Option<TextUnit>,
) -> SourceAnalyzer {
let _p = profile("SourceBinder::analyzer");
let container = match self.find_container(db, src) {
Some(it) => it,
None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src),
};
let resolver = match container {
ChildContainer::DefWithBodyId(def) => {
return SourceAnalyzer::new_for_body(db, def, src, offset)
}
ChildContainer::TraitId(it) => it.resolver(db),
ChildContainer::ImplId(it) => it.resolver(db),
ChildContainer::ModuleId(it) => it.resolver(db),
ChildContainer::EnumId(it) => it.resolver(db),
ChildContainer::VariantId(it) => it.resolver(db),
};
SourceAnalyzer::new_for_resolver(resolver, src)
}
pub fn to_def<D, ID>(&mut self, db: &impl HirDatabase, src: InFile<ID::Ast>) -> Option<D>
where
D: From<ID>,
ID: ToId,
{
let id: ID = self.to_id(db, src)?;
Some(id.into())
}
fn to_id<D: ToId>(&mut self, db: &impl HirDatabase, src: InFile<D::Ast>) -> Option<D> {
let container = self.find_container(db, src.as_ref().map(|it| it.syntax()))?;
let dyn_map =
&*self.child_by_source_cache.entry(container).or_insert_with(|| match container {
ChildContainer::DefWithBodyId(it) => it.child_by_source(db),
ChildContainer::ModuleId(it) => it.child_by_source(db),
ChildContainer::TraitId(it) => it.child_by_source(db),
ChildContainer::ImplId(it) => it.child_by_source(db),
ChildContainer::EnumId(it) => it.child_by_source(db),
ChildContainer::VariantId(it) => it.child_by_source(db),
});
dyn_map[D::KEY].get(&src).copied()
}
fn find_container(
&mut self,
db: &impl HirDatabase,
src: InFile<&SyntaxNode>,
) -> Option<ChildContainer> {
for container in src.cloned().ancestors_with_macros(db).skip(1) {
let res: ChildContainer = match_ast! {
match (container.value) {
ast::TraitDef(it) => {
let def: TraitId = self.to_id(db, container.with_value(it))?;
def.into()
},
ast::ImplBlock(it) => {
let def: ImplId = self.to_id(db, container.with_value(it))?;
def.into()
},
ast::FnDef(it) => {
let def: FunctionId = self.to_id(db, container.with_value(it))?;
DefWithBodyId::from(def).into()
},
ast::StaticDef(it) => {
let def: StaticId = self.to_id(db, container.with_value(it))?;
DefWithBodyId::from(def).into()
},
ast::ConstDef(it) => {
let def: ConstId = self.to_id(db, container.with_value(it))?;
DefWithBodyId::from(def).into()
},
ast::EnumDef(it) => {
let def: EnumId = self.to_id(db, container.with_value(it))?;
def.into()
},
ast::StructDef(it) => {
let def: StructId = self.to_id(db, container.with_value(it))?;
VariantId::from(def).into()
},
ast::UnionDef(it) => {
let def: UnionId = self.to_id(db, container.with_value(it))?;
VariantId::from(def).into()
},
// FIXME: handle out-of-line modules here
_ => { continue },
}
};
return Some(res);
}
let module_source = ModuleSource::from_child_node(db, src);
let c = crate::Module::from_definition(db, src.with_value(module_source))?;
Some(c.id.into())
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
enum ChildContainer {
DefWithBodyId(DefWithBodyId),
ModuleId(ModuleId),
TraitId(TraitId),
ImplId(ImplId),
EnumId(EnumId),
VariantId(VariantId),
}
impl_froms! {
ChildContainer:
DefWithBodyId,
ModuleId,
TraitId,
ImplId,
EnumId,
VariantId,
}
pub trait ToId: Sized + Copy + 'static {
type Ast: AstNode + 'static;
const KEY: Key<Self::Ast, Self>;
}
macro_rules! to_id_impls {
($(($id:ident, $ast:path, $key:path)),* ,) => {$(
impl ToId for $id {
type Ast = $ast;
const KEY: Key<Self::Ast, Self> = $key;
}
)*}
}
to_id_impls![
(StructId, ast::StructDef, keys::STRUCT),
(UnionId, ast::UnionDef, keys::UNION),
(EnumId, ast::EnumDef, keys::ENUM),
(TraitId, ast::TraitDef, keys::TRAIT),
(FunctionId, ast::FnDef, keys::FUNCTION),
(StaticId, ast::StaticDef, keys::STATIC),
(ConstId, ast::ConstDef, keys::CONST),
// (TypeAlias, TypeAliasId, ast::TypeAliasDef, keys::TYPE_ALIAS),
(ImplId, ast::ImplBlock, keys::IMPL),
];

View file

@ -332,7 +332,7 @@ pub enum VariantId {
StructId(StructId), StructId(StructId),
UnionId(UnionId), UnionId(UnionId),
} }
impl_froms!(VariantId: EnumVariantId, StructId); impl_froms!(VariantId: EnumVariantId, StructId, UnionId);
trait Intern { trait Intern {
type ID; type ID;

View file

@ -312,7 +312,7 @@ mod tests {
use test_utils::{assert_eq_text, project_dir, read_text}; use test_utils::{assert_eq_text, project_dir, read_text};
#[test] #[test]
fn test_highlighting() { fn te3st_highlighting() {
let (analysis, file_id) = single_file( let (analysis, file_id) = single_file(
r#" r#"
#[derive(Clone, Debug)] #[derive(Clone, Debug)]