623: WIP: module id is not def id r=matklad a=matklad

This achieves two things:

* makes module_tree & item_map per crate, not per source_root
* begins the refactoring to remove universal `DefId` in favor of having separate ids for each kind of `Def`. Currently, only modules get a differnt ID though. 

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2019-01-24 22:56:13 +00:00
commit c42db0bbd7
36 changed files with 950 additions and 988 deletions

View file

@ -160,6 +160,7 @@ pub trait FilesDatabase: salsa::Database {
/// Contents of the source root. /// Contents of the source root.
#[salsa::input] #[salsa::input]
fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>; fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
fn source_root_crates(&self, id: SourceRootId) -> Arc<Vec<CrateId>>;
/// The set of "local" (that is, from the current workspace) roots. /// The set of "local" (that is, from the current workspace) roots.
/// Files in local roots are assumed to change frequently. /// Files in local roots are assumed to change frequently.
#[salsa::input] #[salsa::input]
@ -173,6 +174,17 @@ pub trait FilesDatabase: salsa::Database {
fn crate_graph(&self) -> Arc<CrateGraph>; fn crate_graph(&self) -> Arc<CrateGraph>;
} }
fn source_root_crates(db: &impl FilesDatabase, id: SourceRootId) -> Arc<Vec<CrateId>> {
let root = db.source_root(id);
let graph = db.crate_graph();
let res = root
.files
.values()
.filter_map(|&it| graph.crate_id_for_crate_root(it))
.collect::<Vec<_>>();
Arc::new(res)
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{CrateGraph, FileId, SmolStr}; use super::{CrateGraph, FileId, SmolStr};

View file

@ -13,7 +13,7 @@ pub use crate::{
cancellation::Canceled, cancellation::Canceled,
input::{ input::{
FilesDatabase, FileId, CrateId, SourceRoot, SourceRootId, CrateGraph, Dependency, FilesDatabase, FileId, CrateId, SourceRoot, SourceRootId, CrateGraph, Dependency,
FileTextQuery, FileSourceRootQuery, SourceRootQuery, LocalRootsQuery, LibraryRootsQuery, CrateGraphQuery, FileTextQuery, FileSourceRootQuery, SourceRootQuery, SourceRootCratesQuery, LocalRootsQuery, LibraryRootsQuery, CrateGraphQuery,
FileRelativePathQuery FileRelativePathQuery
}, },
loc2id::LocationIntener, loc2id::LocationIntener,

View file

@ -3,25 +3,35 @@
use std::sync::Arc; use std::sync::Arc;
use ra_syntax::{ use ra_syntax::ast::{self, NameOwner, StructFlavor};
SyntaxNode,
ast::{self, NameOwner, StructFlavor, AstNode}
};
use crate::{ use crate::{
DefId, DefLoc, Name, AsName, Struct, Enum, EnumVariant, Name, AsName, Struct, Enum, EnumVariant, Crate,
HirDatabase, DefKind, HirDatabase,
SourceItemId,
type_ref::TypeRef, type_ref::TypeRef,
ids::LocationCtx,
}; };
impl Struct { #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub(crate) fn new(def_id: DefId) -> Self { pub enum AdtDef {
Struct { def_id } Struct(Struct),
} Enum(Enum),
}
impl_froms!(AdtDef: Struct, Enum);
impl AdtDef {
pub(crate) fn krate(self, db: &impl HirDatabase) -> Option<Crate> {
match self {
AdtDef::Struct(s) => s.module(db),
AdtDef::Enum(e) => e.module(db),
}
.krate(db)
}
}
impl Struct {
pub(crate) fn variant_data(&self, db: &impl HirDatabase) -> Arc<VariantData> { pub(crate) fn variant_data(&self, db: &impl HirDatabase) -> Arc<VariantData> {
db.struct_data(self.def_id).variant_data.clone() db.struct_data((*self).into()).variant_data.clone()
} }
} }
@ -39,38 +49,12 @@ impl StructData {
StructData { name, variant_data } StructData { name, variant_data }
} }
pub(crate) fn struct_data_query(db: &impl HirDatabase, def_id: DefId) -> Arc<StructData> { pub(crate) fn struct_data_query(db: &impl HirDatabase, struct_: Struct) -> Arc<StructData> {
let def_loc = def_id.loc(db); let (_, struct_def) = struct_.source(db);
assert!(def_loc.kind == DefKind::Struct); Arc::new(StructData::new(&*struct_def))
let syntax = db.file_item(def_loc.source_item_id);
let struct_def =
ast::StructDef::cast(&syntax).expect("struct def should point to StructDef node");
Arc::new(StructData::new(struct_def))
} }
} }
fn get_def_id(
db: &impl HirDatabase,
same_file_loc: &DefLoc,
node: &SyntaxNode,
expected_kind: DefKind,
) -> DefId {
let file_id = same_file_loc.source_item_id.file_id;
let file_items = db.file_items(file_id);
let item_id = file_items.id_of(file_id, node);
let source_item_id = SourceItemId {
item_id: Some(item_id),
..same_file_loc.source_item_id
};
let loc = DefLoc {
kind: expected_kind,
source_item_id,
..*same_file_loc
};
loc.id(db)
}
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct EnumData { pub struct EnumData {
pub(crate) name: Option<Name>, pub(crate) name: Option<Name>,
@ -83,27 +67,24 @@ impl EnumData {
EnumData { name, variants } EnumData { name, variants }
} }
pub(crate) fn enum_data_query(db: &impl HirDatabase, def_id: DefId) -> Arc<EnumData> { pub(crate) fn enum_data_query(db: &impl HirDatabase, e: Enum) -> Arc<EnumData> {
let def_loc = def_id.loc(db); let (file_id, enum_def) = e.source(db);
assert!(def_loc.kind == DefKind::Enum); let module = e.module(db);
let syntax = db.file_item(def_loc.source_item_id); let ctx = LocationCtx::new(db, module, file_id);
let enum_def = ast::EnumDef::cast(&syntax).expect("enum def should point to EnumDef node");
let variants = if let Some(vl) = enum_def.variant_list() { let variants = if let Some(vl) = enum_def.variant_list() {
vl.variants() vl.variants()
.filter_map(|variant_def| { .filter_map(|variant_def| {
let name = variant_def.name().map(|n| n.as_name()); let name = variant_def.name()?.as_name();
let var = EnumVariant {
name.map(|n| { id: ctx.to_def(variant_def),
let def_id = };
get_def_id(db, &def_loc, variant_def.syntax(), DefKind::EnumVariant); Some((name, var))
(n, EnumVariant::new(def_id))
})
}) })
.collect() .collect()
} else { } else {
Vec::new() Vec::new()
}; };
Arc::new(EnumData::new(enum_def, variants)) Arc::new(EnumData::new(&*enum_def, variants))
} }
} }
@ -128,21 +109,15 @@ impl EnumVariantData {
pub(crate) fn enum_variant_data_query( pub(crate) fn enum_variant_data_query(
db: &impl HirDatabase, db: &impl HirDatabase,
def_id: DefId, var: EnumVariant,
) -> Arc<EnumVariantData> { ) -> Arc<EnumVariantData> {
let def_loc = def_id.loc(db); let (file_id, variant_def) = var.source(db);
assert!(def_loc.kind == DefKind::EnumVariant); let enum_def = variant_def.parent_enum();
let syntax = db.file_item(def_loc.source_item_id); let ctx = LocationCtx::new(db, var.module(db), file_id);
let variant_def = ast::EnumVariant::cast(&syntax) let e = Enum {
.expect("enum variant def should point to EnumVariant node"); id: ctx.to_def(enum_def),
let enum_node = syntax };
.parent() Arc::new(EnumVariantData::new(&*variant_def, e))
.expect("enum variant should have enum variant list ancestor")
.parent()
.expect("enum variant list should have enum ancestor");
let enum_def_id = get_def_id(db, &def_loc, enum_node, DefKind::Enum);
Arc::new(EnumVariantData::new(variant_def, Enum::new(enum_def_id)))
} }
} }

View file

@ -5,22 +5,23 @@ use ra_db::{CrateId, FileId};
use ra_syntax::{ast::self, TreeArc, SyntaxNode}; use ra_syntax::{ast::self, TreeArc, SyntaxNode};
use crate::{ use crate::{
Name, DefId, Path, PerNs, ScopesWithSyntaxMapping, Ty, HirFileId, Name, Path, PerNs, ScopesWithSyntaxMapping, Ty, HirFileId,
type_ref::TypeRef, type_ref::TypeRef,
nameres::{ModuleScope, lower::ImportId}, nameres::{ModuleScope, lower::ImportId},
db::HirDatabase, db::HirDatabase,
expr::BodySyntaxMapping, expr::BodySyntaxMapping,
ty::InferenceResult, ty::{InferenceResult, VariantDef},
adt::VariantData, adt::VariantData,
generics::GenericParams, generics::GenericParams,
code_model_impl::def_id_to_ast, docs::{Documentation, Docs, docs_from_ast},
docs::{Documentation, Docs, docs_from_ast} module_tree::ModuleId,
ids::{FunctionId, StructId, EnumId, EnumVariantId, AstItemDef, ConstId, StaticId, TraitId, TypeId},
}; };
/// hir::Crate describes a single crate. It's the main interface with which /// hir::Crate describes a single crate. It's the main interface with which
/// a crate's dependencies interact. Mostly, it should be just a proxy for the /// a crate's dependencies interact. Mostly, it should be just a proxy for the
/// root module. /// root module.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Crate { pub struct Crate {
pub(crate) crate_id: CrateId, pub(crate) crate_id: CrateId,
} }
@ -45,22 +46,40 @@ impl Crate {
#[derive(Debug)] #[derive(Debug)]
pub enum Def { pub enum Def {
Item,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Module {
pub(crate) krate: CrateId,
pub(crate) module_id: ModuleId,
}
/// The defs which can be visible in the module.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ModuleDef {
Module(Module), Module(Module),
Function(Function),
Struct(Struct), Struct(Struct),
Enum(Enum), Enum(Enum),
// Can't be directly declared, but can be imported.
EnumVariant(EnumVariant), EnumVariant(EnumVariant),
Function(Function),
Const(Const), Const(Const),
Static(Static), Static(Static),
Trait(Trait), Trait(Trait),
Type(Type), Type(Type),
Item,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Module {
pub(crate) def_id: DefId,
} }
impl_froms!(
ModuleDef: Module,
Function,
Struct,
Enum,
EnumVariant,
Const,
Static,
Trait,
Type
);
pub enum ModuleSource { pub enum ModuleSource {
SourceFile(TreeArc<ast::SourceFile>), SourceFile(TreeArc<ast::SourceFile>),
@ -149,7 +168,7 @@ impl Module {
self.scope_impl(db) self.scope_impl(db)
} }
pub fn resolve_path(&self, db: &impl HirDatabase, path: &Path) -> PerNs<DefId> { pub fn resolve_path(&self, db: &impl HirDatabase, path: &Path) -> PerNs<ModuleDef> {
self.resolve_path_impl(db, path) self.resolve_path_impl(db, path)
} }
@ -160,7 +179,7 @@ impl Module {
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct StructField { pub struct StructField {
parent: DefId, parent: VariantDef,
name: Name, name: Name,
} }
@ -174,38 +193,38 @@ impl StructField {
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Struct { pub struct Struct {
pub(crate) def_id: DefId, pub(crate) id: StructId,
} }
impl Struct { impl Struct {
pub fn def_id(&self) -> DefId { pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::StructDef>) {
self.def_id self.id.source(db)
}
pub fn module(&self, db: &impl HirDatabase) -> Module {
self.id.module(db)
} }
pub fn name(&self, db: &impl HirDatabase) -> Option<Name> { pub fn name(&self, db: &impl HirDatabase) -> Option<Name> {
db.struct_data(self.def_id).name.clone() db.struct_data(*self).name.clone()
} }
pub fn fields(&self, db: &impl HirDatabase) -> Vec<StructField> { pub fn fields(&self, db: &impl HirDatabase) -> Vec<StructField> {
db.struct_data(self.def_id) db.struct_data(*self)
.variant_data .variant_data
.fields() .fields()
.iter() .iter()
.map(|it| StructField { .map(|it| StructField {
parent: self.def_id, parent: (*self).into(),
name: it.name.clone(), name: it.name.clone(),
}) })
.collect() .collect()
} }
pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::StructDef>) {
def_id_to_ast(db, self.def_id)
}
pub fn generic_params(&self, db: &impl HirDatabase) -> Arc<GenericParams> { pub fn generic_params(&self, db: &impl HirDatabase) -> Arc<GenericParams> {
db.generic_params(self.def_id) db.generic_params((*self).into())
} }
} }
@ -215,34 +234,30 @@ impl Docs for Struct {
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Enum { pub struct Enum {
pub(crate) def_id: DefId, pub(crate) id: EnumId,
} }
impl Enum { impl Enum {
pub(crate) fn new(def_id: DefId) -> Self { pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::EnumDef>) {
Enum { def_id } self.id.source(db)
} }
pub fn def_id(&self) -> DefId { pub fn module(&self, db: &impl HirDatabase) -> Module {
self.def_id self.id.module(db)
} }
pub fn name(&self, db: &impl HirDatabase) -> Option<Name> { pub fn name(&self, db: &impl HirDatabase) -> Option<Name> {
db.enum_data(self.def_id).name.clone() db.enum_data(*self).name.clone()
} }
pub fn variants(&self, db: &impl HirDatabase) -> Vec<(Name, EnumVariant)> { pub fn variants(&self, db: &impl HirDatabase) -> Vec<(Name, EnumVariant)> {
db.enum_data(self.def_id).variants.clone() db.enum_data(*self).variants.clone()
}
pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::EnumDef>) {
def_id_to_ast(db, self.def_id)
} }
pub fn generic_params(&self, db: &impl HirDatabase) -> Arc<GenericParams> { pub fn generic_params(&self, db: &impl HirDatabase) -> Arc<GenericParams> {
db.generic_params(self.def_id) db.generic_params((*self).into())
} }
} }
@ -252,30 +267,28 @@ impl Docs for Enum {
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct EnumVariant { pub struct EnumVariant {
pub(crate) def_id: DefId, pub(crate) id: EnumVariantId,
} }
impl EnumVariant { impl EnumVariant {
pub(crate) fn new(def_id: DefId) -> Self { pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::EnumVariant>) {
EnumVariant { def_id } self.id.source(db)
} }
pub fn module(&self, db: &impl HirDatabase) -> Module {
pub fn def_id(&self) -> DefId { self.id.module(db)
self.def_id
} }
pub fn parent_enum(&self, db: &impl HirDatabase) -> Enum { pub fn parent_enum(&self, db: &impl HirDatabase) -> Enum {
db.enum_variant_data(self.def_id).parent_enum.clone() db.enum_variant_data(*self).parent_enum.clone()
} }
pub fn name(&self, db: &impl HirDatabase) -> Option<Name> { pub fn name(&self, db: &impl HirDatabase) -> Option<Name> {
db.enum_variant_data(self.def_id).name.clone() db.enum_variant_data(*self).name.clone()
} }
pub fn variant_data(&self, db: &impl HirDatabase) -> Arc<VariantData> { pub fn variant_data(&self, db: &impl HirDatabase) -> Arc<VariantData> {
db.enum_variant_data(self.def_id).variant_data.clone() db.enum_variant_data(*self).variant_data.clone()
} }
pub fn fields(&self, db: &impl HirDatabase) -> Vec<StructField> { pub fn fields(&self, db: &impl HirDatabase) -> Vec<StructField> {
@ -283,15 +296,11 @@ impl EnumVariant {
.fields() .fields()
.iter() .iter()
.map(|it| StructField { .map(|it| StructField {
parent: self.def_id, parent: (*self).into(),
name: it.name.clone(), name: it.name.clone(),
}) })
.collect() .collect()
} }
pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::EnumVariant>) {
def_id_to_ast(db, self.def_id)
}
} }
impl Docs for EnumVariant { impl Docs for EnumVariant {
@ -300,9 +309,9 @@ impl Docs for EnumVariant {
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Function { pub struct Function {
pub(crate) def_id: DefId, pub(crate) id: FunctionId,
} }
pub use crate::code_model_impl::function::ScopeEntryWithSyntax; pub use crate::code_model_impl::function::ScopeEntryWithSyntax;
@ -339,21 +348,21 @@ impl FnSignature {
} }
impl Function { impl Function {
pub fn def_id(&self) -> DefId { pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::FnDef>) {
self.def_id self.id.source(db)
} }
pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::FnDef>) { pub fn module(&self, db: &impl HirDatabase) -> Module {
def_id_to_ast(db, self.def_id) self.id.module(db)
} }
pub fn body_syntax_mapping(&self, db: &impl HirDatabase) -> Arc<BodySyntaxMapping> { pub fn body_syntax_mapping(&self, db: &impl HirDatabase) -> Arc<BodySyntaxMapping> {
db.body_syntax_mapping(self.def_id) db.body_syntax_mapping(*self)
} }
pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSyntaxMapping { pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSyntaxMapping {
let scopes = db.fn_scopes(self.def_id); let scopes = db.fn_scopes(*self);
let syntax_mapping = db.body_syntax_mapping(self.def_id); let syntax_mapping = db.body_syntax_mapping(*self);
ScopesWithSyntaxMapping { ScopesWithSyntaxMapping {
scopes, scopes,
syntax_mapping, syntax_mapping,
@ -361,15 +370,15 @@ impl Function {
} }
pub fn signature(&self, db: &impl HirDatabase) -> Arc<FnSignature> { pub fn signature(&self, db: &impl HirDatabase) -> Arc<FnSignature> {
db.fn_signature(self.def_id) db.fn_signature(*self)
} }
pub fn infer(&self, db: &impl HirDatabase) -> Arc<InferenceResult> { pub fn infer(&self, db: &impl HirDatabase) -> Arc<InferenceResult> {
db.infer(self.def_id) db.infer(*self)
} }
pub fn generic_params(&self, db: &impl HirDatabase) -> Arc<GenericParams> { pub fn generic_params(&self, db: &impl HirDatabase) -> Arc<GenericParams> {
db.generic_params(self.def_id) db.generic_params((*self).into())
} }
} }
@ -379,18 +388,14 @@ impl Docs for Function {
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Const { pub struct Const {
pub(crate) def_id: DefId, pub(crate) id: ConstId,
} }
impl Const { impl Const {
pub(crate) fn new(def_id: DefId) -> Const {
Const { def_id }
}
pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::ConstDef>) { pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::ConstDef>) {
def_id_to_ast(db, self.def_id) self.id.source(db)
} }
} }
@ -400,18 +405,14 @@ impl Docs for Const {
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Static { pub struct Static {
pub(crate) def_id: DefId, pub(crate) id: StaticId,
} }
impl Static { impl Static {
pub(crate) fn new(def_id: DefId) -> Static {
Static { def_id }
}
pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::StaticDef>) { pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::StaticDef>) {
def_id_to_ast(db, self.def_id) self.id.source(db)
} }
} }
@ -421,22 +422,18 @@ impl Docs for Static {
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Trait { pub struct Trait {
pub(crate) def_id: DefId, pub(crate) id: TraitId,
} }
impl Trait { impl Trait {
pub(crate) fn new(def_id: DefId) -> Trait {
Trait { def_id }
}
pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::TraitDef>) { pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::TraitDef>) {
def_id_to_ast(db, self.def_id) self.id.source(db)
} }
pub fn generic_params(&self, db: &impl HirDatabase) -> Arc<GenericParams> { pub fn generic_params(&self, db: &impl HirDatabase) -> Arc<GenericParams> {
db.generic_params(self.def_id) db.generic_params((*self).into())
} }
} }
@ -446,22 +443,18 @@ impl Docs for Trait {
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Type { pub struct Type {
pub(crate) def_id: DefId, pub(crate) id: TypeId,
} }
impl Type { impl Type {
pub(crate) fn new(def_id: DefId) -> Type {
Type { def_id }
}
pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::TypeDef>) { pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::TypeDef>) {
def_id_to_ast(db, self.def_id) self.id.source(db)
} }
pub fn generic_params(&self, db: &impl HirDatabase) -> Arc<GenericParams> { pub fn generic_params(&self, db: &impl HirDatabase) -> Arc<GenericParams> {
db.generic_params(self.def_id) db.generic_params((*self).into())
} }
} }

View file

@ -1,18 +1,3 @@
mod krate; // `crate` is invalid ident :( mod krate; // `crate` is invalid ident :(
mod module; mod module;
pub(crate) mod function; pub(crate) mod function;
use ra_syntax::{AstNode, TreeArc};
use crate::{HirDatabase, DefId, HirFileId};
pub(crate) fn def_id_to_ast<N: AstNode>(
db: &impl HirDatabase,
def_id: DefId,
) -> (HirFileId, TreeArc<N>) {
let (file_id, syntax) = def_id.source(db);
let ast = N::cast(&syntax)
.unwrap_or_else(|| panic!("def points to wrong source {:?} {:?}", def_id, syntax))
.to_owned();
(file_id, ast)
}

View file

@ -2,41 +2,32 @@ mod scope;
use std::sync::Arc; use std::sync::Arc;
use ra_syntax::{TreeArc, ast::{self, NameOwner}}; use ra_syntax::ast::{self, NameOwner};
use crate::{ use crate::{
DefId, HirDatabase, Name, AsName, Function, FnSignature, Module, HirDatabase, Name, AsName, Function, FnSignature,
type_ref::{TypeRef, Mutability}, type_ref::{TypeRef, Mutability},
expr::Body, expr::Body,
impl_block::ImplBlock, impl_block::ImplBlock,
code_model_impl::def_id_to_ast,
}; };
pub use self::scope::{FnScopes, ScopesWithSyntaxMapping, ScopeEntryWithSyntax}; pub use self::scope::{FnScopes, ScopesWithSyntaxMapping, ScopeEntryWithSyntax};
impl Function { impl Function {
pub(crate) fn new(def_id: DefId) -> Function {
Function { def_id }
}
pub(crate) fn body(&self, db: &impl HirDatabase) -> Arc<Body> { pub(crate) fn body(&self, db: &impl HirDatabase) -> Arc<Body> {
db.body_hir(self.def_id) db.body_hir(*self)
}
pub(crate) fn module(&self, db: &impl HirDatabase) -> Module {
self.def_id.module(db)
} }
/// The containing impl block, if this is a method. /// The containing impl block, if this is a method.
pub(crate) fn impl_block(&self, db: &impl HirDatabase) -> Option<ImplBlock> { pub(crate) fn impl_block(&self, db: &impl HirDatabase) -> Option<ImplBlock> {
self.def_id.impl_block(db) let module_impls = db.impls_in_module(self.module(db));
ImplBlock::containing(module_impls, (*self).into())
} }
} }
impl FnSignature { impl FnSignature {
pub(crate) fn fn_signature_query(db: &impl HirDatabase, def_id: DefId) -> Arc<FnSignature> { pub(crate) fn fn_signature_query(db: &impl HirDatabase, func: Function) -> Arc<FnSignature> {
// FIXME: we're using def_id_to_ast here to avoid returning Cancelable... this is a bit hacky let (_, node) = func.source(db);
let node: TreeArc<ast::FnDef> = def_id_to_ast(db, def_id).1;
let name = node let name = node
.name() .name()
.map(|n| n.as_name()) .map(|n| n.as_name())

View file

@ -1,7 +1,7 @@
use ra_db::CrateId; use ra_db::CrateId;
use crate::{ use crate::{
HirFileId, Crate, CrateDependency, AsName, DefLoc, DefKind, Module, SourceItemId, Crate, CrateDependency, AsName, Module,
db::HirDatabase, db::HirDatabase,
}; };
@ -21,27 +21,13 @@ impl Crate {
.collect() .collect()
} }
pub(crate) fn root_module_impl(&self, db: &impl HirDatabase) -> Option<Module> { pub(crate) fn root_module_impl(&self, db: &impl HirDatabase) -> Option<Module> {
let crate_graph = db.crate_graph(); let module_tree = db.module_tree(self.crate_id);
let file_id = crate_graph.crate_root(self.crate_id); let module_id = module_tree.modules().next()?;
let source_root_id = db.file_source_root(file_id);
let file_id = HirFileId::from(file_id);
let module_tree = db.module_tree(source_root_id);
// FIXME: teach module tree about crate roots instead of guessing
let source = SourceItemId {
file_id,
item_id: None,
};
let module_id = module_tree.find_module_by_source(source)?;
let def_loc = DefLoc { let module = Module {
kind: DefKind::Module, krate: self.crate_id,
source_root_id,
module_id, module_id,
source_item_id: module_id.source(&module_tree),
}; };
let def_id = def_loc.id(db);
let module = Module::new(def_id);
Some(module) Some(module)
} }
} }

View file

@ -1,52 +1,33 @@
use ra_db::{SourceRootId, FileId}; use ra_db::FileId;
use ra_syntax::{ast, SyntaxNode, AstNode, TreeArc}; use ra_syntax::{ast, SyntaxNode, TreeArc};
use crate::{ use crate::{
Module, ModuleSource, Problem, Module, ModuleSource, Problem, ModuleDef,
Crate, DefId, DefLoc, DefKind, Name, Path, PathKind, PerNs, Def, Crate, Name, Path, PathKind, PerNs,
module_tree::ModuleId, module_tree::ModuleId,
nameres::{ModuleScope, lower::ImportId}, nameres::{ModuleScope, lower::ImportId},
db::HirDatabase, db::HirDatabase,
}; };
impl Module { impl Module {
pub(crate) fn new(def_id: DefId) -> Self { fn with_module_id(&self, module_id: ModuleId) -> Module {
crate::code_model_api::Module { def_id } Module {
}
pub(crate) fn from_module_id(
db: &impl HirDatabase,
source_root_id: SourceRootId,
module_id: ModuleId,
) -> Self {
let module_tree = db.module_tree(source_root_id);
let def_loc = DefLoc {
kind: DefKind::Module,
source_root_id,
module_id, module_id,
source_item_id: module_id.source(&module_tree), krate: self.krate,
}; }
let def_id = def_loc.id(db);
Module::new(def_id)
} }
pub(crate) fn name_impl(&self, db: &impl HirDatabase) -> Option<Name> { pub(crate) fn name_impl(&self, db: &impl HirDatabase) -> Option<Name> {
let loc = self.def_id.loc(db); let module_tree = db.module_tree(self.krate);
let module_tree = db.module_tree(loc.source_root_id); let link = self.module_id.parent_link(&module_tree)?;
let link = loc.module_id.parent_link(&module_tree)?;
Some(link.name(&module_tree).clone()) Some(link.name(&module_tree).clone())
} }
pub(crate) fn definition_source_impl(&self, db: &impl HirDatabase) -> (FileId, ModuleSource) { pub(crate) fn definition_source_impl(&self, db: &impl HirDatabase) -> (FileId, ModuleSource) {
let loc = self.def_id.loc(db); let module_tree = db.module_tree(self.krate);
let file_id = loc.source_item_id.file_id.as_original_file(); let source = self.module_id.source(&module_tree);
let syntax_node = db.file_item(loc.source_item_id); let module_source = ModuleSource::from_source_item_id(db, source);
let module_source = if let Some(source_file) = ast::SourceFile::cast(&syntax_node) { let file_id = source.file_id.as_original_file();
ModuleSource::SourceFile(source_file.to_owned())
} else {
let module = ast::Module::cast(&syntax_node).unwrap();
ModuleSource::Module(module.to_owned())
};
(file_id, module_source) (file_id, module_source)
} }
@ -54,9 +35,8 @@ impl Module {
&self, &self,
db: &impl HirDatabase, db: &impl HirDatabase,
) -> Option<(FileId, TreeArc<ast::Module>)> { ) -> Option<(FileId, TreeArc<ast::Module>)> {
let loc = self.def_id.loc(db); let module_tree = db.module_tree(self.krate);
let module_tree = db.module_tree(loc.source_root_id); let link = self.module_id.parent_link(&module_tree)?;
let link = loc.module_id.parent_link(&module_tree)?;
let file_id = link let file_id = link
.owner(&module_tree) .owner(&module_tree)
.source(&module_tree) .source(&module_tree)
@ -71,85 +51,67 @@ impl Module {
db: &impl HirDatabase, db: &impl HirDatabase,
import: ImportId, import: ImportId,
) -> TreeArc<ast::PathSegment> { ) -> TreeArc<ast::PathSegment> {
let loc = self.def_id.loc(db); let source_map = db.lower_module_source_map(self.clone());
let source_map = db.lower_module_source_map(loc.source_root_id, loc.module_id);
let (_, source) = self.definition_source(db); let (_, source) = self.definition_source(db);
source_map.get(&source, import) source_map.get(&source, import)
} }
pub(crate) fn krate_impl(&self, db: &impl HirDatabase) -> Option<Crate> { pub(crate) fn krate_impl(&self, _db: &impl HirDatabase) -> Option<Crate> {
let root = self.crate_root(db); Some(Crate::new(self.krate))
let loc = root.def_id.loc(db);
let file_id = loc.source_item_id.file_id.as_original_file();
let crate_graph = db.crate_graph();
let crate_id = crate_graph.crate_id_for_crate_root(file_id)?;
Some(Crate::new(crate_id))
} }
pub(crate) fn crate_root_impl(&self, db: &impl HirDatabase) -> Module { pub(crate) fn crate_root_impl(&self, db: &impl HirDatabase) -> Module {
let loc = self.def_id.loc(db); let module_tree = db.module_tree(self.krate);
let module_tree = db.module_tree(loc.source_root_id); let module_id = self.module_id.crate_root(&module_tree);
let module_id = loc.module_id.crate_root(&module_tree); self.with_module_id(module_id)
Module::from_module_id(db, loc.source_root_id, module_id)
} }
/// Finds a child module with the specified name. /// Finds a child module with the specified name.
pub(crate) fn child_impl(&self, db: &impl HirDatabase, name: &Name) -> Option<Module> { pub(crate) fn child_impl(&self, db: &impl HirDatabase, name: &Name) -> Option<Module> {
let loc = self.def_id.loc(db); let module_tree = db.module_tree(self.krate);
let module_tree = db.module_tree(loc.source_root_id); let child_id = self.module_id.child(&module_tree, name)?;
let child_id = loc.module_id.child(&module_tree, name)?; Some(self.with_module_id(child_id))
Some(Module::from_module_id(db, loc.source_root_id, child_id))
} }
/// Iterates over all child modules. /// Iterates over all child modules.
pub(crate) fn children_impl(&self, db: &impl HirDatabase) -> impl Iterator<Item = Module> { pub(crate) fn children_impl(&self, db: &impl HirDatabase) -> impl Iterator<Item = Module> {
// FIXME this should be implementable without collecting into a vec, but let module_tree = db.module_tree(self.krate);
// it's kind of hard since the iterator needs to keep a reference to the let children = self
// module tree.
let loc = self.def_id.loc(db);
let module_tree = db.module_tree(loc.source_root_id);
let children = loc
.module_id .module_id
.children(&module_tree) .children(&module_tree)
.map(|(_, module_id)| Module::from_module_id(db, loc.source_root_id, module_id)) .map(|(_, module_id)| self.with_module_id(module_id))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
children.into_iter() children.into_iter()
} }
pub(crate) fn parent_impl(&self, db: &impl HirDatabase) -> Option<Module> { pub(crate) fn parent_impl(&self, db: &impl HirDatabase) -> Option<Module> {
let loc = self.def_id.loc(db); let module_tree = db.module_tree(self.krate);
let module_tree = db.module_tree(loc.source_root_id); let parent_id = self.module_id.parent(&module_tree)?;
let parent_id = loc.module_id.parent(&module_tree)?; Some(self.with_module_id(parent_id))
Some(Module::from_module_id(db, loc.source_root_id, parent_id))
} }
/// Returns a `ModuleScope`: a set of items, visible in this module. /// Returns a `ModuleScope`: a set of items, visible in this module.
pub(crate) fn scope_impl(&self, db: &impl HirDatabase) -> ModuleScope { pub(crate) fn scope_impl(&self, db: &impl HirDatabase) -> ModuleScope {
let loc = self.def_id.loc(db); let item_map = db.item_map(self.krate);
let item_map = db.item_map(loc.source_root_id); item_map.per_module[&self.module_id].clone()
item_map.per_module[&loc.module_id].clone()
} }
pub(crate) fn resolve_path_impl(&self, db: &impl HirDatabase, path: &Path) -> PerNs<DefId> { pub(crate) fn resolve_path_impl(&self, db: &impl HirDatabase, path: &Path) -> PerNs<ModuleDef> {
let mut curr_per_ns = PerNs::types( let mut curr_per_ns: PerNs<ModuleDef> = PerNs::types(match path.kind {
match path.kind { PathKind::Crate => self.crate_root(db).into(),
PathKind::Crate => self.crate_root(db), PathKind::Self_ | PathKind::Plain => self.clone().into(),
PathKind::Self_ | PathKind::Plain => self.clone(), PathKind::Super => {
PathKind::Super => { if let Some(p) = self.parent(db) {
if let Some(p) = self.parent(db) { p.into()
p } else {
} else {
return PerNs::none();
}
}
PathKind::Abs => {
// TODO: absolute use is not supported
return PerNs::none(); return PerNs::none();
} }
} }
.def_id, PathKind::Abs => {
); // TODO: absolute use is not supported
return PerNs::none();
}
});
for segment in path.segments.iter() { for segment in path.segments.iter() {
let curr = match curr_per_ns.as_ref().take_types() { let curr = match curr_per_ns.as_ref().take_types() {
@ -164,15 +126,16 @@ impl Module {
} }
}; };
// resolve segment in curr // resolve segment in curr
curr_per_ns = match curr.resolve(db) {
Def::Module(m) => { curr_per_ns = match curr {
ModuleDef::Module(m) => {
let scope = m.scope(db); let scope = m.scope(db);
match scope.get(&segment.name) { match scope.get(&segment.name) {
Some(r) => r.def_id, Some(r) => r.def_id.clone(),
None => PerNs::none(), None => PerNs::none(),
} }
} }
Def::Enum(e) => { ModuleDef::Enum(e) => {
// enum variant // enum variant
let matching_variant = e let matching_variant = e
.variants(db) .variants(db)
@ -180,7 +143,7 @@ impl Module {
.find(|(n, _variant)| n == &segment.name); .find(|(n, _variant)| n == &segment.name);
match matching_variant { match matching_variant {
Some((_n, variant)) => PerNs::both(variant.def_id(), e.def_id()), Some((_n, variant)) => PerNs::both(variant.into(), (*e).into()),
None => PerNs::none(), None => PerNs::none(),
} }
} }
@ -199,8 +162,7 @@ impl Module {
&self, &self,
db: &impl HirDatabase, db: &impl HirDatabase,
) -> Vec<(TreeArc<SyntaxNode>, Problem)> { ) -> Vec<(TreeArc<SyntaxNode>, Problem)> {
let loc = self.def_id.loc(db); let module_tree = db.module_tree(self.krate);
let module_tree = db.module_tree(loc.source_root_id); self.module_id.problems(&module_tree, db)
loc.module_id.problems(&module_tree, db)
} }
} }

View file

@ -1,20 +1,21 @@
use std::sync::Arc; use std::sync::Arc;
use ra_syntax::{SyntaxNode, TreeArc, SourceFile}; use ra_syntax::{SyntaxNode, TreeArc, SourceFile};
use ra_db::{SourceRootId, SyntaxDatabase, salsa}; use ra_db::{SyntaxDatabase, CrateId, salsa};
use crate::{ use crate::{
HirInterner, DefId, MacroCallId, Name, HirFileId, MacroCallId, Name, HirFileId,
SourceFileItems, SourceItemId, Crate, SourceFileItems, SourceItemId, Crate, Module, HirInterner,
query_definitions, query_definitions,
FnSignature, FnScopes, Function, FnSignature, FnScopes,
Struct, Enum, EnumVariant,
macros::MacroExpansion, macros::MacroExpansion,
module_tree::{ModuleId, ModuleTree}, module_tree::ModuleTree,
nameres::{ItemMap, lower::{LoweredModule, ImportSourceMap}}, nameres::{ItemMap, lower::{LoweredModule, ImportSourceMap}},
ty::{InferenceResult, Ty, method_resolution::CrateImplBlocks}, ty::{InferenceResult, Ty, method_resolution::CrateImplBlocks, TypableDef, VariantDef},
adt::{StructData, EnumData, EnumVariantData}, adt::{StructData, EnumData, EnumVariantData},
impl_block::ModuleImplBlocks, impl_block::ModuleImplBlocks,
generics::GenericParams, generics::{GenericParams, GenericDef},
}; };
#[salsa::query_group] #[salsa::query_group]
@ -26,25 +27,25 @@ pub trait HirDatabase: SyntaxDatabase + AsRef<HirInterner> {
fn expand_macro_invocation(&self, invoc: MacroCallId) -> Option<Arc<MacroExpansion>>; fn expand_macro_invocation(&self, invoc: MacroCallId) -> Option<Arc<MacroExpansion>>;
#[salsa::invoke(query_definitions::fn_scopes)] #[salsa::invoke(query_definitions::fn_scopes)]
fn fn_scopes(&self, def_id: DefId) -> Arc<FnScopes>; fn fn_scopes(&self, func: Function) -> Arc<FnScopes>;
#[salsa::invoke(crate::adt::StructData::struct_data_query)] #[salsa::invoke(crate::adt::StructData::struct_data_query)]
fn struct_data(&self, def_id: DefId) -> Arc<StructData>; fn struct_data(&self, s: Struct) -> Arc<StructData>;
#[salsa::invoke(crate::adt::EnumData::enum_data_query)] #[salsa::invoke(crate::adt::EnumData::enum_data_query)]
fn enum_data(&self, def_id: DefId) -> Arc<EnumData>; fn enum_data(&self, e: Enum) -> Arc<EnumData>;
#[salsa::invoke(crate::adt::EnumVariantData::enum_variant_data_query)] #[salsa::invoke(crate::adt::EnumVariantData::enum_variant_data_query)]
fn enum_variant_data(&self, def_id: DefId) -> Arc<EnumVariantData>; fn enum_variant_data(&self, var: EnumVariant) -> Arc<EnumVariantData>;
#[salsa::invoke(crate::ty::infer)] #[salsa::invoke(crate::ty::infer)]
fn infer(&self, def_id: DefId) -> Arc<InferenceResult>; fn infer(&self, func: Function) -> Arc<InferenceResult>;
#[salsa::invoke(crate::ty::type_for_def)] #[salsa::invoke(crate::ty::type_for_def)]
fn type_for_def(&self, def_id: DefId) -> Ty; fn type_for_def(&self, def: TypableDef) -> Ty;
#[salsa::invoke(crate::ty::type_for_field)] #[salsa::invoke(crate::ty::type_for_field)]
fn type_for_field(&self, def_id: DefId, field: Name) -> Option<Ty>; fn type_for_field(&self, def: VariantDef, field: Name) -> Option<Ty>;
#[salsa::invoke(query_definitions::file_items)] #[salsa::invoke(query_definitions::file_items)]
fn file_items(&self, file_id: HirFileId) -> Arc<SourceFileItems>; fn file_items(&self, file_id: HirFileId) -> Arc<SourceFileItems>;
@ -56,51 +57,35 @@ pub trait HirDatabase: SyntaxDatabase + AsRef<HirInterner> {
fn submodules(&self, source: SourceItemId) -> Arc<Vec<crate::module_tree::Submodule>>; fn submodules(&self, source: SourceItemId) -> Arc<Vec<crate::module_tree::Submodule>>;
#[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_query)] #[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_query)]
fn lower_module( fn lower_module(&self, module: Module) -> (Arc<LoweredModule>, Arc<ImportSourceMap>);
&self,
source_root_id: SourceRootId,
module_id: ModuleId,
) -> (Arc<LoweredModule>, Arc<ImportSourceMap>);
#[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_module_query)] #[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_module_query)]
fn lower_module_module( fn lower_module_module(&self, module: Module) -> Arc<LoweredModule>;
&self,
source_root_id: SourceRootId,
module_id: ModuleId,
) -> Arc<LoweredModule>;
#[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_source_map_query)] #[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_source_map_query)]
fn lower_module_source_map( fn lower_module_source_map(&self, module: Module) -> Arc<ImportSourceMap>;
&self,
source_root_id: SourceRootId,
module_id: ModuleId,
) -> Arc<ImportSourceMap>;
#[salsa::invoke(query_definitions::item_map)] #[salsa::invoke(query_definitions::item_map)]
fn item_map(&self, source_root_id: SourceRootId) -> Arc<ItemMap>; fn item_map(&self, crate_id: CrateId) -> Arc<ItemMap>;
#[salsa::invoke(crate::module_tree::ModuleTree::module_tree_query)] #[salsa::invoke(crate::module_tree::ModuleTree::module_tree_query)]
fn module_tree(&self, source_root_id: SourceRootId) -> Arc<ModuleTree>; fn module_tree(&self, crate_id: CrateId) -> Arc<ModuleTree>;
#[salsa::invoke(crate::impl_block::impls_in_module)] #[salsa::invoke(crate::impl_block::impls_in_module)]
fn impls_in_module( fn impls_in_module(&self, module: Module) -> Arc<ModuleImplBlocks>;
&self,
source_root_id: SourceRootId,
module_id: ModuleId,
) -> Arc<ModuleImplBlocks>;
#[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)] #[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)]
fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>; fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>;
#[salsa::invoke(crate::expr::body_hir)] #[salsa::invoke(crate::expr::body_hir)]
fn body_hir(&self, def_id: DefId) -> Arc<crate::expr::Body>; fn body_hir(&self, func: Function) -> Arc<crate::expr::Body>;
#[salsa::invoke(crate::expr::body_syntax_mapping)] #[salsa::invoke(crate::expr::body_syntax_mapping)]
fn body_syntax_mapping(&self, def_id: DefId) -> Arc<crate::expr::BodySyntaxMapping>; fn body_syntax_mapping(&self, func: Function) -> Arc<crate::expr::BodySyntaxMapping>;
#[salsa::invoke(crate::generics::GenericParams::generic_params_query)] #[salsa::invoke(crate::generics::GenericParams::generic_params_query)]
fn generic_params(&self, def_id: DefId) -> Arc<GenericParams>; fn generic_params(&self, def: GenericDef) -> Arc<GenericParams>;
#[salsa::invoke(crate::FnSignature::fn_signature_query)] #[salsa::invoke(crate::FnSignature::fn_signature_query)]
fn fn_signature(&self, def_id: DefId) -> Arc<FnSignature>; fn fn_signature(&self, func: Function) -> Arc<FnSignature>;
} }

View file

@ -9,7 +9,11 @@ use ra_syntax::{
ast::{self, LoopBodyOwner, ArgListOwner, NameOwner, LiteralFlavor} ast::{self, LoopBodyOwner, ArgListOwner, NameOwner, LiteralFlavor}
}; };
use crate::{Path, type_ref::{Mutability, TypeRef}, Name, HirDatabase, DefId, Def, name::AsName}; use crate::{
Path, Name, HirDatabase, Function,
name::AsName,
type_ref::{Mutability, TypeRef},
};
use crate::ty::primitive::{UintTy, UncertainIntTy, UncertainFloatTy}; use crate::ty::primitive::{UintTy, UncertainIntTy, UncertainFloatTy};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -435,8 +439,8 @@ impl Pat {
// Queries // Queries
pub(crate) fn body_hir(db: &impl HirDatabase, def_id: DefId) -> Arc<Body> { pub(crate) fn body_hir(db: &impl HirDatabase, func: Function) -> Arc<Body> {
Arc::clone(&body_syntax_mapping(db, def_id).body) Arc::clone(&body_syntax_mapping(db, func).body)
} }
struct ExprCollector { struct ExprCollector {
@ -955,14 +959,8 @@ pub(crate) fn collect_fn_body_syntax(node: &ast::FnDef) -> BodySyntaxMapping {
collector.into_body_syntax_mapping(params, body) collector.into_body_syntax_mapping(params, body)
} }
pub(crate) fn body_syntax_mapping(db: &impl HirDatabase, def_id: DefId) -> Arc<BodySyntaxMapping> { pub(crate) fn body_syntax_mapping(db: &impl HirDatabase, func: Function) -> Arc<BodySyntaxMapping> {
let def = def_id.resolve(db); let (_, fn_def) = func.source(db);
let body_syntax_mapping = collect_fn_body_syntax(&fn_def);
let body_syntax_mapping = match def {
Def::Function(f) => collect_fn_body_syntax(&f.source(db).1),
// TODO: consts, etc.
_ => panic!("Trying to get body for item type without body"),
};
Arc::new(body_syntax_mapping) Arc::new(body_syntax_mapping)
} }

View file

@ -5,9 +5,9 @@
use std::sync::Arc; use std::sync::Arc;
use ra_syntax::ast::{TypeParamList, AstNode, NameOwner}; use ra_syntax::ast::{self, NameOwner, TypeParamsOwner};
use crate::{db::HirDatabase, DefId, Name, AsName}; use crate::{db::HirDatabase, Name, AsName, Function, Struct, Enum, Trait, Type};
/// Data about a generic parameter (to a function, struct, impl, ...). /// Data about a generic parameter (to a function, struct, impl, ...).
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
@ -22,26 +22,53 @@ pub struct GenericParams {
pub(crate) params: Vec<GenericParam>, pub(crate) params: Vec<GenericParam>,
} }
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
pub enum GenericDef {
Function(Function),
Struct(Struct),
Enum(Enum),
Trait(Trait),
Type(Type),
}
impl_froms!(GenericDef: Function, Struct, Enum, Trait, Type);
impl GenericParams { impl GenericParams {
pub(crate) fn generic_params_query(db: &impl HirDatabase, def_id: DefId) -> Arc<GenericParams> { pub(crate) fn generic_params_query(
let (_file_id, node) = def_id.source(db); db: &impl HirDatabase,
def: GenericDef,
) -> Arc<GenericParams> {
let mut generics = GenericParams::default(); let mut generics = GenericParams::default();
if let Some(type_param_list) = node.children().find_map(TypeParamList::cast) { match def {
for (idx, type_param) in type_param_list.type_params().enumerate() { GenericDef::Function(it) => generics.fill(&*it.source(db).1),
let name = type_param GenericDef::Struct(it) => generics.fill(&*it.source(db).1),
.name() GenericDef::Enum(it) => generics.fill(&*it.source(db).1),
.map(AsName::as_name) GenericDef::Trait(it) => generics.fill(&*it.source(db).1),
.unwrap_or_else(Name::missing); GenericDef::Type(it) => generics.fill(&*it.source(db).1),
let param = GenericParam {
idx: idx as u32,
name,
};
generics.params.push(param);
}
} }
Arc::new(generics) Arc::new(generics)
} }
fn fill(&mut self, node: &impl TypeParamsOwner) {
if let Some(params) = node.type_param_list() {
self.fill_params(params)
}
}
fn fill_params(&mut self, params: &ast::TypeParamList) {
for (idx, type_param) in params.type_params().enumerate() {
let name = type_param
.name()
.map(AsName::as_name)
.unwrap_or_else(Name::missing);
let param = GenericParam {
idx: idx as u32,
name,
};
self.params.push(param);
}
}
pub(crate) fn find_by_name(&self, name: &Name) -> Option<&GenericParam> { pub(crate) fn find_by_name(&self, name: &Name) -> Option<&GenericParam> {
self.params.iter().find(|p| &p.name == name) self.params.iter().find(|p| &p.name == name)
} }

View file

@ -1,22 +1,41 @@
use ra_db::{SourceRootId, LocationIntener, FileId}; use std::{
marker::PhantomData,
hash::{Hash, Hasher},
};
use ra_db::{LocationIntener, FileId};
use ra_syntax::{TreeArc, SyntaxNode, SourceFile, AstNode, ast}; use ra_syntax::{TreeArc, SyntaxNode, SourceFile, AstNode, ast};
use ra_arena::{Arena, RawId, impl_arena_id}; use ra_arena::{Arena, RawId, ArenaId, impl_arena_id};
use crate::{ use crate::{
HirDatabase, Def, Function, Struct, Enum, EnumVariant, ImplBlock, Crate, HirDatabase,
Module, Trait, Type, Static, Const, Module,
module_tree::ModuleId,
}; };
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct HirInterner { pub struct HirInterner {
defs: LocationIntener<DefLoc, DefId>,
macros: LocationIntener<MacroCallLoc, MacroCallId>, macros: LocationIntener<MacroCallLoc, MacroCallId>,
fns: LocationIntener<ItemLoc<ast::FnDef>, FunctionId>,
structs: LocationIntener<ItemLoc<ast::StructDef>, StructId>,
enums: LocationIntener<ItemLoc<ast::EnumDef>, EnumId>,
enum_variants: LocationIntener<ItemLoc<ast::EnumVariant>, EnumVariantId>,
consts: LocationIntener<ItemLoc<ast::ConstDef>, ConstId>,
statics: LocationIntener<ItemLoc<ast::StaticDef>, StaticId>,
traits: LocationIntener<ItemLoc<ast::TraitDef>, TraitId>,
types: LocationIntener<ItemLoc<ast::TypeDef>, TypeId>,
} }
impl HirInterner { impl HirInterner {
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
self.defs.len() + self.macros.len() self.macros.len()
+ self.fns.len()
+ self.structs.len()
+ self.enums.len()
+ self.enum_variants.len()
+ self.consts.len()
+ self.statics.len()
+ self.traits.len()
+ self.types.len()
} }
} }
@ -110,10 +129,9 @@ impl From<MacroCallId> for HirFileId {
pub struct MacroCallId(RawId); pub struct MacroCallId(RawId);
impl_arena_id!(MacroCallId); impl_arena_id!(MacroCallId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct MacroCallLoc { pub struct MacroCallLoc {
pub(crate) source_root_id: SourceRootId, pub(crate) module: Module,
pub(crate) module_id: ModuleId,
pub(crate) source_item_id: SourceItemId, pub(crate) source_item_id: SourceItemId,
} }
@ -130,117 +148,161 @@ impl MacroCallLoc {
} }
} }
/// Def's are a core concept of hir. A `Def` is an Item (function, module, etc) #[derive(Debug)]
/// in a specific module. pub struct ItemLoc<N: AstNode> {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub(crate) module: Module,
pub struct DefId(RawId); raw: SourceItemId,
impl_arena_id!(DefId); _ty: PhantomData<N>,
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct DefLoc {
pub(crate) kind: DefKind,
pub(crate) source_root_id: SourceRootId,
pub(crate) module_id: ModuleId,
pub(crate) source_item_id: SourceItemId,
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] impl<N: AstNode> PartialEq for ItemLoc<N> {
pub(crate) enum DefKind { fn eq(&self, other: &Self) -> bool {
Module, self.module == other.module && self.raw == other.raw
Function,
Struct,
Enum,
EnumVariant,
Const,
Static,
Trait,
Type,
Item,
/// The constructor of a struct. E.g. if we have `struct Foo(usize)`, the
/// name `Foo` needs to resolve to different types depending on whether we
/// are in the types or values namespace: As a type, `Foo` of course refers
/// to the struct `Foo`; as a value, `Foo` is a callable type with signature
/// `(usize) -> Foo`. The cleanest approach to handle this seems to be to
/// have different defs in the two namespaces.
///
/// rustc does the same; note that it even creates a struct constructor if
/// the struct isn't a tuple struct (see `CtorKind::Fictive` in rustc).
StructCtor,
}
impl DefId {
pub(crate) fn loc(self, db: &impl AsRef<HirInterner>) -> DefLoc {
db.as_ref().defs.id2loc(self)
} }
}
impl<N: AstNode> Eq for ItemLoc<N> {}
impl<N: AstNode> Hash for ItemLoc<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.module.hash(hasher);
self.raw.hash(hasher);
}
}
pub fn resolve(self, db: &impl HirDatabase) -> Def { impl<N: AstNode> Clone for ItemLoc<N> {
let loc = self.loc(db); fn clone(&self) -> ItemLoc<N> {
match loc.kind { ItemLoc {
DefKind::Module => { module: self.module,
let module = Module::from_module_id(db, loc.source_root_id, loc.module_id); raw: self.raw,
Def::Module(module) _ty: PhantomData,
}
DefKind::Function => {
let function = Function::new(self);
Def::Function(function)
}
DefKind::Struct => {
let struct_def = Struct::new(self);
Def::Struct(struct_def)
}
DefKind::Enum => Def::Enum(Enum::new(self)),
DefKind::EnumVariant => Def::EnumVariant(EnumVariant::new(self)),
DefKind::Const => {
let def = Const::new(self);
Def::Const(def)
}
DefKind::Static => {
let def = Static::new(self);
Def::Static(def)
}
DefKind::Trait => {
let def = Trait::new(self);
Def::Trait(def)
}
DefKind::Type => {
let def = Type::new(self);
Def::Type(def)
}
DefKind::StructCtor => Def::Item,
DefKind::Item => Def::Item,
} }
} }
}
pub(crate) fn source(self, db: &impl HirDatabase) -> (HirFileId, TreeArc<SyntaxNode>) { #[derive(Clone, Copy)]
let loc = self.loc(db); pub(crate) struct LocationCtx<DB> {
let syntax = db.file_item(loc.source_item_id); db: DB,
(loc.source_item_id.file_id, syntax) module: Module,
file_id: HirFileId,
}
impl<'a, DB: HirDatabase> LocationCtx<&'a DB> {
pub(crate) fn new(db: &'a DB, module: Module, file_id: HirFileId) -> LocationCtx<&'a DB> {
LocationCtx {
db,
module,
file_id,
}
} }
pub(crate) fn to_def<N, DEF>(self, ast: &N) -> DEF
/// For a module, returns that module; for any other def, returns the containing module. where
pub fn module(self, db: &impl HirDatabase) -> Module { N: AstNode,
let loc = self.loc(db); DEF: AstItemDef<N>,
Module::from_module_id(db, loc.source_root_id, loc.module_id) {
} DEF::from_ast(self, ast)
/// Returns the containing crate.
pub fn krate(&self, db: &impl HirDatabase) -> Option<Crate> {
self.module(db).krate(db)
}
/// Returns the containing impl block, if this is an impl item.
pub fn impl_block(self, db: &impl HirDatabase) -> Option<ImplBlock> {
let loc = self.loc(db);
let module_impls = db.impls_in_module(loc.source_root_id, loc.module_id);
ImplBlock::containing(module_impls, self)
} }
} }
impl DefLoc { pub(crate) trait AstItemDef<N: AstNode>: ArenaId + Clone {
pub(crate) fn id(&self, db: &impl AsRef<HirInterner>) -> DefId { fn interner(interner: &HirInterner) -> &LocationIntener<ItemLoc<N>, Self>;
db.as_ref().defs.loc2id(&self) fn from_ast(ctx: LocationCtx<&impl HirDatabase>, ast: &N) -> Self {
let items = ctx.db.file_items(ctx.file_id);
let raw = SourceItemId {
file_id: ctx.file_id,
item_id: Some(items.id_of(ctx.file_id, ast.syntax())),
};
let loc = ItemLoc {
module: ctx.module,
raw,
_ty: PhantomData,
};
Self::interner(ctx.db.as_ref()).loc2id(&loc)
}
fn source(self, db: &impl HirDatabase) -> (HirFileId, TreeArc<N>) {
let int = Self::interner(db.as_ref());
let loc = int.id2loc(self);
let syntax = db.file_item(loc.raw);
let ast = N::cast(&syntax)
.unwrap_or_else(|| panic!("invalid ItemLoc: {:?}", loc.raw))
.to_owned();
(loc.raw.file_id, ast)
}
fn module(self, db: &impl HirDatabase) -> Module {
let int = Self::interner(db.as_ref());
let loc = int.id2loc(self);
loc.module
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct FunctionId(RawId);
impl_arena_id!(FunctionId);
impl AstItemDef<ast::FnDef> for FunctionId {
fn interner(interner: &HirInterner) -> &LocationIntener<ItemLoc<ast::FnDef>, Self> {
&interner.fns
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StructId(RawId);
impl_arena_id!(StructId);
impl AstItemDef<ast::StructDef> for StructId {
fn interner(interner: &HirInterner) -> &LocationIntener<ItemLoc<ast::StructDef>, Self> {
&interner.structs
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct EnumId(RawId);
impl_arena_id!(EnumId);
impl AstItemDef<ast::EnumDef> for EnumId {
fn interner(interner: &HirInterner) -> &LocationIntener<ItemLoc<ast::EnumDef>, Self> {
&interner.enums
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct EnumVariantId(RawId);
impl_arena_id!(EnumVariantId);
impl AstItemDef<ast::EnumVariant> for EnumVariantId {
fn interner(interner: &HirInterner) -> &LocationIntener<ItemLoc<ast::EnumVariant>, Self> {
&interner.enum_variants
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ConstId(RawId);
impl_arena_id!(ConstId);
impl AstItemDef<ast::ConstDef> for ConstId {
fn interner(interner: &HirInterner) -> &LocationIntener<ItemLoc<ast::ConstDef>, Self> {
&interner.consts
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StaticId(RawId);
impl_arena_id!(StaticId);
impl AstItemDef<ast::StaticDef> for StaticId {
fn interner(interner: &HirInterner) -> &LocationIntener<ItemLoc<ast::StaticDef>, Self> {
&interner.statics
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TraitId(RawId);
impl_arena_id!(TraitId);
impl AstItemDef<ast::TraitDef> for TraitId {
fn interner(interner: &HirInterner) -> &LocationIntener<ItemLoc<ast::TraitDef>, Self> {
&interner.traits
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TypeId(RawId);
impl_arena_id!(TypeId);
impl AstItemDef<ast::TypeDef> for TypeId {
fn interner(interner: &HirInterner) -> &LocationIntener<ItemLoc<ast::TypeDef>, Self> {
&interner.types
} }
} }

View file

@ -3,14 +3,13 @@ use rustc_hash::FxHashMap;
use ra_arena::{Arena, RawId, impl_arena_id}; use ra_arena::{Arena, RawId, impl_arena_id};
use ra_syntax::ast::{self, AstNode}; use ra_syntax::ast::{self, AstNode};
use ra_db::{SourceRootId};
use crate::{ use crate::{
DefId, DefLoc, DefKind, SourceItemId, SourceFileItems, Const, Type,
Function, HirInterner, Function, HirFileId,
db::HirDatabase, db::HirDatabase,
type_ref::TypeRef, type_ref::TypeRef,
module_tree::ModuleId, ids::LocationCtx,
}; };
use crate::code_model_api::{Module, ModuleSource}; use crate::code_model_api::{Module, ModuleSource};
@ -24,9 +23,9 @@ pub struct ImplBlock {
impl ImplBlock { impl ImplBlock {
pub(crate) fn containing( pub(crate) fn containing(
module_impl_blocks: Arc<ModuleImplBlocks>, module_impl_blocks: Arc<ModuleImplBlocks>,
def_id: DefId, item: ImplItem,
) -> Option<ImplBlock> { ) -> Option<ImplBlock> {
let impl_id = *module_impl_blocks.impls_by_def.get(&def_id)?; let impl_id = *module_impl_blocks.impls_by_def.get(&item)?;
Some(ImplBlock { Some(ImplBlock {
module_impl_blocks, module_impl_blocks,
impl_id, impl_id,
@ -66,39 +65,25 @@ pub struct ImplData {
impl ImplData { impl ImplData {
pub(crate) fn from_ast( pub(crate) fn from_ast(
db: &impl AsRef<HirInterner>, db: &impl HirDatabase,
file_items: &SourceFileItems, file_id: HirFileId,
module: &Module, module: Module,
node: &ast::ImplBlock, node: &ast::ImplBlock,
) -> Self { ) -> Self {
let target_trait = node.target_trait().map(TypeRef::from_ast); let target_trait = node.target_trait().map(TypeRef::from_ast);
let target_type = TypeRef::from_ast_opt(node.target_type()); let target_type = TypeRef::from_ast_opt(node.target_type());
let module_loc = module.def_id.loc(db); let ctx = LocationCtx::new(db, module, file_id);
let items = if let Some(item_list) = node.item_list() { let items = if let Some(item_list) = node.item_list() {
item_list item_list
.impl_items() .impl_items()
.map(|item_node| { .map(|item_node| match item_node.kind() {
let kind = match item_node.kind() { ast::ImplItemKind::FnDef(it) => {
ast::ImplItemKind::FnDef(..) => DefKind::Function, ImplItem::Method(Function { id: ctx.to_def(it) })
ast::ImplItemKind::ConstDef(..) => DefKind::Item,
ast::ImplItemKind::TypeDef(..) => DefKind::Item,
};
let item_id = file_items.id_of_unchecked(item_node.syntax());
let source_item_id = SourceItemId {
file_id: module_loc.source_item_id.file_id,
item_id: Some(item_id),
};
let def_loc = DefLoc {
kind,
source_item_id,
..module_loc
};
let def_id = def_loc.id(db);
match item_node.kind() {
ast::ImplItemKind::FnDef(..) => ImplItem::Method(Function::new(def_id)),
ast::ImplItemKind::ConstDef(..) => ImplItem::Const(def_id),
ast::ImplItemKind::TypeDef(..) => ImplItem::Type(def_id),
} }
ast::ImplItemKind::ConstDef(it) => {
ImplItem::Const(Const { id: ctx.to_def(it) })
}
ast::ImplItemKind::TypeDef(it) => ImplItem::Type(Type { id: ctx.to_def(it) }),
}) })
.collect() .collect()
} else { } else {
@ -124,22 +109,19 @@ impl ImplData {
} }
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
//TODO: rename to ImplDef?
pub enum ImplItem { pub enum ImplItem {
Method(Function), Method(Function),
// these don't have their own types yet Const(Const),
Const(DefId), Type(Type),
Type(DefId),
// Existential // Existential
} }
impl_froms!(ImplItem: Const, Type);
impl ImplItem { impl From<Function> for ImplItem {
pub fn def_id(&self) -> DefId { fn from(func: Function) -> ImplItem {
match self { ImplItem::Method(func)
ImplItem::Method(f) => f.def_id(),
ImplItem::Const(def_id) => *def_id,
ImplItem::Type(def_id) => *def_id,
}
} }
} }
@ -155,7 +137,7 @@ impl_arena_id!(ImplId);
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub struct ModuleImplBlocks { pub struct ModuleImplBlocks {
pub(crate) impls: Arena<ImplId, ImplData>, pub(crate) impls: Arena<ImplId, ImplData>,
impls_by_def: FxHashMap<DefId, ImplId>, impls_by_def: FxHashMap<ImplItem, ImplId>,
} }
impl ModuleImplBlocks { impl ModuleImplBlocks {
@ -168,6 +150,7 @@ impl ModuleImplBlocks {
fn collect(&mut self, db: &impl HirDatabase, module: Module) { fn collect(&mut self, db: &impl HirDatabase, module: Module) {
let (file_id, module_source) = module.definition_source(db); let (file_id, module_source) = module.definition_source(db);
let file_id: HirFileId = file_id.into();
let node = match &module_source { let node = match &module_source {
ModuleSource::SourceFile(node) => node.syntax(), ModuleSource::SourceFile(node) => node.syntax(),
ModuleSource::Module(node) => node ModuleSource::Module(node) => node
@ -176,25 +159,18 @@ impl ModuleImplBlocks {
.syntax(), .syntax(),
}; };
let source_file_items = db.file_items(file_id.into());
for impl_block_ast in node.children().filter_map(ast::ImplBlock::cast) { for impl_block_ast in node.children().filter_map(ast::ImplBlock::cast) {
let impl_block = ImplData::from_ast(db, &source_file_items, &module, impl_block_ast); let impl_block = ImplData::from_ast(db, file_id, module, impl_block_ast);
let id = self.impls.alloc(impl_block); let id = self.impls.alloc(impl_block);
for impl_item in &self.impls[id].items { for &impl_item in &self.impls[id].items {
self.impls_by_def.insert(impl_item.def_id(), id); self.impls_by_def.insert(impl_item, id);
} }
} }
} }
} }
pub(crate) fn impls_in_module( pub(crate) fn impls_in_module(db: &impl HirDatabase, module: Module) -> Arc<ModuleImplBlocks> {
db: &impl HirDatabase,
source_root_id: SourceRootId,
module_id: ModuleId,
) -> Arc<ModuleImplBlocks> {
let mut result = ModuleImplBlocks::new(); let mut result = ModuleImplBlocks::new();
let module = Module::from_module_id(db, source_root_id, module_id);
result.collect(db, module); result.collect(db, module);
Arc::new(result) Arc::new(result)
} }

View file

@ -5,6 +5,18 @@
//! to a particular crate instance. That is, it has cfg flags and features //! to a particular crate instance. That is, it has cfg flags and features
//! applied. So, the relation between syntax and HIR is many-to-one. //! applied. So, the relation between syntax and HIR is many-to-one.
macro_rules! impl_froms {
($e:ident: $($v:ident), *) => {
$(
impl From<$v> for $e {
fn from(it: $v) -> $e {
$e::$v(it)
}
}
)*
}
}
pub mod db; pub mod db;
#[cfg(test)] #[cfg(test)]
mod mock; mod mock;
@ -34,25 +46,26 @@ mod marks;
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
name::{AsName, KnownName}, name::{AsName, KnownName},
ids::{DefKind, SourceItemId, SourceFileItems}, ids::{SourceItemId, SourceFileItems},
}; };
pub use self::{ pub use self::{
path::{Path, PathKind}, path::{Path, PathKind},
name::Name, name::Name,
ids::{HirFileId, DefId, DefLoc, MacroCallId, MacroCallLoc, HirInterner}, ids::{HirFileId, MacroCallId, MacroCallLoc, HirInterner},
macros::{MacroDef, MacroInput, MacroExpansion}, macros::{MacroDef, MacroInput, MacroExpansion},
nameres::{ItemMap, PerNs, Namespace, Resolution}, nameres::{ItemMap, PerNs, Namespace, Resolution},
ty::Ty, ty::Ty,
impl_block::{ImplBlock, ImplItem}, impl_block::{ImplBlock, ImplItem},
code_model_impl::function::{FnScopes, ScopesWithSyntaxMapping}, code_model_impl::function::{FnScopes, ScopesWithSyntaxMapping},
docs::{Docs, Documentation} docs::{Docs, Documentation},
adt::AdtDef,
}; };
pub use self::code_model_api::{ pub use self::code_model_api::{
Crate, CrateDependency, Crate, CrateDependency,
Def, Def,
Module, ModuleSource, Problem, Module, ModuleDef, ModuleSource, Problem,
Struct, Enum, EnumVariant, Struct, Enum, EnumVariant,
Function, FnSignature, ScopeEntryWithSyntax, Function, FnSignature, ScopeEntryWithSyntax,
StructField, StructField,

View file

@ -35,10 +35,6 @@ impl MockDatabase {
let file_id = db.add_file(WORKSPACE, &mut source_root, "/main.rs", text); let file_id = db.add_file(WORKSPACE, &mut source_root, "/main.rs", text);
db.query_mut(ra_db::SourceRootQuery) db.query_mut(ra_db::SourceRootQuery)
.set(WORKSPACE, Arc::new(source_root.clone())); .set(WORKSPACE, Arc::new(source_root.clone()));
let mut crate_graph = CrateGraph::default();
crate_graph.add_crate_root(file_id);
db.set_crate_graph(crate_graph);
(db, source_root, file_id) (db, source_root, file_id)
} }
@ -97,6 +93,8 @@ impl MockDatabase {
text: &str, text: &str,
) -> FileId { ) -> FileId {
assert!(path.starts_with('/')); assert!(path.starts_with('/'));
let is_crate_root = path == "/lib.rs" || path == "/main.rs";
let path = RelativePathBuf::from_path(&path[1..]).unwrap(); let path = RelativePathBuf::from_path(&path[1..]).unwrap();
let file_id = FileId(self.file_counter); let file_id = FileId(self.file_counter);
self.file_counter += 1; self.file_counter += 1;
@ -107,6 +105,12 @@ impl MockDatabase {
self.query_mut(ra_db::FileSourceRootQuery) self.query_mut(ra_db::FileSourceRootQuery)
.set(file_id, source_root_id); .set(file_id, source_root_id);
source_root.files.insert(path, file_id); source_root.files.insert(path, file_id);
if is_crate_root {
let mut crate_graph = CrateGraph::default();
crate_graph.add_crate_root(file_id);
self.set_crate_graph(crate_graph);
}
file_id file_id
} }
@ -202,6 +206,7 @@ salsa::database_storage! {
fn file_relative_path() for ra_db::FileRelativePathQuery; fn file_relative_path() for ra_db::FileRelativePathQuery;
fn file_source_root() for ra_db::FileSourceRootQuery; fn file_source_root() for ra_db::FileSourceRootQuery;
fn source_root() for ra_db::SourceRootQuery; fn source_root() for ra_db::SourceRootQuery;
fn source_root_crates() for ra_db::SourceRootCratesQuery;
fn local_roots() for ra_db::LocalRootsQuery; fn local_roots() for ra_db::LocalRootsQuery;
fn library_roots() for ra_db::LibraryRootsQuery; fn library_roots() for ra_db::LibraryRootsQuery;
fn crate_graph() for ra_db::CrateGraphQuery; fn crate_graph() for ra_db::CrateGraphQuery;

View file

@ -3,7 +3,7 @@ use std::sync::Arc;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use arrayvec::ArrayVec; use arrayvec::ArrayVec;
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use ra_db::{FileId, SourceRootId, SourceRoot}; use ra_db::{FileId, SourceRoot, CrateId};
use ra_syntax::{ use ra_syntax::{
SyntaxNode, TreeArc, SyntaxNode, TreeArc,
algo::generate, algo::generate,
@ -126,13 +126,10 @@ struct LinkData {
} }
impl ModuleTree { impl ModuleTree {
pub(crate) fn module_tree_query( pub(crate) fn module_tree_query(db: &impl HirDatabase, crate_id: CrateId) -> Arc<ModuleTree> {
db: &impl HirDatabase,
source_root: SourceRootId,
) -> Arc<ModuleTree> {
db.check_canceled(); db.check_canceled();
let mut res = ModuleTree::default(); let mut res = ModuleTree::default();
res.init(db, source_root); res.init_crate(db, crate_id);
Arc::new(res) Arc::new(res)
} }
@ -145,24 +142,21 @@ impl ModuleTree {
Some(res) Some(res)
} }
fn init(&mut self, db: &impl HirDatabase, source_root: SourceRootId) { fn init_crate(&mut self, db: &impl HirDatabase, crate_id: CrateId) {
let crate_graph = db.crate_graph();
let file_id = crate_graph.crate_root(crate_id);
let source_root_id = db.file_source_root(file_id);
let mut roots = FxHashMap::default(); let mut roots = FxHashMap::default();
let mut visited = FxHashSet::default(); let mut visited = FxHashSet::default();
let source_root = db.source_root(source_root); let source_root = db.source_root(source_root_id);
for &file_id in source_root.files.values() { let source = SourceItemId {
let source = SourceItemId { file_id: file_id.into(),
file_id: file_id.into(), item_id: None,
item_id: None, };
}; let module_id = self.init_subtree(db, &source_root, &mut visited, &mut roots, None, source);
if visited.contains(&source) { roots.insert(file_id, module_id);
continue; // TODO: use explicit crate_roots here
}
assert!(!roots.contains_key(&file_id));
let module_id =
self.init_subtree(db, &source_root, &mut visited, &mut roots, None, source);
roots.insert(file_id, module_id);
}
} }
fn init_subtree( fn init_subtree(

View file

@ -16,19 +16,19 @@
//! structure itself is modified. //! structure itself is modified.
pub(crate) mod lower; pub(crate) mod lower;
use crate::nameres::lower::*;
use std::sync::Arc; use std::sync::Arc;
use ra_db::CrateId;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use ra_db::SourceRootId;
use crate::{ use crate::{
DefId, DefLoc, DefKind, Module, ModuleDef,
Path, PathKind, Path, PathKind,
HirDatabase, Crate, HirDatabase, Crate,
Name, Name,
module_tree::{ModuleId, ModuleTree}, module_tree::{ModuleId, ModuleTree},
//FIXME: deglobify
nameres::lower::*,
}; };
/// `ItemMap` is the result of name resolution. It contains, for each /// `ItemMap` is the result of name resolution. It contains, for each
@ -58,7 +58,7 @@ impl ModuleScope {
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Resolution { pub struct Resolution {
/// None for unresolved /// None for unresolved
pub def_id: PerNs<DefId>, pub def_id: PerNs<ModuleDef>,
/// ident by which this is imported into local scope. /// ident by which this is imported into local scope.
pub import: Option<ImportId>, pub import: Option<ImportId>,
} }
@ -152,7 +152,7 @@ impl<T> PerNs<T> {
pub(crate) struct Resolver<'a, DB> { pub(crate) struct Resolver<'a, DB> {
db: &'a DB, db: &'a DB,
input: &'a FxHashMap<ModuleId, Arc<LoweredModule>>, input: &'a FxHashMap<ModuleId, Arc<LoweredModule>>,
source_root: SourceRootId, krate: CrateId,
module_tree: Arc<ModuleTree>, module_tree: Arc<ModuleTree>,
processed_imports: FxHashSet<(ModuleId, ImportId)>, processed_imports: FxHashSet<(ModuleId, ImportId)>,
result: ItemMap, result: ItemMap,
@ -165,13 +165,13 @@ where
pub(crate) fn new( pub(crate) fn new(
db: &'a DB, db: &'a DB,
input: &'a FxHashMap<ModuleId, Arc<LoweredModule>>, input: &'a FxHashMap<ModuleId, Arc<LoweredModule>>,
source_root: SourceRootId, krate: CrateId,
module_tree: Arc<ModuleTree>,
) -> Resolver<'a, DB> { ) -> Resolver<'a, DB> {
let module_tree = db.module_tree(krate);
Resolver { Resolver {
db, db,
input, input,
source_root, krate,
module_tree, module_tree,
processed_imports: FxHashSet::default(), processed_imports: FxHashSet::default(),
result: ItemMap::default(), result: ItemMap::default(),
@ -210,7 +210,7 @@ where
let krate = Crate::new(crate_id); let krate = Crate::new(crate_id);
for dep in krate.dependencies(self.db) { for dep in krate.dependencies(self.db) {
if let Some(module) = dep.krate.root_module(self.db) { if let Some(module) = dep.krate.root_module(self.db) {
let def_id = module.def_id; let def_id = module.into();
self.add_module_item( self.add_module_item(
&mut module_items, &mut module_items,
dep.name.clone(), dep.name.clone(),
@ -244,20 +244,22 @@ where
// Populate modules // Populate modules
for (name, module_id) in module_id.children(&self.module_tree) { for (name, module_id) in module_id.children(&self.module_tree) {
let def_loc = DefLoc { let module = Module {
kind: DefKind::Module,
source_root_id: self.source_root,
module_id, module_id,
source_item_id: module_id.source(&self.module_tree), krate: self.krate,
}; };
let def_id = def_loc.id(self.db); self.add_module_item(&mut module_items, name, PerNs::types(module.into()));
self.add_module_item(&mut module_items, name, PerNs::types(def_id));
} }
self.result.per_module.insert(module_id, module_items); self.result.per_module.insert(module_id, module_items);
} }
fn add_module_item(&self, module_items: &mut ModuleScope, name: Name, def_id: PerNs<DefId>) { fn add_module_item(
&self,
module_items: &mut ModuleScope,
name: Name,
def_id: PerNs<ModuleDef>,
) {
let resolution = Resolution { let resolution = Resolution {
def_id, def_id,
import: None, import: None,
@ -329,17 +331,11 @@ where
); );
return false; return false;
}; };
curr = match type_def_id.loc(self.db) { curr = match type_def_id {
DefLoc { ModuleDef::Module(module) => {
kind: DefKind::Module, if module.krate == self.krate {
module_id: target_module_id, module.module_id
source_root_id,
..
} => {
if source_root_id == self.source_root {
target_module_id
} else { } else {
let module = crate::code_model_api::Module::new(type_def_id);
let path = Path { let path = Path {
segments: import.path.segments[i + 1..].iter().cloned().collect(), segments: import.path.segments[i + 1..].iter().cloned().collect(),
kind: PathKind::Crate, kind: PathKind::Crate,
@ -359,7 +355,7 @@ where
"resolved import {:?} ({:?}) cross-source root to {:?}", "resolved import {:?} ({:?}) cross-source root to {:?}",
last_segment.name, last_segment.name,
import, import,
def_id.map(|did| did.loc(self.db)) def_id,
); );
return true; return true;
} else { } else {
@ -372,7 +368,7 @@ where
log::debug!( log::debug!(
"path segment {:?} resolved to non-module {:?}, but is not last", "path segment {:?} resolved to non-module {:?}, but is not last",
segment.name, segment.name,
type_def_id.loc(self.db) type_def_id,
); );
return true; // this resolved to a non-module, so the path won't ever resolve return true; // this resolved to a non-module, so the path won't ever resolve
} }
@ -382,7 +378,7 @@ where
"resolved import {:?} ({:?}) within source root to {:?}", "resolved import {:?} ({:?}) within source root to {:?}",
segment.name, segment.name,
import, import,
def_id.map(|did| did.loc(self.db)) def_id,
); );
self.update(module_id, |items| { self.update(module_id, |items| {
let res = Resolution { let res = Resolution {

View file

@ -1,17 +1,17 @@
use std::sync::Arc; use std::sync::Arc;
use ra_syntax::{ use ra_syntax::{
SyntaxKind, AstNode, SourceFile, TreeArc, AstPtr, AstNode, SourceFile, TreeArc, AstPtr,
ast::{self, ModuleItemOwner, NameOwner}, ast::{self, ModuleItemOwner, NameOwner},
}; };
use ra_db::SourceRootId;
use ra_arena::{Arena, RawId, impl_arena_id, map::ArenaMap}; use ra_arena::{Arena, RawId, impl_arena_id, map::ArenaMap};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::{ use crate::{
SourceItemId, Path, ModuleSource, HirDatabase, Name, SourceFileItems, SourceItemId, Path, ModuleSource, HirDatabase, Name,
HirFileId, MacroCallLoc, AsName, PerNs, DefId, DefKind, DefLoc, HirFileId, MacroCallLoc, AsName, PerNs, Function,
module_tree::ModuleId ModuleDef, Module, Struct, Enum, Const, Static, Trait, Type,
ids::LocationCtx,
}; };
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -32,7 +32,7 @@ pub(super) struct ImportData {
/// can avoid redoing name resolution. /// can avoid redoing name resolution.
#[derive(Debug, Default, PartialEq, Eq)] #[derive(Debug, Default, PartialEq, Eq)]
pub struct LoweredModule { pub struct LoweredModule {
pub(crate) declarations: FxHashMap<Name, PerNs<DefId>>, pub(crate) declarations: FxHashMap<Name, PerNs<ModuleDef>>,
pub(super) imports: Arena<ImportId, ImportData>, pub(super) imports: Arena<ImportId, ImportData>,
} }
@ -59,37 +59,31 @@ impl ImportSourceMap {
impl LoweredModule { impl LoweredModule {
pub(crate) fn lower_module_module_query( pub(crate) fn lower_module_module_query(
db: &impl HirDatabase, db: &impl HirDatabase,
source_root_id: SourceRootId, module: Module,
module_id: ModuleId,
) -> Arc<LoweredModule> { ) -> Arc<LoweredModule> {
db.lower_module(source_root_id, module_id).0 db.lower_module(module).0
} }
pub(crate) fn lower_module_source_map_query( pub(crate) fn lower_module_source_map_query(
db: &impl HirDatabase, db: &impl HirDatabase,
source_root_id: SourceRootId, module: Module,
module_id: ModuleId,
) -> Arc<ImportSourceMap> { ) -> Arc<ImportSourceMap> {
db.lower_module(source_root_id, module_id).1 db.lower_module(module).1
} }
pub(crate) fn lower_module_query( pub(crate) fn lower_module_query(
db: &impl HirDatabase, db: &impl HirDatabase,
source_root_id: SourceRootId, module: Module,
module_id: ModuleId,
) -> (Arc<LoweredModule>, Arc<ImportSourceMap>) { ) -> (Arc<LoweredModule>, Arc<ImportSourceMap>) {
let module_tree = db.module_tree(source_root_id); let (file_id, source) = module.definition_source(db);
let source = module_id.source(&module_tree); let file_id: HirFileId = file_id.into();
let file_id = source.file_id;
let source = ModuleSource::from_source_item_id(db, source);
let mut source_map = ImportSourceMap::default(); let mut source_map = ImportSourceMap::default();
let mut res = LoweredModule::default(); let mut res = LoweredModule::default();
match source { match source {
ModuleSource::SourceFile(it) => res.fill( ModuleSource::SourceFile(it) => res.fill(
&mut source_map, &mut source_map,
db, db,
source_root_id, module,
module_id,
file_id, file_id,
&mut it.items_with_macros(), &mut it.items_with_macros(),
), ),
@ -98,8 +92,7 @@ impl LoweredModule {
res.fill( res.fill(
&mut source_map, &mut source_map,
db, db,
source_root_id, module,
module_id,
file_id, file_id,
&mut item_list.items_with_macros(), &mut item_list.items_with_macros(),
) )
@ -113,8 +106,7 @@ impl LoweredModule {
&mut self, &mut self,
source_map: &mut ImportSourceMap, source_map: &mut ImportSourceMap,
db: &impl HirDatabase, db: &impl HirDatabase,
source_root_id: SourceRootId, module: Module,
module_id: ModuleId,
file_id: HirFileId, file_id: HirFileId,
items: &mut Iterator<Item = ast::ItemOrMacro>, items: &mut Iterator<Item = ast::ItemOrMacro>,
) { ) {
@ -123,21 +115,12 @@ impl LoweredModule {
for item in items { for item in items {
match item { match item {
ast::ItemOrMacro::Item(it) => { ast::ItemOrMacro::Item(it) => {
self.add_def_id( self.add_def_id(source_map, db, module, file_id, it);
source_map,
db,
source_root_id,
module_id,
file_id,
&file_items,
it,
);
} }
ast::ItemOrMacro::Macro(macro_call) => { ast::ItemOrMacro::Macro(macro_call) => {
let item_id = file_items.id_of_unchecked(macro_call.syntax()); let item_id = file_items.id_of_unchecked(macro_call.syntax());
let loc = MacroCallLoc { let loc = MacroCallLoc {
source_root_id, module,
module_id,
source_item_id: SourceItemId { source_item_id: SourceItemId {
file_id, file_id,
item_id: Some(item_id), item_id: Some(item_id),
@ -145,18 +128,9 @@ impl LoweredModule {
}; };
let id = loc.id(db); let id = loc.id(db);
let file_id = HirFileId::from(id); let file_id = HirFileId::from(id);
let file_items = db.file_items(file_id);
//FIXME: expand recursively //FIXME: expand recursively
for item in db.hir_source_file(file_id).items() { for item in db.hir_source_file(file_id).items() {
self.add_def_id( self.add_def_id(source_map, db, module, file_id, item);
source_map,
db,
source_root_id,
module_id,
file_id,
&file_items,
item,
);
} }
} }
} }
@ -167,41 +141,74 @@ impl LoweredModule {
&mut self, &mut self,
source_map: &mut ImportSourceMap, source_map: &mut ImportSourceMap,
db: &impl HirDatabase, db: &impl HirDatabase,
source_root_id: SourceRootId, module: Module,
module_id: ModuleId,
file_id: HirFileId, file_id: HirFileId,
file_items: &SourceFileItems,
item: &ast::ModuleItem, item: &ast::ModuleItem,
) { ) {
let name = match item.kind() { let ctx = LocationCtx::new(db, module, file_id);
ast::ModuleItemKind::StructDef(it) => it.name(), match item.kind() {
ast::ModuleItemKind::EnumDef(it) => it.name(), ast::ModuleItemKind::StructDef(it) => {
ast::ModuleItemKind::FnDef(it) => it.name(), if let Some(name) = it.name() {
ast::ModuleItemKind::TraitDef(it) => it.name(), let s = Struct { id: ctx.to_def(it) };
ast::ModuleItemKind::TypeDef(it) => it.name(), let s: ModuleDef = s.into();
self.declarations.insert(name.as_name(), PerNs::both(s, s));
}
}
ast::ModuleItemKind::EnumDef(it) => {
if let Some(name) = it.name() {
let e = Enum { id: ctx.to_def(it) };
let e: ModuleDef = e.into();
self.declarations.insert(name.as_name(), PerNs::types(e));
}
}
ast::ModuleItemKind::FnDef(it) => {
if let Some(name) = it.name() {
let func = Function { id: ctx.to_def(it) };
self.declarations
.insert(name.as_name(), PerNs::values(func.into()));
}
}
ast::ModuleItemKind::TraitDef(it) => {
if let Some(name) = it.name() {
let t = Trait { id: ctx.to_def(it) };
self.declarations
.insert(name.as_name(), PerNs::types(t.into()));
}
}
ast::ModuleItemKind::TypeDef(it) => {
if let Some(name) = it.name() {
let t = Type { id: ctx.to_def(it) };
self.declarations
.insert(name.as_name(), PerNs::types(t.into()));
}
}
ast::ModuleItemKind::ImplBlock(_) => { ast::ModuleItemKind::ImplBlock(_) => {
// impls don't define items // impls don't define items
return;
} }
ast::ModuleItemKind::UseItem(it) => { ast::ModuleItemKind::UseItem(it) => {
self.add_use_item(source_map, it); self.add_use_item(source_map, it);
return;
} }
ast::ModuleItemKind::ExternCrateItem(_) => { ast::ModuleItemKind::ExternCrateItem(_) => {
// TODO // TODO
return;
} }
ast::ModuleItemKind::ConstDef(it) => it.name(), ast::ModuleItemKind::ConstDef(it) => {
ast::ModuleItemKind::StaticDef(it) => it.name(), if let Some(name) = it.name() {
let c = Const { id: ctx.to_def(it) };
self.declarations
.insert(name.as_name(), PerNs::values(c.into()));
}
}
ast::ModuleItemKind::StaticDef(it) => {
if let Some(name) = it.name() {
let s = Static { id: ctx.to_def(it) };
self.declarations
.insert(name.as_name(), PerNs::values(s.into()));
}
}
ast::ModuleItemKind::Module(_) => { ast::ModuleItemKind::Module(_) => {
// modules are handled separately direclty by nameres // modules are handled separately direclty by nameres
return;
} }
}; };
if let Some(name) = name {
let def_id = assign_def_id(db, source_root_id, module_id, file_id, file_items, item);
self.declarations.insert(name.as_name(), def_id);
}
} }
fn add_use_item(&mut self, source_map: &mut ImportSourceMap, item: &ast::UseItem) { fn add_use_item(&mut self, source_map: &mut ImportSourceMap, item: &ast::UseItem) {
@ -216,46 +223,3 @@ impl LoweredModule {
}) })
} }
} }
fn assign_def_id(
db: &impl HirDatabase,
source_root_id: SourceRootId,
module_id: ModuleId,
file_id: HirFileId,
file_items: &SourceFileItems,
item: &ast::ModuleItem,
) -> PerNs<DefId> {
// depending on the item kind, the location can define something in
// the values namespace, the types namespace, or both
let kind = DefKind::for_syntax_kind(item.syntax().kind());
let def_id = kind.map(|k| {
let item_id = file_items.id_of_unchecked(item.syntax());
let def_loc = DefLoc {
kind: k,
source_root_id,
module_id,
source_item_id: SourceItemId {
file_id,
item_id: Some(item_id),
},
};
def_loc.id(db)
});
def_id
}
impl DefKind {
fn for_syntax_kind(kind: SyntaxKind) -> PerNs<DefKind> {
match kind {
SyntaxKind::FN_DEF => PerNs::values(DefKind::Function),
SyntaxKind::MODULE => PerNs::types(DefKind::Module),
SyntaxKind::STRUCT_DEF => PerNs::both(DefKind::Struct, DefKind::StructCtor),
SyntaxKind::ENUM_DEF => PerNs::types(DefKind::Enum),
SyntaxKind::TRAIT_DEF => PerNs::types(DefKind::Trait),
SyntaxKind::TYPE_DEF => PerNs::types(DefKind::Type),
SyntaxKind::CONST_DEF => PerNs::values(DefKind::Const),
SyntaxKind::STATIC_DEF => PerNs::values(DefKind::Static),
_ => PerNs::none(),
}
}
}

View file

@ -1,6 +1,6 @@
use std::sync::Arc; use std::sync::Arc;
use ra_db::{FilesDatabase, CrateGraph, SourceRootId, salsa::Database}; use ra_db::{CrateGraph, SourceRootId, salsa::Database};
use relative_path::RelativePath; use relative_path::RelativePath;
use test_utils::{assert_eq_text, covers}; use test_utils::{assert_eq_text, covers};
@ -13,10 +13,10 @@ use crate::{
fn item_map(fixture: &str) -> (Arc<ItemMap>, ModuleId) { fn item_map(fixture: &str) -> (Arc<ItemMap>, ModuleId) {
let (db, pos) = MockDatabase::with_position(fixture); let (db, pos) = MockDatabase::with_position(fixture);
let source_root = db.file_source_root(pos.file_id);
let module = crate::source_binder::module_from_position(&db, pos).unwrap(); let module = crate::source_binder::module_from_position(&db, pos).unwrap();
let module_id = module.def_id.loc(&db).module_id; let krate = module.krate(&db).unwrap();
(db.item_map(source_root), module_id) let module_id = module.module_id;
(db.item_map(krate.crate_id), module_id)
} }
fn check_module_item_map(map: &ItemMap, module_id: ModuleId, expected: &str) { fn check_module_item_map(map: &ItemMap, module_id: ModuleId, expected: &str) {
@ -238,14 +238,13 @@ fn item_map_across_crates() {
db.set_crate_graph(crate_graph); db.set_crate_graph(crate_graph);
let source_root = db.file_source_root(main_id);
let module = crate::source_binder::module_from_file_id(&db, main_id).unwrap(); let module = crate::source_binder::module_from_file_id(&db, main_id).unwrap();
let module_id = module.def_id.loc(&db).module_id; let krate = module.krate(&db).unwrap();
let item_map = db.item_map(source_root); let item_map = db.item_map(krate.crate_id);
check_module_item_map( check_module_item_map(
&item_map, &item_map,
module_id, module.module_id,
" "
Baz: t v Baz: t v
test_crate: t test_crate: t
@ -292,12 +291,12 @@ fn import_across_source_roots() {
db.set_crate_graph(crate_graph); db.set_crate_graph(crate_graph);
let module = crate::source_binder::module_from_file_id(&db, main_id).unwrap(); let module = crate::source_binder::module_from_file_id(&db, main_id).unwrap();
let module_id = module.def_id.loc(&db).module_id; let krate = module.krate(&db).unwrap();
let item_map = db.item_map(source_root); let item_map = db.item_map(krate.crate_id);
check_module_item_map( check_module_item_map(
&item_map, &item_map,
module_id, module.module_id,
" "
C: t v C: t v
test_crate: t test_crate: t
@ -333,14 +332,13 @@ fn reexport_across_crates() {
db.set_crate_graph(crate_graph); db.set_crate_graph(crate_graph);
let source_root = db.file_source_root(main_id);
let module = crate::source_binder::module_from_file_id(&db, main_id).unwrap(); let module = crate::source_binder::module_from_file_id(&db, main_id).unwrap();
let module_id = module.def_id.loc(&db).module_id; let krate = module.krate(&db).unwrap();
let item_map = db.item_map(source_root); let item_map = db.item_map(krate.crate_id);
check_module_item_map( check_module_item_map(
&item_map, &item_map,
module_id, module.module_id,
" "
Baz: t v Baz: t v
test_crate: t test_crate: t
@ -350,10 +348,11 @@ fn reexport_across_crates() {
fn check_item_map_is_not_recomputed(initial: &str, file_change: &str) { fn check_item_map_is_not_recomputed(initial: &str, file_change: &str) {
let (mut db, pos) = MockDatabase::with_position(initial); let (mut db, pos) = MockDatabase::with_position(initial);
let source_root = db.file_source_root(pos.file_id); let module = crate::source_binder::module_from_file_id(&db, pos.file_id).unwrap();
let krate = module.krate(&db).unwrap();
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
db.item_map(source_root); db.item_map(krate.crate_id);
}); });
assert!(format!("{:?}", events).contains("item_map")) assert!(format!("{:?}", events).contains("item_map"))
} }
@ -362,7 +361,7 @@ fn check_item_map_is_not_recomputed(initial: &str, file_change: &str) {
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
db.item_map(source_root); db.item_map(krate.crate_id);
}); });
assert!( assert!(
!format!("{:?}", events).contains("item_map"), !format!("{:?}", events).contains("item_map"),

View file

@ -7,17 +7,17 @@ use rustc_hash::FxHashMap;
use ra_syntax::{ use ra_syntax::{
AstNode, SyntaxNode, TreeArc, AstNode, SyntaxNode, TreeArc,
}; };
use ra_db::SourceRootId; use ra_db::{CrateId};
use crate::{ use crate::{
SourceFileItems, SourceItemId, DefId, HirFileId, SourceFileItems, SourceItemId, HirFileId,
FnScopes, Function, FnScopes, Module,
db::HirDatabase, db::HirDatabase,
nameres::{ItemMap, Resolver}, nameres::{ItemMap, Resolver},
}; };
pub(super) fn fn_scopes(db: &impl HirDatabase, def_id: DefId) -> Arc<FnScopes> { pub(super) fn fn_scopes(db: &impl HirDatabase, func: Function) -> Arc<FnScopes> {
let body = db.body_hir(def_id); let body = db.body_hir(func);
let res = FnScopes::new(body); let res = FnScopes::new(body);
Arc::new(res) Arc::new(res)
} }
@ -41,15 +41,23 @@ pub(super) fn file_item(
} }
} }
pub(super) fn item_map(db: &impl HirDatabase, source_root: SourceRootId) -> Arc<ItemMap> { pub(super) fn item_map(db: &impl HirDatabase, crate_id: CrateId) -> Arc<ItemMap> {
let start = Instant::now(); let start = Instant::now();
let module_tree = db.module_tree(source_root); let module_tree = db.module_tree(crate_id);
let input = module_tree let input = module_tree
.modules() .modules()
.map(|id| (id, db.lower_module_module(source_root, id))) .map(|module_id| {
(
module_id,
db.lower_module_module(Module {
krate: crate_id,
module_id,
}),
)
})
.collect::<FxHashMap<_, _>>(); .collect::<FxHashMap<_, _>>();
let resolver = Resolver::new(db, &input, source_root, module_tree); let resolver = Resolver::new(db, &input, crate_id);
let res = resolver.resolve(); let res = resolver.resolve();
let elapsed = start.elapsed(); let elapsed = start.elapsed();
log::info!("item_map: {:?}", elapsed); log::info!("item_map: {:?}", elapsed);

View file

@ -13,8 +13,9 @@ use ra_syntax::{
}; };
use crate::{ use crate::{
HirDatabase, Function, SourceItemId, HirDatabase, Function, SourceItemId, ModuleDef,
DefKind, DefLoc, AsName, Module, AsName, Module,
ids::LocationCtx,
}; };
/// Locates the module by `FileId`. Picks topmost module in the file. /// Locates the module by `FileId`. Picks topmost module in the file.
@ -84,9 +85,13 @@ pub fn module_from_child_node(
fn module_from_source(db: &impl HirDatabase, source: SourceItemId) -> Option<Module> { fn module_from_source(db: &impl HirDatabase, source: SourceItemId) -> Option<Module> {
let source_root_id = db.file_source_root(source.file_id.as_original_file()); let source_root_id = db.file_source_root(source.file_id.as_original_file());
let module_tree = db.module_tree(source_root_id); db.source_root_crates(source_root_id)
let module_id = module_tree.find_module_by_source(source)?; .iter()
Some(Module::from_module_id(db, source_root_id, module_id)) .find_map(|&krate| {
let module_tree = db.module_tree(krate);
let module_id = module_tree.find_module_by_source(source)?;
Some(Module { krate, module_id })
})
} }
pub fn function_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Function> { pub fn function_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Function> {
@ -101,30 +106,21 @@ pub fn function_from_source(
fn_def: &ast::FnDef, fn_def: &ast::FnDef,
) -> Option<Function> { ) -> Option<Function> {
let module = module_from_child_node(db, file_id, fn_def.syntax())?; let module = module_from_child_node(db, file_id, fn_def.syntax())?;
let res = function_from_module(db, &module, fn_def); let res = function_from_module(db, module, fn_def);
Some(res) Some(res)
} }
pub fn function_from_module( pub fn function_from_module(
db: &impl HirDatabase, db: &impl HirDatabase,
module: &Module, module: Module,
fn_def: &ast::FnDef, fn_def: &ast::FnDef,
) -> Function { ) -> Function {
let loc = module.def_id.loc(db); let (file_id, _) = module.definition_source(db);
let file_id = loc.source_item_id.file_id; let file_id = file_id.into();
let file_items = db.file_items(file_id); let ctx = LocationCtx::new(db, module, file_id);
let item_id = file_items.id_of(file_id, fn_def.syntax()); Function {
let source_item_id = SourceItemId { id: ctx.to_def(fn_def),
file_id, }
item_id: Some(item_id),
};
let def_loc = DefLoc {
kind: DefKind::Function,
source_root_id: loc.source_root_id,
module_id: loc.module_id,
source_item_id,
};
Function::new(def_loc.id(db))
} }
pub fn function_from_child_node( pub fn function_from_child_node(
@ -141,15 +137,18 @@ pub fn macro_symbols(db: &impl HirDatabase, file_id: FileId) -> Vec<(SmolStr, Te
Some(it) => it, Some(it) => it,
None => return Vec::new(), None => return Vec::new(),
}; };
let loc = module.def_id.loc(db); let items = db.lower_module_module(module);
let items = db.lower_module_module(loc.source_root_id, loc.module_id);
let mut res = Vec::new(); let mut res = Vec::new();
for macro_call_id in items for macro_call_id in items
.declarations .declarations
.iter() .iter()
.filter_map(|(_, it)| it.take_types()) .filter_map(|(_, it)| it.clone().take_types())
.filter_map(|it| it.loc(db).source_item_id.file_id.as_macro_call_id()) .filter_map(|it| match it {
ModuleDef::Trait(it) => Some(it),
_ => None,
})
.filter_map(|it| it.source(db).0.as_macro_call_id())
{ {
if let Some(exp) = db.expand_macro_invocation(macro_call_id) { if let Some(exp) = db.expand_macro_invocation(macro_call_id) {
let loc = macro_call_id.loc(db); let loc = macro_call_id.loc(db);

View file

@ -24,15 +24,14 @@ use std::ops::Index;
use std::sync::Arc; use std::sync::Arc;
use std::{fmt, mem}; use std::{fmt, mem};
use log;
use ena::unify::{InPlaceUnificationTable, UnifyKey, UnifyValue, NoError}; use ena::unify::{InPlaceUnificationTable, UnifyKey, UnifyValue, NoError};
use ra_arena::map::ArenaMap; use ra_arena::map::ArenaMap;
use join_to_string::join; use join_to_string::join;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::{ use crate::{
Def, DefId, Module, Function, Struct, StructField, Enum, EnumVariant, Path, Name, ImplBlock, Module, Function, Struct, StructField, Enum, EnumVariant, Path, Name, ImplBlock,
FnSignature, FnScopes, FnSignature, FnScopes, ModuleDef, AdtDef,
db::HirDatabase, db::HirDatabase,
type_ref::{TypeRef, Mutability}, type_ref::{TypeRef, Mutability},
name::KnownName, name::KnownName,
@ -184,7 +183,7 @@ pub enum Ty {
/// Structures, enumerations and unions. /// Structures, enumerations and unions.
Adt { Adt {
/// The DefId of the struct/enum. /// The DefId of the struct/enum.
def_id: DefId, def_id: AdtDef,
/// The name, for displaying. /// The name, for displaying.
name: Name, name: Name,
/// Substitutions for the generic parameters of the type. /// Substitutions for the generic parameters of the type.
@ -381,12 +380,16 @@ impl Ty {
} }
// Resolve in module (in type namespace) // Resolve in module (in type namespace)
let resolved = match module.resolve_path(db, path).take_types() { let typable: TypableDef = match module
Some(r) => r, .resolve_path(db, path)
.take_types()
.and_then(|it| it.into())
{
None => return Ty::Unknown, None => return Ty::Unknown,
Some(it) => it,
}; };
let ty = db.type_for_def(resolved); let ty = db.type_for_def(typable);
let substs = Ty::substs_from_path(db, module, impl_block, generics, path, resolved); let substs = Ty::substs_from_path(db, module, impl_block, generics, path, typable);
ty.apply_substs(substs) ty.apply_substs(substs)
} }
@ -399,20 +402,18 @@ impl Ty {
impl_block: Option<&ImplBlock>, impl_block: Option<&ImplBlock>,
outer_generics: &GenericParams, outer_generics: &GenericParams,
path: &Path, path: &Path,
resolved: DefId, resolved: TypableDef,
) -> Substs { ) -> Substs {
let mut substs = Vec::new(); let mut substs = Vec::new();
let def = resolved.resolve(db);
let last = path let last = path
.segments .segments
.last() .last()
.expect("path should have at least one segment"); .expect("path should have at least one segment");
let (def_generics, segment) = match def { let (def_generics, segment) = match resolved {
Def::Struct(s) => (s.generic_params(db), last), TypableDef::Function(func) => (func.generic_params(db), last),
Def::Enum(e) => (e.generic_params(db), last), TypableDef::Struct(s) => (s.generic_params(db), last),
Def::Function(f) => (f.generic_params(db), last), TypableDef::Enum(e) => (e.generic_params(db), last),
Def::Trait(t) => (t.generic_params(db), last), TypableDef::EnumVariant(var) => {
Def::EnumVariant(ev) => {
// the generic args for an enum variant may be either specified // the generic args for an enum variant may be either specified
// on the segment referring to the enum, or on the segment // on the segment referring to the enum, or on the segment
// referring to the variant. So `Option::<T>::None` and // referring to the variant. So `Option::<T>::None` and
@ -426,9 +427,8 @@ impl Ty {
// Option::None::<T> // Option::None::<T>
last last
}; };
(ev.parent_enum(db).generic_params(db), segment) (var.parent_enum(db).generic_params(db), segment)
} }
_ => return Substs::empty(),
}; };
// substs_from_path // substs_from_path
if let Some(generic_args) = &segment.args_and_bindings { if let Some(generic_args) = &segment.args_and_bindings {
@ -639,7 +639,7 @@ fn make_substs(generics: &GenericParams) -> Substs {
fn type_for_struct(db: &impl HirDatabase, s: Struct) -> Ty { fn type_for_struct(db: &impl HirDatabase, s: Struct) -> Ty {
let generics = s.generic_params(db); let generics = s.generic_params(db);
Ty::Adt { Ty::Adt {
def_id: s.def_id(), def_id: s.into(),
name: s.name(db).unwrap_or_else(Name::missing), name: s.name(db).unwrap_or_else(Name::missing),
substs: make_substs(&generics), substs: make_substs(&generics),
} }
@ -648,7 +648,7 @@ fn type_for_struct(db: &impl HirDatabase, s: Struct) -> Ty {
pub(crate) fn type_for_enum(db: &impl HirDatabase, s: Enum) -> Ty { pub(crate) fn type_for_enum(db: &impl HirDatabase, s: Enum) -> Ty {
let generics = s.generic_params(db); let generics = s.generic_params(db);
Ty::Adt { Ty::Adt {
def_id: s.def_id(), def_id: s.into(),
name: s.name(db).unwrap_or_else(Name::missing), name: s.name(db).unwrap_or_else(Name::missing),
substs: make_substs(&generics), substs: make_substs(&generics),
} }
@ -660,66 +660,74 @@ pub(crate) fn type_for_enum_variant(db: &impl HirDatabase, ev: EnumVariant) -> T
type_for_enum(db, enum_parent) type_for_enum(db, enum_parent)
} }
pub(super) fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Ty { #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
let def = def_id.resolve(db); pub enum TypableDef {
match def { Function(Function),
Def::Module(..) => { Struct(Struct),
log::debug!("trying to get type for module {:?}", def_id); Enum(Enum),
Ty::Unknown EnumVariant(EnumVariant),
} }
Def::Function(f) => type_for_fn(db, f), impl_froms!(TypableDef: Function, Struct, Enum, EnumVariant);
Def::Struct(s) => type_for_struct(db, s),
Def::Enum(e) => type_for_enum(db, e), impl From<ModuleDef> for Option<TypableDef> {
Def::EnumVariant(ev) => type_for_enum_variant(db, ev), fn from(def: ModuleDef) -> Option<TypableDef> {
_ => { let res = match def {
log::debug!( ModuleDef::Function(f) => f.into(),
"trying to get type for item of unknown type {:?} {:?}", ModuleDef::Struct(s) => s.into(),
def_id, ModuleDef::Enum(e) => e.into(),
def ModuleDef::EnumVariant(v) => v.into(),
); ModuleDef::Const(_)
Ty::Unknown | ModuleDef::Static(_)
} | ModuleDef::Module(_)
| ModuleDef::Trait(_)
| ModuleDef::Type(_) => return None,
};
Some(res)
} }
} }
pub(super) fn type_for_field(db: &impl HirDatabase, def_id: DefId, field: Name) -> Option<Ty> { pub(super) fn type_for_def(db: &impl HirDatabase, def: TypableDef) -> Ty {
let def = def_id.resolve(db); match def {
let (variant_data, generics) = match def { TypableDef::Function(f) => type_for_fn(db, f),
Def::Struct(s) => (s.variant_data(db), s.generic_params(db)), TypableDef::Struct(s) => type_for_struct(db, s),
Def::EnumVariant(ev) => (ev.variant_data(db), ev.parent_enum(db).generic_params(db)), TypableDef::Enum(e) => type_for_enum(db, e),
// TODO: unions TypableDef::EnumVariant(v) => type_for_enum_variant(db, v),
Def::Enum(_) => { }
// this can happen in (invalid) code, but enums don't have fields themselves }
return None;
} #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
_ => panic!( pub enum VariantDef {
"trying to get type for field {:?} in non-struct/variant {:?}", Struct(Struct),
field, def_id EnumVariant(EnumVariant),
}
impl_froms!(VariantDef: Struct, EnumVariant);
pub(super) fn type_for_field(db: &impl HirDatabase, def: VariantDef, field: Name) -> Option<Ty> {
let (variant_data, generics, module) = match def {
VariantDef::Struct(s) => (s.variant_data(db), s.generic_params(db), s.module(db)),
VariantDef::EnumVariant(var) => (
var.variant_data(db),
var.parent_enum(db).generic_params(db),
var.module(db),
), ),
}; };
let module = def_id.module(db); // We can't have an impl block ere, right?
let impl_block = def_id.impl_block(db); // let impl_block = def_id.impl_block(db);
let type_ref = variant_data.get_field_type_ref(&field)?; let type_ref = variant_data.get_field_type_ref(&field)?;
Some(Ty::from_hir( Some(Ty::from_hir(db, &module, None, &generics, &type_ref))
db,
&module,
impl_block.as_ref(),
&generics,
&type_ref,
))
} }
/// The result of type inference: A mapping from expressions and patterns to types. /// The result of type inference: A mapping from expressions and patterns to types.
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
pub struct InferenceResult { pub struct InferenceResult {
/// For each method call expr, record the function it resolved to. /// For each method call expr, record the function it resolved to.
method_resolutions: FxHashMap<ExprId, DefId>, method_resolutions: FxHashMap<ExprId, Function>,
type_of_expr: ArenaMap<ExprId, Ty>, type_of_expr: ArenaMap<ExprId, Ty>,
type_of_pat: ArenaMap<PatId, Ty>, type_of_pat: ArenaMap<PatId, Ty>,
} }
impl InferenceResult { impl InferenceResult {
pub fn method_resolution(&self, expr: ExprId) -> Option<DefId> { pub fn method_resolution(&self, expr: ExprId) -> Option<Function> {
self.method_resolutions.get(&expr).map(|it| *it) self.method_resolutions.get(&expr).map(|it| *it)
} }
} }
@ -749,7 +757,7 @@ struct InferenceContext<'a, D: HirDatabase> {
module: Module, module: Module,
impl_block: Option<ImplBlock>, impl_block: Option<ImplBlock>,
var_unification_table: InPlaceUnificationTable<TypeVarId>, var_unification_table: InPlaceUnificationTable<TypeVarId>,
method_resolutions: FxHashMap<ExprId, DefId>, method_resolutions: FxHashMap<ExprId, Function>,
type_of_expr: ArenaMap<ExprId, Ty>, type_of_expr: ArenaMap<ExprId, Ty>,
type_of_pat: ArenaMap<PatId, Ty>, type_of_pat: ArenaMap<PatId, Ty>,
/// The return type of the function being inferred. /// The return type of the function being inferred.
@ -875,8 +883,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
self.type_of_expr.insert(expr, ty); self.type_of_expr.insert(expr, ty);
} }
fn write_method_resolution(&mut self, expr: ExprId, def_id: DefId) { fn write_method_resolution(&mut self, expr: ExprId, func: Function) {
self.method_resolutions.insert(expr, def_id); self.method_resolutions.insert(expr, func);
} }
fn write_pat_ty(&mut self, pat: PatId, ty: Ty) { fn write_pat_ty(&mut self, pat: PatId, ty: Ty) {
@ -1063,20 +1071,30 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
}; };
// resolve in module // resolve in module
let resolved = self.module.resolve_path(self.db, &path).take_values()?; let typable: Option<TypableDef> = self
let ty = self.db.type_for_def(resolved); .module
.resolve_path(self.db, &path)
.take_values()?
.into();
let typable = typable?;
let ty = self.db.type_for_def(typable);
let ty = self.insert_type_vars(ty); let ty = self.insert_type_vars(ty);
Some(ty) Some(ty)
} }
fn resolve_variant(&mut self, path: Option<&Path>) -> (Ty, Option<DefId>) { fn resolve_variant(&mut self, path: Option<&Path>) -> (Ty, Option<VariantDef>) {
let path = match path { let path = match path {
Some(path) => path, Some(path) => path,
None => return (Ty::Unknown, None), None => return (Ty::Unknown, None),
}; };
let def_id = match self.module.resolve_path(self.db, &path).take_types() { let typable: Option<TypableDef> = self
Some(def_id) => def_id, .module
_ => return (Ty::Unknown, None), .resolve_path(self.db, &path)
.take_types()
.and_then(|it| it.into());
let def = match typable {
None => return (Ty::Unknown, None),
Some(it) => it,
}; };
// TODO remove the duplication between here and `Ty::from_path`? // TODO remove the duplication between here and `Ty::from_path`?
// TODO provide generics of function // TODO provide generics of function
@ -1087,38 +1105,34 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
self.impl_block.as_ref(), self.impl_block.as_ref(),
&generics, &generics,
path, path,
def_id, def,
); );
match def_id.resolve(self.db) { match def {
Def::Struct(s) => { TypableDef::Struct(s) => {
let ty = type_for_struct(self.db, s); let ty = type_for_struct(self.db, s);
let ty = self.insert_type_vars(ty.apply_substs(substs)); let ty = self.insert_type_vars(ty.apply_substs(substs));
(ty, Some(def_id)) (ty, Some(s.into()))
} }
Def::EnumVariant(ev) => { TypableDef::EnumVariant(var) => {
let ty = type_for_enum_variant(self.db, ev); let ty = type_for_enum_variant(self.db, var);
let ty = self.insert_type_vars(ty.apply_substs(substs)); let ty = self.insert_type_vars(ty.apply_substs(substs));
(ty, Some(def_id)) (ty, Some(var.into()))
} }
_ => (Ty::Unknown, None), TypableDef::Enum(_) | TypableDef::Function(_) => (Ty::Unknown, None),
} }
} }
fn resolve_fields(&mut self, path: Option<&Path>) -> Option<(Ty, Vec<StructField>)> { fn resolve_fields(&mut self, path: Option<&Path>) -> Option<(Ty, Vec<StructField>)> {
let (ty, def_id) = self.resolve_variant(path); let (ty, def) = self.resolve_variant(path);
let def_id = def_id?; match def? {
let def = def_id.resolve(self.db); VariantDef::Struct(s) => {
match def {
Def::Struct(s) => {
let fields = s.fields(self.db); let fields = s.fields(self.db);
Some((ty, fields)) Some((ty, fields))
} }
Def::EnumVariant(ev) => { VariantDef::EnumVariant(var) => {
let fields = ev.fields(self.db); let fields = var.fields(self.db);
Some((ty, fields)) Some((ty, fields))
} }
_ => None,
} }
} }
@ -1216,6 +1230,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
.module .module
.resolve_path(self.db, &path) .resolve_path(self.db, &path)
.take_values() .take_values()
.and_then(|module_def| module_def.into())
.map_or(Ty::Unknown, |resolved| self.db.type_for_def(resolved)), .map_or(Ty::Unknown, |resolved| self.db.type_for_def(resolved)),
Pat::Bind { Pat::Bind {
mode, mode,
@ -1336,9 +1351,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let receiver_ty = self.infer_expr(*receiver, &Expectation::none()); let receiver_ty = self.infer_expr(*receiver, &Expectation::none());
let resolved = receiver_ty.clone().lookup_method(self.db, method_name); let resolved = receiver_ty.clone().lookup_method(self.db, method_name);
let method_ty = match resolved { let method_ty = match resolved {
Some(def_id) => { Some(func) => {
self.write_method_resolution(expr, def_id); self.write_method_resolution(expr, func);
self.db.type_for_def(def_id) self.db.type_for_def(func.into())
} }
None => Ty::Unknown, None => Ty::Unknown,
}; };
@ -1407,7 +1422,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
for field in fields { for field in fields {
let field_ty = if let Some(def_id) = def_id { let field_ty = if let Some(def_id) = def_id {
self.db self.db
.type_for_field(def_id, field.name.clone()) .type_for_field(def_id.into(), field.name.clone())
.unwrap_or(Ty::Unknown) .unwrap_or(Ty::Unknown)
.subst(&substs) .subst(&substs)
} else { } else {
@ -1431,10 +1446,12 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
i.and_then(|i| fields.get(i).cloned()) i.and_then(|i| fields.get(i).cloned())
} }
Ty::Adt { Ty::Adt {
def_id, ref substs, .. def_id: AdtDef::Struct(s),
ref substs,
..
} => self } => self
.db .db
.type_for_field(def_id, name.clone()) .type_for_field(s.into(), name.clone())
.map(|ty| ty.subst(substs)), .map(|ty| ty.subst(substs)),
_ => None, _ => None,
}) })
@ -1607,16 +1624,15 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
} }
pub fn infer(db: &impl HirDatabase, def_id: DefId) -> Arc<InferenceResult> { pub fn infer(db: &impl HirDatabase, func: Function) -> Arc<InferenceResult> {
db.check_canceled(); db.check_canceled();
let function = Function::new(def_id); // TODO: consts also need inference let body = func.body(db);
let body = function.body(db); let scopes = db.fn_scopes(func);
let scopes = db.fn_scopes(def_id); let module = func.module(db);
let module = function.module(db); let impl_block = func.impl_block(db);
let impl_block = function.impl_block(db);
let mut ctx = InferenceContext::new(db, body, scopes, module, impl_block); let mut ctx = InferenceContext::new(db, body, scopes, module, impl_block);
let signature = function.signature(db); let signature = func.signature(db);
ctx.collect_fn_signature(&signature); ctx.collect_fn_signature(&signature);
ctx.infer_body(); ctx.infer_body();

View file

@ -6,19 +6,17 @@ use std::sync::Arc;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use ra_db::SourceRootId;
use crate::{ use crate::{
HirDatabase, DefId, module_tree::ModuleId, Module, Crate, Name, Function, HirDatabase, module_tree::ModuleId, Module, Crate, Name, Function,
impl_block::{ImplId, ImplBlock, ImplItem}, impl_block::{ImplId, ImplBlock, ImplItem},
generics::GenericParams generics::GenericParams,
ty::{AdtDef, Ty}
}; };
use super::Ty;
/// This is used as a key for indexing impls. /// This is used as a key for indexing impls.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum TyFingerprint { pub enum TyFingerprint {
Adt(DefId), Adt(AdtDef),
// we'll also want to index impls for primitive types etc. // we'll also want to index impls for primitive types etc.
} }
@ -37,7 +35,7 @@ impl TyFingerprint {
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub struct CrateImplBlocks { pub struct CrateImplBlocks {
/// To make sense of the ModuleIds, we need the source root. /// To make sense of the ModuleIds, we need the source root.
source_root_id: SourceRootId, krate: Crate,
impls: FxHashMap<TyFingerprint, Vec<(ModuleId, ImplId)>>, impls: FxHashMap<TyFingerprint, Vec<(ModuleId, ImplId)>>,
} }
@ -53,14 +51,17 @@ impl CrateImplBlocks {
.into_iter() .into_iter()
.flat_map(|i| i.iter()) .flat_map(|i| i.iter())
.map(move |(module_id, impl_id)| { .map(move |(module_id, impl_id)| {
let module_impl_blocks = db.impls_in_module(self.source_root_id, *module_id); let module = Module {
krate: self.krate.crate_id,
module_id: *module_id,
};
let module_impl_blocks = db.impls_in_module(module);
ImplBlock::from_id(module_impl_blocks, *impl_id) ImplBlock::from_id(module_impl_blocks, *impl_id)
}) })
} }
fn collect_recursive(&mut self, db: &impl HirDatabase, module: Module) { fn collect_recursive(&mut self, db: &impl HirDatabase, module: &Module) {
let module_id = module.def_id.loc(db).module_id; let module_impl_blocks = db.impls_in_module(module.clone());
let module_impl_blocks = db.impls_in_module(self.source_root_id, module_id);
for (impl_id, impl_data) in module_impl_blocks.impls.iter() { for (impl_id, impl_data) in module_impl_blocks.impls.iter() {
let impl_block = ImplBlock::from_id(Arc::clone(&module_impl_blocks), impl_id); let impl_block = ImplBlock::from_id(Arc::clone(&module_impl_blocks), impl_id);
@ -81,13 +82,13 @@ impl CrateImplBlocks {
self.impls self.impls
.entry(target_ty_fp) .entry(target_ty_fp)
.or_insert_with(Vec::new) .or_insert_with(Vec::new)
.push((module_id, impl_id)); .push((module.module_id, impl_id));
} }
} }
} }
for child in module.children(db) { for child in module.children(db) {
self.collect_recursive(db, child); self.collect_recursive(db, &child);
} }
} }
@ -95,15 +96,12 @@ impl CrateImplBlocks {
db: &impl HirDatabase, db: &impl HirDatabase,
krate: Crate, krate: Crate,
) -> Arc<CrateImplBlocks> { ) -> Arc<CrateImplBlocks> {
let crate_graph = db.crate_graph();
let file_id = crate_graph.crate_root(krate.crate_id);
let source_root_id = db.file_source_root(file_id);
let mut crate_impl_blocks = CrateImplBlocks { let mut crate_impl_blocks = CrateImplBlocks {
source_root_id, krate: krate.clone(),
impls: FxHashMap::default(), impls: FxHashMap::default(),
}; };
if let Some(module) = krate.root_module(db) { if let Some(module) = krate.root_module(db) {
crate_impl_blocks.collect_recursive(db, module); crate_impl_blocks.collect_recursive(db, &module);
} }
Arc::new(crate_impl_blocks) Arc::new(crate_impl_blocks)
} }
@ -120,11 +118,11 @@ impl Ty {
// TODO: cache this as a query? // TODO: cache this as a query?
// - if so, what signature? (TyFingerprint, Name)? // - if so, what signature? (TyFingerprint, Name)?
// - or maybe cache all names and def_ids of methods per fingerprint? // - or maybe cache all names and def_ids of methods per fingerprint?
pub fn lookup_method(self, db: &impl HirDatabase, name: &Name) -> Option<DefId> { pub fn lookup_method(self, db: &impl HirDatabase, name: &Name) -> Option<Function> {
self.iterate_methods(db, |f| { self.iterate_methods(db, |f| {
let sig = f.signature(db); let sig = f.signature(db);
if sig.name() == name && sig.has_self_param() { if sig.name() == name && sig.has_self_param() {
Some(f.def_id()) Some(f)
} else { } else {
None None
} }

View file

@ -1,19 +1,19 @@
--- ---
created: "2019-01-22T14:45:00.058678600+00:00" created: "2019-01-24T14:51:32.808861856+00:00"
creator: insta@0.4.0 creator: insta@0.5.2
expression: "&result" expression: "&result"
source: "crates\\ra_hir\\src\\ty\\tests.rs" source: crates/ra_hir/src/ty/tests.rs
--- ---
[72; 154) '{ ...a.c; }': () [72; 154) '{ ...a.c; }': ()
[82; 83) 'c': [unknown] [82; 83) 'c': [unknown]
[86; 87) 'C': [unknown] [86; 87) 'C': C
[86; 90) 'C(1)': [unknown] [86; 90) 'C(1)': [unknown]
[88; 89) '1': i32 [88; 89) '1': i32
[96; 97) 'B': [unknown] [96; 97) 'B': B
[107; 108) 'a': A [107; 108) 'a': A
[114; 133) 'A { b:...C(1) }': A [114; 133) 'A { b:...C(1) }': A
[121; 122) 'B': B [121; 122) 'B': B
[127; 128) 'C': [unknown] [127; 128) 'C': C
[127; 131) 'C(1)': C [127; 131) 'C(1)': C
[129; 130) '1': i32 [129; 130) '1': i32
[139; 140) 'a': A [139; 140) 'a': A

View file

@ -1,4 +1,4 @@
use hir::{Ty, Def}; use hir::{Ty, AdtDef};
use crate::completion::{CompletionContext, Completions, CompletionItem, CompletionItemKind}; use crate::completion::{CompletionContext, Completions, CompletionItem, CompletionItemKind};
use crate::completion::completion_item::CompletionKind; use crate::completion::completion_item::CompletionKind;
@ -28,8 +28,8 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty)
Ty::Adt { Ty::Adt {
def_id, ref substs, .. def_id, ref substs, ..
} => { } => {
match def_id.resolve(ctx.db) { match def_id {
Def::Struct(s) => { AdtDef::Struct(s) => {
for field in s.fields(ctx.db) { for field in s.fields(ctx.db) {
CompletionItem::new( CompletionItem::new(
CompletionKind::Reference, CompletionKind::Reference,
@ -41,8 +41,9 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty)
.add_to(acc); .add_to(acc);
} }
} }
// TODO unions // TODO unions
_ => {} AdtDef::Enum(_) => (),
} }
} }
Ty::Tuple(fields) => { Ty::Tuple(fields) => {

View file

@ -13,8 +13,8 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
Some(it) => it, Some(it) => it,
None => return, None => return,
}; };
match def_id.resolve(ctx.db) { match def_id {
hir::Def::Module(module) => { hir::ModuleDef::Module(module) => {
let module_scope = module.scope(ctx.db); let module_scope = module.scope(ctx.db);
for (name, res) in module_scope.entries() { for (name, res) in module_scope.entries() {
CompletionItem::new( CompletionItem::new(
@ -26,7 +26,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
.add_to(acc); .add_to(acc);
} }
} }
hir::Def::Enum(e) => { hir::ModuleDef::Enum(e) => {
e.variants(ctx.db) e.variants(ctx.db)
.into_iter() .into_iter()
.for_each(|(variant_name, variant)| { .for_each(|(variant_name, variant)| {

View file

@ -127,7 +127,7 @@ impl<'a> CompletionContext<'a> {
.ancestors() .ancestors()
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
.find_map(ast::FnDef::cast); .find_map(ast::FnDef::cast);
match (&self.module, self.function_syntax) { match (self.module, self.function_syntax) {
(Some(module), Some(fn_def)) => { (Some(module), Some(fn_def)) => {
let function = source_binder::function_from_module(self.db, module, fn_def); let function = source_binder::function_from_module(self.db, module, fn_def);
self.function = Some(function); self.function = Some(function);

View file

@ -1,6 +1,4 @@
use hir::{Docs, Documentation, PerNs}; use hir::{Docs, Documentation};
use crate::completion::completion_context::CompletionContext;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
TextRange, TextRange,
@ -8,6 +6,8 @@ use ra_syntax::{
use ra_text_edit::TextEdit; use ra_text_edit::TextEdit;
use test_utils::tested_by; use test_utils::tested_by;
use crate::completion::completion_context::CompletionContext;
/// `CompletionItem` describes a single completion variant in the editor pop-up. /// `CompletionItem` describes a single completion variant in the editor pop-up.
/// It is basically a POD with various properties. To construct a /// It is basically a POD with various properties. To construct a
/// `CompletionItem`, use `new` method and the `Builder` struct. /// `CompletionItem`, use `new` method and the `Builder` struct.
@ -209,41 +209,24 @@ impl Builder {
ctx: &CompletionContext, ctx: &CompletionContext,
resolution: &hir::Resolution, resolution: &hir::Resolution,
) -> Builder { ) -> Builder {
let resolved = resolution.def_id.map(|d| d.resolve(ctx.db)); let def = resolution
let (kind, docs) = match resolved { .def_id
PerNs { .take_types()
types: Some(hir::Def::Module(..)), .or(resolution.def_id.take_values());
.. let def = match def {
} => (CompletionItemKind::Module, None), None => return self,
PerNs { Some(it) => it,
types: Some(hir::Def::Struct(s)), };
.. let (kind, docs) = match def {
} => (CompletionItemKind::Struct, s.docs(ctx.db)), hir::ModuleDef::Module(_) => (CompletionItemKind::Module, None),
PerNs { hir::ModuleDef::Function(func) => return self.from_function(ctx, func),
types: Some(hir::Def::Enum(e)), hir::ModuleDef::Struct(it) => (CompletionItemKind::Struct, it.docs(ctx.db)),
.. hir::ModuleDef::Enum(it) => (CompletionItemKind::Enum, it.docs(ctx.db)),
} => (CompletionItemKind::Enum, e.docs(ctx.db)), hir::ModuleDef::EnumVariant(it) => (CompletionItemKind::EnumVariant, it.docs(ctx.db)),
PerNs { hir::ModuleDef::Const(it) => (CompletionItemKind::Const, it.docs(ctx.db)),
types: Some(hir::Def::Trait(t)), hir::ModuleDef::Static(it) => (CompletionItemKind::Static, it.docs(ctx.db)),
.. hir::ModuleDef::Trait(it) => (CompletionItemKind::Trait, it.docs(ctx.db)),
} => (CompletionItemKind::Trait, t.docs(ctx.db)), hir::ModuleDef::Type(it) => (CompletionItemKind::TypeAlias, it.docs(ctx.db)),
PerNs {
types: Some(hir::Def::Type(t)),
..
} => (CompletionItemKind::TypeAlias, t.docs(ctx.db)),
PerNs {
values: Some(hir::Def::Const(c)),
..
} => (CompletionItemKind::Const, c.docs(ctx.db)),
PerNs {
values: Some(hir::Def::Static(s)),
..
} => (CompletionItemKind::Static, s.docs(ctx.db)),
PerNs {
values: Some(hir::Def::Function(function)),
..
} => return self.from_function(ctx, function),
_ => return self,
}; };
self.kind = Some(kind); self.kind = Some(kind);
self.documentation = docs; self.documentation = docs;

View file

@ -72,6 +72,7 @@ salsa::database_storage! {
fn file_relative_path() for ra_db::FileRelativePathQuery; fn file_relative_path() for ra_db::FileRelativePathQuery;
fn file_source_root() for ra_db::FileSourceRootQuery; fn file_source_root() for ra_db::FileSourceRootQuery;
fn source_root() for ra_db::SourceRootQuery; fn source_root() for ra_db::SourceRootQuery;
fn source_root_crates() for ra_db::SourceRootCratesQuery;
fn local_roots() for ra_db::LocalRootsQuery; fn local_roots() for ra_db::LocalRootsQuery;
fn library_roots() for ra_db::LibraryRootsQuery; fn library_roots() for ra_db::LibraryRootsQuery;
fn crate_graph() for ra_db::CrateGraphQuery; fn crate_graph() for ra_db::CrateGraphQuery;

View file

@ -63,13 +63,11 @@ pub(crate) fn reference_definition(
let infer_result = function.infer(db); let infer_result = function.infer(db);
let syntax_mapping = function.body_syntax_mapping(db); let syntax_mapping = function.body_syntax_mapping(db);
let expr = ast::Expr::cast(method_call.syntax()).unwrap(); let expr = ast::Expr::cast(method_call.syntax()).unwrap();
if let Some(def_id) = syntax_mapping if let Some(func) = syntax_mapping
.node_expr(expr) .node_expr(expr)
.and_then(|it| infer_result.method_resolution(it)) .and_then(|it| infer_result.method_resolution(it))
{ {
if let Some(target) = NavigationTarget::from_def(db, def_id.resolve(db)) { return Exact(NavigationTarget::from_function(db, func));
return Exact(target);
}
}; };
} }
} }
@ -84,7 +82,7 @@ pub(crate) fn reference_definition(
{ {
let resolved = module.resolve_path(db, &path); let resolved = module.resolve_path(db, &path);
if let Some(def_id) = resolved.take_types().or(resolved.take_values()) { if let Some(def_id) = resolved.take_types().or(resolved.take_values()) {
if let Some(target) = NavigationTarget::from_def(db, def_id.resolve(db)) { if let Some(target) = NavigationTarget::from_def(db, def_id) {
return Exact(target); return Exact(target);
} }
} }

View file

@ -3,7 +3,7 @@ use ra_syntax::{
SyntaxNode, AstNode, SmolStr, TextRange, ast, SyntaxNode, AstNode, SmolStr, TextRange, ast,
SyntaxKind::{self, NAME}, SyntaxKind::{self, NAME},
}; };
use hir::{Def, ModuleSource}; use hir::{ModuleSource};
use crate::{FileSymbol, db::RootDatabase}; use crate::{FileSymbol, db::RootDatabase};
@ -96,45 +96,69 @@ impl NavigationTarget {
NavigationTarget::from_module(db, module) NavigationTarget::from_module(db, module)
} }
pub(crate) fn from_function(db: &RootDatabase, func: hir::Function) -> NavigationTarget {
let (file_id, fn_def) = func.source(db);
NavigationTarget::from_named(file_id.original_file(db), &*fn_def)
}
// TODO once Def::Item is gone, this should be able to always return a NavigationTarget // TODO once Def::Item is gone, this should be able to always return a NavigationTarget
pub(crate) fn from_def(db: &RootDatabase, def: Def) -> Option<NavigationTarget> { pub(crate) fn from_def(
let res = match def { db: &RootDatabase,
Def::Struct(s) => { module_def: hir::ModuleDef,
) -> Option<NavigationTarget> {
match module_def {
hir::ModuleDef::Module(module) => Some(NavigationTarget::from_module(db, module)),
hir::ModuleDef::Function(func) => Some(NavigationTarget::from_function(db, func)),
hir::ModuleDef::Struct(s) => {
let (file_id, node) = s.source(db); let (file_id, node) = s.source(db);
NavigationTarget::from_named(file_id.original_file(db), &*node) Some(NavigationTarget::from_named(
file_id.original_file(db),
&*node,
))
} }
Def::Enum(e) => { hir::ModuleDef::Const(s) => {
let (file_id, node) = s.source(db);
Some(NavigationTarget::from_named(
file_id.original_file(db),
&*node,
))
}
hir::ModuleDef::Static(s) => {
let (file_id, node) = s.source(db);
Some(NavigationTarget::from_named(
file_id.original_file(db),
&*node,
))
}
hir::ModuleDef::Enum(e) => {
let (file_id, node) = e.source(db); let (file_id, node) = e.source(db);
NavigationTarget::from_named(file_id.original_file(db), &*node) Some(NavigationTarget::from_named(
file_id.original_file(db),
&*node,
))
} }
Def::EnumVariant(ev) => { hir::ModuleDef::EnumVariant(var) => {
let (file_id, node) = ev.source(db); let (file_id, node) = var.source(db);
NavigationTarget::from_named(file_id.original_file(db), &*node) Some(NavigationTarget::from_named(
file_id.original_file(db),
&*node,
))
} }
Def::Function(f) => { hir::ModuleDef::Trait(e) => {
let (file_id, node) = f.source(db); let (file_id, node) = e.source(db);
NavigationTarget::from_named(file_id.original_file(db), &*node) Some(NavigationTarget::from_named(
file_id.original_file(db),
&*node,
))
} }
Def::Trait(f) => { hir::ModuleDef::Type(e) => {
let (file_id, node) = f.source(db); let (file_id, node) = e.source(db);
NavigationTarget::from_named(file_id.original_file(db), &*node) Some(NavigationTarget::from_named(
file_id.original_file(db),
&*node,
))
} }
Def::Type(f) => { }
let (file_id, node) = f.source(db);
NavigationTarget::from_named(file_id.original_file(db), &*node)
}
Def::Static(f) => {
let (file_id, node) = f.source(db);
NavigationTarget::from_named(file_id.original_file(db), &*node)
}
Def::Const(f) => {
let (file_id, node) = f.source(db);
NavigationTarget::from_named(file_id.original_file(db), &*node)
}
Def::Module(m) => NavigationTarget::from_module(db, m),
Def::Item => return None,
};
Some(res)
} }
#[cfg(test)] #[cfg(test)]

View file

@ -57,7 +57,6 @@ fn rename_mod(
) -> Option<SourceChange> { ) -> Option<SourceChange> {
let mut source_file_edits = Vec::new(); let mut source_file_edits = Vec::new();
let mut file_system_edits = Vec::new(); let mut file_system_edits = Vec::new();
if let Some(module) = module_from_declaration(db, position.file_id, &ast_module) { if let Some(module) = module_from_declaration(db, position.file_id, &ast_module) {
let (file_id, module_source) = module.definition_source(db); let (file_id, module_source) = module.definition_source(db);
match module_source { match module_source {
@ -223,11 +222,15 @@ mod tests {
fn test_rename_mod() { fn test_rename_mod() {
let (analysis, position) = analysis_and_position( let (analysis, position) = analysis_and_position(
" "
//- /bar.rs //- /lib.rs
mod fo<|>o; mod bar;
//- /bar/foo.rs
// emtpy //- /bar.rs
", mod foo<|>;
//- /bar/foo.rs
// emtpy
",
); );
let new_name = "foo2"; let new_name = "foo2";
let source_change = analysis.rename(position, new_name).unwrap(); let source_change = analysis.rename(position, new_name).unwrap();
@ -238,11 +241,11 @@ mod tests {
fn test_rename_mod_in_dir() { fn test_rename_mod_in_dir() {
let (analysis, position) = analysis_and_position( let (analysis, position) = analysis_and_position(
" "
//- /lib.rs //- /lib.rs
mod fo<|>o; mod fo<|>o;
//- /foo/mod.rs //- /foo/mod.rs
// emtpy // emtpy
", ",
); );
let new_name = "foo2"; let new_name = "foo2";
let source_change = analysis.rename(position, new_name).unwrap(); let source_change = analysis.rename(position, new_name).unwrap();

View file

@ -1,8 +1,8 @@
--- ---
created: "2019-01-22T14:45:00.975229300+00:00" created: "2019-01-24T08:39:53.759318522+00:00"
creator: insta@0.4.0 creator: insta@0.5.2
expression: "&source_change" expression: "&source_change"
source: "crates\\ra_ide_api\\src\\rename.rs" source: crates/ra_ide_api/src/rename.rs
--- ---
Some( Some(
SourceChange { SourceChange {
@ -10,7 +10,7 @@ Some(
source_file_edits: [ source_file_edits: [
SourceFileEdit { SourceFileEdit {
file_id: FileId( file_id: FileId(
1 2
), ),
edit: TextEdit { edit: TextEdit {
atoms: [ atoms: [
@ -25,7 +25,7 @@ Some(
file_system_edits: [ file_system_edits: [
MoveFile { MoveFile {
src: FileId( src: FileId(
2 3
), ),
dst_source_root: SourceRootId( dst_source_root: SourceRootId(
0 0

View file

@ -430,6 +430,13 @@ impl StructDef {
} }
impl EnumVariant { impl EnumVariant {
pub fn parent_enum(&self) -> &EnumDef {
self.syntax()
.parent()
.and_then(|it| it.parent())
.and_then(EnumDef::cast)
.expect("EnumVariants are always nested in Enums")
}
pub fn flavor(&self) -> StructFlavor { pub fn flavor(&self) -> StructFlavor {
StructFlavor::from_node(self) StructFlavor::from_node(self)
} }

View file

@ -3229,6 +3229,7 @@ impl ast::VisibilityOwner for TraitDef {}
impl ast::NameOwner for TraitDef {} impl ast::NameOwner for TraitDef {}
impl ast::AttrsOwner for TraitDef {} impl ast::AttrsOwner for TraitDef {}
impl ast::DocCommentsOwner for TraitDef {} impl ast::DocCommentsOwner for TraitDef {}
impl ast::TypeParamsOwner for TraitDef {}
impl TraitDef {} impl TraitDef {}
// TrueKw // TrueKw

View file

@ -280,7 +280,7 @@ Grammar(
], options: [["variant_list", "EnumVariantList"]] ), ], options: [["variant_list", "EnumVariantList"]] ),
"EnumVariantList": ( collections: [["variants", "EnumVariant"]] ), "EnumVariantList": ( collections: [["variants", "EnumVariant"]] ),
"EnumVariant": ( traits: ["NameOwner", "DocCommentsOwner"], options: ["Expr"] ), "EnumVariant": ( traits: ["NameOwner", "DocCommentsOwner"], options: ["Expr"] ),
"TraitDef": ( traits: ["VisibilityOwner", "NameOwner", "AttrsOwner", "DocCommentsOwner"] ), "TraitDef": ( traits: ["VisibilityOwner", "NameOwner", "AttrsOwner", "DocCommentsOwner", "TypeParamsOwner"] ),
"Module": ( "Module": (
traits: ["VisibilityOwner", "NameOwner", "AttrsOwner", "DocCommentsOwner" ], traits: ["VisibilityOwner", "NameOwner", "AttrsOwner", "DocCommentsOwner" ],
options: [ "ItemList" ] options: [ "ItemList" ]
@ -489,7 +489,7 @@ Grammar(
), ),
"RefPat": ( options: [ "Pat" ]), "RefPat": ( options: [ "Pat" ]),
"BindPat": ( "BindPat": (
options: [ "Pat" ], options: [ "Pat" ],
traits: ["NameOwner"] traits: ["NameOwner"]
), ),