mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-03 07:04:49 +00:00
Merge remote-tracking branch 'upstream/master' into issue961_profiling
This commit is contained in:
commit
b74449e995
54 changed files with 1943 additions and 1180 deletions
|
@ -1,8 +1,7 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use relative_path::RelativePathBuf;
|
||||
use ra_db::{CrateId, SourceRootId, Edition};
|
||||
use ra_syntax::{ast::self, TreeArc, SyntaxNode};
|
||||
use ra_syntax::{ast::self, TreeArc};
|
||||
|
||||
use crate::{
|
||||
Name, ScopesWithSourceMap, Ty, HirFileId,
|
||||
|
@ -14,9 +13,11 @@ use crate::{
|
|||
adt::{EnumVariantId, StructFieldId, VariantDef},
|
||||
generics::GenericParams,
|
||||
docs::{Documentation, Docs, docs_from_ast},
|
||||
ids::{FunctionId, StructId, EnumId, AstItemDef, ConstId, StaticId, TraitId, TypeId},
|
||||
ids::{FunctionId, StructId, EnumId, AstItemDef, ConstId, StaticId, TraitId, TypeAliasId},
|
||||
impl_block::ImplBlock,
|
||||
resolve::Resolver,
|
||||
diagnostics::DiagnosticSink,
|
||||
traits::{TraitItem, TraitData},
|
||||
};
|
||||
|
||||
/// hir::Crate describes a single crate. It's the main interface with which
|
||||
|
@ -95,11 +96,6 @@ pub enum ModuleSource {
|
|||
Module(TreeArc<ast::Module>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub enum Problem {
|
||||
UnresolvedModule { candidate: RelativePathBuf },
|
||||
}
|
||||
|
||||
impl Module {
|
||||
/// Name of this module.
|
||||
pub fn name(&self, db: &impl HirDatabase) -> Option<Name> {
|
||||
|
@ -171,8 +167,24 @@ impl Module {
|
|||
db.crate_def_map(self.krate)[self.module_id].scope.clone()
|
||||
}
|
||||
|
||||
pub fn problems(&self, db: &impl HirDatabase) -> Vec<(TreeArc<SyntaxNode>, Problem)> {
|
||||
self.problems_impl(db)
|
||||
pub fn diagnostics(&self, db: &impl HirDatabase, sink: &mut DiagnosticSink) {
|
||||
db.crate_def_map(self.krate).add_diagnostics(db, self.module_id, sink);
|
||||
for decl in self.declarations(db) {
|
||||
match decl {
|
||||
crate::ModuleDef::Function(f) => f.diagnostics(db, sink),
|
||||
crate::ModuleDef::Module(f) => f.diagnostics(db, sink),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
for impl_block in self.impl_blocks(db) {
|
||||
for item in impl_block.items(db) {
|
||||
match item {
|
||||
crate::ImplItem::Method(f) => f.diagnostics(db, sink),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
|
@ -519,6 +531,10 @@ impl Function {
|
|||
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
|
||||
r
|
||||
}
|
||||
|
||||
pub fn diagnostics(&self, db: &impl HirDatabase, sink: &mut DiagnosticSink) {
|
||||
self.infer(db).add_diagnostics(db, *self, sink);
|
||||
}
|
||||
}
|
||||
|
||||
impl Docs for Function {
|
||||
|
@ -634,6 +650,18 @@ impl Trait {
|
|||
pub fn generic_params(&self, db: &impl DefDatabase) -> Arc<GenericParams> {
|
||||
db.generic_params((*self).into())
|
||||
}
|
||||
|
||||
pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
|
||||
self.trait_data(db).name().clone()
|
||||
}
|
||||
|
||||
pub fn items(self, db: &impl DefDatabase) -> Vec<TraitItem> {
|
||||
self.trait_data(db).items().to_vec()
|
||||
}
|
||||
|
||||
pub(crate) fn trait_data(self, db: &impl DefDatabase) -> Arc<TraitData> {
|
||||
db.trait_data(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Docs for Trait {
|
||||
|
@ -644,7 +672,7 @@ impl Docs for Trait {
|
|||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct TypeAlias {
|
||||
pub(crate) id: TypeId,
|
||||
pub(crate) id: TypeAliasId,
|
||||
}
|
||||
|
||||
impl TypeAlias {
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
use ra_db::FileId;
|
||||
use ra_syntax::{ast, SyntaxNode, TreeArc, AstNode};
|
||||
use ra_syntax::{ast, TreeArc};
|
||||
|
||||
use crate::{
|
||||
Module, ModuleSource, Problem, Name,
|
||||
Module, ModuleSource, Name, AstId,
|
||||
nameres::{CrateModuleId, ImportId},
|
||||
HirDatabase, DefDatabase,
|
||||
HirFileId, SourceItemId,
|
||||
HirFileId,
|
||||
};
|
||||
|
||||
impl ModuleSource {
|
||||
pub(crate) fn new(
|
||||
db: &impl DefDatabase,
|
||||
file_id: Option<FileId>,
|
||||
decl_id: Option<SourceItemId>,
|
||||
decl_id: Option<AstId<ast::Module>>,
|
||||
) -> ModuleSource {
|
||||
match (file_id, decl_id) {
|
||||
(Some(file_id), _) => {
|
||||
|
@ -20,8 +20,7 @@ impl ModuleSource {
|
|||
ModuleSource::SourceFile(source_file)
|
||||
}
|
||||
(None, Some(item_id)) => {
|
||||
let module = db.file_item(item_id);
|
||||
let module = ast::Module::cast(&*module).unwrap();
|
||||
let module = item_id.to_node(db);
|
||||
assert!(module.item_list().is_some(), "expected inline module");
|
||||
ModuleSource::Module(module.to_owned())
|
||||
}
|
||||
|
@ -55,7 +54,7 @@ impl Module {
|
|||
let decl_id = def_map[self.module_id].declaration;
|
||||
let file_id = def_map[self.module_id].definition;
|
||||
let module_source = ModuleSource::new(db, file_id, decl_id);
|
||||
let file_id = file_id.map(HirFileId::from).unwrap_or_else(|| decl_id.unwrap().file_id);
|
||||
let file_id = file_id.map(HirFileId::from).unwrap_or_else(|| decl_id.unwrap().file_id());
|
||||
(file_id, module_source)
|
||||
}
|
||||
|
||||
|
@ -65,9 +64,8 @@ impl Module {
|
|||
) -> Option<(HirFileId, TreeArc<ast::Module>)> {
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
let decl = def_map[self.module_id].declaration?;
|
||||
let syntax_node = db.file_item(decl);
|
||||
let ast = ast::Module::cast(&syntax_node).unwrap().to_owned();
|
||||
Some((decl.file_id, ast))
|
||||
let ast = decl.to_node(db);
|
||||
Some((decl.file_id(), ast))
|
||||
}
|
||||
|
||||
pub(crate) fn import_source_impl(
|
||||
|
@ -76,7 +74,7 @@ impl Module {
|
|||
import: ImportId,
|
||||
) -> TreeArc<ast::PathSegment> {
|
||||
let (file_id, source) = self.definition_source(db);
|
||||
let (_, source_map) = db.raw_items_with_source_map(file_id.original_file(db));
|
||||
let (_, source_map) = db.raw_items_with_source_map(file_id);
|
||||
source_map.get(&source, import)
|
||||
}
|
||||
|
||||
|
@ -108,19 +106,4 @@ impl Module {
|
|||
let parent_id = def_map[self.module_id].parent?;
|
||||
Some(self.with_module_id(parent_id))
|
||||
}
|
||||
|
||||
pub(crate) fn problems_impl(
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
) -> Vec<(TreeArc<SyntaxNode>, Problem)> {
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
let (my_file_id, _) = self.definition_source(db);
|
||||
// FIXME: not entirely corret filterint by module
|
||||
def_map
|
||||
.problems()
|
||||
.iter()
|
||||
.filter(|(source_item_id, _problem)| my_file_id == source_item_id.file_id)
|
||||
.map(|(source_item_id, problem)| (db.file_item(*source_item_id), problem.clone()))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use ra_syntax::{SyntaxNode, TreeArc, SourceFile};
|
||||
use ra_db::{SourceDatabase, salsa, FileId};
|
||||
use ra_db::{SourceDatabase, salsa};
|
||||
|
||||
use crate::{
|
||||
HirFileId, SourceFileItems, SourceItemId, Crate, Module, HirInterner,
|
||||
HirFileId, MacroDefId, AstIdMap, ErasedFileAstId, Crate, Module, HirInterner,
|
||||
Function, FnSignature, ExprScopes, TypeAlias,
|
||||
Struct, Enum, StructField,
|
||||
Const, ConstSignature, Static,
|
||||
|
@ -14,11 +14,15 @@ use crate::{
|
|||
impl_block::{ModuleImplBlocks, ImplSourceMap},
|
||||
generics::{GenericParams, GenericDef},
|
||||
type_ref::TypeRef,
|
||||
traits::TraitData, Trait, ty::TraitRef
|
||||
};
|
||||
|
||||
#[salsa::query_group(DefDatabaseStorage)]
|
||||
pub trait DefDatabase: SourceDatabase + AsRef<HirInterner> {
|
||||
#[salsa::invoke(HirFileId::hir_parse)]
|
||||
#[salsa::invoke(crate::ids::macro_def_query)]
|
||||
fn macro_def(&self, macro_id: MacroDefId) -> Option<Arc<mbe::MacroRules>>;
|
||||
|
||||
#[salsa::invoke(HirFileId::hir_parse_query)]
|
||||
fn hir_parse(&self, file_id: HirFileId) -> TreeArc<SourceFile>;
|
||||
|
||||
#[salsa::invoke(crate::adt::StructData::struct_data_query)]
|
||||
|
@ -27,17 +31,23 @@ pub trait DefDatabase: SourceDatabase + AsRef<HirInterner> {
|
|||
#[salsa::invoke(crate::adt::EnumData::enum_data_query)]
|
||||
fn enum_data(&self, e: Enum) -> Arc<EnumData>;
|
||||
|
||||
#[salsa::invoke(crate::ids::SourceFileItems::file_items_query)]
|
||||
fn file_items(&self, file_id: HirFileId) -> Arc<SourceFileItems>;
|
||||
#[salsa::invoke(crate::traits::TraitData::trait_data_query)]
|
||||
fn trait_data(&self, t: Trait) -> Arc<TraitData>;
|
||||
|
||||
#[salsa::invoke(crate::ids::SourceFileItems::file_item_query)]
|
||||
fn file_item(&self, source_item_id: SourceItemId) -> TreeArc<SyntaxNode>;
|
||||
#[salsa::invoke(crate::source_id::AstIdMap::ast_id_map_query)]
|
||||
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
|
||||
|
||||
#[salsa::invoke(crate::source_id::AstIdMap::file_item_query)]
|
||||
fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> TreeArc<SyntaxNode>;
|
||||
|
||||
#[salsa::invoke(RawItems::raw_items_query)]
|
||||
fn raw_items(&self, file_id: FileId) -> Arc<RawItems>;
|
||||
fn raw_items(&self, file_id: HirFileId) -> Arc<RawItems>;
|
||||
|
||||
#[salsa::invoke(RawItems::raw_items_with_source_map_query)]
|
||||
fn raw_items_with_source_map(&self, file_id: FileId) -> (Arc<RawItems>, Arc<ImportSourceMap>);
|
||||
fn raw_items_with_source_map(
|
||||
&self,
|
||||
file_id: HirFileId,
|
||||
) -> (Arc<RawItems>, Arc<ImportSourceMap>);
|
||||
|
||||
#[salsa::invoke(CrateDefMap::crate_def_map_query)]
|
||||
fn crate_def_map(&self, krate: Crate) -> Arc<CrateDefMap>;
|
||||
|
@ -98,6 +108,9 @@ pub trait HirDatabase: DefDatabase {
|
|||
|
||||
#[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)]
|
||||
fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>;
|
||||
|
||||
#[salsa::invoke(crate::ty::method_resolution::implements)]
|
||||
fn implements(&self, trait_ref: TraitRef) -> bool;
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
115
crates/ra_hir/src/diagnostics.rs
Normal file
115
crates/ra_hir/src/diagnostics.rs
Normal file
|
@ -0,0 +1,115 @@
|
|||
use std::{fmt, any::Any};
|
||||
|
||||
use ra_syntax::{SyntaxNodePtr, TreeArc, AstPtr, TextRange, ast, SyntaxNode};
|
||||
use relative_path::RelativePathBuf;
|
||||
|
||||
use crate::{HirFileId, HirDatabase};
|
||||
|
||||
/// Diagnostic defines hir API for errors and warnings.
|
||||
///
|
||||
/// It is used as a `dyn` object, which you can downcast to a concrete
|
||||
/// diagnostic. DiagnosticSink are structured, meaning that they include rich
|
||||
/// information which can be used by IDE to create fixes. DiagnosticSink are
|
||||
/// expressed in terms of macro-expanded syntax tree nodes (so, it's a bad idea
|
||||
/// to diagnostic in a salsa value).
|
||||
///
|
||||
/// Internally, various subsystems of hir produce diagnostics specific to a
|
||||
/// subsystem (typically, an `enum`), which are safe to store in salsa but do not
|
||||
/// include source locations. Such internal diagnostic are transformed into an
|
||||
/// instance of `Diagnostic` on demand.
|
||||
pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static {
|
||||
fn message(&self) -> String;
|
||||
fn file(&self) -> HirFileId;
|
||||
fn syntax_node_ptr(&self) -> SyntaxNodePtr;
|
||||
fn highlight_range(&self) -> TextRange {
|
||||
self.syntax_node_ptr().range()
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static);
|
||||
}
|
||||
|
||||
impl dyn Diagnostic {
|
||||
pub fn syntax_node(&self, db: &impl HirDatabase) -> TreeArc<SyntaxNode> {
|
||||
let source_file = db.hir_parse(self.file());
|
||||
self.syntax_node_ptr().to_node(&source_file).to_owned()
|
||||
}
|
||||
pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> {
|
||||
self.as_any().downcast_ref()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DiagnosticSink<'a> {
|
||||
callbacks: Vec<Box<dyn FnMut(&dyn Diagnostic) -> Result<(), ()> + 'a>>,
|
||||
default_callback: Box<dyn FnMut(&dyn Diagnostic) + 'a>,
|
||||
}
|
||||
|
||||
impl<'a> DiagnosticSink<'a> {
|
||||
pub fn new(cb: impl FnMut(&dyn Diagnostic) + 'a) -> DiagnosticSink<'a> {
|
||||
DiagnosticSink { callbacks: Vec::new(), default_callback: Box::new(cb) }
|
||||
}
|
||||
|
||||
pub fn on<D: Diagnostic, F: FnMut(&D) + 'a>(mut self, mut cb: F) -> DiagnosticSink<'a> {
|
||||
let cb = move |diag: &dyn Diagnostic| match diag.downcast_ref::<D>() {
|
||||
Some(d) => {
|
||||
cb(d);
|
||||
Ok(())
|
||||
}
|
||||
None => Err(()),
|
||||
};
|
||||
self.callbacks.push(Box::new(cb));
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn push(&mut self, d: impl Diagnostic) {
|
||||
let d: &dyn Diagnostic = &d;
|
||||
for cb in self.callbacks.iter_mut() {
|
||||
match cb(d) {
|
||||
Ok(()) => return,
|
||||
Err(()) => (),
|
||||
}
|
||||
}
|
||||
(self.default_callback)(d)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct NoSuchField {
|
||||
pub file: HirFileId,
|
||||
pub field: AstPtr<ast::NamedField>,
|
||||
}
|
||||
|
||||
impl Diagnostic for NoSuchField {
|
||||
fn message(&self) -> String {
|
||||
"no such field".to_string()
|
||||
}
|
||||
fn file(&self) -> HirFileId {
|
||||
self.file
|
||||
}
|
||||
fn syntax_node_ptr(&self) -> SyntaxNodePtr {
|
||||
self.field.into()
|
||||
}
|
||||
fn as_any(&self) -> &(Any + Send + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UnresolvedModule {
|
||||
pub file: HirFileId,
|
||||
pub decl: AstPtr<ast::Module>,
|
||||
pub candidate: RelativePathBuf,
|
||||
}
|
||||
|
||||
impl Diagnostic for UnresolvedModule {
|
||||
fn message(&self) -> String {
|
||||
"unresolved module".to_string()
|
||||
}
|
||||
fn file(&self) -> HirFileId {
|
||||
self.file
|
||||
}
|
||||
fn syntax_node_ptr(&self) -> SyntaxNodePtr {
|
||||
self.decl.into()
|
||||
}
|
||||
fn as_any(&self) -> &(Any + Send + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
|
@ -5,7 +5,7 @@ use rustc_hash::FxHashMap;
|
|||
|
||||
use ra_arena::{Arena, RawId, impl_arena_id, map::ArenaMap};
|
||||
use ra_syntax::{
|
||||
SyntaxNodePtr, AstNode,
|
||||
SyntaxNodePtr, AstPtr, AstNode,
|
||||
ast::{self, LoopBodyOwner, ArgListOwner, NameOwner, LiteralFlavor, TypeAscriptionOwner}
|
||||
};
|
||||
|
||||
|
@ -54,6 +54,7 @@ pub struct BodySourceMap {
|
|||
expr_map_back: ArenaMap<ExprId, SyntaxNodePtr>,
|
||||
pat_map: FxHashMap<SyntaxNodePtr, PatId>,
|
||||
pat_map_back: ArenaMap<PatId, SyntaxNodePtr>,
|
||||
field_map: FxHashMap<(ExprId, usize), AstPtr<ast::NamedField>>,
|
||||
}
|
||||
|
||||
impl Body {
|
||||
|
@ -138,6 +139,10 @@ impl BodySourceMap {
|
|||
pub fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
|
||||
self.pat_map.get(&SyntaxNodePtr::new(node.syntax())).cloned()
|
||||
}
|
||||
|
||||
pub fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::NamedField> {
|
||||
self.field_map[&(expr, field)].clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
|
@ -629,8 +634,10 @@ impl ExprCollector {
|
|||
}
|
||||
ast::ExprKind::StructLit(e) => {
|
||||
let path = e.path().and_then(Path::from_ast);
|
||||
let mut field_ptrs = Vec::new();
|
||||
let fields = if let Some(nfl) = e.named_field_list() {
|
||||
nfl.fields()
|
||||
.inspect(|field| field_ptrs.push(AstPtr::new(*field)))
|
||||
.map(|field| StructLitField {
|
||||
name: field
|
||||
.name_ref()
|
||||
|
@ -657,7 +664,11 @@ impl ExprCollector {
|
|||
Vec::new()
|
||||
};
|
||||
let spread = e.spread().map(|s| self.collect_expr(s));
|
||||
self.alloc_expr(Expr::StructLit { path, fields, spread }, syntax_ptr)
|
||||
let res = self.alloc_expr(Expr::StructLit { path, fields, spread }, syntax_ptr);
|
||||
for (i, ptr) in field_ptrs.into_iter().enumerate() {
|
||||
self.source_map.field_map.insert((res, i), ptr);
|
||||
}
|
||||
res
|
||||
}
|
||||
ast::ExprKind::FieldExpr(e) => {
|
||||
let expr = self.collect_expr_opt(e.expr());
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
use std::{
|
||||
marker::PhantomData,
|
||||
hash::{Hash, Hasher},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use ra_db::{LocationInterner, FileId};
|
||||
use ra_syntax::{TreeArc, SyntaxNode, SourceFile, AstNode, SyntaxNodePtr, ast};
|
||||
use ra_arena::{Arena, RawId, ArenaId, impl_arena_id};
|
||||
use ra_syntax::{TreeArc, SourceFile, AstNode, ast};
|
||||
use ra_arena::{RawId, ArenaId, impl_arena_id};
|
||||
use mbe::MacroRules;
|
||||
|
||||
use crate::{
|
||||
Module,
|
||||
DefDatabase,
|
||||
Module, DefDatabase, AstId, FileAstId,
|
||||
};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
|
@ -22,7 +21,7 @@ pub struct HirInterner {
|
|||
consts: LocationInterner<ItemLoc<ast::ConstDef>, ConstId>,
|
||||
statics: LocationInterner<ItemLoc<ast::StaticDef>, StaticId>,
|
||||
traits: LocationInterner<ItemLoc<ast::TraitDef>, TraitId>,
|
||||
types: LocationInterner<ItemLoc<ast::TypeAliasDef>, TypeId>,
|
||||
types: LocationInterner<ItemLoc<ast::TypeAliasDef>, TypeAliasId>,
|
||||
}
|
||||
|
||||
impl HirInterner {
|
||||
|
@ -68,7 +67,7 @@ impl HirFileId {
|
|||
HirFileIdRepr::File(file_id) => file_id,
|
||||
HirFileIdRepr::Macro(macro_call_id) => {
|
||||
let loc = macro_call_id.loc(db);
|
||||
loc.source_item_id.file_id.original_file(db)
|
||||
loc.ast_id.file_id().original_file(db)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -83,7 +82,10 @@ impl HirFileId {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn hir_parse(db: &impl DefDatabase, file_id: HirFileId) -> TreeArc<SourceFile> {
|
||||
pub(crate) fn hir_parse_query(
|
||||
db: &impl DefDatabase,
|
||||
file_id: HirFileId,
|
||||
) -> TreeArc<SourceFile> {
|
||||
match file_id.0 {
|
||||
HirFileIdRepr::File(file_id) => db.parse(file_id),
|
||||
HirFileIdRepr::Macro(macro_call_id) => {
|
||||
|
@ -96,14 +98,10 @@ impl HirFileId {
|
|||
|
||||
fn parse_macro(db: &impl DefDatabase, macro_call_id: MacroCallId) -> Option<TreeArc<SourceFile>> {
|
||||
let loc = macro_call_id.loc(db);
|
||||
let syntax = db.file_item(loc.source_item_id);
|
||||
let macro_call = ast::MacroCall::cast(&syntax).unwrap();
|
||||
let macro_call = loc.ast_id.to_node(db);
|
||||
let (macro_arg, _) = macro_call.token_tree().and_then(mbe::ast_to_token_tree)?;
|
||||
|
||||
let def_map = db.crate_def_map(loc.module.krate);
|
||||
let (krate, macro_id) = def_map.resolve_macro(macro_call_id)?;
|
||||
let def_map = db.crate_def_map(krate);
|
||||
let macro_rules = &def_map[macro_id];
|
||||
let macro_rules = db.macro_def(loc.def)?;
|
||||
let tt = macro_rules.expand(¯o_arg).ok()?;
|
||||
Some(mbe::token_tree_to_ast_item_list(&tt))
|
||||
}
|
||||
|
@ -126,6 +124,17 @@ impl From<MacroCallId> for HirFileId {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct MacroDefId(pub(crate) AstId<ast::MacroCall>);
|
||||
|
||||
pub(crate) fn macro_def_query(db: &impl DefDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> {
|
||||
let macro_call = id.0.to_node(db);
|
||||
let arg = macro_call.token_tree()?;
|
||||
let (tt, _) = mbe::ast_to_token_tree(arg)?;
|
||||
let rules = MacroRules::parse(&tt).ok()?;
|
||||
Some(Arc::new(rules))
|
||||
}
|
||||
|
||||
/// `MacroCallId` identifies a particular macro invocation, like
|
||||
/// `println!("Hello, {}", world)`.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
|
@ -134,8 +143,8 @@ impl_arena_id!(MacroCallId);
|
|||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct MacroCallLoc {
|
||||
pub(crate) module: Module,
|
||||
pub(crate) source_item_id: SourceItemId,
|
||||
pub(crate) def: MacroDefId,
|
||||
pub(crate) ast_id: AstId<ast::MacroCall>,
|
||||
}
|
||||
|
||||
impl MacroCallId {
|
||||
|
@ -145,7 +154,6 @@ impl MacroCallId {
|
|||
}
|
||||
|
||||
impl MacroCallLoc {
|
||||
#[allow(unused)]
|
||||
pub(crate) fn id(&self, db: &impl AsRef<HirInterner>) -> MacroCallId {
|
||||
db.as_ref().macros.loc2id(&self)
|
||||
}
|
||||
|
@ -154,26 +162,25 @@ impl MacroCallLoc {
|
|||
#[derive(Debug)]
|
||||
pub struct ItemLoc<N: AstNode> {
|
||||
pub(crate) module: Module,
|
||||
raw: SourceItemId,
|
||||
_ty: PhantomData<N>,
|
||||
ast_id: AstId<N>,
|
||||
}
|
||||
|
||||
impl<N: AstNode> PartialEq for ItemLoc<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.module == other.module && self.raw == other.raw
|
||||
self.module == other.module && self.ast_id == other.ast_id
|
||||
}
|
||||
}
|
||||
impl<N: AstNode> Eq for ItemLoc<N> {}
|
||||
impl<N: AstNode> Hash for ItemLoc<N> {
|
||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||
self.module.hash(hasher);
|
||||
self.raw.hash(hasher);
|
||||
self.ast_id.hash(hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: AstNode> Clone for ItemLoc<N> {
|
||||
fn clone(&self) -> ItemLoc<N> {
|
||||
ItemLoc { module: self.module, raw: self.raw, _ty: PhantomData }
|
||||
ItemLoc { module: self.module, ast_id: self.ast_id }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -200,26 +207,19 @@ impl<'a, DB: DefDatabase> LocationCtx<&'a DB> {
|
|||
pub(crate) trait AstItemDef<N: AstNode>: ArenaId + Clone {
|
||||
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<N>, Self>;
|
||||
fn from_ast(ctx: LocationCtx<&impl DefDatabase>, ast: &N) -> Self {
|
||||
let items = ctx.db.file_items(ctx.file_id);
|
||||
let item_id = items.id_of(ctx.file_id, ast.syntax());
|
||||
Self::from_source_item_id_unchecked(ctx, item_id)
|
||||
let items = ctx.db.ast_id_map(ctx.file_id);
|
||||
let item_id = items.ast_id(ast);
|
||||
Self::from_ast_id(ctx, item_id)
|
||||
}
|
||||
fn from_source_item_id_unchecked(
|
||||
ctx: LocationCtx<&impl DefDatabase>,
|
||||
item_id: SourceFileItemId,
|
||||
) -> Self {
|
||||
let raw = SourceItemId { file_id: ctx.file_id, item_id };
|
||||
let loc = ItemLoc { module: ctx.module, raw, _ty: PhantomData };
|
||||
|
||||
fn from_ast_id(ctx: LocationCtx<&impl DefDatabase>, ast_id: FileAstId<N>) -> Self {
|
||||
let loc = ItemLoc { module: ctx.module, ast_id: ast_id.with_file_id(ctx.file_id) };
|
||||
Self::interner(ctx.db.as_ref()).loc2id(&loc)
|
||||
}
|
||||
fn source(self, db: &impl DefDatabase) -> (HirFileId, TreeArc<N>) {
|
||||
let int = Self::interner(db.as_ref());
|
||||
let loc = int.id2loc(self);
|
||||
let syntax = db.file_item(loc.raw);
|
||||
let ast =
|
||||
N::cast(&syntax).unwrap_or_else(|| panic!("invalid ItemLoc: {:?}", loc.raw)).to_owned();
|
||||
(loc.raw.file_id, ast)
|
||||
let ast = loc.ast_id.to_node(db);
|
||||
(loc.ast_id.file_id(), ast)
|
||||
}
|
||||
fn module(self, db: &impl DefDatabase) -> Module {
|
||||
let int = Self::interner(db.as_ref());
|
||||
|
@ -229,7 +229,7 @@ pub(crate) trait AstItemDef<N: AstNode>: ArenaId + Clone {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct FunctionId(RawId);
|
||||
pub(crate) struct FunctionId(RawId);
|
||||
impl_arena_id!(FunctionId);
|
||||
impl AstItemDef<ast::FnDef> for FunctionId {
|
||||
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<ast::FnDef>, Self> {
|
||||
|
@ -238,7 +238,7 @@ impl AstItemDef<ast::FnDef> for FunctionId {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct StructId(RawId);
|
||||
pub(crate) struct StructId(RawId);
|
||||
impl_arena_id!(StructId);
|
||||
impl AstItemDef<ast::StructDef> for StructId {
|
||||
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<ast::StructDef>, Self> {
|
||||
|
@ -247,7 +247,7 @@ impl AstItemDef<ast::StructDef> for StructId {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct EnumId(RawId);
|
||||
pub(crate) struct EnumId(RawId);
|
||||
impl_arena_id!(EnumId);
|
||||
impl AstItemDef<ast::EnumDef> for EnumId {
|
||||
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<ast::EnumDef>, Self> {
|
||||
|
@ -256,7 +256,7 @@ impl AstItemDef<ast::EnumDef> for EnumId {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct ConstId(RawId);
|
||||
pub(crate) struct ConstId(RawId);
|
||||
impl_arena_id!(ConstId);
|
||||
impl AstItemDef<ast::ConstDef> for ConstId {
|
||||
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<ast::ConstDef>, Self> {
|
||||
|
@ -265,7 +265,7 @@ impl AstItemDef<ast::ConstDef> for ConstId {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct StaticId(RawId);
|
||||
pub(crate) struct StaticId(RawId);
|
||||
impl_arena_id!(StaticId);
|
||||
impl AstItemDef<ast::StaticDef> for StaticId {
|
||||
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<ast::StaticDef>, Self> {
|
||||
|
@ -274,7 +274,7 @@ impl AstItemDef<ast::StaticDef> for StaticId {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct TraitId(RawId);
|
||||
pub(crate) struct TraitId(RawId);
|
||||
impl_arena_id!(TraitId);
|
||||
impl AstItemDef<ast::TraitDef> for TraitId {
|
||||
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<ast::TraitDef>, Self> {
|
||||
|
@ -283,117 +283,10 @@ impl AstItemDef<ast::TraitDef> for TraitId {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct TypeId(RawId);
|
||||
impl_arena_id!(TypeId);
|
||||
impl AstItemDef<ast::TypeAliasDef> for TypeId {
|
||||
pub(crate) struct TypeAliasId(RawId);
|
||||
impl_arena_id!(TypeAliasId);
|
||||
impl AstItemDef<ast::TypeAliasDef> for TypeAliasId {
|
||||
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<ast::TypeAliasDef>, Self> {
|
||||
&interner.types
|
||||
}
|
||||
}
|
||||
|
||||
/// Identifier of item within a specific file. This is stable over reparses, so
|
||||
/// it's OK to use it as a salsa key/value.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct SourceFileItemId(RawId);
|
||||
impl_arena_id!(SourceFileItemId);
|
||||
|
||||
impl SourceFileItemId {
|
||||
pub(crate) fn with_file_id(self, file_id: HirFileId) -> SourceItemId {
|
||||
SourceItemId { file_id, item_id: self }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct SourceItemId {
|
||||
pub(crate) file_id: HirFileId,
|
||||
pub(crate) item_id: SourceFileItemId,
|
||||
}
|
||||
|
||||
/// Maps items' `SyntaxNode`s to `SourceFileItemId`s and back.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct SourceFileItems {
|
||||
file_id: HirFileId,
|
||||
arena: Arena<SourceFileItemId, SyntaxNodePtr>,
|
||||
}
|
||||
|
||||
impl SourceFileItems {
|
||||
pub(crate) fn file_items_query(
|
||||
db: &impl DefDatabase,
|
||||
file_id: HirFileId,
|
||||
) -> Arc<SourceFileItems> {
|
||||
let source_file = db.hir_parse(file_id);
|
||||
Arc::new(SourceFileItems::from_source_file(&source_file, file_id))
|
||||
}
|
||||
|
||||
pub(crate) fn file_item_query(
|
||||
db: &impl DefDatabase,
|
||||
source_item_id: SourceItemId,
|
||||
) -> TreeArc<SyntaxNode> {
|
||||
let source_file = db.hir_parse(source_item_id.file_id);
|
||||
db.file_items(source_item_id.file_id)[source_item_id.item_id]
|
||||
.to_node(&source_file)
|
||||
.to_owned()
|
||||
}
|
||||
|
||||
pub(crate) fn from_source_file(
|
||||
source_file: &SourceFile,
|
||||
file_id: HirFileId,
|
||||
) -> SourceFileItems {
|
||||
let mut res = SourceFileItems { file_id, arena: Arena::default() };
|
||||
// By walking the tree in bread-first order we make sure that parents
|
||||
// get lower ids then children. That is, adding a new child does not
|
||||
// change parent's id. This means that, say, adding a new function to a
|
||||
// trait does not change ids of top-level items, which helps caching.
|
||||
bfs(source_file.syntax(), |it| {
|
||||
if let Some(module_item) = ast::ModuleItem::cast(it) {
|
||||
res.alloc(module_item.syntax());
|
||||
} else if let Some(macro_call) = ast::MacroCall::cast(it) {
|
||||
res.alloc(macro_call.syntax());
|
||||
}
|
||||
});
|
||||
res
|
||||
}
|
||||
|
||||
fn alloc(&mut self, item: &SyntaxNode) -> SourceFileItemId {
|
||||
self.arena.alloc(SyntaxNodePtr::new(item))
|
||||
}
|
||||
pub(crate) fn id_of(&self, file_id: HirFileId, item: &SyntaxNode) -> SourceFileItemId {
|
||||
assert_eq!(
|
||||
self.file_id, file_id,
|
||||
"SourceFileItems: wrong file, expected {:?}, got {:?}",
|
||||
self.file_id, file_id
|
||||
);
|
||||
self.id_of_unchecked(item)
|
||||
}
|
||||
pub(crate) fn id_of_unchecked(&self, item: &SyntaxNode) -> SourceFileItemId {
|
||||
let ptr = SyntaxNodePtr::new(item);
|
||||
if let Some((id, _)) = self.arena.iter().find(|(_id, i)| **i == ptr) {
|
||||
return id;
|
||||
}
|
||||
panic!(
|
||||
"Can't find {:?} in SourceFileItems:\n{:?}",
|
||||
item,
|
||||
self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Index<SourceFileItemId> for SourceFileItems {
|
||||
type Output = SyntaxNodePtr;
|
||||
fn index(&self, idx: SourceFileItemId) -> &SyntaxNodePtr {
|
||||
&self.arena[idx]
|
||||
}
|
||||
}
|
||||
|
||||
/// Walks the subtree in bfs order, calling `f` for each node.
|
||||
fn bfs(node: &SyntaxNode, mut f: impl FnMut(&SyntaxNode)) {
|
||||
let mut curr_layer = vec![node];
|
||||
let mut next_layer = vec![];
|
||||
while !curr_layer.is_empty() {
|
||||
curr_layer.drain(..).for_each(|node| {
|
||||
next_layer.extend(node.children());
|
||||
f(node);
|
||||
});
|
||||
std::mem::swap(&mut curr_layer, &mut next_layer);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,10 +23,12 @@ pub mod mock;
|
|||
mod path;
|
||||
pub mod source_binder;
|
||||
|
||||
mod source_id;
|
||||
mod ids;
|
||||
mod name;
|
||||
mod nameres;
|
||||
mod adt;
|
||||
mod traits;
|
||||
mod type_alias;
|
||||
mod type_ref;
|
||||
mod ty;
|
||||
|
@ -35,6 +37,7 @@ mod expr;
|
|||
mod generics;
|
||||
mod docs;
|
||||
mod resolve;
|
||||
pub mod diagnostics;
|
||||
|
||||
mod code_model_api;
|
||||
mod code_model_impl;
|
||||
|
@ -45,13 +48,14 @@ mod marks;
|
|||
use crate::{
|
||||
db::{HirDatabase, DefDatabase},
|
||||
name::{AsName, KnownName},
|
||||
ids::{SourceItemId, SourceFileItems},
|
||||
source_id::{FileAstId, AstId},
|
||||
};
|
||||
|
||||
pub use self::{
|
||||
path::{Path, PathKind},
|
||||
name::Name,
|
||||
ids::{HirFileId, MacroCallId, MacroCallLoc, HirInterner},
|
||||
source_id::{AstIdMap, ErasedFileAstId},
|
||||
ids::{HirFileId, MacroDefId, MacroCallId, MacroCallLoc, HirInterner},
|
||||
nameres::{PerNs, Namespace},
|
||||
ty::{Ty, ApplicationTy, TypeCtor, Substs, display::HirDisplay},
|
||||
impl_block::{ImplBlock, ImplItem},
|
||||
|
@ -63,7 +67,7 @@ pub use self::{
|
|||
|
||||
pub use self::code_model_api::{
|
||||
Crate, CrateDependency,
|
||||
Module, ModuleDef, ModuleSource, Problem,
|
||||
Module, ModuleDef, ModuleSource,
|
||||
Struct, Enum, EnumVariant,
|
||||
Function, FnSignature,
|
||||
StructField, FieldSource,
|
||||
|
|
|
@ -9,7 +9,7 @@ use relative_path::RelativePathBuf;
|
|||
use test_utils::{parse_fixture, CURSOR_MARKER, extract_offset};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::{db, HirInterner};
|
||||
use crate::{db, HirInterner, diagnostics::DiagnosticSink};
|
||||
|
||||
pub const WORKSPACE: SourceRootId = SourceRootId(0);
|
||||
|
||||
|
@ -70,6 +70,22 @@ impl MockDatabase {
|
|||
self.set_crate_graph(Arc::new(crate_graph))
|
||||
}
|
||||
|
||||
pub fn diagnostics(&self) -> String {
|
||||
let mut buf = String::from("\n");
|
||||
let mut files: Vec<FileId> = self.files.values().map(|&it| it).collect();
|
||||
files.sort();
|
||||
for file in files {
|
||||
let module = crate::source_binder::module_from_file_id(self, file).unwrap();
|
||||
module.diagnostics(
|
||||
self,
|
||||
&mut DiagnosticSink::new(|d| {
|
||||
buf += &format!("{:?}: {}\n", d.syntax_node(self).text(), d.message());
|
||||
}),
|
||||
)
|
||||
}
|
||||
buf
|
||||
}
|
||||
|
||||
fn from_fixture(fixture: &str) -> (MockDatabase, Option<FilePosition>) {
|
||||
let mut db = MockDatabase::default();
|
||||
|
||||
|
|
|
@ -59,12 +59,16 @@ use rustc_hash::FxHashMap;
|
|||
use ra_arena::{Arena, RawId, impl_arena_id};
|
||||
use ra_db::{FileId, Edition};
|
||||
use test_utils::tested_by;
|
||||
use ra_syntax::ast;
|
||||
use ra_prof::profile;
|
||||
|
||||
use crate::{
|
||||
ModuleDef, Name, Crate, Module, Problem,
|
||||
DefDatabase, Path, PathKind, HirFileId,
|
||||
ids::{SourceItemId, SourceFileItemId, MacroCallId},
|
||||
ModuleDef, Name, Crate, Module,
|
||||
DefDatabase, Path, PathKind, HirFileId, Trait,
|
||||
ids::MacroDefId,
|
||||
diagnostics::DiagnosticSink,
|
||||
nameres::diagnostics::DefDiagnostic,
|
||||
AstId,
|
||||
};
|
||||
|
||||
pub(crate) use self::raw::{RawItems, ImportId, ImportSourceMap};
|
||||
|
@ -83,10 +87,8 @@ pub struct CrateDefMap {
|
|||
extern_prelude: FxHashMap<Name, ModuleDef>,
|
||||
root: CrateModuleId,
|
||||
modules: Arena<CrateModuleId, ModuleData>,
|
||||
macros: Arena<CrateMacroId, mbe::MacroRules>,
|
||||
public_macros: FxHashMap<Name, CrateMacroId>,
|
||||
macro_resolutions: FxHashMap<MacroCallId, (Crate, CrateMacroId)>,
|
||||
problems: CrateDefMapProblems,
|
||||
public_macros: FxHashMap<Name, MacroDefId>,
|
||||
diagnostics: Vec<DefDiagnostic>,
|
||||
}
|
||||
|
||||
impl std::ops::Index<CrateModuleId> for CrateDefMap {
|
||||
|
@ -96,18 +98,6 @@ impl std::ops::Index<CrateModuleId> for CrateDefMap {
|
|||
}
|
||||
}
|
||||
|
||||
impl std::ops::Index<CrateMacroId> for CrateDefMap {
|
||||
type Output = mbe::MacroRules;
|
||||
fn index(&self, id: CrateMacroId) -> &mbe::MacroRules {
|
||||
&self.macros[id]
|
||||
}
|
||||
}
|
||||
|
||||
/// An ID of a macro, **local** to a specific crate
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub(crate) struct CrateMacroId(RawId);
|
||||
impl_arena_id!(CrateMacroId);
|
||||
|
||||
/// An ID of a module, **local** to a specific crate
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub(crate) struct CrateModuleId(RawId);
|
||||
|
@ -119,28 +109,13 @@ pub(crate) struct ModuleData {
|
|||
pub(crate) children: FxHashMap<Name, CrateModuleId>,
|
||||
pub(crate) scope: ModuleScope,
|
||||
/// None for root
|
||||
pub(crate) declaration: Option<SourceItemId>,
|
||||
pub(crate) declaration: Option<AstId<ast::Module>>,
|
||||
/// None for inline modules.
|
||||
///
|
||||
/// Note that non-inline modules, by definition, live inside non-macro file.
|
||||
pub(crate) definition: Option<FileId>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct CrateDefMapProblems {
|
||||
problems: Vec<(SourceItemId, Problem)>,
|
||||
}
|
||||
|
||||
impl CrateDefMapProblems {
|
||||
fn add(&mut self, source_item_id: SourceItemId, problem: Problem) {
|
||||
self.problems.push((source_item_id, problem))
|
||||
}
|
||||
|
||||
pub(crate) fn iter<'a>(&'a self) -> impl Iterator<Item = (&'a SourceItemId, &'a Problem)> + 'a {
|
||||
self.problems.iter().map(|(s, p)| (s, p))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Eq, Clone)]
|
||||
pub struct ModuleScope {
|
||||
items: FxHashMap<Name, Resolution>,
|
||||
|
@ -153,6 +128,12 @@ impl ModuleScope {
|
|||
pub fn get(&self, name: &Name) -> Option<&Resolution> {
|
||||
self.items.get(name)
|
||||
}
|
||||
pub fn traits<'a>(&'a self) -> impl Iterator<Item = Trait> + 'a {
|
||||
self.items.values().filter_map(|r| match r.def.take_types() {
|
||||
Some(ModuleDef::Trait(t)) => Some(t),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
||||
|
@ -210,10 +191,8 @@ impl CrateDefMap {
|
|||
prelude: None,
|
||||
root,
|
||||
modules,
|
||||
macros: Arena::default(),
|
||||
public_macros: FxHashMap::default(),
|
||||
macro_resolutions: FxHashMap::default(),
|
||||
problems: CrateDefMapProblems::default(),
|
||||
diagnostics: Vec::new(),
|
||||
}
|
||||
};
|
||||
let def_map = collector::collect_defs(db, def_map);
|
||||
|
@ -224,10 +203,6 @@ impl CrateDefMap {
|
|||
self.root
|
||||
}
|
||||
|
||||
pub(crate) fn problems(&self) -> &CrateDefMapProblems {
|
||||
&self.problems
|
||||
}
|
||||
|
||||
pub(crate) fn mk_module(&self, module_id: CrateModuleId) -> Module {
|
||||
Module { krate: self.krate, module_id }
|
||||
}
|
||||
|
@ -240,19 +215,20 @@ impl CrateDefMap {
|
|||
&self.extern_prelude
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_macro(
|
||||
pub(crate) fn add_diagnostics(
|
||||
&self,
|
||||
macro_call_id: MacroCallId,
|
||||
) -> Option<(Crate, CrateMacroId)> {
|
||||
self.macro_resolutions.get(¯o_call_id).map(|&it| it)
|
||||
db: &impl DefDatabase,
|
||||
module: CrateModuleId,
|
||||
sink: &mut DiagnosticSink,
|
||||
) {
|
||||
self.diagnostics.iter().for_each(|it| it.add_to(db, module, sink))
|
||||
}
|
||||
|
||||
pub(crate) fn find_module_by_source(
|
||||
&self,
|
||||
file_id: HirFileId,
|
||||
decl_id: Option<SourceFileItemId>,
|
||||
decl_id: Option<AstId<ast::Module>>,
|
||||
) -> Option<CrateModuleId> {
|
||||
let decl_id = decl_id.map(|it| it.with_file_id(file_id));
|
||||
let (module_id, _module_data) = self.modules.iter().find(|(_module_id, module_data)| {
|
||||
if decl_id.is_some() {
|
||||
module_data.declaration == decl_id
|
||||
|
@ -452,3 +428,46 @@ impl CrateDefMap {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod diagnostics {
|
||||
use relative_path::RelativePathBuf;
|
||||
use ra_syntax::{AstPtr, ast};
|
||||
|
||||
use crate::{
|
||||
AstId, DefDatabase,
|
||||
nameres::CrateModuleId,
|
||||
diagnostics::{DiagnosticSink, UnresolvedModule},
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(super) enum DefDiagnostic {
|
||||
UnresolvedModule {
|
||||
module: CrateModuleId,
|
||||
declaration: AstId<ast::Module>,
|
||||
candidate: RelativePathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
impl DefDiagnostic {
|
||||
pub(super) fn add_to(
|
||||
&self,
|
||||
db: &impl DefDatabase,
|
||||
target_module: CrateModuleId,
|
||||
sink: &mut DiagnosticSink,
|
||||
) {
|
||||
match self {
|
||||
DefDiagnostic::UnresolvedModule { module, declaration, candidate } => {
|
||||
if *module != target_module {
|
||||
return;
|
||||
}
|
||||
let decl = declaration.to_node(db);
|
||||
sink.push(UnresolvedModule {
|
||||
file: declaration.file_id(),
|
||||
decl: AstPtr::new(&decl),
|
||||
candidate: candidate.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,17 +3,22 @@ use rustc_hash::FxHashMap;
|
|||
use relative_path::RelativePathBuf;
|
||||
use test_utils::tested_by;
|
||||
use ra_db::FileId;
|
||||
use ra_syntax::ast;
|
||||
|
||||
use crate::{
|
||||
Function, Module, Struct, Enum, Const, Static, Trait, TypeAlias,
|
||||
DefDatabase, HirFileId, Name, Path, Problem, Crate,
|
||||
DefDatabase, HirFileId, Name, Path,
|
||||
KnownName,
|
||||
nameres::{Resolution, PerNs, ModuleDef, ReachedFixedPoint, ResolveMode, raw},
|
||||
ids::{AstItemDef, LocationCtx, MacroCallLoc, SourceItemId, MacroCallId},
|
||||
nameres::{
|
||||
Resolution, PerNs, ModuleDef, ReachedFixedPoint, ResolveMode,
|
||||
CrateDefMap, CrateModuleId, ModuleData,
|
||||
diagnostics::DefDiagnostic,
|
||||
raw,
|
||||
},
|
||||
ids::{AstItemDef, LocationCtx, MacroCallLoc, MacroCallId, MacroDefId},
|
||||
AstId,
|
||||
};
|
||||
|
||||
use super::{CrateDefMap, CrateModuleId, ModuleData, CrateMacroId};
|
||||
|
||||
pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
|
||||
// populate external prelude
|
||||
for dep in def_map.krate.dependencies(db) {
|
||||
|
@ -48,8 +53,8 @@ struct DefCollector<DB> {
|
|||
def_map: CrateDefMap,
|
||||
glob_imports: FxHashMap<CrateModuleId, Vec<(CrateModuleId, raw::ImportId)>>,
|
||||
unresolved_imports: Vec<(CrateModuleId, raw::ImportId, raw::ImportData)>,
|
||||
unexpanded_macros: Vec<(CrateModuleId, MacroCallId, Path, tt::Subtree)>,
|
||||
global_macro_scope: FxHashMap<Name, CrateMacroId>,
|
||||
unexpanded_macros: Vec<(CrateModuleId, AstId<ast::MacroCall>, Path)>,
|
||||
global_macro_scope: FxHashMap<Name, MacroDefId>,
|
||||
}
|
||||
|
||||
impl<'a, DB> DefCollector<&'a DB>
|
||||
|
@ -59,7 +64,7 @@ where
|
|||
fn collect(&mut self) {
|
||||
let crate_graph = self.db.crate_graph();
|
||||
let file_id = crate_graph.crate_root(self.def_map.krate.crate_id());
|
||||
let raw_items = self.db.raw_items(file_id);
|
||||
let raw_items = self.db.raw_items(file_id.into());
|
||||
let module_id = self.def_map.root;
|
||||
self.def_map.modules[module_id].definition = Some(file_id);
|
||||
ModCollector {
|
||||
|
@ -90,14 +95,11 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
fn define_macro(&mut self, name: Name, tt: &tt::Subtree, export: bool) {
|
||||
if let Ok(rules) = mbe::MacroRules::parse(tt) {
|
||||
let macro_id = self.def_map.macros.alloc(rules);
|
||||
if export {
|
||||
self.def_map.public_macros.insert(name.clone(), macro_id);
|
||||
}
|
||||
self.global_macro_scope.insert(name, macro_id);
|
||||
fn define_macro(&mut self, name: Name, macro_id: MacroDefId, export: bool) {
|
||||
if export {
|
||||
self.def_map.public_macros.insert(name.clone(), macro_id);
|
||||
}
|
||||
self.global_macro_scope.insert(name, macro_id);
|
||||
}
|
||||
|
||||
fn resolve_imports(&mut self) -> ReachedFixedPoint {
|
||||
|
@ -293,7 +295,7 @@ where
|
|||
let mut macros = std::mem::replace(&mut self.unexpanded_macros, Vec::new());
|
||||
let mut resolved = Vec::new();
|
||||
let mut res = ReachedFixedPoint::Yes;
|
||||
macros.retain(|(module_id, call_id, path, tt)| {
|
||||
macros.retain(|(module_id, ast_id, path)| {
|
||||
if path.segments.len() != 2 {
|
||||
return true;
|
||||
}
|
||||
|
@ -309,47 +311,23 @@ where
|
|||
res = ReachedFixedPoint::No;
|
||||
let def_map = self.db.crate_def_map(krate);
|
||||
if let Some(macro_id) = def_map.public_macros.get(&path.segments[1].name).cloned() {
|
||||
resolved.push((*module_id, *call_id, (krate, macro_id), tt.clone()));
|
||||
let call_id = MacroCallLoc { def: macro_id, ast_id: *ast_id }.id(self.db);
|
||||
resolved.push((*module_id, call_id));
|
||||
}
|
||||
false
|
||||
});
|
||||
|
||||
for (module_id, macro_call_id, macro_def_id, arg) in resolved {
|
||||
self.collect_macro_expansion(module_id, macro_call_id, macro_def_id, arg);
|
||||
for (module_id, macro_call_id) in resolved {
|
||||
self.collect_macro_expansion(module_id, macro_call_id);
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
fn collect_macro_expansion(
|
||||
&mut self,
|
||||
module_id: CrateModuleId,
|
||||
macro_call_id: MacroCallId,
|
||||
macro_def_id: (Crate, CrateMacroId),
|
||||
macro_arg: tt::Subtree,
|
||||
) {
|
||||
let (macro_krate, macro_id) = macro_def_id;
|
||||
let dm;
|
||||
let rules = if macro_krate == self.def_map.krate {
|
||||
&self.def_map[macro_id]
|
||||
} else {
|
||||
dm = self.db.crate_def_map(macro_krate);
|
||||
&dm[macro_id]
|
||||
};
|
||||
if let Ok(expansion) = rules.expand(¯o_arg) {
|
||||
self.def_map.macro_resolutions.insert(macro_call_id, macro_def_id);
|
||||
// XXX: this **does not** go through a database, because we can't
|
||||
// identify macro_call without adding the whole state of name resolution
|
||||
// as a parameter to the query.
|
||||
//
|
||||
// So, we run the queries "manually" and we must ensure that
|
||||
// `db.hir_parse(macro_call_id)` returns the same source_file.
|
||||
let file_id: HirFileId = macro_call_id.into();
|
||||
let source_file = mbe::token_tree_to_ast_item_list(&expansion);
|
||||
|
||||
let raw_items = raw::RawItems::from_source_file(&source_file, file_id);
|
||||
ModCollector { def_collector: &mut *self, file_id, module_id, raw_items: &raw_items }
|
||||
.collect(raw_items.items())
|
||||
}
|
||||
fn collect_macro_expansion(&mut self, module_id: CrateModuleId, macro_call_id: MacroCallId) {
|
||||
let file_id: HirFileId = macro_call_id.into();
|
||||
let raw_items = self.db.raw_items(file_id);
|
||||
ModCollector { def_collector: &mut *self, file_id, module_id, raw_items: &raw_items }
|
||||
.collect(raw_items.items())
|
||||
}
|
||||
|
||||
fn finish(self) -> CrateDefMap {
|
||||
|
@ -387,12 +365,9 @@ where
|
|||
fn collect_module(&mut self, module: &raw::ModuleData) {
|
||||
match module {
|
||||
// inline module, just recurse
|
||||
raw::ModuleData::Definition { name, items, source_item_id } => {
|
||||
let module_id = self.push_child_module(
|
||||
name.clone(),
|
||||
source_item_id.with_file_id(self.file_id),
|
||||
None,
|
||||
);
|
||||
raw::ModuleData::Definition { name, items, ast_id } => {
|
||||
let module_id =
|
||||
self.push_child_module(name.clone(), ast_id.with_file_id(self.file_id), None);
|
||||
ModCollector {
|
||||
def_collector: &mut *self.def_collector,
|
||||
module_id,
|
||||
|
@ -402,28 +377,29 @@ where
|
|||
.collect(&*items);
|
||||
}
|
||||
// out of line module, resovle, parse and recurse
|
||||
raw::ModuleData::Declaration { name, source_item_id } => {
|
||||
let source_item_id = source_item_id.with_file_id(self.file_id);
|
||||
raw::ModuleData::Declaration { name, ast_id } => {
|
||||
let ast_id = ast_id.with_file_id(self.file_id);
|
||||
let is_root = self.def_collector.def_map.modules[self.module_id].parent.is_none();
|
||||
let (file_ids, problem) =
|
||||
resolve_submodule(self.def_collector.db, self.file_id, name, is_root);
|
||||
|
||||
if let Some(problem) = problem {
|
||||
self.def_collector.def_map.problems.add(source_item_id, problem)
|
||||
}
|
||||
|
||||
if let Some(&file_id) = file_ids.first() {
|
||||
let module_id =
|
||||
self.push_child_module(name.clone(), source_item_id, Some(file_id));
|
||||
let raw_items = self.def_collector.db.raw_items(file_id);
|
||||
ModCollector {
|
||||
def_collector: &mut *self.def_collector,
|
||||
module_id,
|
||||
file_id: file_id.into(),
|
||||
raw_items: &raw_items,
|
||||
match resolve_submodule(self.def_collector.db, self.file_id, name, is_root) {
|
||||
Ok(file_id) => {
|
||||
let module_id = self.push_child_module(name.clone(), ast_id, Some(file_id));
|
||||
let raw_items = self.def_collector.db.raw_items(file_id.into());
|
||||
ModCollector {
|
||||
def_collector: &mut *self.def_collector,
|
||||
module_id,
|
||||
file_id: file_id.into(),
|
||||
raw_items: &raw_items,
|
||||
}
|
||||
.collect(raw_items.items())
|
||||
}
|
||||
.collect(raw_items.items())
|
||||
}
|
||||
Err(candidate) => self.def_collector.def_map.diagnostics.push(
|
||||
DefDiagnostic::UnresolvedModule {
|
||||
module: self.module_id,
|
||||
declaration: ast_id,
|
||||
candidate,
|
||||
},
|
||||
),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -431,7 +407,7 @@ where
|
|||
fn push_child_module(
|
||||
&mut self,
|
||||
name: Name,
|
||||
declaration: SourceItemId,
|
||||
declaration: AstId<ast::Module>,
|
||||
definition: Option<FileId>,
|
||||
) -> CrateModuleId {
|
||||
let modules = &mut self.def_collector.def_map.modules;
|
||||
|
@ -453,23 +429,24 @@ where
|
|||
fn define_def(&mut self, def: &raw::DefData) {
|
||||
let module = Module { krate: self.def_collector.def_map.krate, module_id: self.module_id };
|
||||
let ctx = LocationCtx::new(self.def_collector.db, module, self.file_id.into());
|
||||
macro_rules! id {
|
||||
() => {
|
||||
AstItemDef::from_source_item_id_unchecked(ctx, def.source_item_id)
|
||||
|
||||
macro_rules! def {
|
||||
($kind:ident, $ast_id:ident) => {
|
||||
$kind { id: AstItemDef::from_ast_id(ctx, $ast_id) }.into()
|
||||
};
|
||||
}
|
||||
let name = def.name.clone();
|
||||
let def: PerNs<ModuleDef> = match def.kind {
|
||||
raw::DefKind::Function => PerNs::values(Function { id: id!() }.into()),
|
||||
raw::DefKind::Struct => {
|
||||
let s = Struct { id: id!() }.into();
|
||||
raw::DefKind::Function(ast_id) => PerNs::values(def!(Function, ast_id)),
|
||||
raw::DefKind::Struct(ast_id) => {
|
||||
let s = def!(Struct, ast_id);
|
||||
PerNs::both(s, s)
|
||||
}
|
||||
raw::DefKind::Enum => PerNs::types(Enum { id: id!() }.into()),
|
||||
raw::DefKind::Const => PerNs::values(Const { id: id!() }.into()),
|
||||
raw::DefKind::Static => PerNs::values(Static { id: id!() }.into()),
|
||||
raw::DefKind::Trait => PerNs::types(Trait { id: id!() }.into()),
|
||||
raw::DefKind::TypeAlias => PerNs::types(TypeAlias { id: id!() }.into()),
|
||||
raw::DefKind::Enum(ast_id) => PerNs::types(def!(Enum, ast_id)),
|
||||
raw::DefKind::Const(ast_id) => PerNs::values(def!(Const, ast_id)),
|
||||
raw::DefKind::Static(ast_id) => PerNs::values(def!(Static, ast_id)),
|
||||
raw::DefKind::Trait(ast_id) => PerNs::types(def!(Trait, ast_id)),
|
||||
raw::DefKind::TypeAlias(ast_id) => PerNs::types(def!(TypeAlias, ast_id)),
|
||||
};
|
||||
let resolution = Resolution { def, import: None };
|
||||
self.def_collector.update(self.module_id, None, &[(name, resolution)])
|
||||
|
@ -479,39 +456,27 @@ where
|
|||
// Case 1: macro rules, define a macro in crate-global mutable scope
|
||||
if is_macro_rules(&mac.path) {
|
||||
if let Some(name) = &mac.name {
|
||||
self.def_collector.define_macro(name.clone(), &mac.arg, mac.export)
|
||||
let macro_id = MacroDefId(mac.ast_id.with_file_id(self.file_id));
|
||||
self.def_collector.define_macro(name.clone(), macro_id, mac.export)
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
let source_item_id = SourceItemId { file_id: self.file_id, item_id: mac.source_item_id };
|
||||
let macro_call_id = MacroCallLoc {
|
||||
module: Module { krate: self.def_collector.def_map.krate, module_id: self.module_id },
|
||||
source_item_id,
|
||||
}
|
||||
.id(self.def_collector.db);
|
||||
let ast_id = mac.ast_id.with_file_id(self.file_id);
|
||||
|
||||
// Case 2: try to expand macro_rules from this crate, triggering
|
||||
// recursive item collection.
|
||||
if let Some(¯o_id) =
|
||||
mac.path.as_ident().and_then(|name| self.def_collector.global_macro_scope.get(name))
|
||||
{
|
||||
self.def_collector.collect_macro_expansion(
|
||||
self.module_id,
|
||||
macro_call_id,
|
||||
(self.def_collector.def_map.krate, macro_id),
|
||||
mac.arg.clone(),
|
||||
);
|
||||
let macro_call_id = MacroCallLoc { def: macro_id, ast_id }.id(self.def_collector.db);
|
||||
|
||||
self.def_collector.collect_macro_expansion(self.module_id, macro_call_id);
|
||||
return;
|
||||
}
|
||||
|
||||
// Case 3: path to a macro from another crate, expand during name resolution
|
||||
self.def_collector.unexpanded_macros.push((
|
||||
self.module_id,
|
||||
macro_call_id,
|
||||
mac.path.clone(),
|
||||
mac.arg.clone(),
|
||||
))
|
||||
self.def_collector.unexpanded_macros.push((self.module_id, ast_id, mac.path.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -524,7 +489,7 @@ fn resolve_submodule(
|
|||
file_id: HirFileId,
|
||||
name: &Name,
|
||||
is_root: bool,
|
||||
) -> (Vec<FileId>, Option<Problem>) {
|
||||
) -> Result<FileId, RelativePathBuf> {
|
||||
// FIXME: handle submodules of inline modules properly
|
||||
let file_id = file_id.original_file(db);
|
||||
let source_root_id = db.file_source_root(file_id);
|
||||
|
@ -545,17 +510,10 @@ fn resolve_submodule(
|
|||
candidates.push(file_dir_mod.clone());
|
||||
};
|
||||
let sr = db.source_root(source_root_id);
|
||||
let points_to = candidates
|
||||
.into_iter()
|
||||
.filter_map(|path| sr.files.get(&path))
|
||||
.map(|&it| it)
|
||||
.collect::<Vec<_>>();
|
||||
let problem = if points_to.is_empty() {
|
||||
Some(Problem::UnresolvedModule {
|
||||
candidate: if is_dir_owner { file_mod } else { file_dir_mod },
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
(points_to, problem)
|
||||
let mut points_to = candidates.into_iter().filter_map(|path| sr.files.get(&path)).map(|&it| it);
|
||||
// FIXME: handle ambiguity
|
||||
match points_to.next() {
|
||||
Some(file_id) => Ok(file_id),
|
||||
None => Err(if is_dir_owner { file_mod } else { file_dir_mod }),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,7 +4,6 @@ use std::{
|
|||
};
|
||||
|
||||
use test_utils::tested_by;
|
||||
use ra_db::FileId;
|
||||
use ra_arena::{Arena, impl_arena_id, RawId, map::ArenaMap};
|
||||
use ra_syntax::{
|
||||
AstNode, SourceFile, AstPtr, TreeArc,
|
||||
|
@ -13,9 +12,13 @@ use ra_syntax::{
|
|||
|
||||
use crate::{
|
||||
DefDatabase, Name, AsName, Path, HirFileId, ModuleSource,
|
||||
ids::{SourceFileItemId, SourceFileItems},
|
||||
AstIdMap, FileAstId,
|
||||
};
|
||||
|
||||
/// `RawItems` is a set of top-level items in a file (except for impls).
|
||||
///
|
||||
/// It is the input to name resolution algorithm. `RawItems` are not invalidated
|
||||
/// on most edits.
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
pub struct RawItems {
|
||||
modules: Arena<Module, ModuleData>,
|
||||
|
@ -32,11 +35,11 @@ pub struct ImportSourceMap {
|
|||
}
|
||||
|
||||
impl ImportSourceMap {
|
||||
pub(crate) fn insert(&mut self, import: ImportId, segment: &ast::PathSegment) {
|
||||
fn insert(&mut self, import: ImportId, segment: &ast::PathSegment) {
|
||||
self.map.insert(import, AstPtr::new(segment))
|
||||
}
|
||||
|
||||
pub fn get(&self, source: &ModuleSource, import: ImportId) -> TreeArc<ast::PathSegment> {
|
||||
pub(crate) fn get(&self, source: &ModuleSource, import: ImportId) -> TreeArc<ast::PathSegment> {
|
||||
let file = match source {
|
||||
ModuleSource::SourceFile(file) => &*file,
|
||||
ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(),
|
||||
|
@ -47,40 +50,27 @@ impl ImportSourceMap {
|
|||
}
|
||||
|
||||
impl RawItems {
|
||||
pub(crate) fn raw_items_query(db: &impl DefDatabase, file_id: FileId) -> Arc<RawItems> {
|
||||
pub(crate) fn raw_items_query(db: &impl DefDatabase, file_id: HirFileId) -> Arc<RawItems> {
|
||||
db.raw_items_with_source_map(file_id).0
|
||||
}
|
||||
|
||||
pub(crate) fn raw_items_with_source_map_query(
|
||||
db: &impl DefDatabase,
|
||||
file_id: FileId,
|
||||
file_id: HirFileId,
|
||||
) -> (Arc<RawItems>, Arc<ImportSourceMap>) {
|
||||
let mut collector = RawItemsCollector {
|
||||
raw_items: RawItems::default(),
|
||||
source_file_items: db.file_items(file_id.into()),
|
||||
source_ast_id_map: db.ast_id_map(file_id.into()),
|
||||
source_map: ImportSourceMap::default(),
|
||||
};
|
||||
let source_file = db.parse(file_id);
|
||||
let source_file = db.hir_parse(file_id);
|
||||
collector.process_module(None, &*source_file);
|
||||
(Arc::new(collector.raw_items), Arc::new(collector.source_map))
|
||||
}
|
||||
|
||||
pub(crate) fn items(&self) -> &[RawItem] {
|
||||
pub(super) fn items(&self) -> &[RawItem] {
|
||||
&self.items
|
||||
}
|
||||
|
||||
// We can't use queries during name resolution for fear of cycles, so this
|
||||
// is a query-less variant of the above function.
|
||||
pub(crate) fn from_source_file(source_file: &SourceFile, file_id: HirFileId) -> RawItems {
|
||||
let source_file_items = SourceFileItems::from_source_file(source_file, file_id);
|
||||
let mut collector = RawItemsCollector {
|
||||
raw_items: RawItems::default(),
|
||||
source_file_items: Arc::new(source_file_items),
|
||||
source_map: ImportSourceMap::default(),
|
||||
};
|
||||
collector.process_module(None, &*source_file);
|
||||
collector.raw_items
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<Module> for RawItems {
|
||||
|
@ -112,7 +102,7 @@ impl Index<Macro> for RawItems {
|
|||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub(crate) enum RawItem {
|
||||
pub(super) enum RawItem {
|
||||
Module(Module),
|
||||
Import(ImportId),
|
||||
Def(Def),
|
||||
|
@ -120,13 +110,13 @@ pub(crate) enum RawItem {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub(crate) struct Module(RawId);
|
||||
pub(super) struct Module(RawId);
|
||||
impl_arena_id!(Module);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) enum ModuleData {
|
||||
Declaration { name: Name, source_item_id: SourceFileItemId },
|
||||
Definition { name: Name, source_item_id: SourceFileItemId, items: Vec<RawItem> },
|
||||
pub(super) enum ModuleData {
|
||||
Declaration { name: Name, ast_id: FileAstId<ast::Module> },
|
||||
Definition { name: Name, ast_id: FileAstId<ast::Module>, items: Vec<RawItem> },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
|
@ -135,51 +125,49 @@ impl_arena_id!(ImportId);
|
|||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ImportData {
|
||||
pub(crate) path: Path,
|
||||
pub(crate) alias: Option<Name>,
|
||||
pub(crate) is_glob: bool,
|
||||
pub(crate) is_prelude: bool,
|
||||
pub(crate) is_extern_crate: bool,
|
||||
pub(super) path: Path,
|
||||
pub(super) alias: Option<Name>,
|
||||
pub(super) is_glob: bool,
|
||||
pub(super) is_prelude: bool,
|
||||
pub(super) is_extern_crate: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub(crate) struct Def(RawId);
|
||||
pub(super) struct Def(RawId);
|
||||
impl_arena_id!(Def);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct DefData {
|
||||
pub(crate) source_item_id: SourceFileItemId,
|
||||
pub(crate) name: Name,
|
||||
pub(crate) kind: DefKind,
|
||||
pub(super) struct DefData {
|
||||
pub(super) name: Name,
|
||||
pub(super) kind: DefKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub(crate) enum DefKind {
|
||||
Function,
|
||||
Struct,
|
||||
Enum,
|
||||
Const,
|
||||
Static,
|
||||
Trait,
|
||||
TypeAlias,
|
||||
pub(super) enum DefKind {
|
||||
Function(FileAstId<ast::FnDef>),
|
||||
Struct(FileAstId<ast::StructDef>),
|
||||
Enum(FileAstId<ast::EnumDef>),
|
||||
Const(FileAstId<ast::ConstDef>),
|
||||
Static(FileAstId<ast::StaticDef>),
|
||||
Trait(FileAstId<ast::TraitDef>),
|
||||
TypeAlias(FileAstId<ast::TypeAliasDef>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub(crate) struct Macro(RawId);
|
||||
pub(super) struct Macro(RawId);
|
||||
impl_arena_id!(Macro);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct MacroData {
|
||||
pub(crate) source_item_id: SourceFileItemId,
|
||||
pub(crate) path: Path,
|
||||
pub(crate) name: Option<Name>,
|
||||
pub(crate) arg: tt::Subtree,
|
||||
pub(crate) export: bool,
|
||||
pub(super) struct MacroData {
|
||||
pub(super) ast_id: FileAstId<ast::MacroCall>,
|
||||
pub(super) path: Path,
|
||||
pub(super) name: Option<Name>,
|
||||
pub(super) export: bool,
|
||||
}
|
||||
|
||||
struct RawItemsCollector {
|
||||
raw_items: RawItems,
|
||||
source_file_items: Arc<SourceFileItems>,
|
||||
source_ast_id_map: Arc<AstIdMap>,
|
||||
source_map: ImportSourceMap,
|
||||
}
|
||||
|
||||
|
@ -211,18 +199,31 @@ impl RawItemsCollector {
|
|||
// impls don't participate in name resolution
|
||||
return;
|
||||
}
|
||||
ast::ModuleItemKind::StructDef(it) => (DefKind::Struct, it.name()),
|
||||
ast::ModuleItemKind::EnumDef(it) => (DefKind::Enum, it.name()),
|
||||
ast::ModuleItemKind::FnDef(it) => (DefKind::Function, it.name()),
|
||||
ast::ModuleItemKind::TraitDef(it) => (DefKind::Trait, it.name()),
|
||||
ast::ModuleItemKind::TypeAliasDef(it) => (DefKind::TypeAlias, it.name()),
|
||||
ast::ModuleItemKind::ConstDef(it) => (DefKind::Const, it.name()),
|
||||
ast::ModuleItemKind::StaticDef(it) => (DefKind::Static, it.name()),
|
||||
ast::ModuleItemKind::StructDef(it) => {
|
||||
(DefKind::Struct(self.source_ast_id_map.ast_id(it)), it.name())
|
||||
}
|
||||
ast::ModuleItemKind::EnumDef(it) => {
|
||||
(DefKind::Enum(self.source_ast_id_map.ast_id(it)), it.name())
|
||||
}
|
||||
ast::ModuleItemKind::FnDef(it) => {
|
||||
(DefKind::Function(self.source_ast_id_map.ast_id(it)), it.name())
|
||||
}
|
||||
ast::ModuleItemKind::TraitDef(it) => {
|
||||
(DefKind::Trait(self.source_ast_id_map.ast_id(it)), it.name())
|
||||
}
|
||||
ast::ModuleItemKind::TypeAliasDef(it) => {
|
||||
(DefKind::TypeAlias(self.source_ast_id_map.ast_id(it)), it.name())
|
||||
}
|
||||
ast::ModuleItemKind::ConstDef(it) => {
|
||||
(DefKind::Const(self.source_ast_id_map.ast_id(it)), it.name())
|
||||
}
|
||||
ast::ModuleItemKind::StaticDef(it) => {
|
||||
(DefKind::Static(self.source_ast_id_map.ast_id(it)), it.name())
|
||||
}
|
||||
};
|
||||
if let Some(name) = name {
|
||||
let name = name.as_name();
|
||||
let source_item_id = self.source_file_items.id_of_unchecked(item.syntax());
|
||||
let def = self.raw_items.defs.alloc(DefData { name, kind, source_item_id });
|
||||
let def = self.raw_items.defs.alloc(DefData { name, kind });
|
||||
self.push_item(current_module, RawItem::Def(def))
|
||||
}
|
||||
}
|
||||
|
@ -232,10 +233,9 @@ impl RawItemsCollector {
|
|||
Some(it) => it.as_name(),
|
||||
None => return,
|
||||
};
|
||||
let source_item_id = self.source_file_items.id_of_unchecked(module.syntax());
|
||||
let ast_id = self.source_ast_id_map.ast_id(module);
|
||||
if module.has_semi() {
|
||||
let item =
|
||||
self.raw_items.modules.alloc(ModuleData::Declaration { name, source_item_id });
|
||||
let item = self.raw_items.modules.alloc(ModuleData::Declaration { name, ast_id });
|
||||
self.push_item(current_module, RawItem::Module(item));
|
||||
return;
|
||||
}
|
||||
|
@ -243,7 +243,7 @@ impl RawItemsCollector {
|
|||
if let Some(item_list) = module.item_list() {
|
||||
let item = self.raw_items.modules.alloc(ModuleData::Definition {
|
||||
name,
|
||||
source_item_id,
|
||||
ast_id,
|
||||
items: Vec::new(),
|
||||
});
|
||||
self.process_module(Some(item), item_list);
|
||||
|
@ -291,18 +291,15 @@ impl RawItemsCollector {
|
|||
}
|
||||
|
||||
fn add_macro(&mut self, current_module: Option<Module>, m: &ast::MacroCall) {
|
||||
let (path, arg) = match (
|
||||
m.path().and_then(Path::from_ast),
|
||||
m.token_tree().and_then(mbe::ast_to_token_tree),
|
||||
) {
|
||||
(Some(path), Some((token_tree, _token_map))) => (path, token_tree),
|
||||
let path = match m.path().and_then(Path::from_ast) {
|
||||
Some(it) => it,
|
||||
_ => return,
|
||||
};
|
||||
|
||||
let name = m.name().map(|it| it.as_name());
|
||||
let source_item_id = self.source_file_items.id_of_unchecked(m.syntax());
|
||||
let ast_id = self.source_ast_id_map.ast_id(m);
|
||||
let export = m.has_atom_attr("macro_export");
|
||||
let m = self.raw_items.macros.alloc(MacroData { source_item_id, path, arg, name, export });
|
||||
let m = self.raw_items.macros.alloc(MacroData { ast_id, path, name, export });
|
||||
self.push_item(current_module, RawItem::Macro(m));
|
||||
}
|
||||
|
||||
|
|
|
@ -552,3 +552,22 @@ foo: v
|
|||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unresolved_module_diagnostics() {
|
||||
let diagnostics = MockDatabase::with_files(
|
||||
r"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
mod bar;
|
||||
mod baz {}
|
||||
//- /foo.rs
|
||||
",
|
||||
)
|
||||
.diagnostics();
|
||||
|
||||
assert_snapshot_matches!(diagnostics, @r###"
|
||||
"mod bar;": unresolved module
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
|
|
@ -90,34 +90,44 @@ fn adding_inner_items_should_not_invalidate_def_map() {
|
|||
);
|
||||
}
|
||||
|
||||
// It would be awesome to make this work, but it's unclear how
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn typing_inside_a_function_inside_a_macro_should_not_invalidate_def_map() {
|
||||
check_def_map_is_not_recomputed(
|
||||
fn typing_inside_a_macro_should_not_invalidate_def_map() {
|
||||
let (mut db, pos) = MockDatabase::with_position(
|
||||
"
|
||||
//- /lib.rs
|
||||
macro_rules! m {
|
||||
($ident:ident) => {
|
||||
fn f() {
|
||||
$ident + $ident;
|
||||
};
|
||||
}
|
||||
}
|
||||
mod foo;
|
||||
|
||||
use crate::foo::bar::Baz;
|
||||
|
||||
//- /foo/mod.rs
|
||||
pub mod bar;
|
||||
|
||||
//- /foo/bar.rs
|
||||
<|>
|
||||
salsa::query_group! {
|
||||
trait Baz {
|
||||
fn foo() -> i32 { 1 + 1 }
|
||||
}
|
||||
}
|
||||
",
|
||||
"
|
||||
salsa::query_group! {
|
||||
trait Baz {
|
||||
fn foo() -> i32 { 92 }
|
||||
}
|
||||
}
|
||||
m!(X);
|
||||
",
|
||||
);
|
||||
{
|
||||
let events = db.log_executed(|| {
|
||||
let module = crate::source_binder::module_from_file_id(&db, pos.file_id).unwrap();
|
||||
let decls = module.declarations(&db);
|
||||
assert_eq!(decls.len(), 1);
|
||||
});
|
||||
assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
|
||||
}
|
||||
db.set_file_text(pos.file_id, Arc::new("m!(Y);".to_string()));
|
||||
|
||||
{
|
||||
let events = db.log_executed(|| {
|
||||
let module = crate::source_binder::module_from_file_id(&db, pos.file_id).unwrap();
|
||||
let decls = module.declarations(&db);
|
||||
assert_eq!(decls.len(), 1);
|
||||
});
|
||||
assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ use crate::{
|
|||
generics::GenericParams,
|
||||
expr::{scope::{ExprScopes, ScopeId}, PatId, Body},
|
||||
impl_block::ImplBlock,
|
||||
path::Path,
|
||||
path::Path, Trait
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
|
@ -175,6 +175,21 @@ impl Resolver {
|
|||
names
|
||||
}
|
||||
|
||||
pub(crate) fn traits_in_scope<'a>(&'a self) -> impl Iterator<Item = Trait> + 'a {
|
||||
// FIXME prelude
|
||||
self.scopes
|
||||
.iter()
|
||||
.rev()
|
||||
.flat_map(|scope| {
|
||||
match scope {
|
||||
Scope::ModuleScope(m) => Some(m.crate_def_map[m.module_id].scope.traits()),
|
||||
_ => None,
|
||||
}
|
||||
.into_iter()
|
||||
})
|
||||
.flatten()
|
||||
}
|
||||
|
||||
fn module(&self) -> Option<(&CrateDefMap, CrateModuleId)> {
|
||||
self.scopes.iter().rev().find_map(|scope| match scope {
|
||||
Scope::ModuleScope(m) => Some((&*m.crate_def_map, m.module_id)),
|
||||
|
|
|
@ -15,8 +15,8 @@ use ra_syntax::{
|
|||
use crate::{
|
||||
HirDatabase, Function, Struct, Enum,
|
||||
AsName, Module, HirFileId, Crate, Trait, Resolver,
|
||||
ids::{LocationCtx, SourceFileItemId},
|
||||
expr
|
||||
ids::LocationCtx,
|
||||
expr, AstId
|
||||
};
|
||||
|
||||
/// Locates the module by `FileId`. Picks topmost module in the file.
|
||||
|
@ -54,8 +54,8 @@ fn module_from_inline(
|
|||
) -> Option<Module> {
|
||||
assert!(!module.has_semi());
|
||||
let file_id = file_id.into();
|
||||
let file_items = db.file_items(file_id);
|
||||
let item_id = file_items.id_of(file_id, module.syntax());
|
||||
let ast_id_map = db.ast_id_map(file_id);
|
||||
let item_id = ast_id_map.ast_id(module).with_file_id(file_id);
|
||||
module_from_source(db, file_id, Some(item_id))
|
||||
}
|
||||
|
||||
|
@ -75,7 +75,7 @@ pub fn module_from_child_node(
|
|||
fn module_from_source(
|
||||
db: &impl HirDatabase,
|
||||
file_id: HirFileId,
|
||||
decl_id: Option<SourceFileItemId>,
|
||||
decl_id: Option<AstId<ast::Module>>,
|
||||
) -> Option<Module> {
|
||||
let source_root_id = db.file_source_root(file_id.as_original_file());
|
||||
db.source_root_crates(source_root_id).iter().map(|&crate_id| Crate { crate_id }).find_map(
|
||||
|
|
150
crates/ra_hir/src/source_id.rs
Normal file
150
crates/ra_hir/src/source_id.rs
Normal file
|
@ -0,0 +1,150 @@
|
|||
use std::{marker::PhantomData, sync::Arc, hash::{Hash, Hasher}};
|
||||
|
||||
use ra_arena::{Arena, RawId, impl_arena_id};
|
||||
use ra_syntax::{SyntaxNodePtr, TreeArc, SyntaxNode, SourceFile, AstNode, ast};
|
||||
|
||||
use crate::{HirFileId, DefDatabase};
|
||||
|
||||
/// `AstId` points to an AST node in any file.
|
||||
///
|
||||
/// It is stable across reparses, and can be used as salsa key/value.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct AstId<N: AstNode> {
|
||||
file_id: HirFileId,
|
||||
file_ast_id: FileAstId<N>,
|
||||
}
|
||||
|
||||
impl<N: AstNode> Clone for AstId<N> {
|
||||
fn clone(&self) -> AstId<N> {
|
||||
*self
|
||||
}
|
||||
}
|
||||
impl<N: AstNode> Copy for AstId<N> {}
|
||||
|
||||
impl<N: AstNode> PartialEq for AstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
(self.file_id, self.file_ast_id) == (other.file_id, other.file_ast_id)
|
||||
}
|
||||
}
|
||||
impl<N: AstNode> Eq for AstId<N> {}
|
||||
impl<N: AstNode> Hash for AstId<N> {
|
||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||
(self.file_id, self.file_ast_id).hash(hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: AstNode> AstId<N> {
|
||||
pub(crate) fn file_id(&self) -> HirFileId {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub(crate) fn to_node(&self, db: &impl DefDatabase) -> TreeArc<N> {
|
||||
let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.raw);
|
||||
N::cast(&syntax_node).unwrap().to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
/// `AstId` points to an AST node in a specific file.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct FileAstId<N: AstNode> {
|
||||
raw: ErasedFileAstId,
|
||||
_ty: PhantomData<N>,
|
||||
}
|
||||
|
||||
impl<N: AstNode> Clone for FileAstId<N> {
|
||||
fn clone(&self) -> FileAstId<N> {
|
||||
*self
|
||||
}
|
||||
}
|
||||
impl<N: AstNode> Copy for FileAstId<N> {}
|
||||
|
||||
impl<N: AstNode> PartialEq for FileAstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.raw == other.raw
|
||||
}
|
||||
}
|
||||
impl<N: AstNode> Eq for FileAstId<N> {}
|
||||
impl<N: AstNode> Hash for FileAstId<N> {
|
||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||
self.raw.hash(hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: AstNode> FileAstId<N> {
|
||||
pub(crate) fn with_file_id(self, file_id: HirFileId) -> AstId<N> {
|
||||
AstId { file_id, file_ast_id: self }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct ErasedFileAstId(RawId);
|
||||
impl_arena_id!(ErasedFileAstId);
|
||||
|
||||
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct AstIdMap {
|
||||
arena: Arena<ErasedFileAstId, SyntaxNodePtr>,
|
||||
}
|
||||
|
||||
impl AstIdMap {
|
||||
pub(crate) fn ast_id_map_query(db: &impl DefDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
|
||||
let source_file = db.hir_parse(file_id);
|
||||
Arc::new(AstIdMap::from_source_file(&source_file))
|
||||
}
|
||||
|
||||
pub(crate) fn file_item_query(
|
||||
db: &impl DefDatabase,
|
||||
file_id: HirFileId,
|
||||
ast_id: ErasedFileAstId,
|
||||
) -> TreeArc<SyntaxNode> {
|
||||
let source_file = db.hir_parse(file_id);
|
||||
db.ast_id_map(file_id).arena[ast_id].to_node(&source_file).to_owned()
|
||||
}
|
||||
|
||||
pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
|
||||
let ptr = SyntaxNodePtr::new(item.syntax());
|
||||
let raw = match self.arena.iter().find(|(_id, i)| **i == ptr) {
|
||||
Some((it, _)) => it,
|
||||
None => panic!(
|
||||
"Can't find {:?} in AstIdMap:\n{:?}",
|
||||
item.syntax(),
|
||||
self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
|
||||
),
|
||||
};
|
||||
|
||||
FileAstId { raw, _ty: PhantomData }
|
||||
}
|
||||
|
||||
fn from_source_file(source_file: &SourceFile) -> AstIdMap {
|
||||
let mut res = AstIdMap { arena: Arena::default() };
|
||||
// By walking the tree in bread-first order we make sure that parents
|
||||
// get lower ids then children. That is, adding a new child does not
|
||||
// change parent's id. This means that, say, adding a new function to a
|
||||
// trait does not change ids of top-level items, which helps caching.
|
||||
bfs(source_file.syntax(), |it| {
|
||||
if let Some(module_item) = ast::ModuleItem::cast(it) {
|
||||
res.alloc(module_item.syntax());
|
||||
} else if let Some(macro_call) = ast::MacroCall::cast(it) {
|
||||
res.alloc(macro_call.syntax());
|
||||
}
|
||||
});
|
||||
res
|
||||
}
|
||||
|
||||
fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
|
||||
self.arena.alloc(SyntaxNodePtr::new(item))
|
||||
}
|
||||
}
|
||||
|
||||
/// Walks the subtree in bfs order, calling `f` for each node.
|
||||
fn bfs(node: &SyntaxNode, mut f: impl FnMut(&SyntaxNode)) {
|
||||
let mut curr_layer = vec![node];
|
||||
let mut next_layer = vec![];
|
||||
while !curr_layer.is_empty() {
|
||||
curr_layer.drain(..).for_each(|node| {
|
||||
next_layer.extend(node.children());
|
||||
f(node);
|
||||
});
|
||||
std::mem::swap(&mut curr_layer, &mut next_layer);
|
||||
}
|
||||
}
|
52
crates/ra_hir/src/traits.rs
Normal file
52
crates/ra_hir/src/traits.rs
Normal file
|
@ -0,0 +1,52 @@
|
|||
//! HIR for trait definitions.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_syntax::ast::{self, NameOwner};
|
||||
|
||||
use crate::{Function, Const, TypeAlias, Name, DefDatabase, Trait, ids::LocationCtx, name::AsName};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct TraitData {
|
||||
name: Option<Name>,
|
||||
items: Vec<TraitItem>,
|
||||
}
|
||||
|
||||
impl TraitData {
|
||||
pub(crate) fn trait_data_query(db: &impl DefDatabase, tr: Trait) -> Arc<TraitData> {
|
||||
let (file_id, node) = tr.source(db);
|
||||
let name = node.name().map(|n| n.as_name());
|
||||
let module = tr.module(db);
|
||||
let ctx = LocationCtx::new(db, module, file_id);
|
||||
let items = if let Some(item_list) = node.item_list() {
|
||||
item_list
|
||||
.impl_items()
|
||||
.map(|item_node| match item_node.kind() {
|
||||
ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(it) }.into(),
|
||||
ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(it) }.into(),
|
||||
ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(it) }.into(),
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
Arc::new(TraitData { name, items })
|
||||
}
|
||||
|
||||
pub(crate) fn name(&self) -> &Option<Name> {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub(crate) fn items(&self) -> &[TraitItem] {
|
||||
&self.items
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum TraitItem {
|
||||
Function(Function),
|
||||
Const(Const),
|
||||
TypeAlias(TypeAlias),
|
||||
// Existential
|
||||
}
|
||||
impl_froms!(TraitItem: Function, Const, TypeAlias);
|
|
@ -14,7 +14,7 @@ pub(crate) mod display;
|
|||
use std::sync::Arc;
|
||||
use std::{fmt, mem};
|
||||
|
||||
use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase};
|
||||
use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase, Trait};
|
||||
|
||||
pub(crate) use lower::{TypableDef, CallableDef, type_for_def, type_for_field, callable_item_sig};
|
||||
pub(crate) use infer::{infer, InferenceResult, InferTy};
|
||||
|
@ -91,7 +91,7 @@ pub enum TypeCtor {
|
|||
/// A nominal type with (maybe 0) type parameters. This might be a primitive
|
||||
/// type like `bool`, a struct, tuple, function pointer, reference or
|
||||
/// several other things.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct ApplicationTy {
|
||||
pub ctor: TypeCtor,
|
||||
pub parameters: Substs,
|
||||
|
@ -103,7 +103,7 @@ pub struct ApplicationTy {
|
|||
/// the same thing (but in a different way).
|
||||
///
|
||||
/// This should be cheap to clone.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub enum Ty {
|
||||
/// A nominal type with (maybe 0) type parameters. This might be a primitive
|
||||
/// type like `bool`, a struct, tuple, function pointer, reference or
|
||||
|
@ -132,7 +132,7 @@ pub enum Ty {
|
|||
}
|
||||
|
||||
/// A list of substitutions for generic parameters.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct Substs(Arc<[Ty]>);
|
||||
|
||||
impl Substs {
|
||||
|
@ -169,6 +169,21 @@ impl Substs {
|
|||
}
|
||||
}
|
||||
|
||||
/// A trait with type parameters. This includes the `Self`, so this represents a concrete type implementing the trait.
|
||||
/// Name to be bikeshedded: TraitBound? TraitImplements?
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct TraitRef {
|
||||
/// FIXME name?
|
||||
trait_: Trait,
|
||||
substs: Substs,
|
||||
}
|
||||
|
||||
impl TraitRef {
|
||||
pub fn self_ty(&self) -> &Ty {
|
||||
&self.substs.0[0]
|
||||
}
|
||||
}
|
||||
|
||||
/// A function signature as seen by type inference: Several parameter types and
|
||||
/// one return type.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
|
|
|
@ -36,7 +36,9 @@ use crate::{
|
|||
path::{GenericArgs, GenericArg},
|
||||
adt::VariantDef,
|
||||
resolve::{Resolver, Resolution},
|
||||
nameres::Namespace
|
||||
nameres::Namespace,
|
||||
ty::infer::diagnostics::InferenceDiagnostic,
|
||||
diagnostics::DiagnosticSink,
|
||||
};
|
||||
use super::{Ty, TypableDef, Substs, primitive, op, FnSig, ApplicationTy, TypeCtor};
|
||||
|
||||
|
@ -96,6 +98,7 @@ pub struct InferenceResult {
|
|||
field_resolutions: FxHashMap<ExprId, StructField>,
|
||||
/// For each associated item record what it resolves to
|
||||
assoc_resolutions: FxHashMap<ExprOrPatId, ImplItem>,
|
||||
diagnostics: Vec<InferenceDiagnostic>,
|
||||
pub(super) type_of_expr: ArenaMap<ExprId, Ty>,
|
||||
pub(super) type_of_pat: ArenaMap<PatId, Ty>,
|
||||
}
|
||||
|
@ -113,6 +116,14 @@ impl InferenceResult {
|
|||
pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<ImplItem> {
|
||||
self.assoc_resolutions.get(&id.into()).map(|it| *it)
|
||||
}
|
||||
pub(crate) fn add_diagnostics(
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
owner: Function,
|
||||
sink: &mut DiagnosticSink,
|
||||
) {
|
||||
self.diagnostics.iter().for_each(|it| it.add_to(db, owner, sink))
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ExprId> for InferenceResult {
|
||||
|
@ -143,6 +154,7 @@ struct InferenceContext<'a, D: HirDatabase> {
|
|||
assoc_resolutions: FxHashMap<ExprOrPatId, ImplItem>,
|
||||
type_of_expr: ArenaMap<ExprId, Ty>,
|
||||
type_of_pat: ArenaMap<PatId, Ty>,
|
||||
diagnostics: Vec<InferenceDiagnostic>,
|
||||
/// The return type of the function being inferred.
|
||||
return_ty: Ty,
|
||||
}
|
||||
|
@ -155,6 +167,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
assoc_resolutions: FxHashMap::default(),
|
||||
type_of_expr: ArenaMap::default(),
|
||||
type_of_pat: ArenaMap::default(),
|
||||
diagnostics: Vec::default(),
|
||||
var_unification_table: InPlaceUnificationTable::new(),
|
||||
return_ty: Ty::Unknown, // set in collect_fn_signature
|
||||
db,
|
||||
|
@ -181,6 +194,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
assoc_resolutions: self.assoc_resolutions,
|
||||
type_of_expr: expr_types,
|
||||
type_of_pat: pat_types,
|
||||
diagnostics: self.diagnostics,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -807,7 +821,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
}
|
||||
Expr::MethodCall { receiver, args, method_name, generic_args } => {
|
||||
let receiver_ty = self.infer_expr(*receiver, &Expectation::none());
|
||||
let resolved = receiver_ty.clone().lookup_method(self.db, method_name);
|
||||
let resolved =
|
||||
receiver_ty.clone().lookup_method(self.db, method_name, &self.resolver);
|
||||
let (derefed_receiver_ty, method_ty, def_generics) = match resolved {
|
||||
Some((ty, func)) => {
|
||||
self.write_method_resolution(tgt_expr, func);
|
||||
|
@ -915,9 +930,18 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
Expr::StructLit { path, fields, spread } => {
|
||||
let (ty, def_id) = self.resolve_variant(path.as_ref());
|
||||
let substs = ty.substs().unwrap_or_else(Substs::empty);
|
||||
for field in fields {
|
||||
for (field_idx, field) in fields.into_iter().enumerate() {
|
||||
let field_ty = def_id
|
||||
.and_then(|it| it.field(self.db, &field.name))
|
||||
.and_then(|it| match it.field(self.db, &field.name) {
|
||||
Some(field) => Some(field),
|
||||
None => {
|
||||
self.diagnostics.push(InferenceDiagnostic::NoSuchField {
|
||||
expr: tgt_expr,
|
||||
field: field_idx,
|
||||
});
|
||||
None
|
||||
}
|
||||
})
|
||||
.map_or(Ty::Unknown, |field| field.ty(self.db))
|
||||
.subst(&substs);
|
||||
self.infer_expr(field.expr, &Expectation::has_type(field_ty));
|
||||
|
@ -1244,3 +1268,29 @@ impl Expectation {
|
|||
Expectation { ty: Ty::Unknown }
|
||||
}
|
||||
}
|
||||
|
||||
mod diagnostics {
|
||||
use crate::{expr::ExprId, diagnostics::{DiagnosticSink, NoSuchField}, HirDatabase, Function};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(super) enum InferenceDiagnostic {
|
||||
NoSuchField { expr: ExprId, field: usize },
|
||||
}
|
||||
|
||||
impl InferenceDiagnostic {
|
||||
pub(super) fn add_to(
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
owner: Function,
|
||||
sink: &mut DiagnosticSink,
|
||||
) {
|
||||
match self {
|
||||
InferenceDiagnostic::NoSuchField { expr, field } => {
|
||||
let (file, _) = owner.source(db);
|
||||
let field = owner.body_source_map(db).field_syntax(*expr, *field);
|
||||
sink.push(NoSuchField { file, field })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,12 +8,12 @@ use rustc_hash::FxHashMap;
|
|||
|
||||
use crate::{
|
||||
HirDatabase, Module, Crate, Name, Function, Trait,
|
||||
ids::TraitId,
|
||||
impl_block::{ImplId, ImplBlock, ImplItem},
|
||||
ty::{Ty, TypeCtor},
|
||||
nameres::CrateModuleId,
|
||||
nameres::CrateModuleId, resolve::Resolver, traits::TraitItem
|
||||
|
||||
};
|
||||
use super::{ TraitRef, Substs};
|
||||
|
||||
/// This is used as a key for indexing impls.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
|
@ -38,7 +38,7 @@ pub struct CrateImplBlocks {
|
|||
/// To make sense of the CrateModuleIds, we need the source root.
|
||||
krate: Crate,
|
||||
impls: FxHashMap<TyFingerprint, Vec<(CrateModuleId, ImplId)>>,
|
||||
impls_by_trait: FxHashMap<TraitId, Vec<(CrateModuleId, ImplId)>>,
|
||||
impls_by_trait: FxHashMap<Trait, Vec<(CrateModuleId, ImplId)>>,
|
||||
}
|
||||
|
||||
impl CrateImplBlocks {
|
||||
|
@ -56,8 +56,7 @@ impl CrateImplBlocks {
|
|||
&'a self,
|
||||
tr: &Trait,
|
||||
) -> impl Iterator<Item = ImplBlock> + 'a {
|
||||
let id = tr.id;
|
||||
self.impls_by_trait.get(&id).into_iter().flat_map(|i| i.iter()).map(
|
||||
self.impls_by_trait.get(&tr).into_iter().flat_map(|i| i.iter()).map(
|
||||
move |(module_id, impl_id)| {
|
||||
let module = Module { krate: self.krate, module_id: *module_id };
|
||||
ImplBlock::from_id(module, *impl_id)
|
||||
|
@ -73,18 +72,18 @@ impl CrateImplBlocks {
|
|||
|
||||
let target_ty = impl_block.target_ty(db);
|
||||
|
||||
if let Some(target_ty_fp) = TyFingerprint::for_impl(&target_ty) {
|
||||
self.impls
|
||||
.entry(target_ty_fp)
|
||||
.or_insert_with(Vec::new)
|
||||
.push((module.module_id, impl_id));
|
||||
}
|
||||
|
||||
if let Some(tr) = impl_block.target_trait(db) {
|
||||
self.impls_by_trait
|
||||
.entry(tr.id)
|
||||
.entry(tr)
|
||||
.or_insert_with(Vec::new)
|
||||
.push((module.module_id, impl_id));
|
||||
} else {
|
||||
if let Some(target_ty_fp) = TyFingerprint::for_impl(&target_ty) {
|
||||
self.impls
|
||||
.entry(target_ty_fp)
|
||||
.or_insert_with(Vec::new)
|
||||
.push((module.module_id, impl_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -109,6 +108,20 @@ impl CrateImplBlocks {
|
|||
}
|
||||
}
|
||||
|
||||
/// Rudimentary check whether an impl exists for a given type and trait; this
|
||||
/// will actually be done by chalk.
|
||||
pub(crate) fn implements(db: &impl HirDatabase, trait_ref: TraitRef) -> bool {
|
||||
// FIXME use all trait impls in the whole crate graph
|
||||
let krate = trait_ref.trait_.module(db).krate(db);
|
||||
let krate = match krate {
|
||||
Some(krate) => krate,
|
||||
None => return false,
|
||||
};
|
||||
let crate_impl_blocks = db.impls_in_crate(krate);
|
||||
let mut impl_blocks = crate_impl_blocks.lookup_impl_blocks_for_trait(&trait_ref.trait_);
|
||||
impl_blocks.any(|impl_block| &impl_block.target_ty(db) == trait_ref.self_ty())
|
||||
}
|
||||
|
||||
fn def_crate(db: &impl HirDatabase, ty: &Ty) -> Option<Crate> {
|
||||
match ty {
|
||||
Ty::Apply(a_ty) => match a_ty.ctor {
|
||||
|
@ -120,20 +133,64 @@ fn def_crate(db: &impl HirDatabase, ty: &Ty) -> Option<Crate> {
|
|||
}
|
||||
|
||||
impl Ty {
|
||||
// FIXME: cache this as a query?
|
||||
// - if so, what signature? (TyFingerprint, Name)?
|
||||
// - or maybe cache all names and def_ids of methods per fingerprint?
|
||||
/// Look up the method with the given name, returning the actual autoderefed
|
||||
/// receiver type (but without autoref applied yet).
|
||||
pub fn lookup_method(self, db: &impl HirDatabase, name: &Name) -> Option<(Ty, Function)> {
|
||||
self.iterate_methods(db, |ty, f| {
|
||||
pub fn lookup_method(
|
||||
self,
|
||||
db: &impl HirDatabase,
|
||||
name: &Name,
|
||||
resolver: &Resolver,
|
||||
) -> Option<(Ty, Function)> {
|
||||
// FIXME: trait methods should be used before autoderefs
|
||||
let inherent_method = self.clone().iterate_methods(db, |ty, f| {
|
||||
let sig = f.signature(db);
|
||||
if sig.name() == name && sig.has_self_param() {
|
||||
Some((ty.clone(), f))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
});
|
||||
inherent_method.or_else(|| self.lookup_trait_method(db, name, resolver))
|
||||
}
|
||||
|
||||
fn lookup_trait_method(
|
||||
self,
|
||||
db: &impl HirDatabase,
|
||||
name: &Name,
|
||||
resolver: &Resolver,
|
||||
) -> Option<(Ty, Function)> {
|
||||
let mut candidates = Vec::new();
|
||||
for t in resolver.traits_in_scope() {
|
||||
let data = t.trait_data(db);
|
||||
for item in data.items() {
|
||||
match item {
|
||||
&TraitItem::Function(m) => {
|
||||
let sig = m.signature(db);
|
||||
if sig.name() == name && sig.has_self_param() {
|
||||
candidates.push((t, m));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
// FIXME:
|
||||
// - we might not actually be able to determine fully that the type
|
||||
// implements the trait here; it's enough if we (well, Chalk) determine
|
||||
// that it's possible.
|
||||
// - when the trait method is picked, we need to register an
|
||||
// 'obligation' somewhere so that we later check that it's really
|
||||
// implemented
|
||||
// - both points go for additional requirements from where clauses as
|
||||
// well (in fact, the 'implements' condition could just be considered a
|
||||
// 'where Self: Trait' clause)
|
||||
candidates.retain(|(t, _m)| {
|
||||
let trait_ref = TraitRef { trait_: *t, substs: Substs::single(self.clone()) };
|
||||
db.implements(trait_ref)
|
||||
});
|
||||
// FIXME if there's multiple candidates here, that's an ambiguity error
|
||||
let (_chosen_trait, chosen_method) = candidates.first()?;
|
||||
Some((self.clone(), *chosen_method))
|
||||
}
|
||||
|
||||
// This would be nicer if it just returned an iterator, but that runs into
|
||||
|
|
|
@ -1272,8 +1272,8 @@ fn test() {
|
|||
[241; 252) 'Struct::FOO': u32
|
||||
[262; 263) 'y': u32
|
||||
[266; 275) 'Enum::BAR': u32
|
||||
[285; 286) 'z': u32
|
||||
[289; 302) 'TraitTest::ID': u32"###
|
||||
[285; 286) 'z': {unknown}
|
||||
[289; 302) 'TraitTest::ID': {unknown}"###
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1918,9 +1918,9 @@ fn test() {
|
|||
[110; 114) 'self': &{unknown}
|
||||
[170; 228) '{ ...i128 }': ()
|
||||
[176; 178) 'S1': S1
|
||||
[176; 187) 'S1.method()': {unknown}
|
||||
[176; 187) 'S1.method()': u32
|
||||
[203; 205) 'S2': S2
|
||||
[203; 214) 'S2.method()': {unknown}"###
|
||||
[203; 214) 'S2.method()': i128"###
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1964,10 +1964,10 @@ mod bar_test {
|
|||
[169; 173) 'self': &{unknown}
|
||||
[300; 337) '{ ... }': ()
|
||||
[310; 311) 'S': S
|
||||
[310; 320) 'S.method()': {unknown}
|
||||
[310; 320) 'S.method()': u32
|
||||
[416; 454) '{ ... }': ()
|
||||
[426; 427) 'S': S
|
||||
[426; 436) 'S.method()': {unknown}"###
|
||||
[426; 436) 'S.method()': i128"###
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -2319,3 +2319,27 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
|||
assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_such_field_diagnostics() {
|
||||
let diagnostics = MockDatabase::with_files(
|
||||
r"
|
||||
//- /lib.rs
|
||||
struct S { foo: i32, bar: () }
|
||||
impl S {
|
||||
fn new() -> S {
|
||||
S {
|
||||
foo: 92,
|
||||
baz: 62,
|
||||
}
|
||||
}
|
||||
}
|
||||
",
|
||||
)
|
||||
.diagnostics();
|
||||
|
||||
assert_snapshot_matches!(diagnostics, @r###"
|
||||
"baz: 62": no such field
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue