1034: HIR diagnostics API r=matklad a=matklad

This PR introduces diagnostics API for HIR, so we can now start issuing errors and warnings! Here are requirements that this solution aims to fulfill:

* structured diagnostics: rather than immediately rendering error to string, we provide a well-typed blob of data with error-description. These data is used by IDE to provide fixes
* open set diagnostics: there's no single enum with all possible diagnostics, which hopefully should result in better modularity

The `Diagnostic` trait describes "a diagnostic", which can be downcast to a specific diagnostic kind. Diagnostics are expressed in terms of macro-expanded syntax tree: they store pointers to syntax nodes. Diagnostics are self-contained: you don't need any context, besides `db`, to fully understand the meaning of a diagnostic. 

Because diagnostics are tied to the source, we can't store them in salsa. So subsystems like type-checking produce subsystem-local diagnostic (which is a closed `enum`), which is expressed in therms of subsystem IR. A separate step converts these proto-diagnostics into `Diagnostic`, by merging them with source-maps.

Note that this PR stresses type-system quite a bit: we now type-check every function in open files to compute errors!

Discussion on Zulip: https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Fwg-rls-2.2E0/topic/Diagnostics.20API

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2019-03-25 11:38:46 +00:00
commit c4ead49361
15 changed files with 424 additions and 165 deletions

View file

@ -1,8 +1,7 @@
use std::sync::Arc; use std::sync::Arc;
use relative_path::RelativePathBuf;
use ra_db::{CrateId, SourceRootId, Edition}; use ra_db::{CrateId, SourceRootId, Edition};
use ra_syntax::{ast::self, TreeArc, SyntaxNode}; use ra_syntax::{ast::self, TreeArc};
use crate::{ use crate::{
Name, ScopesWithSourceMap, Ty, HirFileId, Name, ScopesWithSourceMap, Ty, HirFileId,
@ -17,6 +16,7 @@ use crate::{
ids::{FunctionId, StructId, EnumId, AstItemDef, ConstId, StaticId, TraitId, TypeId}, ids::{FunctionId, StructId, EnumId, AstItemDef, ConstId, StaticId, TraitId, TypeId},
impl_block::ImplBlock, impl_block::ImplBlock,
resolve::Resolver, resolve::Resolver,
diagnostics::DiagnosticSink,
}; };
/// hir::Crate describes a single crate. It's the main interface with which /// hir::Crate describes a single crate. It's the main interface with which
@ -95,11 +95,6 @@ pub enum ModuleSource {
Module(TreeArc<ast::Module>), Module(TreeArc<ast::Module>),
} }
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub enum Problem {
UnresolvedModule { candidate: RelativePathBuf },
}
impl Module { impl Module {
/// Name of this module. /// Name of this module.
pub fn name(&self, db: &impl HirDatabase) -> Option<Name> { pub fn name(&self, db: &impl HirDatabase) -> Option<Name> {
@ -171,8 +166,24 @@ impl Module {
db.crate_def_map(self.krate)[self.module_id].scope.clone() db.crate_def_map(self.krate)[self.module_id].scope.clone()
} }
pub fn problems(&self, db: &impl HirDatabase) -> Vec<(TreeArc<SyntaxNode>, Problem)> { pub fn diagnostics(&self, db: &impl HirDatabase, sink: &mut DiagnosticSink) {
self.problems_impl(db) db.crate_def_map(self.krate).add_diagnostics(db, self.module_id, sink);
for decl in self.declarations(db) {
match decl {
crate::ModuleDef::Function(f) => f.diagnostics(db, sink),
crate::ModuleDef::Module(f) => f.diagnostics(db, sink),
_ => (),
}
}
for impl_block in self.impl_blocks(db) {
for item in impl_block.items(db) {
match item {
crate::ImplItem::Method(f) => f.diagnostics(db, sink),
_ => (),
}
}
}
} }
pub fn resolver(&self, db: &impl HirDatabase) -> Resolver { pub fn resolver(&self, db: &impl HirDatabase) -> Resolver {
@ -519,6 +530,10 @@ impl Function {
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r }; let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
r r
} }
pub fn diagnostics(&self, db: &impl HirDatabase, sink: &mut DiagnosticSink) {
self.infer(db).add_diagnostics(db, *self, sink);
}
} }
impl Docs for Function { impl Docs for Function {

View file

@ -1,8 +1,8 @@
use ra_db::FileId; use ra_db::FileId;
use ra_syntax::{ast, SyntaxNode, TreeArc, AstNode}; use ra_syntax::{ast, TreeArc, AstNode};
use crate::{ use crate::{
Module, ModuleSource, Problem, Name, Module, ModuleSource, Name,
nameres::{CrateModuleId, ImportId}, nameres::{CrateModuleId, ImportId},
HirDatabase, DefDatabase, HirDatabase, DefDatabase,
HirFileId, SourceItemId, HirFileId, SourceItemId,
@ -108,19 +108,4 @@ impl Module {
let parent_id = def_map[self.module_id].parent?; let parent_id = def_map[self.module_id].parent?;
Some(self.with_module_id(parent_id)) Some(self.with_module_id(parent_id))
} }
pub(crate) fn problems_impl(
&self,
db: &impl HirDatabase,
) -> Vec<(TreeArc<SyntaxNode>, Problem)> {
let def_map = db.crate_def_map(self.krate);
let (my_file_id, _) = self.definition_source(db);
// FIXME: not entirely corret filterint by module
def_map
.problems()
.iter()
.filter(|(source_item_id, _problem)| my_file_id == source_item_id.file_id)
.map(|(source_item_id, problem)| (db.file_item(*source_item_id), problem.clone()))
.collect()
}
} }

View file

@ -0,0 +1,115 @@
use std::{fmt, any::Any};
use ra_syntax::{SyntaxNodePtr, TreeArc, AstPtr, TextRange, ast, SyntaxNode};
use relative_path::RelativePathBuf;
use crate::{HirFileId, HirDatabase};
/// Diagnostic defines hir API for errors and warnings.
///
/// It is used as a `dyn` object, which you can downcast to a concrete
/// diagnostic. DiagnosticSink are structured, meaning that they include rich
/// information which can be used by IDE to create fixes. DiagnosticSink are
/// expressed in terms of macro-expanded syntax tree nodes (so, it's a bad idea
/// to diagnostic in a salsa value).
///
/// Internally, various subsystems of hir produce diagnostics specific to a
/// subsystem (typically, an `enum`), which are safe to store in salsa but do not
/// include source locations. Such internal diagnostic are transformed into an
/// instance of `Diagnostic` on demand.
pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static {
fn message(&self) -> String;
fn file(&self) -> HirFileId;
fn syntax_node_ptr(&self) -> SyntaxNodePtr;
fn highlight_range(&self) -> TextRange {
self.syntax_node_ptr().range()
}
fn as_any(&self) -> &(dyn Any + Send + 'static);
}
impl dyn Diagnostic {
pub fn syntax_node(&self, db: &impl HirDatabase) -> TreeArc<SyntaxNode> {
let source_file = db.hir_parse(self.file());
self.syntax_node_ptr().to_node(&source_file).to_owned()
}
pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> {
self.as_any().downcast_ref()
}
}
pub struct DiagnosticSink<'a> {
callbacks: Vec<Box<dyn FnMut(&dyn Diagnostic) -> Result<(), ()> + 'a>>,
default_callback: Box<dyn FnMut(&dyn Diagnostic) + 'a>,
}
impl<'a> DiagnosticSink<'a> {
pub fn new(cb: impl FnMut(&dyn Diagnostic) + 'a) -> DiagnosticSink<'a> {
DiagnosticSink { callbacks: Vec::new(), default_callback: Box::new(cb) }
}
pub fn on<D: Diagnostic, F: FnMut(&D) + 'a>(mut self, mut cb: F) -> DiagnosticSink<'a> {
let cb = move |diag: &dyn Diagnostic| match diag.downcast_ref::<D>() {
Some(d) => {
cb(d);
Ok(())
}
None => Err(()),
};
self.callbacks.push(Box::new(cb));
self
}
pub(crate) fn push(&mut self, d: impl Diagnostic) {
let d: &dyn Diagnostic = &d;
for cb in self.callbacks.iter_mut() {
match cb(d) {
Ok(()) => return,
Err(()) => (),
}
}
(self.default_callback)(d)
}
}
#[derive(Debug)]
pub struct NoSuchField {
pub file: HirFileId,
pub field: AstPtr<ast::NamedField>,
}
impl Diagnostic for NoSuchField {
fn message(&self) -> String {
"no such field".to_string()
}
fn file(&self) -> HirFileId {
self.file
}
fn syntax_node_ptr(&self) -> SyntaxNodePtr {
self.field.into()
}
fn as_any(&self) -> &(Any + Send + 'static) {
self
}
}
#[derive(Debug)]
pub struct UnresolvedModule {
pub file: HirFileId,
pub decl: AstPtr<ast::Module>,
pub candidate: RelativePathBuf,
}
impl Diagnostic for UnresolvedModule {
fn message(&self) -> String {
"unresolved module".to_string()
}
fn file(&self) -> HirFileId {
self.file
}
fn syntax_node_ptr(&self) -> SyntaxNodePtr {
self.decl.into()
}
fn as_any(&self) -> &(Any + Send + 'static) {
self
}
}

View file

@ -5,7 +5,7 @@ use rustc_hash::FxHashMap;
use ra_arena::{Arena, RawId, impl_arena_id, map::ArenaMap}; use ra_arena::{Arena, RawId, impl_arena_id, map::ArenaMap};
use ra_syntax::{ use ra_syntax::{
SyntaxNodePtr, AstNode, SyntaxNodePtr, AstPtr, AstNode,
ast::{self, LoopBodyOwner, ArgListOwner, NameOwner, LiteralFlavor, TypeAscriptionOwner} ast::{self, LoopBodyOwner, ArgListOwner, NameOwner, LiteralFlavor, TypeAscriptionOwner}
}; };
@ -54,6 +54,7 @@ pub struct BodySourceMap {
expr_map_back: ArenaMap<ExprId, SyntaxNodePtr>, expr_map_back: ArenaMap<ExprId, SyntaxNodePtr>,
pat_map: FxHashMap<SyntaxNodePtr, PatId>, pat_map: FxHashMap<SyntaxNodePtr, PatId>,
pat_map_back: ArenaMap<PatId, SyntaxNodePtr>, pat_map_back: ArenaMap<PatId, SyntaxNodePtr>,
field_map: FxHashMap<(ExprId, usize), AstPtr<ast::NamedField>>,
} }
impl Body { impl Body {
@ -138,6 +139,10 @@ impl BodySourceMap {
pub fn node_pat(&self, node: &ast::Pat) -> Option<PatId> { pub fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
self.pat_map.get(&SyntaxNodePtr::new(node.syntax())).cloned() self.pat_map.get(&SyntaxNodePtr::new(node.syntax())).cloned()
} }
pub fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::NamedField> {
self.field_map[&(expr, field)].clone()
}
} }
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
@ -629,8 +634,10 @@ impl ExprCollector {
} }
ast::ExprKind::StructLit(e) => { ast::ExprKind::StructLit(e) => {
let path = e.path().and_then(Path::from_ast); let path = e.path().and_then(Path::from_ast);
let mut field_ptrs = Vec::new();
let fields = if let Some(nfl) = e.named_field_list() { let fields = if let Some(nfl) = e.named_field_list() {
nfl.fields() nfl.fields()
.inspect(|field| field_ptrs.push(AstPtr::new(*field)))
.map(|field| StructLitField { .map(|field| StructLitField {
name: field name: field
.name_ref() .name_ref()
@ -657,7 +664,11 @@ impl ExprCollector {
Vec::new() Vec::new()
}; };
let spread = e.spread().map(|s| self.collect_expr(s)); let spread = e.spread().map(|s| self.collect_expr(s));
self.alloc_expr(Expr::StructLit { path, fields, spread }, syntax_ptr) let res = self.alloc_expr(Expr::StructLit { path, fields, spread }, syntax_ptr);
for (i, ptr) in field_ptrs.into_iter().enumerate() {
self.source_map.field_map.insert((res, i), ptr);
}
res
} }
ast::ExprKind::FieldExpr(e) => { ast::ExprKind::FieldExpr(e) => {
let expr = self.collect_expr_opt(e.expr()); let expr = self.collect_expr_opt(e.expr());

View file

@ -35,6 +35,7 @@ mod expr;
mod generics; mod generics;
mod docs; mod docs;
mod resolve; mod resolve;
pub mod diagnostics;
mod code_model_api; mod code_model_api;
mod code_model_impl; mod code_model_impl;
@ -63,7 +64,7 @@ pub use self::{
pub use self::code_model_api::{ pub use self::code_model_api::{
Crate, CrateDependency, Crate, CrateDependency,
Module, ModuleDef, ModuleSource, Problem, Module, ModuleDef, ModuleSource,
Struct, Enum, EnumVariant, Struct, Enum, EnumVariant,
Function, FnSignature, Function, FnSignature,
StructField, FieldSource, StructField, FieldSource,

View file

@ -9,7 +9,7 @@ use relative_path::RelativePathBuf;
use test_utils::{parse_fixture, CURSOR_MARKER, extract_offset}; use test_utils::{parse_fixture, CURSOR_MARKER, extract_offset};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::{db, HirInterner}; use crate::{db, HirInterner, diagnostics::DiagnosticSink};
pub const WORKSPACE: SourceRootId = SourceRootId(0); pub const WORKSPACE: SourceRootId = SourceRootId(0);
@ -70,6 +70,22 @@ impl MockDatabase {
self.set_crate_graph(Arc::new(crate_graph)) self.set_crate_graph(Arc::new(crate_graph))
} }
pub fn diagnostics(&self) -> String {
let mut buf = String::from("\n");
let mut files: Vec<FileId> = self.files.values().map(|&it| it).collect();
files.sort();
for file in files {
let module = crate::source_binder::module_from_file_id(self, file).unwrap();
module.diagnostics(
self,
&mut DiagnosticSink::new(|d| {
buf += &format!("{:?}: {}\n", d.syntax_node(self).text(), d.message());
}),
)
}
buf
}
fn from_fixture(fixture: &str) -> (MockDatabase, Option<FilePosition>) { fn from_fixture(fixture: &str) -> (MockDatabase, Option<FilePosition>) {
let mut db = MockDatabase::default(); let mut db = MockDatabase::default();

View file

@ -61,9 +61,11 @@ use ra_db::{FileId, Edition};
use test_utils::tested_by; use test_utils::tested_by;
use crate::{ use crate::{
ModuleDef, Name, Crate, Module, Problem, ModuleDef, Name, Crate, Module,
DefDatabase, Path, PathKind, HirFileId, DefDatabase, Path, PathKind, HirFileId,
ids::{SourceItemId, SourceFileItemId, MacroCallId}, ids::{SourceItemId, SourceFileItemId, MacroCallId},
diagnostics::DiagnosticSink,
nameres::diagnostics::DefDiagnostic,
}; };
pub(crate) use self::raw::{RawItems, ImportId, ImportSourceMap}; pub(crate) use self::raw::{RawItems, ImportId, ImportSourceMap};
@ -85,7 +87,7 @@ pub struct CrateDefMap {
macros: Arena<CrateMacroId, mbe::MacroRules>, macros: Arena<CrateMacroId, mbe::MacroRules>,
public_macros: FxHashMap<Name, CrateMacroId>, public_macros: FxHashMap<Name, CrateMacroId>,
macro_resolutions: FxHashMap<MacroCallId, (Crate, CrateMacroId)>, macro_resolutions: FxHashMap<MacroCallId, (Crate, CrateMacroId)>,
problems: CrateDefMapProblems, diagnostics: Vec<DefDiagnostic>,
} }
impl std::ops::Index<CrateModuleId> for CrateDefMap { impl std::ops::Index<CrateModuleId> for CrateDefMap {
@ -125,21 +127,6 @@ pub(crate) struct ModuleData {
pub(crate) definition: Option<FileId>, pub(crate) definition: Option<FileId>,
} }
#[derive(Default, Debug, PartialEq, Eq)]
pub(crate) struct CrateDefMapProblems {
problems: Vec<(SourceItemId, Problem)>,
}
impl CrateDefMapProblems {
fn add(&mut self, source_item_id: SourceItemId, problem: Problem) {
self.problems.push((source_item_id, problem))
}
pub(crate) fn iter<'a>(&'a self) -> impl Iterator<Item = (&'a SourceItemId, &'a Problem)> + 'a {
self.problems.iter().map(|(s, p)| (s, p))
}
}
#[derive(Debug, Default, PartialEq, Eq, Clone)] #[derive(Debug, Default, PartialEq, Eq, Clone)]
pub struct ModuleScope { pub struct ModuleScope {
items: FxHashMap<Name, Resolution>, items: FxHashMap<Name, Resolution>,
@ -212,7 +199,7 @@ impl CrateDefMap {
macros: Arena::default(), macros: Arena::default(),
public_macros: FxHashMap::default(), public_macros: FxHashMap::default(),
macro_resolutions: FxHashMap::default(), macro_resolutions: FxHashMap::default(),
problems: CrateDefMapProblems::default(), diagnostics: Vec::new(),
} }
}; };
let def_map = collector::collect_defs(db, def_map); let def_map = collector::collect_defs(db, def_map);
@ -224,10 +211,6 @@ impl CrateDefMap {
self.root self.root
} }
pub(crate) fn problems(&self) -> &CrateDefMapProblems {
&self.problems
}
pub(crate) fn mk_module(&self, module_id: CrateModuleId) -> Module { pub(crate) fn mk_module(&self, module_id: CrateModuleId) -> Module {
Module { krate: self.krate, module_id } Module { krate: self.krate, module_id }
} }
@ -240,6 +223,15 @@ impl CrateDefMap {
&self.extern_prelude &self.extern_prelude
} }
pub(crate) fn add_diagnostics(
&self,
db: &impl DefDatabase,
module: CrateModuleId,
sink: &mut DiagnosticSink,
) {
self.diagnostics.iter().for_each(|it| it.add_to(db, module, sink))
}
pub(crate) fn resolve_macro( pub(crate) fn resolve_macro(
&self, &self,
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
@ -452,3 +444,48 @@ impl CrateDefMap {
} }
} }
} }
mod diagnostics {
use relative_path::RelativePathBuf;
use ra_syntax::{AstPtr, AstNode, ast};
use crate::{
SourceItemId, DefDatabase,
nameres::CrateModuleId,
diagnostics::{DiagnosticSink, UnresolvedModule},
};
#[derive(Debug, PartialEq, Eq)]
pub(super) enum DefDiagnostic {
UnresolvedModule {
module: CrateModuleId,
declaration: SourceItemId,
candidate: RelativePathBuf,
},
}
impl DefDiagnostic {
pub(super) fn add_to(
&self,
db: &impl DefDatabase,
target_module: CrateModuleId,
sink: &mut DiagnosticSink,
) {
match self {
DefDiagnostic::UnresolvedModule { module, declaration, candidate } => {
if *module != target_module {
return;
}
let syntax = db.file_item(*declaration);
let decl = ast::Module::cast(&syntax).unwrap();
sink.push(UnresolvedModule {
file: declaration.file_id,
decl: AstPtr::new(&decl),
candidate: candidate.clone(),
})
}
}
}
}
}

View file

@ -6,14 +6,17 @@ use ra_db::FileId;
use crate::{ use crate::{
Function, Module, Struct, Enum, Const, Static, Trait, TypeAlias, Function, Module, Struct, Enum, Const, Static, Trait, TypeAlias,
DefDatabase, HirFileId, Name, Path, Problem, Crate, DefDatabase, HirFileId, Name, Path, Crate,
KnownName, KnownName,
nameres::{Resolution, PerNs, ModuleDef, ReachedFixedPoint, ResolveMode, raw}, nameres::{
Resolution, PerNs, ModuleDef, ReachedFixedPoint, ResolveMode,
CrateDefMap, CrateModuleId, ModuleData, CrateMacroId,
diagnostics::DefDiagnostic,
raw,
},
ids::{AstItemDef, LocationCtx, MacroCallLoc, SourceItemId, MacroCallId}, ids::{AstItemDef, LocationCtx, MacroCallLoc, SourceItemId, MacroCallId},
}; };
use super::{CrateDefMap, CrateModuleId, ModuleData, CrateMacroId};
pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap { pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
// populate external prelude // populate external prelude
for dep in def_map.krate.dependencies(db) { for dep in def_map.krate.dependencies(db) {
@ -405,25 +408,27 @@ where
raw::ModuleData::Declaration { name, source_item_id } => { raw::ModuleData::Declaration { name, source_item_id } => {
let source_item_id = source_item_id.with_file_id(self.file_id); let source_item_id = source_item_id.with_file_id(self.file_id);
let is_root = self.def_collector.def_map.modules[self.module_id].parent.is_none(); let is_root = self.def_collector.def_map.modules[self.module_id].parent.is_none();
let (file_ids, problem) = match resolve_submodule(self.def_collector.db, self.file_id, name, is_root) {
resolve_submodule(self.def_collector.db, self.file_id, name, is_root); Ok(file_id) => {
let module_id =
if let Some(problem) = problem { self.push_child_module(name.clone(), source_item_id, Some(file_id));
self.def_collector.def_map.problems.add(source_item_id, problem) let raw_items = self.def_collector.db.raw_items(file_id);
} ModCollector {
def_collector: &mut *self.def_collector,
if let Some(&file_id) = file_ids.first() { module_id,
let module_id = file_id: file_id.into(),
self.push_child_module(name.clone(), source_item_id, Some(file_id)); raw_items: &raw_items,
let raw_items = self.def_collector.db.raw_items(file_id); }
ModCollector { .collect(raw_items.items())
def_collector: &mut *self.def_collector,
module_id,
file_id: file_id.into(),
raw_items: &raw_items,
} }
.collect(raw_items.items()) Err(candidate) => self.def_collector.def_map.diagnostics.push(
} DefDiagnostic::UnresolvedModule {
module: self.module_id,
declaration: source_item_id,
candidate,
},
),
};
} }
} }
} }
@ -524,7 +529,7 @@ fn resolve_submodule(
file_id: HirFileId, file_id: HirFileId,
name: &Name, name: &Name,
is_root: bool, is_root: bool,
) -> (Vec<FileId>, Option<Problem>) { ) -> Result<FileId, RelativePathBuf> {
// FIXME: handle submodules of inline modules properly // FIXME: handle submodules of inline modules properly
let file_id = file_id.original_file(db); let file_id = file_id.original_file(db);
let source_root_id = db.file_source_root(file_id); let source_root_id = db.file_source_root(file_id);
@ -545,17 +550,10 @@ fn resolve_submodule(
candidates.push(file_dir_mod.clone()); candidates.push(file_dir_mod.clone());
}; };
let sr = db.source_root(source_root_id); let sr = db.source_root(source_root_id);
let points_to = candidates let mut points_to = candidates.into_iter().filter_map(|path| sr.files.get(&path)).map(|&it| it);
.into_iter() // FIXME: handle ambiguity
.filter_map(|path| sr.files.get(&path)) match points_to.next() {
.map(|&it| it) Some(file_id) => Ok(file_id),
.collect::<Vec<_>>(); None => Err(if is_dir_owner { file_mod } else { file_dir_mod }),
let problem = if points_to.is_empty() { }
Some(Problem::UnresolvedModule {
candidate: if is_dir_owner { file_mod } else { file_dir_mod },
})
} else {
None
};
(points_to, problem)
} }

View file

@ -552,3 +552,22 @@ foo: v
"### "###
); );
} }
#[test]
fn unresolved_module_diagnostics() {
let diagnostics = MockDatabase::with_files(
r"
//- /lib.rs
mod foo;
mod bar;
mod baz {}
//- /foo.rs
",
)
.diagnostics();
assert_snapshot_matches!(diagnostics, @r###"
"mod bar;": unresolved module
"###
);
}

View file

@ -36,7 +36,9 @@ use crate::{
path::{GenericArgs, GenericArg}, path::{GenericArgs, GenericArg},
adt::VariantDef, adt::VariantDef,
resolve::{Resolver, Resolution}, resolve::{Resolver, Resolution},
nameres::Namespace nameres::Namespace,
ty::infer::diagnostics::InferenceDiagnostic,
diagnostics::DiagnosticSink,
}; };
use super::{Ty, TypableDef, Substs, primitive, op, FnSig, ApplicationTy, TypeCtor}; use super::{Ty, TypableDef, Substs, primitive, op, FnSig, ApplicationTy, TypeCtor};
@ -96,6 +98,7 @@ pub struct InferenceResult {
field_resolutions: FxHashMap<ExprId, StructField>, field_resolutions: FxHashMap<ExprId, StructField>,
/// For each associated item record what it resolves to /// For each associated item record what it resolves to
assoc_resolutions: FxHashMap<ExprOrPatId, ImplItem>, assoc_resolutions: FxHashMap<ExprOrPatId, ImplItem>,
diagnostics: Vec<InferenceDiagnostic>,
pub(super) type_of_expr: ArenaMap<ExprId, Ty>, pub(super) type_of_expr: ArenaMap<ExprId, Ty>,
pub(super) type_of_pat: ArenaMap<PatId, Ty>, pub(super) type_of_pat: ArenaMap<PatId, Ty>,
} }
@ -113,6 +116,14 @@ impl InferenceResult {
pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<ImplItem> { pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<ImplItem> {
self.assoc_resolutions.get(&id.into()).map(|it| *it) self.assoc_resolutions.get(&id.into()).map(|it| *it)
} }
pub(crate) fn add_diagnostics(
&self,
db: &impl HirDatabase,
owner: Function,
sink: &mut DiagnosticSink,
) {
self.diagnostics.iter().for_each(|it| it.add_to(db, owner, sink))
}
} }
impl Index<ExprId> for InferenceResult { impl Index<ExprId> for InferenceResult {
@ -143,6 +154,7 @@ struct InferenceContext<'a, D: HirDatabase> {
assoc_resolutions: FxHashMap<ExprOrPatId, ImplItem>, assoc_resolutions: FxHashMap<ExprOrPatId, ImplItem>,
type_of_expr: ArenaMap<ExprId, Ty>, type_of_expr: ArenaMap<ExprId, Ty>,
type_of_pat: ArenaMap<PatId, Ty>, type_of_pat: ArenaMap<PatId, Ty>,
diagnostics: Vec<InferenceDiagnostic>,
/// The return type of the function being inferred. /// The return type of the function being inferred.
return_ty: Ty, return_ty: Ty,
} }
@ -155,6 +167,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
assoc_resolutions: FxHashMap::default(), assoc_resolutions: FxHashMap::default(),
type_of_expr: ArenaMap::default(), type_of_expr: ArenaMap::default(),
type_of_pat: ArenaMap::default(), type_of_pat: ArenaMap::default(),
diagnostics: Vec::default(),
var_unification_table: InPlaceUnificationTable::new(), var_unification_table: InPlaceUnificationTable::new(),
return_ty: Ty::Unknown, // set in collect_fn_signature return_ty: Ty::Unknown, // set in collect_fn_signature
db, db,
@ -181,6 +194,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
assoc_resolutions: self.assoc_resolutions, assoc_resolutions: self.assoc_resolutions,
type_of_expr: expr_types, type_of_expr: expr_types,
type_of_pat: pat_types, type_of_pat: pat_types,
diagnostics: self.diagnostics,
} }
} }
@ -915,9 +929,18 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
Expr::StructLit { path, fields, spread } => { Expr::StructLit { path, fields, spread } => {
let (ty, def_id) = self.resolve_variant(path.as_ref()); let (ty, def_id) = self.resolve_variant(path.as_ref());
let substs = ty.substs().unwrap_or_else(Substs::empty); let substs = ty.substs().unwrap_or_else(Substs::empty);
for field in fields { for (field_idx, field) in fields.into_iter().enumerate() {
let field_ty = def_id let field_ty = def_id
.and_then(|it| it.field(self.db, &field.name)) .and_then(|it| match it.field(self.db, &field.name) {
Some(field) => Some(field),
None => {
self.diagnostics.push(InferenceDiagnostic::NoSuchField {
expr: tgt_expr,
field: field_idx,
});
None
}
})
.map_or(Ty::Unknown, |field| field.ty(self.db)) .map_or(Ty::Unknown, |field| field.ty(self.db))
.subst(&substs); .subst(&substs);
self.infer_expr(field.expr, &Expectation::has_type(field_ty)); self.infer_expr(field.expr, &Expectation::has_type(field_ty));
@ -1244,3 +1267,29 @@ impl Expectation {
Expectation { ty: Ty::Unknown } Expectation { ty: Ty::Unknown }
} }
} }
mod diagnostics {
use crate::{expr::ExprId, diagnostics::{DiagnosticSink, NoSuchField}, HirDatabase, Function};
#[derive(Debug, PartialEq, Eq, Clone)]
pub(super) enum InferenceDiagnostic {
NoSuchField { expr: ExprId, field: usize },
}
impl InferenceDiagnostic {
pub(super) fn add_to(
&self,
db: &impl HirDatabase,
owner: Function,
sink: &mut DiagnosticSink,
) {
match self {
InferenceDiagnostic::NoSuchField { expr, field } => {
let (file, _) = owner.source(db);
let field = owner.body_source_map(db).field_syntax(*expr, *field);
sink.push(NoSuchField { file, field })
}
}
}
}
}

View file

@ -2319,3 +2319,27 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
} }
} }
#[test]
fn no_such_field_diagnostics() {
let diagnostics = MockDatabase::with_files(
r"
//- /lib.rs
struct S { foo: i32, bar: () }
impl S {
fn new() -> S {
S {
foo: 92,
baz: 62,
}
}
}
",
)
.diagnostics();
assert_snapshot_matches!(diagnostics, @r###"
"baz: 62": no such field
"###
);
}

View file

@ -1,10 +1,11 @@
use std::cell::RefCell;
use itertools::Itertools; use itertools::Itertools;
use hir::{Problem, source_binder}; use hir::{source_binder, diagnostics::{Diagnostic as _, DiagnosticSink}};
use ra_db::SourceDatabase; use ra_db::SourceDatabase;
use ra_syntax::{ use ra_syntax::{
Location, SourceFile, SyntaxKind, TextRange, SyntaxNode, Location, SourceFile, SyntaxKind, TextRange, SyntaxNode,
ast::{self, AstNode}, ast::{self, AstNode},
}; };
use ra_text_edit::{TextEdit, TextEditBuilder}; use ra_text_edit::{TextEdit, TextEditBuilder};
@ -26,11 +27,31 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
check_unnecessary_braces_in_use_statement(&mut res, file_id, node); check_unnecessary_braces_in_use_statement(&mut res, file_id, node);
check_struct_shorthand_initialization(&mut res, file_id, node); check_struct_shorthand_initialization(&mut res, file_id, node);
} }
let res = RefCell::new(res);
let mut sink = DiagnosticSink::new(|d| {
res.borrow_mut().push(Diagnostic {
message: d.message(),
range: d.highlight_range(),
severity: Severity::Error,
fix: None,
})
})
.on::<hir::diagnostics::UnresolvedModule, _>(|d| {
let source_root = db.file_source_root(d.file().original_file(db));
let create_file = FileSystemEdit::CreateFile { source_root, path: d.candidate.clone() };
let fix = SourceChange::file_system_edit("create module", create_file);
res.borrow_mut().push(Diagnostic {
range: d.highlight_range(),
message: d.message(),
severity: Severity::Error,
fix: Some(fix),
})
});
if let Some(m) = source_binder::module_from_file_id(db, file_id) { if let Some(m) = source_binder::module_from_file_id(db, file_id) {
check_module(&mut res, db, file_id, m); m.diagnostics(db, &mut sink);
}; };
res drop(sink);
res.into_inner()
} }
fn syntax_errors(acc: &mut Vec<Diagnostic>, source_file: &SourceFile) { fn syntax_errors(acc: &mut Vec<Diagnostic>, source_file: &SourceFile) {
@ -128,34 +149,12 @@ fn check_struct_shorthand_initialization(
Some(()) Some(())
} }
fn check_module(
acc: &mut Vec<Diagnostic>,
db: &RootDatabase,
file_id: FileId,
module: hir::Module,
) {
let source_root = db.file_source_root(file_id);
for (name_node, problem) in module.problems(db) {
let diag = match problem {
Problem::UnresolvedModule { candidate } => {
let create_file =
FileSystemEdit::CreateFile { source_root, path: candidate.clone() };
let fix = SourceChange::file_system_edit("create module", create_file);
Diagnostic {
range: name_node.range(),
message: "unresolved module".to_string(),
severity: Severity::Error,
fix: Some(fix),
}
}
};
acc.push(diag)
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use test_utils::assert_eq_text; use test_utils::assert_eq_text;
use insta::assert_debug_snapshot_matches;
use crate::mock_analysis::single_file;
use super::*; use super::*;
@ -184,6 +183,34 @@ mod tests {
assert_eq_text!(after, &actual); assert_eq_text!(after, &actual);
} }
#[test]
fn test_unresolved_module_diagnostic() {
let (analysis, file_id) = single_file("mod foo;");
let diagnostics = analysis.diagnostics(file_id).unwrap();
assert_debug_snapshot_matches!(diagnostics, @r####"[
Diagnostic {
message: "unresolved module",
range: [0; 8),
fix: Some(
SourceChange {
label: "create module",
source_file_edits: [],
file_system_edits: [
CreateFile {
source_root: SourceRootId(
0
),
path: "foo.rs"
}
],
cursor_position: None
}
),
severity: Error
}
]"####);
}
#[test] #[test]
fn test_check_unnecessary_braces_in_use_statement() { fn test_check_unnecessary_braces_in_use_statement() {
check_not_applicable( check_not_applicable(

View file

@ -1,4 +1,3 @@
use insta::assert_debug_snapshot_matches;
use ra_ide_api::{ use ra_ide_api::{
mock_analysis::{single_file, single_file_with_position, single_file_with_range, MockAnalysis}, mock_analysis::{single_file, single_file_with_position, single_file_with_range, MockAnalysis},
AnalysisChange, CrateGraph, Edition::Edition2018, Query, NavigationTarget, AnalysisChange, CrateGraph, Edition::Edition2018, Query, NavigationTarget,
@ -6,21 +5,6 @@ use ra_ide_api::{
}; };
use ra_syntax::SmolStr; use ra_syntax::SmolStr;
#[test]
fn test_unresolved_module_diagnostic() {
let (analysis, file_id) = single_file("mod foo;");
let diagnostics = analysis.diagnostics(file_id).unwrap();
assert_debug_snapshot_matches!("unresolved_module_diagnostic", &diagnostics);
}
// FIXME: move this test to hir
#[test]
fn test_unresolved_module_diagnostic_no_diag_for_inline_mode() {
let (analysis, file_id) = single_file("mod foo {}");
let diagnostics = analysis.diagnostics(file_id).unwrap();
assert!(diagnostics.is_empty());
}
#[test] #[test]
fn test_resolve_crate_root() { fn test_resolve_crate_root() {
let mock = MockAnalysis::with_files( let mock = MockAnalysis::with_files(

View file

@ -1,28 +0,0 @@
---
created: "2019-01-22T14:45:01.486985900+00:00"
creator: insta@0.4.0
expression: "&diagnostics"
source: "crates\\ra_ide_api\\tests\\test\\main.rs"
---
[
Diagnostic {
message: "unresolved module",
range: [0; 8),
fix: Some(
SourceChange {
label: "create module",
source_file_edits: [],
file_system_edits: [
CreateFile {
source_root: SourceRootId(
0
),
path: "foo.rs"
}
],
cursor_position: None
}
),
severity: Error
}
]

View file

@ -64,6 +64,12 @@ impl<N: AstNode> AstPtr<N> {
} }
} }
impl<N: AstNode> From<AstPtr<N>> for SyntaxNodePtr {
fn from(ptr: AstPtr<N>) -> SyntaxNodePtr {
ptr.raw
}
}
#[test] #[test]
fn test_local_syntax_ptr() { fn test_local_syntax_ptr() {
use crate::{ast, AstNode}; use crate::{ast, AstNode};