This commit is contained in:
Florian Diebold 2018-12-23 12:15:46 +01:00
parent 7348f7883f
commit 515c3bc59b
4 changed files with 110 additions and 108 deletions

View file

@ -516,8 +516,14 @@ impl AnalysisImpl {
let syntax = file.syntax(); let syntax = file.syntax();
let node = find_covering_node(syntax, range); let node = find_covering_node(syntax, range);
let parent_fn = node.ancestors().filter_map(FnDef::cast).next(); let parent_fn = node.ancestors().filter_map(FnDef::cast).next();
let parent_fn = if let Some(p) = parent_fn { p } else { return Ok(None) }; let parent_fn = if let Some(p) = parent_fn {
let function = ctry!(source_binder::function_from_source(&*self.db, file_id, parent_fn)?); p
} else {
return Ok(None);
};
let function = ctry!(source_binder::function_from_source(
&*self.db, file_id, parent_fn
)?);
let infer = function.infer(&*self.db); let infer = function.infer(&*self.db);
Ok(infer.type_of_node(node).map(|t| t.to_string())) Ok(infer.type_of_node(node).map(|t| t.to_string()))
} }

View file

@ -2,24 +2,21 @@ mod primitive;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
use rustc_hash::{FxHashMap, FxHashSet};
use std::sync::Arc; use std::sync::Arc;
use std::collections::HashMap;
use std::fmt; use std::fmt;
use rustc_hash::{FxHashMap};
use ra_db::LocalSyntaxPtr; use ra_db::LocalSyntaxPtr;
use ra_syntax::{ use ra_syntax::{
TextRange, TextUnit, SmolStr, SmolStr,
algo::visit::{visitor, Visitor}, ast::{self, AstNode, LoopBodyOwner, ArgListOwner},
ast::{self, AstNode, DocCommentsOwner, NameOwner, LoopBodyOwner, ArgListOwner},
SyntaxNodeRef SyntaxNodeRef
}; };
use crate::{ use crate::{
FnScopes, FnScopes,
db::HirDatabase, db::HirDatabase,
arena::{Arena, Id},
}; };
// pub(crate) type TypeId = Id<Ty>; // pub(crate) type TypeId = Id<Ty>;
@ -150,9 +147,17 @@ impl Ty {
TupleType(_inner) => Ty::Unknown, // TODO TupleType(_inner) => Ty::Unknown, // TODO
NeverType(..) => Ty::Never, NeverType(..) => Ty::Never,
PathType(inner) => { PathType(inner) => {
let path = if let Some(p) = inner.path() { p } else { return Ty::Unknown }; let path = if let Some(p) = inner.path() {
p
} else {
return Ty::Unknown;
};
if path.qualifier().is_none() { if path.qualifier().is_none() {
let name = path.segment().and_then(|s| s.name_ref()).map(|n| n.text()).unwrap_or(SmolStr::new("")); let name = path
.segment()
.and_then(|s| s.name_ref())
.map(|n| n.text())
.unwrap_or(SmolStr::new(""));
if let Some(int_ty) = primitive::IntTy::from_string(&name) { if let Some(int_ty) = primitive::IntTy::from_string(&name) {
Ty::Int(int_ty) Ty::Int(int_ty)
} else if let Some(uint_ty) = primitive::UintTy::from_string(&name) { } else if let Some(uint_ty) = primitive::UintTy::from_string(&name) {
@ -167,16 +172,16 @@ impl Ty {
// TODO // TODO
Ty::Unknown Ty::Unknown
} }
}, }
PointerType(_inner) => Ty::Unknown, // TODO PointerType(_inner) => Ty::Unknown, // TODO
ArrayType(_inner) => Ty::Unknown, // TODO ArrayType(_inner) => Ty::Unknown, // TODO
SliceType(_inner) => Ty::Unknown, // TODO SliceType(_inner) => Ty::Unknown, // TODO
ReferenceType(_inner) => Ty::Unknown, // TODO ReferenceType(_inner) => Ty::Unknown, // TODO
PlaceholderType(_inner) => Ty::Unknown, // TODO PlaceholderType(_inner) => Ty::Unknown, // TODO
FnPointerType(_inner) => Ty::Unknown, // TODO FnPointerType(_inner) => Ty::Unknown, // TODO
ForType(_inner) => Ty::Unknown, // TODO ForType(_inner) => Ty::Unknown, // TODO
ImplTraitType(_inner) => Ty::Unknown, // TODO ImplTraitType(_inner) => Ty::Unknown, // TODO
DynTraitType(_inner) => Ty::Unknown, // TODO DynTraitType(_inner) => Ty::Unknown, // TODO
} }
} }
@ -203,7 +208,7 @@ impl fmt::Display for Ty {
} }
write!(f, ")") write!(f, ")")
} }
Ty::Unknown => write!(f, "[unknown]") Ty::Unknown => write!(f, "[unknown]"),
} }
} }
} }
@ -230,7 +235,7 @@ impl InferenceContext {
fn new(scopes: Arc<FnScopes>) -> Self { fn new(scopes: Arc<FnScopes>) -> Self {
InferenceContext { InferenceContext {
type_for: FxHashMap::default(), type_for: FxHashMap::default(),
scopes scopes,
} }
} }
@ -238,7 +243,7 @@ impl InferenceContext {
self.type_for.insert(LocalSyntaxPtr::new(node), ty); self.type_for.insert(LocalSyntaxPtr::new(node), ty);
} }
fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool { fn unify(&mut self, _ty1: &Ty, _ty2: &Ty) -> bool {
unimplemented!() unimplemented!()
} }
@ -303,7 +308,7 @@ impl InferenceContext {
if let Some(expr) = e.iterable() { if let Some(expr) = e.iterable() {
self.infer_expr(expr); self.infer_expr(expr);
} }
if let Some(pat) = e.pat() { if let Some(_pat) = e.pat() {
// TODO write type for pat // TODO write type for pat
} }
if let Some(block) = e.loop_body() { if let Some(block) = e.loop_body() {
@ -313,7 +318,7 @@ impl InferenceContext {
Ty::Unknown Ty::Unknown
} }
ast::Expr::LambdaExpr(e) => { ast::Expr::LambdaExpr(e) => {
let body_ty = if let Some(body) = e.body() { let _body_ty = if let Some(body) = e.body() {
self.infer_expr(body) self.infer_expr(body)
} else { } else {
Ty::Unknown Ty::Unknown
@ -339,7 +344,7 @@ impl InferenceContext {
Ty::Unknown Ty::Unknown
} }
ast::Expr::MatchExpr(e) => { ast::Expr::MatchExpr(e) => {
let ty = if let Some(match_expr) = e.expr() { let _ty = if let Some(match_expr) = e.expr() {
self.infer_expr(match_expr) self.infer_expr(match_expr)
} else { } else {
Ty::Unknown Ty::Unknown
@ -348,7 +353,7 @@ impl InferenceContext {
for arm in match_arm_list.arms() { for arm in match_arm_list.arms() {
// TODO type the bindings in pat // TODO type the bindings in pat
// TODO type the guard // TODO type the guard
let ty = if let Some(e) = arm.expr() { let _ty = if let Some(e) = arm.expr() {
self.infer_expr(e) self.infer_expr(e)
} else { } else {
Ty::Unknown Ty::Unknown
@ -360,12 +365,8 @@ impl InferenceContext {
Ty::Unknown Ty::Unknown
} }
} }
ast::Expr::TupleExpr(e) => { ast::Expr::TupleExpr(_e) => Ty::Unknown,
Ty::Unknown ast::Expr::ArrayExpr(_e) => Ty::Unknown,
}
ast::Expr::ArrayExpr(e) => {
Ty::Unknown
}
ast::Expr::PathExpr(e) => { ast::Expr::PathExpr(e) => {
if let Some(p) = e.path() { if let Some(p) = e.path() {
if p.qualifier().is_none() { if p.qualifier().is_none() {
@ -392,12 +393,8 @@ impl InferenceContext {
Ty::Unknown Ty::Unknown
} }
} }
ast::Expr::ContinueExpr(e) => { ast::Expr::ContinueExpr(_e) => Ty::Never,
Ty::Never ast::Expr::BreakExpr(_e) => Ty::Never,
}
ast::Expr::BreakExpr(e) => {
Ty::Never
}
ast::Expr::ParenExpr(e) => { ast::Expr::ParenExpr(e) => {
if let Some(e) = e.expr() { if let Some(e) = e.expr() {
self.infer_expr(e) self.infer_expr(e)
@ -405,9 +402,7 @@ impl InferenceContext {
Ty::Unknown Ty::Unknown
} }
} }
ast::Expr::Label(e) => { ast::Expr::Label(_e) => Ty::Unknown,
Ty::Unknown
}
ast::Expr::ReturnExpr(e) => { ast::Expr::ReturnExpr(e) => {
if let Some(e) = e.expr() { if let Some(e) = e.expr() {
// TODO unify with return type // TODO unify with return type
@ -419,21 +414,15 @@ impl InferenceContext {
// Can this even occur outside of a match expression? // Can this even occur outside of a match expression?
Ty::Unknown Ty::Unknown
} }
ast::Expr::StructLit(e) => { ast::Expr::StructLit(_e) => Ty::Unknown,
Ty::Unknown
}
ast::Expr::NamedFieldList(_) | ast::Expr::NamedField(_) => { ast::Expr::NamedFieldList(_) | ast::Expr::NamedField(_) => {
// Can this even occur outside of a struct literal? // Can this even occur outside of a struct literal?
Ty::Unknown Ty::Unknown
} }
ast::Expr::IndexExpr(e) => { ast::Expr::IndexExpr(_e) => Ty::Unknown,
Ty::Unknown ast::Expr::FieldExpr(_e) => Ty::Unknown,
}
ast::Expr::FieldExpr(e) => {
Ty::Unknown
}
ast::Expr::TryExpr(e) => { ast::Expr::TryExpr(e) => {
let inner_ty = if let Some(e) = e.expr() { let _inner_ty = if let Some(e) = e.expr() {
self.infer_expr(e) self.infer_expr(e)
} else { } else {
Ty::Unknown Ty::Unknown
@ -441,7 +430,7 @@ impl InferenceContext {
Ty::Unknown Ty::Unknown
} }
ast::Expr::CastExpr(e) => { ast::Expr::CastExpr(e) => {
let inner_ty = if let Some(e) = e.expr() { let _inner_ty = if let Some(e) = e.expr() {
self.infer_expr(e) self.infer_expr(e)
} else { } else {
Ty::Unknown Ty::Unknown
@ -451,7 +440,7 @@ impl InferenceContext {
cast_ty cast_ty
} }
ast::Expr::RefExpr(e) => { ast::Expr::RefExpr(e) => {
let inner_ty = if let Some(e) = e.expr() { let _inner_ty = if let Some(e) = e.expr() {
self.infer_expr(e) self.infer_expr(e)
} else { } else {
Ty::Unknown Ty::Unknown
@ -459,22 +448,16 @@ impl InferenceContext {
Ty::Unknown Ty::Unknown
} }
ast::Expr::PrefixExpr(e) => { ast::Expr::PrefixExpr(e) => {
let inner_ty = if let Some(e) = e.expr() { let _inner_ty = if let Some(e) = e.expr() {
self.infer_expr(e) self.infer_expr(e)
} else { } else {
Ty::Unknown Ty::Unknown
}; };
Ty::Unknown Ty::Unknown
} }
ast::Expr::RangeExpr(e) => { ast::Expr::RangeExpr(_e) => Ty::Unknown,
Ty::Unknown ast::Expr::BinExpr(_e) => Ty::Unknown,
} ast::Expr::Literal(_e) => Ty::Unknown,
ast::Expr::BinExpr(e) => {
Ty::Unknown
}
ast::Expr::Literal(e) => {
Ty::Unknown
}
}; };
self.write_ty(expr.syntax(), ty.clone()); self.write_ty(expr.syntax(), ty.clone());
ty ty
@ -505,7 +488,7 @@ impl InferenceContext {
} }
} }
pub fn infer(db: &impl HirDatabase, node: ast::FnDef, scopes: Arc<FnScopes>) -> InferenceResult { pub fn infer(_db: &impl HirDatabase, node: ast::FnDef, scopes: Arc<FnScopes>) -> InferenceResult {
let mut ctx = InferenceContext::new(scopes); let mut ctx = InferenceContext::new(scopes);
for param in node.param_list().unwrap().params() { for param in node.param_list().unwrap().params() {
@ -522,5 +505,7 @@ pub fn infer(db: &impl HirDatabase, node: ast::FnDef, scopes: Arc<FnScopes>) ->
// TODO 'resolve' the types: replace inference variables by their inferred results // TODO 'resolve' the types: replace inference variables by their inferred results
InferenceResult { type_for: ctx.type_for } InferenceResult {
type_for: ctx.type_for,
}
} }

View file

@ -1,31 +1,38 @@
use std::fmt::Write; use std::fmt::Write;
use std::sync::Arc; use std::path::{PathBuf};
use std::path::{Path, PathBuf};
use salsa::Database; use ra_db::{SyntaxDatabase};
use ra_db::{FilesDatabase, CrateGraph, SyntaxDatabase}; use ra_syntax::ast::{self, AstNode};
use ra_syntax::{SmolStr, algo::visit::{visitor, Visitor}, ast::{self, AstNode}};
use test_utils::{project_dir, dir_tests}; use test_utils::{project_dir, dir_tests};
use relative_path::RelativePath;
use crate::{source_binder, mock::WORKSPACE, module::ModuleSourceNode};
use crate::{ use crate::{
self as hir, source_binder,
db::HirDatabase,
mock::MockDatabase, mock::MockDatabase,
}; };
fn infer_file(content: &str) -> String { fn infer_file(content: &str) -> String {
let (db, source_root, file_id) = MockDatabase::with_single_file(content); let (db, _, file_id) = MockDatabase::with_single_file(content);
let source_file = db.source_file(file_id); let source_file = db.source_file(file_id);
let mut acc = String::new(); let mut acc = String::new();
for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) { for fn_def in source_file
let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap().unwrap(); .syntax()
.descendants()
.filter_map(ast::FnDef::cast)
{
let func = source_binder::function_from_source(&db, file_id, fn_def)
.unwrap()
.unwrap();
let inference_result = func.infer(&db); let inference_result = func.infer(&db);
for (syntax_ptr, ty) in &inference_result.type_for { for (syntax_ptr, ty) in &inference_result.type_for {
let node = syntax_ptr.resolve(&source_file); let node = syntax_ptr.resolve(&source_file);
write!(acc, "{} '{}': {}\n", syntax_ptr.range(), ellipsize(node.text().to_string().replace("\n", " "), 15), ty); write!(
acc,
"{} '{}': {}\n",
syntax_ptr.range(),
ellipsize(node.text().to_string().replace("\n", " "), 15),
ty
)
.unwrap();
} }
} }
acc acc
@ -51,9 +58,7 @@ fn ellipsize(mut text: String, max_len: usize) -> String {
#[test] #[test]
pub fn infer_tests() { pub fn infer_tests() {
dir_tests(&test_data_dir(), &["."], |text, _path| { dir_tests(&test_data_dir(), &["."], |text, _path| infer_file(text));
infer_file(text)
});
} }
fn test_data_dir() -> PathBuf { fn test_data_dir() -> PathBuf {

View file

@ -1,12 +1,10 @@
extern crate ra_syntax; extern crate ra_syntax;
#[macro_use]
extern crate test_utils; extern crate test_utils;
extern crate walkdir; extern crate walkdir;
use std::{ use std::{
fmt::Write, fmt::Write,
fs, path::{PathBuf, Component},
path::{Path, PathBuf, Component},
}; };
use test_utils::{project_dir, dir_tests, read_text, collect_tests}; use test_utils::{project_dir, dir_tests, read_text, collect_tests};
@ -25,28 +23,36 @@ fn lexer_tests() {
#[test] #[test]
fn parser_tests() { fn parser_tests() {
dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| { dir_tests(
let file = SourceFileNode::parse(text); &test_data_dir(),
let errors = file.errors(); &["parser/inline/ok", "parser/ok"],
assert_eq!( |text, path| {
&*errors, let file = SourceFileNode::parse(text);
&[] as &[ra_syntax::SyntaxError], let errors = file.errors();
"There should be no errors in the file {:?}", assert_eq!(
path.display() &*errors,
); &[] as &[ra_syntax::SyntaxError],
dump_tree(file.syntax()) "There should be no errors in the file {:?}",
}); path.display()
dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { );
let file = SourceFileNode::parse(text); dump_tree(file.syntax())
let errors = file.errors(); },
assert_ne!( );
&*errors, dir_tests(
&[] as &[ra_syntax::SyntaxError], &test_data_dir(),
"There should be errors in the file {:?}", &["parser/err", "parser/inline/err"],
path.display() |text, path| {
); let file = SourceFileNode::parse(text);
dump_tree(file.syntax()) let errors = file.errors();
}); assert_ne!(
&*errors,
&[] as &[ra_syntax::SyntaxError],
"There should be errors in the file {:?}",
path.display()
);
dump_tree(file.syntax())
},
);
} }
#[test] #[test]