Rename ra_hir_ty -> hir_ty

This commit is contained in:
Aleksey Kladov 2020-08-13 16:35:29 +02:00
parent 50f8c1ebf2
commit 6a77ec7bbe
40 changed files with 44 additions and 47 deletions

View file

@ -0,0 +1,131 @@
//! In certain situations, rust automatically inserts derefs as necessary: for
//! example, field accesses `foo.bar` still work when `foo` is actually a
//! reference to a type with the field `bar`. This is an approximation of the
//! logic in rustc (which lives in librustc_typeck/check/autoderef.rs).
use std::iter::successors;
use base_db::CrateId;
use hir_def::lang_item::LangItemTarget;
use hir_expand::name::name;
use log::{info, warn};
use crate::{
db::HirDatabase,
traits::{InEnvironment, Solution},
utils::generics,
BoundVar, Canonical, DebruijnIndex, Obligation, Substs, TraitRef, Ty,
};
const AUTODEREF_RECURSION_LIMIT: usize = 10;
pub fn autoderef<'a>(
db: &'a dyn HirDatabase,
krate: Option<CrateId>,
ty: InEnvironment<Canonical<Ty>>,
) -> impl Iterator<Item = Canonical<Ty>> + 'a {
let InEnvironment { value: ty, environment } = ty;
successors(Some(ty), move |ty| {
deref(db, krate?, InEnvironment { value: ty, environment: environment.clone() })
})
.take(AUTODEREF_RECURSION_LIMIT)
}
pub(crate) fn deref(
db: &dyn HirDatabase,
krate: CrateId,
ty: InEnvironment<&Canonical<Ty>>,
) -> Option<Canonical<Ty>> {
if let Some(derefed) = ty.value.value.builtin_deref() {
Some(Canonical { value: derefed, kinds: ty.value.kinds.clone() })
} else {
deref_by_trait(db, krate, ty)
}
}
fn deref_by_trait(
db: &dyn HirDatabase,
krate: CrateId,
ty: InEnvironment<&Canonical<Ty>>,
) -> Option<Canonical<Ty>> {
let deref_trait = match db.lang_item(krate, "deref".into())? {
LangItemTarget::TraitId(it) => it,
_ => return None,
};
let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?;
let generic_params = generics(db.upcast(), target.into());
if generic_params.len() != 1 {
// the Target type + Deref trait should only have one generic parameter,
// namely Deref's Self type
return None;
}
// FIXME make the Canonical / bound var handling nicer
let parameters =
Substs::build_for_generics(&generic_params).push(ty.value.value.clone()).build();
// Check that the type implements Deref at all
let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() };
let implements_goal = Canonical {
kinds: ty.value.kinds.clone(),
value: InEnvironment {
value: Obligation::Trait(trait_ref),
environment: ty.environment.clone(),
},
};
if db.trait_solve(krate, implements_goal).is_none() {
return None;
}
// Now do the assoc type projection
let projection = super::traits::ProjectionPredicate {
ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.kinds.len())),
projection_ty: super::ProjectionTy { associated_ty: target, parameters },
};
let obligation = super::Obligation::Projection(projection);
let in_env = InEnvironment { value: obligation, environment: ty.environment };
let canonical =
Canonical::new(in_env, ty.value.kinds.iter().copied().chain(Some(super::TyKind::General)));
let solution = db.trait_solve(krate, canonical)?;
match &solution {
Solution::Unique(vars) => {
// FIXME: vars may contain solutions for any inference variables
// that happened to be inside ty. To correctly handle these, we
// would have to pass the solution up to the inference context, but
// that requires a larger refactoring (especially if the deref
// happens during method resolution). So for the moment, we just
// check that we're not in the situation we're we would actually
// need to handle the values of the additional variables, i.e.
// they're just being 'passed through'. In the 'standard' case where
// we have `impl<T> Deref for Foo<T> { Target = T }`, that should be
// the case.
// FIXME: if the trait solver decides to truncate the type, these
// assumptions will be broken. We would need to properly introduce
// new variables in that case
for i in 1..vars.0.kinds.len() {
if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1))
{
warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.value, solution);
return None;
}
}
Some(Canonical {
value: vars.0.value[vars.0.value.len() - 1].clone(),
kinds: vars.0.kinds.clone(),
})
}
Solution::Ambig(_) => {
info!("Ambiguous solution for derefing {:?}: {:?}", ty.value, solution);
None
}
}
}

158
crates/hir_ty/src/db.rs Normal file
View file

@ -0,0 +1,158 @@
//! FIXME: write short doc here
use std::sync::Arc;
use arena::map::ArenaMap;
use base_db::{impl_intern_key, salsa, CrateId, Upcast};
use hir_def::{
db::DefDatabase, expr::ExprId, DefWithBodyId, FunctionId, GenericDefId, ImplId, LocalFieldId,
TypeParamId, VariantId,
};
use crate::{
method_resolution::{InherentImpls, TraitImpls},
traits::chalk,
Binders, CallableDefId, GenericPredicate, InferenceResult, OpaqueTyId, PolyFnSig,
ReturnTypeImplTraits, TraitRef, Ty, TyDefId, ValueTyDefId,
};
use hir_expand::name::Name;
#[salsa::query_group(HirDatabaseStorage)]
pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(infer_wait)]
#[salsa::transparent]
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
#[salsa::invoke(crate::infer::infer_query)]
fn infer_query(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
#[salsa::invoke(crate::lower::ty_query)]
#[salsa::cycle(crate::lower::ty_recover)]
fn ty(&self, def: TyDefId) -> Binders<Ty>;
#[salsa::invoke(crate::lower::value_ty_query)]
fn value_ty(&self, def: ValueTyDefId) -> Binders<Ty>;
#[salsa::invoke(crate::lower::impl_self_ty_query)]
#[salsa::cycle(crate::lower::impl_self_ty_recover)]
fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
#[salsa::invoke(crate::lower::impl_trait_query)]
fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
#[salsa::invoke(crate::lower::field_types_query)]
fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
#[salsa::invoke(crate::callable_item_sig)]
fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
#[salsa::invoke(crate::lower::return_type_impl_traits)]
fn return_type_impl_traits(
&self,
def: FunctionId,
) -> Option<Arc<Binders<ReturnTypeImplTraits>>>;
#[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
#[salsa::cycle(crate::lower::generic_predicates_for_param_recover)]
fn generic_predicates_for_param(
&self,
param_id: TypeParamId,
) -> Arc<[Binders<GenericPredicate>]>;
#[salsa::invoke(crate::lower::generic_predicates_query)]
fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<GenericPredicate>]>;
#[salsa::invoke(crate::lower::generic_defaults_query)]
fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<Ty>]>;
#[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>;
#[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
fn trait_impls_in_crate(&self, krate: CrateId) -> Arc<TraitImpls>;
#[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<TraitImpls>;
// Interned IDs for Chalk integration
#[salsa::interned]
fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId;
#[salsa::interned]
fn intern_type_param_id(&self, param_id: TypeParamId) -> GlobalTypeParamId;
#[salsa::interned]
fn intern_impl_trait_id(&self, id: OpaqueTyId) -> InternedOpaqueTyId;
#[salsa::interned]
fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> ClosureId;
#[salsa::invoke(chalk::associated_ty_data_query)]
fn associated_ty_data(&self, id: chalk::AssocTypeId) -> Arc<chalk::AssociatedTyDatum>;
#[salsa::invoke(chalk::trait_datum_query)]
fn trait_datum(&self, krate: CrateId, trait_id: chalk::TraitId) -> Arc<chalk::TraitDatum>;
#[salsa::invoke(chalk::struct_datum_query)]
fn struct_datum(&self, krate: CrateId, struct_id: chalk::AdtId) -> Arc<chalk::StructDatum>;
#[salsa::invoke(crate::traits::chalk::impl_datum_query)]
fn impl_datum(&self, krate: CrateId, impl_id: chalk::ImplId) -> Arc<chalk::ImplDatum>;
#[salsa::invoke(crate::traits::chalk::fn_def_datum_query)]
fn fn_def_datum(&self, krate: CrateId, fn_def_id: chalk::FnDefId) -> Arc<chalk::FnDefDatum>;
#[salsa::invoke(crate::traits::chalk::associated_ty_value_query)]
fn associated_ty_value(
&self,
krate: CrateId,
id: chalk::AssociatedTyValueId,
) -> Arc<chalk::AssociatedTyValue>;
#[salsa::invoke(crate::traits::trait_solve_query)]
fn trait_solve(
&self,
krate: CrateId,
goal: crate::Canonical<crate::InEnvironment<crate::Obligation>>,
) -> Option<crate::traits::Solution>;
#[salsa::invoke(crate::traits::chalk::program_clauses_for_chalk_env_query)]
fn program_clauses_for_chalk_env(
&self,
krate: CrateId,
env: chalk_ir::Environment<chalk::Interner>,
) -> chalk_ir::ProgramClauses<chalk::Interner>;
}
fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
let _p = profile::span("infer:wait").detail(|| match def {
DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(),
DefWithBodyId::StaticId(it) => {
db.static_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
}
DefWithBodyId::ConstId(it) => {
db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
}
});
db.infer_query(def)
}
#[test]
fn hir_database_is_object_safe() {
fn _assert_object_safe(_: &dyn HirDatabase) {}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct GlobalTypeParamId(salsa::InternId);
impl_intern_key!(GlobalTypeParamId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InternedOpaqueTyId(salsa::InternId);
impl_intern_key!(InternedOpaqueTyId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ClosureId(salsa::InternId);
impl_intern_key!(ClosureId);
/// This exists just for Chalk, because Chalk just has a single `FnDefId` where
/// we have different IDs for struct and enum variant constructors.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct InternedCallableDefId(salsa::InternId);
impl_intern_key!(InternedCallableDefId);

View file

@ -0,0 +1,444 @@
//! FIXME: write short doc here
mod expr;
mod match_check;
mod unsafe_check;
use std::any::Any;
use hir_def::DefWithBodyId;
use hir_expand::diagnostics::{Diagnostic, DiagnosticSink};
use hir_expand::{name::Name, HirFileId, InFile};
use stdx::format_to;
use syntax::{ast, AstPtr, SyntaxNodePtr};
use crate::db::HirDatabase;
pub use crate::diagnostics::expr::{record_literal_missing_fields, record_pattern_missing_fields};
pub fn validate_body(db: &dyn HirDatabase, owner: DefWithBodyId, sink: &mut DiagnosticSink<'_>) {
let _p = profile::span("validate_body");
let infer = db.infer(owner);
infer.add_diagnostics(db, owner, sink);
let mut validator = expr::ExprValidator::new(owner, infer.clone(), sink);
validator.validate_body(db);
let mut validator = unsafe_check::UnsafeValidator::new(owner, infer, sink);
validator.validate_body(db);
}
#[derive(Debug)]
pub struct NoSuchField {
pub file: HirFileId,
pub field: AstPtr<ast::RecordExprField>,
}
impl Diagnostic for NoSuchField {
fn message(&self) -> String {
"no such field".to_string()
}
fn display_source(&self) -> InFile<SyntaxNodePtr> {
InFile::new(self.file, self.field.clone().into())
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
}
#[derive(Debug)]
pub struct MissingFields {
pub file: HirFileId,
pub field_list_parent: AstPtr<ast::RecordExpr>,
pub field_list_parent_path: Option<AstPtr<ast::Path>>,
pub missed_fields: Vec<Name>,
}
impl Diagnostic for MissingFields {
fn message(&self) -> String {
let mut buf = String::from("Missing structure fields:\n");
for field in &self.missed_fields {
format_to!(buf, "- {}\n", field);
}
buf
}
fn display_source(&self) -> InFile<SyntaxNodePtr> {
InFile {
file_id: self.file,
value: self
.field_list_parent_path
.clone()
.map(SyntaxNodePtr::from)
.unwrap_or_else(|| self.field_list_parent.clone().into()),
}
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
}
#[derive(Debug)]
pub struct MissingPatFields {
pub file: HirFileId,
pub field_list_parent: AstPtr<ast::RecordPat>,
pub field_list_parent_path: Option<AstPtr<ast::Path>>,
pub missed_fields: Vec<Name>,
}
impl Diagnostic for MissingPatFields {
fn message(&self) -> String {
let mut buf = String::from("Missing structure fields:\n");
for field in &self.missed_fields {
format_to!(buf, "- {}\n", field);
}
buf
}
fn display_source(&self) -> InFile<SyntaxNodePtr> {
InFile {
file_id: self.file,
value: self
.field_list_parent_path
.clone()
.map(SyntaxNodePtr::from)
.unwrap_or_else(|| self.field_list_parent.clone().into()),
}
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
}
#[derive(Debug)]
pub struct MissingMatchArms {
pub file: HirFileId,
pub match_expr: AstPtr<ast::Expr>,
pub arms: AstPtr<ast::MatchArmList>,
}
impl Diagnostic for MissingMatchArms {
fn message(&self) -> String {
String::from("Missing match arm")
}
fn display_source(&self) -> InFile<SyntaxNodePtr> {
InFile { file_id: self.file, value: self.match_expr.clone().into() }
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
}
#[derive(Debug)]
pub struct MissingOkInTailExpr {
pub file: HirFileId,
pub expr: AstPtr<ast::Expr>,
}
impl Diagnostic for MissingOkInTailExpr {
fn message(&self) -> String {
"wrap return expression in Ok".to_string()
}
fn display_source(&self) -> InFile<SyntaxNodePtr> {
InFile { file_id: self.file, value: self.expr.clone().into() }
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
}
#[derive(Debug)]
pub struct BreakOutsideOfLoop {
pub file: HirFileId,
pub expr: AstPtr<ast::Expr>,
}
impl Diagnostic for BreakOutsideOfLoop {
fn message(&self) -> String {
"break outside of loop".to_string()
}
fn display_source(&self) -> InFile<SyntaxNodePtr> {
InFile { file_id: self.file, value: self.expr.clone().into() }
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
}
#[derive(Debug)]
pub struct MissingUnsafe {
pub file: HirFileId,
pub expr: AstPtr<ast::Expr>,
}
impl Diagnostic for MissingUnsafe {
fn message(&self) -> String {
format!("This operation is unsafe and requires an unsafe function or block")
}
fn display_source(&self) -> InFile<SyntaxNodePtr> {
InFile { file_id: self.file, value: self.expr.clone().into() }
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
}
#[derive(Debug)]
pub struct MismatchedArgCount {
pub file: HirFileId,
pub call_expr: AstPtr<ast::Expr>,
pub expected: usize,
pub found: usize,
}
impl Diagnostic for MismatchedArgCount {
fn message(&self) -> String {
let s = if self.expected == 1 { "" } else { "s" };
format!("Expected {} argument{}, found {}", self.expected, s, self.found)
}
fn display_source(&self) -> InFile<SyntaxNodePtr> {
InFile { file_id: self.file, value: self.call_expr.clone().into() }
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
fn is_experimental(&self) -> bool {
true
}
}
#[cfg(test)]
mod tests {
use base_db::{fixture::WithFixture, FileId, SourceDatabase, SourceDatabaseExt};
use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId};
use hir_expand::{
db::AstDatabase,
diagnostics::{Diagnostic, DiagnosticSinkBuilder},
};
use rustc_hash::FxHashMap;
use syntax::{TextRange, TextSize};
use crate::{diagnostics::validate_body, test_db::TestDB};
impl TestDB {
fn diagnostics<F: FnMut(&dyn Diagnostic)>(&self, mut cb: F) {
let crate_graph = self.crate_graph();
for krate in crate_graph.iter() {
let crate_def_map = self.crate_def_map(krate);
let mut fns = Vec::new();
for (module_id, _) in crate_def_map.modules.iter() {
for decl in crate_def_map[module_id].scope.declarations() {
if let ModuleDefId::FunctionId(f) = decl {
fns.push(f)
}
}
for impl_id in crate_def_map[module_id].scope.impls() {
let impl_data = self.impl_data(impl_id);
for item in impl_data.items.iter() {
if let AssocItemId::FunctionId(f) = item {
fns.push(*f)
}
}
}
}
for f in fns {
let mut sink = DiagnosticSinkBuilder::new().build(&mut cb);
validate_body(self, f.into(), &mut sink);
}
}
}
}
pub(crate) fn check_diagnostics(ra_fixture: &str) {
let db = TestDB::with_files(ra_fixture);
let annotations = db.extract_annotations();
let mut actual: FxHashMap<FileId, Vec<(TextRange, String)>> = FxHashMap::default();
db.diagnostics(|d| {
let src = d.display_source();
let root = db.parse_or_expand(src.file_id).unwrap();
// FIXME: macros...
let file_id = src.file_id.original_file(&db);
let range = src.value.to_node(&root).text_range();
let message = d.message().to_owned();
actual.entry(file_id).or_default().push((range, message));
});
for (file_id, diags) in actual.iter_mut() {
diags.sort_by_key(|it| it.0.start());
let text = db.file_text(*file_id);
// For multiline spans, place them on line start
for (range, content) in diags {
if text[*range].contains('\n') {
*range = TextRange::new(range.start(), range.start() + TextSize::from(1));
*content = format!("... {}", content);
}
}
}
assert_eq!(annotations, actual);
}
#[test]
fn no_such_field_diagnostics() {
check_diagnostics(
r#"
struct S { foo: i32, bar: () }
impl S {
fn new() -> S {
S {
//^ Missing structure fields:
//| - bar
foo: 92,
baz: 62,
//^^^^^^^ no such field
}
}
}
"#,
);
}
#[test]
fn no_such_field_with_feature_flag_diagnostics() {
check_diagnostics(
r#"
//- /lib.rs crate:foo cfg:feature=foo
struct MyStruct {
my_val: usize,
#[cfg(feature = "foo")]
bar: bool,
}
impl MyStruct {
#[cfg(feature = "foo")]
pub(crate) fn new(my_val: usize, bar: bool) -> Self {
Self { my_val, bar }
}
#[cfg(not(feature = "foo"))]
pub(crate) fn new(my_val: usize, _bar: bool) -> Self {
Self { my_val }
}
}
"#,
);
}
#[test]
fn no_such_field_enum_with_feature_flag_diagnostics() {
check_diagnostics(
r#"
//- /lib.rs crate:foo cfg:feature=foo
enum Foo {
#[cfg(not(feature = "foo"))]
Buz,
#[cfg(feature = "foo")]
Bar,
Baz
}
fn test_fn(f: Foo) {
match f {
Foo::Bar => {},
Foo::Baz => {},
}
}
"#,
);
}
#[test]
fn no_such_field_with_feature_flag_diagnostics_on_struct_lit() {
check_diagnostics(
r#"
//- /lib.rs crate:foo cfg:feature=foo
struct S {
#[cfg(feature = "foo")]
foo: u32,
#[cfg(not(feature = "foo"))]
bar: u32,
}
impl S {
#[cfg(feature = "foo")]
fn new(foo: u32) -> Self {
Self { foo }
}
#[cfg(not(feature = "foo"))]
fn new(bar: u32) -> Self {
Self { bar }
}
fn new2(bar: u32) -> Self {
#[cfg(feature = "foo")]
{ Self { foo: bar } }
#[cfg(not(feature = "foo"))]
{ Self { bar } }
}
fn new2(val: u32) -> Self {
Self {
#[cfg(feature = "foo")]
foo: val,
#[cfg(not(feature = "foo"))]
bar: val,
}
}
}
"#,
);
}
#[test]
fn no_such_field_with_type_macro() {
check_diagnostics(
r#"
macro_rules! Type { () => { u32 }; }
struct Foo { bar: Type![] }
impl Foo {
fn new() -> Self {
Foo { bar: 0 }
}
}
"#,
);
}
#[test]
fn missing_record_pat_field_diagnostic() {
check_diagnostics(
r#"
struct S { foo: i32, bar: () }
fn baz(s: S) {
let S { foo: _ } = s;
//^ Missing structure fields:
//| - bar
}
"#,
);
}
#[test]
fn missing_record_pat_field_no_diagnostic_if_not_exhaustive() {
check_diagnostics(
r"
struct S { foo: i32, bar: () }
fn baz(s: S) -> i32 {
match s {
S { foo, .. } => foo,
}
}
",
)
}
#[test]
fn break_outside_of_loop() {
check_diagnostics(
r#"
fn foo() { break; }
//^^^^^ break outside of loop
"#,
);
}
}

View file

@ -0,0 +1,569 @@
//! FIXME: write short doc here
use std::sync::Arc;
use hir_def::{path::path, resolver::HasResolver, AdtId, DefWithBodyId};
use hir_expand::diagnostics::DiagnosticSink;
use rustc_hash::FxHashSet;
use syntax::{ast, AstPtr};
use crate::{
db::HirDatabase,
diagnostics::{
match_check::{is_useful, MatchCheckCtx, Matrix, PatStack, Usefulness},
MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, MissingPatFields,
},
utils::variant_data,
ApplicationTy, InferenceResult, Ty, TypeCtor,
};
pub use hir_def::{
body::{
scope::{ExprScopes, ScopeEntry, ScopeId},
Body, BodySourceMap, ExprPtr, ExprSource, PatPtr, PatSource,
},
expr::{
ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp,
},
src::HasSource,
LocalFieldId, Lookup, VariantId,
};
pub(super) struct ExprValidator<'a, 'b: 'a> {
owner: DefWithBodyId,
infer: Arc<InferenceResult>,
sink: &'a mut DiagnosticSink<'b>,
}
impl<'a, 'b> ExprValidator<'a, 'b> {
pub(super) fn new(
owner: DefWithBodyId,
infer: Arc<InferenceResult>,
sink: &'a mut DiagnosticSink<'b>,
) -> ExprValidator<'a, 'b> {
ExprValidator { owner, infer, sink }
}
pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) {
let body = db.body(self.owner.into());
for (id, expr) in body.exprs.iter() {
if let Some((variant_def, missed_fields, true)) =
record_literal_missing_fields(db, &self.infer, id, expr)
{
self.create_record_literal_missing_fields_diagnostic(
id,
db,
variant_def,
missed_fields,
);
}
match expr {
Expr::Match { expr, arms } => {
self.validate_match(id, *expr, arms, db, self.infer.clone());
}
Expr::Call { .. } | Expr::MethodCall { .. } => {
self.validate_call(db, id, expr);
}
_ => {}
}
}
for (id, pat) in body.pats.iter() {
if let Some((variant_def, missed_fields, true)) =
record_pattern_missing_fields(db, &self.infer, id, pat)
{
self.create_record_pattern_missing_fields_diagnostic(
id,
db,
variant_def,
missed_fields,
);
}
}
let body_expr = &body[body.body_expr];
if let Expr::Block { tail: Some(t), .. } = body_expr {
self.validate_results_in_tail_expr(body.body_expr, *t, db);
}
}
fn create_record_literal_missing_fields_diagnostic(
&mut self,
id: ExprId,
db: &dyn HirDatabase,
variant_def: VariantId,
missed_fields: Vec<LocalFieldId>,
) {
// XXX: only look at source_map if we do have missing fields
let (_, source_map) = db.body_with_source_map(self.owner.into());
if let Ok(source_ptr) = source_map.expr_syntax(id) {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::RecordExpr(record_expr) = &source_ptr.value.to_node(&root) {
if let Some(_) = record_expr.record_expr_field_list() {
let variant_data = variant_data(db.upcast(), variant_def);
let missed_fields = missed_fields
.into_iter()
.map(|idx| variant_data.fields()[idx].name.clone())
.collect();
self.sink.push(MissingFields {
file: source_ptr.file_id,
field_list_parent: AstPtr::new(&record_expr),
field_list_parent_path: record_expr.path().map(|path| AstPtr::new(&path)),
missed_fields,
})
}
}
}
}
fn create_record_pattern_missing_fields_diagnostic(
&mut self,
id: PatId,
db: &dyn HirDatabase,
variant_def: VariantId,
missed_fields: Vec<LocalFieldId>,
) {
// XXX: only look at source_map if we do have missing fields
let (_, source_map) = db.body_with_source_map(self.owner.into());
if let Ok(source_ptr) = source_map.pat_syntax(id) {
if let Some(expr) = source_ptr.value.as_ref().left() {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
if let Some(_) = record_pat.record_pat_field_list() {
let variant_data = variant_data(db.upcast(), variant_def);
let missed_fields = missed_fields
.into_iter()
.map(|idx| variant_data.fields()[idx].name.clone())
.collect();
self.sink.push(MissingPatFields {
file: source_ptr.file_id,
field_list_parent: AstPtr::new(&record_pat),
field_list_parent_path: record_pat
.path()
.map(|path| AstPtr::new(&path)),
missed_fields,
})
}
}
}
}
}
fn validate_call(&mut self, db: &dyn HirDatabase, call_id: ExprId, expr: &Expr) -> Option<()> {
// Check that the number of arguments matches the number of parameters.
// FIXME: Due to shortcomings in the current type system implementation, only emit this
// diagnostic if there are no type mismatches in the containing function.
if self.infer.type_mismatches.iter().next().is_some() {
return Some(());
}
let is_method_call = matches!(expr, Expr::MethodCall { .. });
let (sig, args) = match expr {
Expr::Call { callee, args } => {
let callee = &self.infer.type_of_expr[*callee];
let sig = callee.callable_sig(db)?;
(sig, args.clone())
}
Expr::MethodCall { receiver, args, .. } => {
let mut args = args.clone();
args.insert(0, *receiver);
// FIXME: note that we erase information about substs here. This
// is not right, but, luckily, doesn't matter as we care only
// about the number of params
let callee = self.infer.method_resolution(call_id)?;
let sig = db.callable_item_signature(callee.into()).value;
(sig, args)
}
_ => return None,
};
if sig.is_varargs {
return None;
}
let params = sig.params();
let mut param_count = params.len();
let mut arg_count = args.len();
if arg_count != param_count {
let (_, source_map) = db.body_with_source_map(self.owner.into());
if let Ok(source_ptr) = source_map.expr_syntax(call_id) {
if is_method_call {
param_count -= 1;
arg_count -= 1;
}
self.sink.push(MismatchedArgCount {
file: source_ptr.file_id,
call_expr: source_ptr.value,
expected: param_count,
found: arg_count,
});
}
}
None
}
fn validate_match(
&mut self,
id: ExprId,
match_expr: ExprId,
arms: &[MatchArm],
db: &dyn HirDatabase,
infer: Arc<InferenceResult>,
) {
let (body, source_map): (Arc<Body>, Arc<BodySourceMap>) =
db.body_with_source_map(self.owner.into());
let match_expr_ty = match infer.type_of_expr.get(match_expr) {
Some(ty) => ty,
// If we can't resolve the type of the match expression
// we cannot perform exhaustiveness checks.
None => return,
};
let cx = MatchCheckCtx { match_expr, body, infer: infer.clone(), db };
let pats = arms.iter().map(|arm| arm.pat);
let mut seen = Matrix::empty();
for pat in pats {
if let Some(pat_ty) = infer.type_of_pat.get(pat) {
// We only include patterns whose type matches the type
// of the match expression. If we had a InvalidMatchArmPattern
// diagnostic or similar we could raise that in an else
// block here.
//
// When comparing the types, we also have to consider that rustc
// will automatically de-reference the match expression type if
// necessary.
//
// FIXME we should use the type checker for this.
if pat_ty == match_expr_ty
|| match_expr_ty
.as_reference()
.map(|(match_expr_ty, _)| match_expr_ty == pat_ty)
.unwrap_or(false)
{
// If we had a NotUsefulMatchArm diagnostic, we could
// check the usefulness of each pattern as we added it
// to the matrix here.
let v = PatStack::from_pattern(pat);
seen.push(&cx, v);
continue;
}
}
// If we can't resolve the type of a pattern, or the pattern type doesn't
// fit the match expression, we skip this diagnostic. Skipping the entire
// diagnostic rather than just not including this match arm is preferred
// to avoid the chance of false positives.
return;
}
match is_useful(&cx, &seen, &PatStack::from_wild()) {
Ok(Usefulness::Useful) => (),
// if a wildcard pattern is not useful, then all patterns are covered
Ok(Usefulness::NotUseful) => return,
// this path is for unimplemented checks, so we err on the side of not
// reporting any errors
_ => return,
}
if let Ok(source_ptr) = source_map.expr_syntax(id) {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) {
if let (Some(match_expr), Some(arms)) =
(match_expr.expr(), match_expr.match_arm_list())
{
self.sink.push(MissingMatchArms {
file: source_ptr.file_id,
match_expr: AstPtr::new(&match_expr),
arms: AstPtr::new(&arms),
})
}
}
}
}
fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) {
// the mismatch will be on the whole block currently
let mismatch = match self.infer.type_mismatch_for_expr(body_id) {
Some(m) => m,
None => return,
};
let core_result_path = path![core::result::Result];
let resolver = self.owner.resolver(db.upcast());
let core_result_enum = match resolver.resolve_known_enum(db.upcast(), &core_result_path) {
Some(it) => it,
_ => return,
};
let core_result_ctor = TypeCtor::Adt(AdtId::EnumId(core_result_enum));
let params = match &mismatch.expected {
Ty::Apply(ApplicationTy { ctor, parameters }) if ctor == &core_result_ctor => {
parameters
}
_ => return,
};
if params.len() == 2 && params[0] == mismatch.actual {
let (_, source_map) = db.body_with_source_map(self.owner.into());
if let Ok(source_ptr) = source_map.expr_syntax(id) {
self.sink
.push(MissingOkInTailExpr { file: source_ptr.file_id, expr: source_ptr.value });
}
}
}
}
pub fn record_literal_missing_fields(
db: &dyn HirDatabase,
infer: &InferenceResult,
id: ExprId,
expr: &Expr,
) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
let (fields, exhausitve) = match expr {
Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()),
_ => return None,
};
let variant_def = infer.variant_resolution_for_expr(id)?;
if let VariantId::UnionId(_) = variant_def {
return None;
}
let variant_data = variant_data(db.upcast(), variant_def);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<LocalFieldId> = variant_data
.fields()
.iter()
.filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
.collect();
if missed_fields.is_empty() {
return None;
}
Some((variant_def, missed_fields, exhausitve))
}
pub fn record_pattern_missing_fields(
db: &dyn HirDatabase,
infer: &InferenceResult,
id: PatId,
pat: &Pat,
) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
let (fields, exhaustive) = match pat {
Pat::Record { path: _, args, ellipsis } => (args, !ellipsis),
_ => return None,
};
let variant_def = infer.variant_resolution_for_pat(id)?;
if let VariantId::UnionId(_) = variant_def {
return None;
}
let variant_data = variant_data(db.upcast(), variant_def);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<LocalFieldId> = variant_data
.fields()
.iter()
.filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
.collect();
if missed_fields.is_empty() {
return None;
}
Some((variant_def, missed_fields, exhaustive))
}
#[cfg(test)]
mod tests {
use crate::diagnostics::tests::check_diagnostics;
#[test]
fn simple_free_fn_zero() {
check_diagnostics(
r#"
fn zero() {}
fn f() { zero(1); }
//^^^^^^^ Expected 0 arguments, found 1
"#,
);
check_diagnostics(
r#"
fn zero() {}
fn f() { zero(); }
"#,
);
}
#[test]
fn simple_free_fn_one() {
check_diagnostics(
r#"
fn one(arg: u8) {}
fn f() { one(); }
//^^^^^ Expected 1 argument, found 0
"#,
);
check_diagnostics(
r#"
fn one(arg: u8) {}
fn f() { one(1); }
"#,
);
}
#[test]
fn method_as_fn() {
check_diagnostics(
r#"
struct S;
impl S { fn method(&self) {} }
fn f() {
S::method();
} //^^^^^^^^^^^ Expected 1 argument, found 0
"#,
);
check_diagnostics(
r#"
struct S;
impl S { fn method(&self) {} }
fn f() {
S::method(&S);
S.method();
}
"#,
);
}
#[test]
fn method_with_arg() {
check_diagnostics(
r#"
struct S;
impl S { fn method(&self, arg: u8) {} }
fn f() {
S.method();
} //^^^^^^^^^^ Expected 1 argument, found 0
"#,
);
check_diagnostics(
r#"
struct S;
impl S { fn method(&self, arg: u8) {} }
fn f() {
S::method(&S, 0);
S.method(1);
}
"#,
);
}
#[test]
fn tuple_struct() {
check_diagnostics(
r#"
struct Tup(u8, u16);
fn f() {
Tup(0);
} //^^^^^^ Expected 2 arguments, found 1
"#,
)
}
#[test]
fn enum_variant() {
check_diagnostics(
r#"
enum En { Variant(u8, u16), }
fn f() {
En::Variant(0);
} //^^^^^^^^^^^^^^ Expected 2 arguments, found 1
"#,
)
}
#[test]
fn enum_variant_type_macro() {
check_diagnostics(
r#"
macro_rules! Type {
() => { u32 };
}
enum Foo {
Bar(Type![])
}
impl Foo {
fn new() {
Foo::Bar(0);
Foo::Bar(0, 1);
//^^^^^^^^^^^^^^ Expected 1 argument, found 2
Foo::Bar();
//^^^^^^^^^^ Expected 1 argument, found 0
}
}
"#,
);
}
#[test]
fn varargs() {
check_diagnostics(
r#"
extern "C" {
fn fixed(fixed: u8);
fn varargs(fixed: u8, ...);
fn varargs2(...);
}
fn f() {
unsafe {
fixed(0);
fixed(0, 1);
//^^^^^^^^^^^ Expected 1 argument, found 2
varargs(0);
varargs(0, 1);
varargs2();
varargs2(0);
varargs2(0, 1);
}
}
"#,
)
}
#[test]
fn arg_count_lambda() {
check_diagnostics(
r#"
fn main() {
let f = |()| ();
f();
//^^^ Expected 1 argument, found 0
f(());
f((), ());
//^^^^^^^^^ Expected 1 argument, found 2
}
"#,
)
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,205 @@
//! Provides validations for unsafe code. Currently checks if unsafe functions are missing
//! unsafe blocks.
use std::sync::Arc;
use hir_def::{
body::Body,
expr::{Expr, ExprId, UnaryOp},
resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
DefWithBodyId,
};
use hir_expand::diagnostics::DiagnosticSink;
use crate::{
db::HirDatabase, diagnostics::MissingUnsafe, lower::CallableDefId, ApplicationTy,
InferenceResult, Ty, TypeCtor,
};
pub(super) struct UnsafeValidator<'a, 'b: 'a> {
owner: DefWithBodyId,
infer: Arc<InferenceResult>,
sink: &'a mut DiagnosticSink<'b>,
}
impl<'a, 'b> UnsafeValidator<'a, 'b> {
pub(super) fn new(
owner: DefWithBodyId,
infer: Arc<InferenceResult>,
sink: &'a mut DiagnosticSink<'b>,
) -> UnsafeValidator<'a, 'b> {
UnsafeValidator { owner, infer, sink }
}
pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) {
let def = self.owner.into();
let unsafe_expressions = unsafe_expressions(db, self.infer.as_ref(), def);
let is_unsafe = match self.owner {
DefWithBodyId::FunctionId(it) => db.function_data(it).is_unsafe,
DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) => false,
};
if is_unsafe
|| unsafe_expressions
.iter()
.filter(|unsafe_expr| !unsafe_expr.inside_unsafe_block)
.count()
== 0
{
return;
}
let (_, body_source) = db.body_with_source_map(def);
for unsafe_expr in unsafe_expressions {
if !unsafe_expr.inside_unsafe_block {
if let Ok(in_file) = body_source.as_ref().expr_syntax(unsafe_expr.expr) {
self.sink.push(MissingUnsafe { file: in_file.file_id, expr: in_file.value })
}
}
}
}
}
pub struct UnsafeExpr {
pub expr: ExprId,
pub inside_unsafe_block: bool,
}
pub fn unsafe_expressions(
db: &dyn HirDatabase,
infer: &InferenceResult,
def: DefWithBodyId,
) -> Vec<UnsafeExpr> {
let mut unsafe_exprs = vec![];
let body = db.body(def);
walk_unsafe(&mut unsafe_exprs, db, infer, def, &body, body.body_expr, false);
unsafe_exprs
}
fn walk_unsafe(
unsafe_exprs: &mut Vec<UnsafeExpr>,
db: &dyn HirDatabase,
infer: &InferenceResult,
def: DefWithBodyId,
body: &Body,
current: ExprId,
inside_unsafe_block: bool,
) {
let expr = &body.exprs[current];
match expr {
Expr::Call { callee, .. } => {
let ty = &infer[*callee];
if let &Ty::Apply(ApplicationTy {
ctor: TypeCtor::FnDef(CallableDefId::FunctionId(func)),
..
}) = ty
{
if db.function_data(func).is_unsafe {
unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
}
}
}
Expr::Path(path) => {
let resolver = resolver_for_expr(db.upcast(), def, current);
let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path.mod_path());
if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial {
if db.static_data(id).mutable {
unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
}
}
}
Expr::MethodCall { .. } => {
if infer
.method_resolution(current)
.map(|func| db.function_data(func).is_unsafe)
.unwrap_or(false)
{
unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
}
}
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
if let Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(..), .. }) = &infer[*expr] {
unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
}
}
Expr::Unsafe { body: child } => {
return walk_unsafe(unsafe_exprs, db, infer, def, body, *child, true);
}
_ => {}
}
expr.walk_child_exprs(|child| {
walk_unsafe(unsafe_exprs, db, infer, def, body, child, inside_unsafe_block);
});
}
#[cfg(test)]
mod tests {
use crate::diagnostics::tests::check_diagnostics;
#[test]
fn missing_unsafe_diagnostic_with_raw_ptr() {
check_diagnostics(
r#"
fn main() {
let x = &5 as *const usize;
unsafe { let y = *x; }
let z = *x;
} //^^ This operation is unsafe and requires an unsafe function or block
"#,
)
}
#[test]
fn missing_unsafe_diagnostic_with_unsafe_call() {
check_diagnostics(
r#"
struct HasUnsafe;
impl HasUnsafe {
unsafe fn unsafe_fn(&self) {
let x = &5 as *const usize;
let y = *x;
}
}
unsafe fn unsafe_fn() {
let x = &5 as *const usize;
let y = *x;
}
fn main() {
unsafe_fn();
//^^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block
HasUnsafe.unsafe_fn();
//^^^^^^^^^^^^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block
unsafe {
unsafe_fn();
HasUnsafe.unsafe_fn();
}
}
"#,
);
}
#[test]
fn missing_unsafe_diagnostic_with_static_mut() {
check_diagnostics(
r#"
struct Ty {
a: u8,
}
static mut static_mut: Ty = Ty { a: 0 };
fn main() {
let x = static_mut.a;
//^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block
unsafe {
let x = static_mut.a;
}
}
"#,
);
}
}

View file

@ -0,0 +1,631 @@
//! FIXME: write short doc here
use std::fmt;
use crate::{
db::HirDatabase, utils::generics, ApplicationTy, CallableDefId, FnSig, GenericPredicate,
Obligation, OpaqueTyId, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
};
use hir_def::{
find_path, generics::TypeParamProvenance, item_scope::ItemInNs, AdtId, AssocContainerId,
Lookup, ModuleId,
};
use hir_expand::name::Name;
pub struct HirFormatter<'a> {
pub db: &'a dyn HirDatabase,
fmt: &'a mut dyn fmt::Write,
buf: String,
curr_size: usize,
pub(crate) max_size: Option<usize>,
omit_verbose_types: bool,
display_target: DisplayTarget,
}
pub trait HirDisplay {
fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError>;
/// Returns a `Display`able type that is human-readable.
/// Use this for showing types to the user (e.g. diagnostics)
fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self>
where
Self: Sized,
{
HirDisplayWrapper {
db,
t: self,
max_size: None,
omit_verbose_types: false,
display_target: DisplayTarget::Diagnostics,
}
}
/// Returns a `Display`able type that is human-readable and tries to be succinct.
/// Use this for showing types to the user where space is constrained (e.g. doc popups)
fn display_truncated<'a>(
&'a self,
db: &'a dyn HirDatabase,
max_size: Option<usize>,
) -> HirDisplayWrapper<'a, Self>
where
Self: Sized,
{
HirDisplayWrapper {
db,
t: self,
max_size,
omit_verbose_types: true,
display_target: DisplayTarget::Diagnostics,
}
}
/// Returns a String representation of `self` that can be inserted into the given module.
/// Use this when generating code (e.g. assists)
fn display_source_code<'a>(
&'a self,
db: &'a dyn HirDatabase,
module_id: ModuleId,
) -> Result<String, DisplaySourceCodeError> {
let mut result = String::new();
match self.hir_fmt(&mut HirFormatter {
db,
fmt: &mut result,
buf: String::with_capacity(20),
curr_size: 0,
max_size: None,
omit_verbose_types: false,
display_target: DisplayTarget::SourceCode { module_id },
}) {
Ok(()) => {}
Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"),
Err(HirDisplayError::DisplaySourceCodeError(e)) => return Err(e),
};
Ok(result)
}
}
impl<'a> HirFormatter<'a> {
pub fn write_joined<T: HirDisplay>(
&mut self,
iter: impl IntoIterator<Item = T>,
sep: &str,
) -> Result<(), HirDisplayError> {
let mut first = true;
for e in iter {
if !first {
write!(self, "{}", sep)?;
}
first = false;
e.hir_fmt(self)?;
}
Ok(())
}
/// This allows using the `write!` macro directly with a `HirFormatter`.
pub fn write_fmt(&mut self, args: fmt::Arguments) -> Result<(), HirDisplayError> {
// We write to a buffer first to track output size
self.buf.clear();
fmt::write(&mut self.buf, args)?;
self.curr_size += self.buf.len();
// Then we write to the internal formatter from the buffer
self.fmt.write_str(&self.buf).map_err(HirDisplayError::from)
}
pub fn should_truncate(&self) -> bool {
if let Some(max_size) = self.max_size {
self.curr_size >= max_size
} else {
false
}
}
pub fn omit_verbose_types(&self) -> bool {
self.omit_verbose_types
}
}
#[derive(Clone, Copy)]
enum DisplayTarget {
/// Display types for inlays, doc popups, autocompletion, etc...
/// Showing `{unknown}` or not qualifying paths is fine here.
/// There's no reason for this to fail.
Diagnostics,
/// Display types for inserting them in source files.
/// The generated code should compile, so paths need to be qualified.
SourceCode { module_id: ModuleId },
}
impl DisplayTarget {
fn is_source_code(&self) -> bool {
matches!(self, Self::SourceCode {..})
}
}
#[derive(Debug)]
pub enum DisplaySourceCodeError {
PathNotFound,
}
pub enum HirDisplayError {
/// Errors that can occur when generating source code
DisplaySourceCodeError(DisplaySourceCodeError),
/// `FmtError` is required to be compatible with std::fmt::Display
FmtError,
}
impl From<fmt::Error> for HirDisplayError {
fn from(_: fmt::Error) -> Self {
Self::FmtError
}
}
pub struct HirDisplayWrapper<'a, T> {
db: &'a dyn HirDatabase,
t: &'a T,
max_size: Option<usize>,
omit_verbose_types: bool,
display_target: DisplayTarget,
}
impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T>
where
T: HirDisplay,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.t.hir_fmt(&mut HirFormatter {
db: self.db,
fmt: f,
buf: String::with_capacity(20),
curr_size: 0,
max_size: self.max_size,
omit_verbose_types: self.omit_verbose_types,
display_target: self.display_target,
}) {
Ok(()) => Ok(()),
Err(HirDisplayError::FmtError) => Err(fmt::Error),
Err(HirDisplayError::DisplaySourceCodeError(_)) => {
// This should never happen
panic!("HirDisplay failed when calling Display::fmt!")
}
}
}
}
const TYPE_HINT_TRUNCATION: &str = "";
impl HirDisplay for &Ty {
fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
HirDisplay::hir_fmt(*self, f)
}
}
impl HirDisplay for ApplicationTy {
fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
if f.should_truncate() {
return write!(f, "{}", TYPE_HINT_TRUNCATION);
}
match self.ctor {
TypeCtor::Bool => write!(f, "bool")?,
TypeCtor::Char => write!(f, "char")?,
TypeCtor::Int(t) => write!(f, "{}", t)?,
TypeCtor::Float(t) => write!(f, "{}", t)?,
TypeCtor::Str => write!(f, "str")?,
TypeCtor::Slice => {
let t = self.parameters.as_single();
write!(f, "[{}]", t.display(f.db))?;
}
TypeCtor::Array => {
let t = self.parameters.as_single();
write!(f, "[{}; _]", t.display(f.db))?;
}
TypeCtor::RawPtr(m) => {
let t = self.parameters.as_single();
write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?;
}
TypeCtor::Ref(m) => {
let t = self.parameters.as_single();
let ty_display = if f.omit_verbose_types() {
t.display_truncated(f.db, f.max_size)
} else {
t.display(f.db)
};
write!(f, "&{}{}", m.as_keyword_for_ref(), ty_display)?;
}
TypeCtor::Never => write!(f, "!")?,
TypeCtor::Tuple { .. } => {
let ts = &self.parameters;
if ts.len() == 1 {
write!(f, "({},)", ts[0].display(f.db))?;
} else {
write!(f, "(")?;
f.write_joined(&*ts.0, ", ")?;
write!(f, ")")?;
}
}
TypeCtor::FnPtr { is_varargs, .. } => {
let sig = FnSig::from_fn_ptr_substs(&self.parameters, is_varargs);
write!(f, "fn(")?;
f.write_joined(sig.params(), ", ")?;
if is_varargs {
if sig.params().is_empty() {
write!(f, "...")?;
} else {
write!(f, ", ...")?;
}
}
write!(f, ")")?;
let ret = sig.ret();
if *ret != Ty::unit() {
let ret_display = if f.omit_verbose_types() {
ret.display_truncated(f.db, f.max_size)
} else {
ret.display(f.db)
};
write!(f, " -> {}", ret_display)?;
}
}
TypeCtor::FnDef(def) => {
let sig = f.db.callable_item_signature(def).subst(&self.parameters);
match def {
CallableDefId::FunctionId(ff) => {
write!(f, "fn {}", f.db.function_data(ff).name)?
}
CallableDefId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?,
CallableDefId::EnumVariantId(e) => {
write!(f, "{}", f.db.enum_data(e.parent).variants[e.local_id].name)?
}
};
if self.parameters.len() > 0 {
let generics = generics(f.db.upcast(), def.into());
let (parent_params, self_param, type_params, _impl_trait_params) =
generics.provenance_split();
let total_len = parent_params + self_param + type_params;
// We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
if total_len > 0 {
write!(f, "<")?;
f.write_joined(&self.parameters.0[..total_len], ", ")?;
write!(f, ">")?;
}
}
write!(f, "(")?;
f.write_joined(sig.params(), ", ")?;
write!(f, ")")?;
let ret = sig.ret();
if *ret != Ty::unit() {
let ret_display = if f.omit_verbose_types() {
ret.display_truncated(f.db, f.max_size)
} else {
ret.display(f.db)
};
write!(f, " -> {}", ret_display)?;
}
}
TypeCtor::Adt(def_id) => {
match f.display_target {
DisplayTarget::Diagnostics => {
let name = match def_id {
AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
};
write!(f, "{}", name)?;
}
DisplayTarget::SourceCode { module_id } => {
if let Some(path) = find_path::find_path(
f.db.upcast(),
ItemInNs::Types(def_id.into()),
module_id,
) {
write!(f, "{}", path)?;
} else {
return Err(HirDisplayError::DisplaySourceCodeError(
DisplaySourceCodeError::PathNotFound,
));
}
}
}
if self.parameters.len() > 0 {
let parameters_to_write =
if f.display_target.is_source_code() || f.omit_verbose_types() {
match self
.ctor
.as_generic_def()
.map(|generic_def_id| f.db.generic_defaults(generic_def_id))
.filter(|defaults| !defaults.is_empty())
{
None => self.parameters.0.as_ref(),
Some(default_parameters) => {
let mut default_from = 0;
for (i, parameter) in self.parameters.iter().enumerate() {
match (parameter, default_parameters.get(i)) {
(&Ty::Unknown, _) | (_, None) => {
default_from = i + 1;
}
(_, Some(default_parameter)) => {
let actual_default = default_parameter
.clone()
.subst(&self.parameters.prefix(i));
if parameter != &actual_default {
default_from = i + 1;
}
}
}
}
&self.parameters.0[0..default_from]
}
}
} else {
self.parameters.0.as_ref()
};
if !parameters_to_write.is_empty() {
write!(f, "<")?;
f.write_joined(parameters_to_write, ", ")?;
write!(f, ">")?;
}
}
}
TypeCtor::AssociatedType(type_alias) => {
let trait_ = match type_alias.lookup(f.db.upcast()).container {
AssocContainerId::TraitId(it) => it,
_ => panic!("not an associated type"),
};
let trait_ = f.db.trait_data(trait_);
let type_alias = f.db.type_alias_data(type_alias);
write!(f, "{}::{}", trait_.name, type_alias.name)?;
if self.parameters.len() > 0 {
write!(f, "<")?;
f.write_joined(&*self.parameters.0, ", ")?;
write!(f, ">")?;
}
}
TypeCtor::OpaqueType(opaque_ty_id) => {
let bounds = match opaque_ty_id {
OpaqueTyId::ReturnTypeImplTrait(func, idx) => {
let datas =
f.db.return_type_impl_traits(func).expect("impl trait id without data");
let data = (*datas)
.as_ref()
.map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
data.subst(&self.parameters)
}
};
write!(f, "impl ")?;
write_bounds_like_dyn_trait(&bounds.value, f)?;
// FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution
}
TypeCtor::Closure { .. } => {
let sig = self.parameters[0].callable_sig(f.db);
if let Some(sig) = sig {
if sig.params().is_empty() {
write!(f, "||")?;
} else if f.omit_verbose_types() {
write!(f, "|{}|", TYPE_HINT_TRUNCATION)?;
} else {
write!(f, "|")?;
f.write_joined(sig.params(), ", ")?;
write!(f, "|")?;
};
let ret_display = if f.omit_verbose_types() {
sig.ret().display_truncated(f.db, f.max_size)
} else {
sig.ret().display(f.db)
};
write!(f, " -> {}", ret_display)?;
} else {
write!(f, "{{closure}}")?;
}
}
}
Ok(())
}
}
impl HirDisplay for ProjectionTy {
fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
if f.should_truncate() {
return write!(f, "{}", TYPE_HINT_TRUNCATION);
}
let trait_ = f.db.trait_data(self.trait_(f.db));
write!(f, "<{} as {}", self.parameters[0].display(f.db), trait_.name)?;
if self.parameters.len() > 1 {
write!(f, "<")?;
f.write_joined(&self.parameters[1..], ", ")?;
write!(f, ">")?;
}
write!(f, ">::{}", f.db.type_alias_data(self.associated_ty).name)?;
Ok(())
}
}
impl HirDisplay for Ty {
fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
if f.should_truncate() {
return write!(f, "{}", TYPE_HINT_TRUNCATION);
}
match self {
Ty::Apply(a_ty) => a_ty.hir_fmt(f)?,
Ty::Projection(p_ty) => p_ty.hir_fmt(f)?,
Ty::Placeholder(id) => {
let generics = generics(f.db.upcast(), id.parent);
let param_data = &generics.params.types[id.local_id];
match param_data.provenance {
TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))?
}
TypeParamProvenance::ArgumentImplTrait => {
write!(f, "impl ")?;
let bounds = f.db.generic_predicates_for_param(*id);
let substs = Substs::type_params_for_generics(&generics);
write_bounds_like_dyn_trait(
&bounds.iter().map(|b| b.clone().subst(&substs)).collect::<Vec<_>>(),
f,
)?;
}
}
}
Ty::Bound(idx) => write!(f, "?{}.{}", idx.debruijn.depth(), idx.index)?,
Ty::Dyn(predicates) => {
write!(f, "dyn ")?;
write_bounds_like_dyn_trait(predicates, f)?;
}
Ty::Opaque(opaque_ty) => {
let bounds = match opaque_ty.opaque_ty_id {
OpaqueTyId::ReturnTypeImplTrait(func, idx) => {
let datas =
f.db.return_type_impl_traits(func).expect("impl trait id without data");
let data = (*datas)
.as_ref()
.map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
data.subst(&opaque_ty.parameters)
}
};
write!(f, "impl ")?;
write_bounds_like_dyn_trait(&bounds.value, f)?;
}
Ty::Unknown => write!(f, "{{unknown}}")?,
Ty::Infer(..) => write!(f, "_")?,
}
Ok(())
}
}
fn write_bounds_like_dyn_trait(
predicates: &[GenericPredicate],
f: &mut HirFormatter,
) -> Result<(), HirDisplayError> {
// Note: This code is written to produce nice results (i.e.
// corresponding to surface Rust) for types that can occur in
// actual Rust. It will have weird results if the predicates
// aren't as expected (i.e. self types = $0, projection
// predicates for a certain trait come after the Implemented
// predicate for that trait).
let mut first = true;
let mut angle_open = false;
for p in predicates.iter() {
match p {
GenericPredicate::Implemented(trait_ref) => {
if angle_open {
write!(f, ">")?;
}
if !first {
write!(f, " + ")?;
}
// We assume that the self type is $0 (i.e. the
// existential) here, which is the only thing that's
// possible in actual Rust, and hence don't print it
write!(f, "{}", f.db.trait_data(trait_ref.trait_).name)?;
if trait_ref.substs.len() > 1 {
write!(f, "<")?;
f.write_joined(&trait_ref.substs[1..], ", ")?;
// there might be assoc type bindings, so we leave the angle brackets open
angle_open = true;
}
}
GenericPredicate::Projection(projection_pred) => {
// in types in actual Rust, these will always come
// after the corresponding Implemented predicate
if angle_open {
write!(f, ", ")?;
} else {
write!(f, "<")?;
angle_open = true;
}
let type_alias = f.db.type_alias_data(projection_pred.projection_ty.associated_ty);
write!(f, "{} = ", type_alias.name)?;
projection_pred.ty.hir_fmt(f)?;
}
GenericPredicate::Error => {
if angle_open {
// impl Trait<X, {error}>
write!(f, ", ")?;
} else if !first {
// impl Trait + {error}
write!(f, " + ")?;
}
p.hir_fmt(f)?;
}
}
first = false;
}
if angle_open {
write!(f, ">")?;
}
Ok(())
}
impl TraitRef {
fn hir_fmt_ext(&self, f: &mut HirFormatter, use_as: bool) -> Result<(), HirDisplayError> {
if f.should_truncate() {
return write!(f, "{}", TYPE_HINT_TRUNCATION);
}
self.substs[0].hir_fmt(f)?;
if use_as {
write!(f, " as ")?;
} else {
write!(f, ": ")?;
}
write!(f, "{}", f.db.trait_data(self.trait_).name)?;
if self.substs.len() > 1 {
write!(f, "<")?;
f.write_joined(&self.substs[1..], ", ")?;
write!(f, ">")?;
}
Ok(())
}
}
impl HirDisplay for TraitRef {
fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
self.hir_fmt_ext(f, false)
}
}
impl HirDisplay for &GenericPredicate {
fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
HirDisplay::hir_fmt(*self, f)
}
}
impl HirDisplay for GenericPredicate {
fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
if f.should_truncate() {
return write!(f, "{}", TYPE_HINT_TRUNCATION);
}
match self {
GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
GenericPredicate::Projection(projection_pred) => {
write!(f, "<")?;
projection_pred.projection_ty.trait_ref(f.db).hir_fmt_ext(f, true)?;
write!(
f,
">::{} = {}",
f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name,
projection_pred.ty.display(f.db)
)?;
}
GenericPredicate::Error => write!(f, "{{error}}")?,
}
Ok(())
}
}
impl HirDisplay for Obligation {
fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
Ok(match self {
Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db))?,
Obligation::Projection(proj) => write!(
f,
"Normalize({} => {})",
proj.projection_ty.display(f.db),
proj.ty.display(f.db)
)?,
})
}
}

802
crates/hir_ty/src/infer.rs Normal file
View file

@ -0,0 +1,802 @@
//! Type inference, i.e. the process of walking through the code and determining
//! the type of each expression and pattern.
//!
//! For type inference, compare the implementations in rustc (the various
//! check_* methods in librustc_typeck/check/mod.rs are a good entry point) and
//! IntelliJ-Rust (org.rust.lang.core.types.infer). Our entry point for
//! inference here is the `infer` function, which infers the types of all
//! expressions in a given function.
//!
//! During inference, types (i.e. the `Ty` struct) can contain type 'variables'
//! which represent currently unknown types; as we walk through the expressions,
//! we might determine that certain variables need to be equal to each other, or
//! to certain types. To record this, we use the union-find implementation from
//! the `ena` crate, which is extracted from rustc.
use std::borrow::Cow;
use std::mem;
use std::ops::Index;
use std::sync::Arc;
use arena::map::ArenaMap;
use hir_def::{
body::Body,
data::{ConstData, FunctionData, StaticData},
expr::{BindingAnnotation, ExprId, PatId},
lang_item::LangItemTarget,
path::{path, Path},
resolver::{HasResolver, Resolver, TypeNs},
type_ref::{Mutability, TypeRef},
AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, Lookup, TraitId,
TypeAliasId, VariantId,
};
use hir_expand::{diagnostics::DiagnosticSink, name::name};
use rustc_hash::FxHashMap;
use stdx::impl_from;
use syntax::SmolStr;
use super::{
primitive::{FloatTy, IntTy},
traits::{Guidance, Obligation, ProjectionPredicate, Solution},
InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk,
};
use crate::{
db::HirDatabase, infer::diagnostics::InferenceDiagnostic, lower::ImplTraitLoweringMode,
};
pub(crate) use unify::unify;
macro_rules! ty_app {
($ctor:pat, $param:pat) => {
crate::Ty::Apply(crate::ApplicationTy { ctor: $ctor, parameters: $param })
};
($ctor:pat) => {
ty_app!($ctor, _)
};
}
mod unify;
mod path;
mod expr;
mod pat;
mod coerce;
/// The entry point of type inference.
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
let _p = profile::span("infer_query");
let resolver = def.resolver(db.upcast());
let mut ctx = InferenceContext::new(db, def, resolver);
match def {
DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
DefWithBodyId::FunctionId(f) => ctx.collect_fn(&db.function_data(f)),
DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)),
}
ctx.infer_body();
Arc::new(ctx.resolve_all())
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
enum ExprOrPatId {
ExprId(ExprId),
PatId(PatId),
}
impl_from!(ExprId, PatId for ExprOrPatId);
/// Binding modes inferred for patterns.
/// https://doc.rust-lang.org/reference/patterns.html#binding-modes
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
enum BindingMode {
Move,
Ref(Mutability),
}
impl BindingMode {
pub fn convert(annotation: BindingAnnotation) -> BindingMode {
match annotation {
BindingAnnotation::Unannotated | BindingAnnotation::Mutable => BindingMode::Move,
BindingAnnotation::Ref => BindingMode::Ref(Mutability::Shared),
BindingAnnotation::RefMut => BindingMode::Ref(Mutability::Mut),
}
}
}
impl Default for BindingMode {
fn default() -> Self {
BindingMode::Move
}
}
/// A mismatch between an expected and an inferred type.
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct TypeMismatch {
pub expected: Ty,
pub actual: Ty,
}
/// The result of type inference: A mapping from expressions and patterns to types.
#[derive(Clone, PartialEq, Eq, Debug, Default)]
pub struct InferenceResult {
/// For each method call expr, records the function it resolves to.
method_resolutions: FxHashMap<ExprId, FunctionId>,
/// For each field access expr, records the field it resolves to.
field_resolutions: FxHashMap<ExprId, FieldId>,
/// For each field in record literal, records the field it resolves to.
record_field_resolutions: FxHashMap<ExprId, FieldId>,
record_field_pat_resolutions: FxHashMap<PatId, FieldId>,
/// For each struct literal, records the variant it resolves to.
variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
/// For each associated item record what it resolves to
assoc_resolutions: FxHashMap<ExprOrPatId, AssocItemId>,
diagnostics: Vec<InferenceDiagnostic>,
pub type_of_expr: ArenaMap<ExprId, Ty>,
pub type_of_pat: ArenaMap<PatId, Ty>,
pub(super) type_mismatches: ArenaMap<ExprId, TypeMismatch>,
}
impl InferenceResult {
pub fn method_resolution(&self, expr: ExprId) -> Option<FunctionId> {
self.method_resolutions.get(&expr).copied()
}
pub fn field_resolution(&self, expr: ExprId) -> Option<FieldId> {
self.field_resolutions.get(&expr).copied()
}
pub fn record_field_resolution(&self, expr: ExprId) -> Option<FieldId> {
self.record_field_resolutions.get(&expr).copied()
}
pub fn record_field_pat_resolution(&self, pat: PatId) -> Option<FieldId> {
self.record_field_pat_resolutions.get(&pat).copied()
}
pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> {
self.variant_resolutions.get(&id.into()).copied()
}
pub fn variant_resolution_for_pat(&self, id: PatId) -> Option<VariantId> {
self.variant_resolutions.get(&id.into()).copied()
}
pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<AssocItemId> {
self.assoc_resolutions.get(&id.into()).copied()
}
pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<AssocItemId> {
self.assoc_resolutions.get(&id.into()).copied()
}
pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> {
self.type_mismatches.get(expr)
}
pub fn add_diagnostics(
&self,
db: &dyn HirDatabase,
owner: DefWithBodyId,
sink: &mut DiagnosticSink,
) {
self.diagnostics.iter().for_each(|it| it.add_to(db, owner, sink))
}
}
impl Index<ExprId> for InferenceResult {
type Output = Ty;
fn index(&self, expr: ExprId) -> &Ty {
self.type_of_expr.get(expr).unwrap_or(&Ty::Unknown)
}
}
impl Index<PatId> for InferenceResult {
type Output = Ty;
fn index(&self, pat: PatId) -> &Ty {
self.type_of_pat.get(pat).unwrap_or(&Ty::Unknown)
}
}
/// The inference context contains all information needed during type inference.
#[derive(Clone, Debug)]
struct InferenceContext<'a> {
db: &'a dyn HirDatabase,
owner: DefWithBodyId,
body: Arc<Body>,
resolver: Resolver,
table: unify::InferenceTable,
trait_env: Arc<TraitEnvironment>,
obligations: Vec<Obligation>,
result: InferenceResult,
/// The return type of the function being inferred, or the closure if we're
/// currently within one.
///
/// We might consider using a nested inference context for checking
/// closures, but currently this is the only field that will change there,
/// so it doesn't make sense.
return_ty: Ty,
diverges: Diverges,
breakables: Vec<BreakableContext>,
}
#[derive(Clone, Debug)]
struct BreakableContext {
pub may_break: bool,
pub break_ty: Ty,
pub label: Option<name::Name>,
}
fn find_breakable<'c>(
ctxs: &'c mut [BreakableContext],
label: Option<&name::Name>,
) -> Option<&'c mut BreakableContext> {
match label {
Some(_) => ctxs.iter_mut().rev().find(|ctx| ctx.label.as_ref() == label),
None => ctxs.last_mut(),
}
}
impl<'a> InferenceContext<'a> {
fn new(db: &'a dyn HirDatabase, owner: DefWithBodyId, resolver: Resolver) -> Self {
InferenceContext {
result: InferenceResult::default(),
table: unify::InferenceTable::new(),
obligations: Vec::default(),
return_ty: Ty::Unknown, // set in collect_fn_signature
trait_env: TraitEnvironment::lower(db, &resolver),
db,
owner,
body: db.body(owner),
resolver,
diverges: Diverges::Maybe,
breakables: Vec::new(),
}
}
fn resolve_all(mut self) -> InferenceResult {
// FIXME resolve obligations as well (use Guidance if necessary)
let mut result = std::mem::take(&mut self.result);
for ty in result.type_of_expr.values_mut() {
let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown));
*ty = resolved;
}
for ty in result.type_of_pat.values_mut() {
let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown));
*ty = resolved;
}
result
}
fn write_expr_ty(&mut self, expr: ExprId, ty: Ty) {
self.result.type_of_expr.insert(expr, ty);
}
fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId) {
self.result.method_resolutions.insert(expr, func);
}
fn write_field_resolution(&mut self, expr: ExprId, field: FieldId) {
self.result.field_resolutions.insert(expr, field);
}
fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) {
self.result.variant_resolutions.insert(id, variant);
}
fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) {
self.result.assoc_resolutions.insert(id, item);
}
fn write_pat_ty(&mut self, pat: PatId, ty: Ty) {
self.result.type_of_pat.insert(pat, ty);
}
fn push_diagnostic(&mut self, diagnostic: InferenceDiagnostic) {
self.result.diagnostics.push(diagnostic);
}
fn make_ty_with_mode(
&mut self,
type_ref: &TypeRef,
impl_trait_mode: ImplTraitLoweringMode,
) -> Ty {
// FIXME use right resolver for block
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
.with_impl_trait_mode(impl_trait_mode);
let ty = Ty::from_hir(&ctx, type_ref);
let ty = self.insert_type_vars(ty);
self.normalize_associated_types_in(ty)
}
fn make_ty(&mut self, type_ref: &TypeRef) -> Ty {
self.make_ty_with_mode(type_ref, ImplTraitLoweringMode::Disallowed)
}
/// Replaces Ty::Unknown by a new type var, so we can maybe still infer it.
fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty {
match ty {
Ty::Unknown => self.table.new_type_var(),
_ => ty,
}
}
fn insert_type_vars(&mut self, ty: Ty) -> Ty {
ty.fold(&mut |ty| self.insert_type_vars_shallow(ty))
}
fn resolve_obligations_as_possible(&mut self) {
let obligations = mem::replace(&mut self.obligations, Vec::new());
for obligation in obligations {
let in_env = InEnvironment::new(self.trait_env.clone(), obligation.clone());
let canonicalized = self.canonicalizer().canonicalize_obligation(in_env);
let solution =
self.db.trait_solve(self.resolver.krate().unwrap(), canonicalized.value.clone());
match solution {
Some(Solution::Unique(substs)) => {
canonicalized.apply_solution(self, substs.0);
}
Some(Solution::Ambig(Guidance::Definite(substs))) => {
canonicalized.apply_solution(self, substs.0);
self.obligations.push(obligation);
}
Some(_) => {
// FIXME use this when trying to resolve everything at the end
self.obligations.push(obligation);
}
None => {
// FIXME obligation cannot be fulfilled => diagnostic
}
};
}
}
fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
self.table.unify(ty1, ty2)
}
/// Resolves the type as far as currently possible, replacing type variables
/// by their known types. All types returned by the infer_* functions should
/// be resolved as far as possible, i.e. contain no type variables with
/// known type.
fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty {
self.resolve_obligations_as_possible();
self.table.resolve_ty_as_possible(ty)
}
fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> {
self.table.resolve_ty_shallow(ty)
}
fn resolve_associated_type(&mut self, inner_ty: Ty, assoc_ty: Option<TypeAliasId>) -> Ty {
self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[])
}
fn resolve_associated_type_with_params(
&mut self,
inner_ty: Ty,
assoc_ty: Option<TypeAliasId>,
params: &[Ty],
) -> Ty {
match assoc_ty {
Some(res_assoc_ty) => {
let trait_ = match res_assoc_ty.lookup(self.db.upcast()).container {
hir_def::AssocContainerId::TraitId(trait_) => trait_,
_ => panic!("resolve_associated_type called with non-associated type"),
};
let ty = self.table.new_type_var();
let substs = Substs::build_for_def(self.db, res_assoc_ty)
.push(inner_ty)
.fill(params.iter().cloned())
.build();
let trait_ref = TraitRef { trait_, substs: substs.clone() };
let projection = ProjectionPredicate {
ty: ty.clone(),
projection_ty: ProjectionTy { associated_ty: res_assoc_ty, parameters: substs },
};
self.obligations.push(Obligation::Trait(trait_ref));
self.obligations.push(Obligation::Projection(projection));
self.resolve_ty_as_possible(ty)
}
None => Ty::Unknown,
}
}
/// Recurses through the given type, normalizing associated types mentioned
/// in it by replacing them by type variables and registering obligations to
/// resolve later. This should be done once for every type we get from some
/// type annotation (e.g. from a let type annotation, field type or function
/// call). `make_ty` handles this already, but e.g. for field types we need
/// to do it as well.
fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty {
let ty = self.resolve_ty_as_possible(ty);
ty.fold(&mut |ty| match ty {
Ty::Projection(proj_ty) => self.normalize_projection_ty(proj_ty),
_ => ty,
})
}
fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty {
let var = self.table.new_type_var();
let predicate = ProjectionPredicate { projection_ty: proj_ty, ty: var.clone() };
let obligation = Obligation::Projection(predicate);
self.obligations.push(obligation);
var
}
fn resolve_variant(&mut self, path: Option<&Path>) -> (Ty, Option<VariantId>) {
let path = match path {
Some(path) => path,
None => return (Ty::Unknown, None),
};
let resolver = &self.resolver;
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
// FIXME: this should resolve assoc items as well, see this example:
// https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
let (resolution, unresolved) =
match resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
Some(it) => it,
None => return (Ty::Unknown, None),
};
return match resolution {
TypeNs::AdtId(AdtId::StructId(strukt)) => {
let substs = Ty::substs_from_path(&ctx, path, strukt.into(), true);
let ty = self.db.ty(strukt.into());
let ty = self.insert_type_vars(ty.subst(&substs));
forbid_unresolved_segments((ty, Some(strukt.into())), unresolved)
}
TypeNs::AdtId(AdtId::UnionId(u)) => {
let substs = Ty::substs_from_path(&ctx, path, u.into(), true);
let ty = self.db.ty(u.into());
let ty = self.insert_type_vars(ty.subst(&substs));
forbid_unresolved_segments((ty, Some(u.into())), unresolved)
}
TypeNs::EnumVariantId(var) => {
let substs = Ty::substs_from_path(&ctx, path, var.into(), true);
let ty = self.db.ty(var.parent.into());
let ty = self.insert_type_vars(ty.subst(&substs));
forbid_unresolved_segments((ty, Some(var.into())), unresolved)
}
TypeNs::SelfType(impl_id) => {
let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
let substs = Substs::type_params_for_generics(&generics);
let ty = self.db.impl_self_ty(impl_id).subst(&substs);
match unresolved {
None => {
let variant = ty_variant(&ty);
(ty, variant)
}
Some(1) => {
let segment = path.mod_path().segments.last().unwrap();
// this could be an enum variant or associated type
if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() {
let enum_data = self.db.enum_data(enum_id);
if let Some(local_id) = enum_data.variant(segment) {
let variant = EnumVariantId { parent: enum_id, local_id };
return (ty, Some(variant.into()));
}
}
// FIXME potentially resolve assoc type
(Ty::Unknown, None)
}
Some(_) => {
// FIXME diagnostic
(Ty::Unknown, None)
}
}
}
TypeNs::TypeAliasId(it) => {
let substs = Substs::build_for_def(self.db, it)
.fill(std::iter::repeat_with(|| self.table.new_type_var()))
.build();
let ty = self.db.ty(it.into()).subst(&substs);
let variant = ty_variant(&ty);
forbid_unresolved_segments((ty, variant), unresolved)
}
TypeNs::AdtSelfType(_) => {
// FIXME this could happen in array size expressions, once we're checking them
(Ty::Unknown, None)
}
TypeNs::GenericParam(_) => {
// FIXME potentially resolve assoc type
(Ty::Unknown, None)
}
TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::TraitId(_) => {
// FIXME diagnostic
(Ty::Unknown, None)
}
};
fn forbid_unresolved_segments(
result: (Ty, Option<VariantId>),
unresolved: Option<usize>,
) -> (Ty, Option<VariantId>) {
if unresolved.is_none() {
result
} else {
// FIXME diagnostic
(Ty::Unknown, None)
}
}
fn ty_variant(ty: &Ty) -> Option<VariantId> {
ty.as_adt().and_then(|(adt_id, _)| match adt_id {
AdtId::StructId(s) => Some(VariantId::StructId(s)),
AdtId::UnionId(u) => Some(VariantId::UnionId(u)),
AdtId::EnumId(_) => {
// FIXME Error E0071, expected struct, variant or union type, found enum `Foo`
None
}
})
}
}
fn collect_const(&mut self, data: &ConstData) {
self.return_ty = self.make_ty(&data.type_ref);
}
fn collect_static(&mut self, data: &StaticData) {
self.return_ty = self.make_ty(&data.type_ref);
}
fn collect_fn(&mut self, data: &FunctionData) {
let body = Arc::clone(&self.body); // avoid borrow checker problem
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
.with_impl_trait_mode(ImplTraitLoweringMode::Param);
let param_tys =
data.params.iter().map(|type_ref| Ty::from_hir(&ctx, type_ref)).collect::<Vec<_>>();
for (ty, pat) in param_tys.into_iter().zip(body.params.iter()) {
let ty = self.insert_type_vars(ty);
let ty = self.normalize_associated_types_in(ty);
self.infer_pat(*pat, &ty, BindingMode::default());
}
let return_ty = self.make_ty_with_mode(&data.ret_type, ImplTraitLoweringMode::Disallowed); // FIXME implement RPIT
self.return_ty = return_ty;
}
fn infer_body(&mut self) {
self.infer_expr_coerce(self.body.body_expr, &Expectation::has_type(self.return_ty.clone()));
}
fn resolve_lang_item(&self, name: &str) -> Option<LangItemTarget> {
let krate = self.resolver.krate()?;
let name = SmolStr::new_inline_from_ascii(name.len(), name.as_bytes());
self.db.lang_item(krate, name)
}
fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
let path = path![core::iter::IntoIterator];
let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name![Item])
}
fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> {
let path = path![core::ops::Try];
let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name![Ok])
}
fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> {
let trait_ = self.resolve_lang_item("neg")?.as_trait()?;
self.db.trait_data(trait_).associated_type_by_name(&name![Output])
}
fn resolve_ops_not_output(&self) -> Option<TypeAliasId> {
let trait_ = self.resolve_lang_item("not")?.as_trait()?;
self.db.trait_data(trait_).associated_type_by_name(&name![Output])
}
fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
let trait_ = self.resolve_lang_item("future_trait")?.as_trait()?;
self.db.trait_data(trait_).associated_type_by_name(&name![Output])
}
fn resolve_boxed_box(&self) -> Option<AdtId> {
let struct_ = self.resolve_lang_item("owned_box")?.as_struct()?;
Some(struct_.into())
}
fn resolve_range_full(&self) -> Option<AdtId> {
let path = path![core::ops::RangeFull];
let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
Some(struct_.into())
}
fn resolve_range(&self) -> Option<AdtId> {
let path = path![core::ops::Range];
let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
Some(struct_.into())
}
fn resolve_range_inclusive(&self) -> Option<AdtId> {
let path = path![core::ops::RangeInclusive];
let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
Some(struct_.into())
}
fn resolve_range_from(&self) -> Option<AdtId> {
let path = path![core::ops::RangeFrom];
let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
Some(struct_.into())
}
fn resolve_range_to(&self) -> Option<AdtId> {
let path = path![core::ops::RangeTo];
let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
Some(struct_.into())
}
fn resolve_range_to_inclusive(&self) -> Option<AdtId> {
let path = path![core::ops::RangeToInclusive];
let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
Some(struct_.into())
}
fn resolve_ops_index(&self) -> Option<TraitId> {
self.resolve_lang_item("index")?.as_trait()
}
fn resolve_ops_index_output(&self) -> Option<TypeAliasId> {
let trait_ = self.resolve_ops_index()?;
self.db.trait_data(trait_).associated_type_by_name(&name![Output])
}
}
/// The kinds of placeholders we need during type inference. There's separate
/// values for general types, and for integer and float variables. The latter
/// two are used for inference of literal values (e.g. `100` could be one of
/// several integer types).
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum InferTy {
TypeVar(unify::TypeVarId),
IntVar(unify::TypeVarId),
FloatVar(unify::TypeVarId),
MaybeNeverTypeVar(unify::TypeVarId),
}
impl InferTy {
fn to_inner(self) -> unify::TypeVarId {
match self {
InferTy::TypeVar(ty)
| InferTy::IntVar(ty)
| InferTy::FloatVar(ty)
| InferTy::MaybeNeverTypeVar(ty) => ty,
}
}
fn fallback_value(self) -> Ty {
match self {
InferTy::TypeVar(..) => Ty::Unknown,
InferTy::IntVar(..) => Ty::simple(TypeCtor::Int(IntTy::i32())),
InferTy::FloatVar(..) => Ty::simple(TypeCtor::Float(FloatTy::f64())),
InferTy::MaybeNeverTypeVar(..) => Ty::simple(TypeCtor::Never),
}
}
}
/// When inferring an expression, we propagate downward whatever type hint we
/// are able in the form of an `Expectation`.
#[derive(Clone, PartialEq, Eq, Debug)]
struct Expectation {
ty: Ty,
/// See the `rvalue_hint` method.
rvalue_hint: bool,
}
impl Expectation {
/// The expectation that the type of the expression needs to equal the given
/// type.
fn has_type(ty: Ty) -> Self {
Expectation { ty, rvalue_hint: false }
}
/// The following explanation is copied straight from rustc:
/// Provides an expectation for an rvalue expression given an *optional*
/// hint, which is not required for type safety (the resulting type might
/// be checked higher up, as is the case with `&expr` and `box expr`), but
/// is useful in determining the concrete type.
///
/// The primary use case is where the expected type is a fat pointer,
/// like `&[isize]`. For example, consider the following statement:
///
/// let x: &[isize] = &[1, 2, 3];
///
/// In this case, the expected type for the `&[1, 2, 3]` expression is
/// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
/// expectation `ExpectHasType([isize])`, that would be too strong --
/// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
/// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
/// to the type `&[isize]`. Therefore, we propagate this more limited hint,
/// which still is useful, because it informs integer literals and the like.
/// See the test case `test/ui/coerce-expect-unsized.rs` and #20169
/// for examples of where this comes up,.
fn rvalue_hint(ty: Ty) -> Self {
Expectation { ty, rvalue_hint: true }
}
/// This expresses no expectation on the type.
fn none() -> Self {
Expectation { ty: Ty::Unknown, rvalue_hint: false }
}
fn coercion_target(&self) -> &Ty {
if self.rvalue_hint {
&Ty::Unknown
} else {
&self.ty
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
enum Diverges {
Maybe,
Always,
}
impl Diverges {
fn is_always(self) -> bool {
self == Diverges::Always
}
}
impl std::ops::BitAnd for Diverges {
type Output = Self;
fn bitand(self, other: Self) -> Self {
std::cmp::min(self, other)
}
}
impl std::ops::BitOr for Diverges {
type Output = Self;
fn bitor(self, other: Self) -> Self {
std::cmp::max(self, other)
}
}
impl std::ops::BitAndAssign for Diverges {
fn bitand_assign(&mut self, other: Self) {
*self = *self & other;
}
}
impl std::ops::BitOrAssign for Diverges {
fn bitor_assign(&mut self, other: Self) {
*self = *self | other;
}
}
mod diagnostics {
use hir_def::{expr::ExprId, DefWithBodyId};
use hir_expand::diagnostics::DiagnosticSink;
use crate::{
db::HirDatabase,
diagnostics::{BreakOutsideOfLoop, NoSuchField},
};
#[derive(Debug, PartialEq, Eq, Clone)]
pub(super) enum InferenceDiagnostic {
NoSuchField { expr: ExprId, field: usize },
BreakOutsideOfLoop { expr: ExprId },
}
impl InferenceDiagnostic {
pub(super) fn add_to(
&self,
db: &dyn HirDatabase,
owner: DefWithBodyId,
sink: &mut DiagnosticSink,
) {
match self {
InferenceDiagnostic::NoSuchField { expr, field } => {
let (_, source_map) = db.body_with_source_map(owner);
let field = source_map.field_syntax(*expr, *field);
sink.push(NoSuchField { file: field.file_id, field: field.value })
}
InferenceDiagnostic::BreakOutsideOfLoop { expr } => {
let (_, source_map) = db.body_with_source_map(owner);
let ptr = source_map
.expr_syntax(*expr)
.expect("break outside of loop in synthetic syntax");
sink.push(BreakOutsideOfLoop { file: ptr.file_id, expr: ptr.value })
}
}
}
}
}

View file

@ -0,0 +1,197 @@
//! Coercion logic. Coercions are certain type conversions that can implicitly
//! happen in certain places, e.g. weakening `&mut` to `&` or deref coercions
//! like going from `&Vec<T>` to `&[T]`.
//!
//! See: https://doc.rust-lang.org/nomicon/coercions.html
use hir_def::{lang_item::LangItemTarget, type_ref::Mutability};
use test_utils::mark;
use crate::{autoderef, traits::Solution, Obligation, Substs, TraitRef, Ty, TypeCtor};
use super::{unify::TypeVarValue, InEnvironment, InferTy, InferenceContext};
impl<'a> InferenceContext<'a> {
/// Unify two types, but may coerce the first one to the second one
/// using "implicit coercion rules" if needed.
pub(super) fn coerce(&mut self, from_ty: &Ty, to_ty: &Ty) -> bool {
let from_ty = self.resolve_ty_shallow(from_ty).into_owned();
let to_ty = self.resolve_ty_shallow(to_ty);
self.coerce_inner(from_ty, &to_ty)
}
/// Merge two types from different branches, with possible coercion.
///
/// Mostly this means trying to coerce one to the other, but
/// - if we have two function types for different functions, we need to
/// coerce both to function pointers;
/// - if we were concerned with lifetime subtyping, we'd need to look for a
/// least upper bound.
pub(super) fn coerce_merge_branch(&mut self, ty1: &Ty, ty2: &Ty) -> Ty {
if self.coerce(ty1, ty2) {
ty2.clone()
} else if self.coerce(ty2, ty1) {
ty1.clone()
} else {
if let (ty_app!(TypeCtor::FnDef(_)), ty_app!(TypeCtor::FnDef(_))) = (ty1, ty2) {
mark::hit!(coerce_fn_reification);
// Special case: two function types. Try to coerce both to
// pointers to have a chance at getting a match. See
// https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916
let sig1 = ty1.callable_sig(self.db).expect("FnDef without callable sig");
let sig2 = ty2.callable_sig(self.db).expect("FnDef without callable sig");
let ptr_ty1 = Ty::fn_ptr(sig1);
let ptr_ty2 = Ty::fn_ptr(sig2);
self.coerce_merge_branch(&ptr_ty1, &ptr_ty2)
} else {
mark::hit!(coerce_merge_fail_fallback);
ty1.clone()
}
}
}
fn coerce_inner(&mut self, mut from_ty: Ty, to_ty: &Ty) -> bool {
match (&from_ty, to_ty) {
// Never type will make type variable to fallback to Never Type instead of Unknown.
(ty_app!(TypeCtor::Never), Ty::Infer(InferTy::TypeVar(tv))) => {
let var = self.table.new_maybe_never_type_var();
self.table.var_unification_table.union_value(*tv, TypeVarValue::Known(var));
return true;
}
(ty_app!(TypeCtor::Never), _) => return true,
// Trivial cases, this should go after `never` check to
// avoid infer result type to be never
_ => {
if self.table.unify_inner_trivial(&from_ty, &to_ty, 0) {
return true;
}
}
}
// Pointer weakening and function to pointer
match (&mut from_ty, to_ty) {
// `*mut T`, `&mut T, `&T`` -> `*const T`
// `&mut T` -> `&T`
// `&mut T` -> `*mut T`
(ty_app!(c1@TypeCtor::RawPtr(_)), ty_app!(c2@TypeCtor::RawPtr(Mutability::Shared)))
| (ty_app!(c1@TypeCtor::Ref(_)), ty_app!(c2@TypeCtor::RawPtr(Mutability::Shared)))
| (ty_app!(c1@TypeCtor::Ref(_)), ty_app!(c2@TypeCtor::Ref(Mutability::Shared)))
| (ty_app!(c1@TypeCtor::Ref(Mutability::Mut)), ty_app!(c2@TypeCtor::RawPtr(_))) => {
*c1 = *c2;
}
// Illegal mutablity conversion
(
ty_app!(TypeCtor::RawPtr(Mutability::Shared)),
ty_app!(TypeCtor::RawPtr(Mutability::Mut)),
)
| (
ty_app!(TypeCtor::Ref(Mutability::Shared)),
ty_app!(TypeCtor::Ref(Mutability::Mut)),
) => return false,
// `{function_type}` -> `fn()`
(ty_app!(TypeCtor::FnDef(_)), ty_app!(TypeCtor::FnPtr { .. })) => {
match from_ty.callable_sig(self.db) {
None => return false,
Some(sig) => {
from_ty = Ty::fn_ptr(sig);
}
}
}
(ty_app!(TypeCtor::Closure { .. }, params), ty_app!(TypeCtor::FnPtr { .. })) => {
from_ty = params[0].clone();
}
_ => {}
}
if let Some(ret) = self.try_coerce_unsized(&from_ty, &to_ty) {
return ret;
}
// Auto Deref if cannot coerce
match (&from_ty, to_ty) {
// FIXME: DerefMut
(ty_app!(TypeCtor::Ref(_), st1), ty_app!(TypeCtor::Ref(_), st2)) => {
self.unify_autoderef_behind_ref(&st1[0], &st2[0])
}
// Otherwise, normal unify
_ => self.unify(&from_ty, to_ty),
}
}
/// Coerce a type using `from_ty: CoerceUnsized<ty_ty>`
///
/// See: https://doc.rust-lang.org/nightly/std/marker/trait.CoerceUnsized.html
fn try_coerce_unsized(&mut self, from_ty: &Ty, to_ty: &Ty) -> Option<bool> {
let krate = self.resolver.krate().unwrap();
let coerce_unsized_trait = match self.db.lang_item(krate, "coerce_unsized".into()) {
Some(LangItemTarget::TraitId(trait_)) => trait_,
_ => return None,
};
let generic_params = crate::utils::generics(self.db.upcast(), coerce_unsized_trait.into());
if generic_params.len() != 2 {
// The CoerceUnsized trait should have two generic params: Self and T.
return None;
}
let substs = Substs::build_for_generics(&generic_params)
.push(from_ty.clone())
.push(to_ty.clone())
.build();
let trait_ref = TraitRef { trait_: coerce_unsized_trait, substs };
let goal = InEnvironment::new(self.trait_env.clone(), Obligation::Trait(trait_ref));
let canonicalizer = self.canonicalizer();
let canonicalized = canonicalizer.canonicalize_obligation(goal);
let solution = self.db.trait_solve(krate, canonicalized.value.clone())?;
match solution {
Solution::Unique(v) => {
canonicalized.apply_solution(self, v.0);
}
_ => return None,
};
Some(true)
}
/// Unify `from_ty` to `to_ty` with optional auto Deref
///
/// Note that the parameters are already stripped the outer reference.
fn unify_autoderef_behind_ref(&mut self, from_ty: &Ty, to_ty: &Ty) -> bool {
let canonicalized = self.canonicalizer().canonicalize_ty(from_ty.clone());
let to_ty = self.resolve_ty_shallow(&to_ty);
// FIXME: Auto DerefMut
for derefed_ty in autoderef::autoderef(
self.db,
self.resolver.krate(),
InEnvironment {
value: canonicalized.value.clone(),
environment: self.trait_env.clone(),
},
) {
let derefed_ty = canonicalized.decanonicalize_ty(derefed_ty.value);
match (&*self.resolve_ty_shallow(&derefed_ty), &*to_ty) {
// Stop when constructor matches.
(ty_app!(from_ctor, st1), ty_app!(to_ctor, st2)) if from_ctor == to_ctor => {
// It will not recurse to `coerce`.
return self.table.unify_substs(st1, st2, 0);
}
_ => {
if self.table.unify_inner_trivial(&derefed_ty, &to_ty, 0) {
return true;
}
}
}
}
false
}
}

View file

@ -0,0 +1,873 @@
//! Type inference for expressions.
use std::iter::{repeat, repeat_with};
use std::{mem, sync::Arc};
use hir_def::{
builtin_type::Signedness,
expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp},
path::{GenericArg, GenericArgs},
resolver::resolver_for_expr,
AdtId, AssocContainerId, FieldId, Lookup,
};
use hir_expand::name::{name, Name};
use syntax::ast::RangeOp;
use crate::{
autoderef, method_resolution, op,
traits::{FnTrait, InEnvironment},
utils::{generics, variant_data, Generics},
ApplicationTy, Binders, CallableDefId, InferTy, IntTy, Mutability, Obligation, Rawness, Substs,
TraitRef, Ty, TypeCtor,
};
use super::{
find_breakable, BindingMode, BreakableContext, Diverges, Expectation, InferenceContext,
InferenceDiagnostic, TypeMismatch,
};
impl<'a> InferenceContext<'a> {
pub(super) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
let ty = self.infer_expr_inner(tgt_expr, expected);
if ty.is_never() {
// Any expression that produces a value of type `!` must have diverged
self.diverges = Diverges::Always;
}
let could_unify = self.unify(&ty, &expected.ty);
if !could_unify {
self.result.type_mismatches.insert(
tgt_expr,
TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() },
);
}
self.resolve_ty_as_possible(ty)
}
/// Infer type of expression with possibly implicit coerce to the expected type.
/// Return the type after possible coercion.
pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
let ty = self.infer_expr_inner(expr, &expected);
let ty = if !self.coerce(&ty, &expected.coercion_target()) {
self.result
.type_mismatches
.insert(expr, TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() });
// Return actual type when type mismatch.
// This is needed for diagnostic when return type mismatch.
ty
} else if expected.coercion_target() == &Ty::Unknown {
ty
} else {
expected.ty.clone()
};
self.resolve_ty_as_possible(ty)
}
fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
let krate = self.resolver.krate()?;
let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?;
let output_assoc_type =
self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?;
let generic_params = generics(self.db.upcast(), fn_once_trait.into());
if generic_params.len() != 2 {
return None;
}
let mut param_builder = Substs::builder(num_args);
let mut arg_tys = vec![];
for _ in 0..num_args {
let arg = self.table.new_type_var();
param_builder = param_builder.push(arg.clone());
arg_tys.push(arg);
}
let parameters = param_builder.build();
let arg_ty = Ty::Apply(ApplicationTy {
ctor: TypeCtor::Tuple { cardinality: num_args as u16 },
parameters,
});
let substs =
Substs::build_for_generics(&generic_params).push(ty.clone()).push(arg_ty).build();
let trait_env = Arc::clone(&self.trait_env);
let implements_fn_trait =
Obligation::Trait(TraitRef { trait_: fn_once_trait, substs: substs.clone() });
let goal = self.canonicalizer().canonicalize_obligation(InEnvironment {
value: implements_fn_trait.clone(),
environment: trait_env,
});
if self.db.trait_solve(krate, goal.value).is_some() {
self.obligations.push(implements_fn_trait);
let output_proj_ty =
crate::ProjectionTy { associated_ty: output_assoc_type, parameters: substs };
let return_ty = self.normalize_projection_ty(output_proj_ty);
Some((arg_tys, return_ty))
} else {
None
}
}
pub fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
match ty.callable_sig(self.db) {
Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())),
None => self.callable_sig_from_fn_trait(ty, num_args),
}
}
fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
let body = Arc::clone(&self.body); // avoid borrow checker problem
let ty = match &body[tgt_expr] {
Expr::Missing => Ty::Unknown,
Expr::If { condition, then_branch, else_branch } => {
// if let is desugared to match, so this is always simple if
self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool)));
let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let mut both_arms_diverge = Diverges::Always;
let then_ty = self.infer_expr_inner(*then_branch, &expected);
both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
let else_ty = match else_branch {
Some(else_branch) => self.infer_expr_inner(*else_branch, &expected),
None => Ty::unit(),
};
both_arms_diverge &= self.diverges;
self.diverges = condition_diverges | both_arms_diverge;
self.coerce_merge_branch(&then_ty, &else_ty)
}
Expr::Block { statements, tail, .. } => {
// FIXME: Breakable block inference
self.infer_block(statements, *tail, expected)
}
Expr::Unsafe { body } => self.infer_expr(*body, expected),
Expr::TryBlock { body } => {
let _inner = self.infer_expr(*body, expected);
// FIXME should be std::result::Result<{inner}, _>
Ty::Unknown
}
Expr::Loop { body, label } => {
self.breakables.push(BreakableContext {
may_break: false,
break_ty: self.table.new_type_var(),
label: label.clone(),
});
self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
let ctxt = self.breakables.pop().expect("breakable stack broken");
if ctxt.may_break {
self.diverges = Diverges::Maybe;
}
if ctxt.may_break {
ctxt.break_ty
} else {
Ty::simple(TypeCtor::Never)
}
}
Expr::While { condition, body, label } => {
self.breakables.push(BreakableContext {
may_break: false,
break_ty: Ty::Unknown,
label: label.clone(),
});
// while let is desugared to a match loop, so this is always simple while
self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool)));
self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
let _ctxt = self.breakables.pop().expect("breakable stack broken");
// the body may not run, so it diverging doesn't mean we diverge
self.diverges = Diverges::Maybe;
Ty::unit()
}
Expr::For { iterable, body, pat, label } => {
let iterable_ty = self.infer_expr(*iterable, &Expectation::none());
self.breakables.push(BreakableContext {
may_break: false,
break_ty: Ty::Unknown,
label: label.clone(),
});
let pat_ty =
self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
self.infer_pat(*pat, &pat_ty, BindingMode::default());
self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
let _ctxt = self.breakables.pop().expect("breakable stack broken");
// the body may not run, so it diverging doesn't mean we diverge
self.diverges = Diverges::Maybe;
Ty::unit()
}
Expr::Lambda { body, args, ret_type, arg_types } => {
assert_eq!(args.len(), arg_types.len());
let mut sig_tys = Vec::new();
// collect explicitly written argument types
for arg_type in arg_types.iter() {
let arg_ty = if let Some(type_ref) = arg_type {
self.make_ty(type_ref)
} else {
self.table.new_type_var()
};
sig_tys.push(arg_ty);
}
// add return type
let ret_ty = match ret_type {
Some(type_ref) => self.make_ty(type_ref),
None => self.table.new_type_var(),
};
sig_tys.push(ret_ty.clone());
let sig_ty = Ty::apply(
TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1, is_varargs: false },
Substs(sig_tys.clone().into()),
);
let closure_ty =
Ty::apply_one(TypeCtor::Closure { def: self.owner, expr: tgt_expr }, sig_ty);
// Eagerly try to relate the closure type with the expected
// type, otherwise we often won't have enough information to
// infer the body.
self.coerce(&closure_ty, &expected.ty);
// Now go through the argument patterns
for (arg_pat, arg_ty) in args.iter().zip(sig_tys) {
let resolved = self.resolve_ty_as_possible(arg_ty);
self.infer_pat(*arg_pat, &resolved, BindingMode::default());
}
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
self.diverges = prev_diverges;
self.return_ty = prev_ret_ty;
closure_ty
}
Expr::Call { callee, args } => {
let callee_ty = self.infer_expr(*callee, &Expectation::none());
let canonicalized = self.canonicalizer().canonicalize_ty(callee_ty.clone());
let mut derefs = autoderef(
self.db,
self.resolver.krate(),
InEnvironment {
value: canonicalized.value.clone(),
environment: self.trait_env.clone(),
},
);
let (param_tys, ret_ty): (Vec<Ty>, Ty) = derefs
.find_map(|callee_deref_ty| {
self.callable_sig(
&canonicalized.decanonicalize_ty(callee_deref_ty.value),
args.len(),
)
})
.unwrap_or((Vec::new(), Ty::Unknown));
self.register_obligations_for_call(&callee_ty);
self.check_call_arguments(args, &param_tys);
self.normalize_associated_types_in(ret_ty)
}
Expr::MethodCall { receiver, args, method_name, generic_args } => self
.infer_method_call(tgt_expr, *receiver, &args, &method_name, generic_args.as_ref()),
Expr::Match { expr, arms } => {
let input_ty = self.infer_expr(*expr, &Expectation::none());
let mut result_ty = if arms.is_empty() {
Ty::simple(TypeCtor::Never)
} else {
self.table.new_type_var()
};
let matchee_diverges = self.diverges;
let mut all_arms_diverge = Diverges::Always;
for arm in arms {
self.diverges = Diverges::Maybe;
let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default());
if let Some(guard_expr) = arm.guard {
self.infer_expr(
guard_expr,
&Expectation::has_type(Ty::simple(TypeCtor::Bool)),
);
}
let arm_ty = self.infer_expr_inner(arm.expr, &expected);
all_arms_diverge &= self.diverges;
result_ty = self.coerce_merge_branch(&result_ty, &arm_ty);
}
self.diverges = matchee_diverges | all_arms_diverge;
result_ty
}
Expr::Path(p) => {
// FIXME this could be more efficient...
let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown)
}
Expr::Continue { .. } => Ty::simple(TypeCtor::Never),
Expr::Break { expr, label } => {
let val_ty = if let Some(expr) = expr {
self.infer_expr(*expr, &Expectation::none())
} else {
Ty::unit()
};
let last_ty =
if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) {
ctxt.break_ty.clone()
} else {
Ty::Unknown
};
let merged_type = self.coerce_merge_branch(&last_ty, &val_ty);
if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) {
ctxt.break_ty = merged_type;
ctxt.may_break = true;
} else {
self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
expr: tgt_expr,
});
}
Ty::simple(TypeCtor::Never)
}
Expr::Return { expr } => {
if let Some(expr) = expr {
self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone()));
} else {
let unit = Ty::unit();
self.coerce(&unit, &self.return_ty.clone());
}
Ty::simple(TypeCtor::Never)
}
Expr::RecordLit { path, fields, spread } => {
let (ty, def_id) = self.resolve_variant(path.as_ref());
if let Some(variant) = def_id {
self.write_variant_resolution(tgt_expr.into(), variant);
}
self.unify(&ty, &expected.ty);
let substs = ty.substs().unwrap_or_else(Substs::empty);
let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default();
let variant_data = def_id.map(|it| variant_data(self.db.upcast(), it));
for (field_idx, field) in fields.iter().enumerate() {
let field_def =
variant_data.as_ref().and_then(|it| match it.field(&field.name) {
Some(local_id) => Some(FieldId { parent: def_id.unwrap(), local_id }),
None => {
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
expr: tgt_expr,
field: field_idx,
});
None
}
});
if let Some(field_def) = field_def {
self.result.record_field_resolutions.insert(field.expr, field_def);
}
let field_ty = field_def
.map_or(Ty::Unknown, |it| field_types[it.local_id].clone().subst(&substs));
self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
}
if let Some(expr) = spread {
self.infer_expr(*expr, &Expectation::has_type(ty.clone()));
}
ty
}
Expr::Field { expr, name } => {
let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none());
let canonicalized = self.canonicalizer().canonicalize_ty(receiver_ty);
let ty = autoderef::autoderef(
self.db,
self.resolver.krate(),
InEnvironment {
value: canonicalized.value.clone(),
environment: self.trait_env.clone(),
},
)
.find_map(|derefed_ty| match canonicalized.decanonicalize_ty(derefed_ty.value) {
Ty::Apply(a_ty) => match a_ty.ctor {
TypeCtor::Tuple { .. } => name
.as_tuple_index()
.and_then(|idx| a_ty.parameters.0.get(idx).cloned()),
TypeCtor::Adt(AdtId::StructId(s)) => {
self.db.struct_data(s).variant_data.field(name).map(|local_id| {
let field = FieldId { parent: s.into(), local_id };
self.write_field_resolution(tgt_expr, field);
self.db.field_types(s.into())[field.local_id]
.clone()
.subst(&a_ty.parameters)
})
}
TypeCtor::Adt(AdtId::UnionId(u)) => {
self.db.union_data(u).variant_data.field(name).map(|local_id| {
let field = FieldId { parent: u.into(), local_id };
self.write_field_resolution(tgt_expr, field);
self.db.field_types(u.into())[field.local_id]
.clone()
.subst(&a_ty.parameters)
})
}
_ => None,
},
_ => None,
})
.unwrap_or(Ty::Unknown);
let ty = self.insert_type_vars(ty);
self.normalize_associated_types_in(ty)
}
Expr::Await { expr } => {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
}
Expr::Try { expr } => {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok())
}
Expr::Cast { expr, type_ref } => {
let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
let cast_ty = self.make_ty(type_ref);
// FIXME check the cast...
cast_ty
}
Expr::Ref { expr, rawness, mutability } => {
let expectation = if let Some((exp_inner, exp_rawness, exp_mutability)) =
&expected.ty.as_reference_or_ptr()
{
if *exp_mutability == Mutability::Mut && *mutability == Mutability::Shared {
// FIXME: throw type error - expected mut reference but found shared ref,
// which cannot be coerced
}
if *exp_rawness == Rawness::Ref && *rawness == Rawness::RawPtr {
// FIXME: throw type error - expected reference but found ptr,
// which cannot be coerced
}
Expectation::rvalue_hint(Ty::clone(exp_inner))
} else {
Expectation::none()
};
let inner_ty = self.infer_expr_inner(*expr, &expectation);
let ty = match rawness {
Rawness::RawPtr => TypeCtor::RawPtr(*mutability),
Rawness::Ref => TypeCtor::Ref(*mutability),
};
Ty::apply_one(ty, inner_ty)
}
Expr::Box { expr } => {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
if let Some(box_) = self.resolve_boxed_box() {
Ty::apply_one(TypeCtor::Adt(box_), inner_ty)
} else {
Ty::Unknown
}
}
Expr::UnaryOp { expr, op } => {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
match op {
UnaryOp::Deref => match self.resolver.krate() {
Some(krate) => {
let canonicalized = self.canonicalizer().canonicalize_ty(inner_ty);
match autoderef::deref(
self.db,
krate,
InEnvironment {
value: &canonicalized.value,
environment: self.trait_env.clone(),
},
) {
Some(derefed_ty) => {
canonicalized.decanonicalize_ty(derefed_ty.value)
}
None => Ty::Unknown,
}
}
None => Ty::Unknown,
},
UnaryOp::Neg => {
match &inner_ty {
// Fast path for builtins
Ty::Apply(ApplicationTy {
ctor: TypeCtor::Int(IntTy { signedness: Signedness::Signed, .. }),
..
})
| Ty::Apply(ApplicationTy { ctor: TypeCtor::Float(_), .. })
| Ty::Infer(InferTy::IntVar(..))
| Ty::Infer(InferTy::FloatVar(..)) => inner_ty,
// Otherwise we resolve via the std::ops::Neg trait
_ => self
.resolve_associated_type(inner_ty, self.resolve_ops_neg_output()),
}
}
UnaryOp::Not => {
match &inner_ty {
// Fast path for builtins
Ty::Apply(ApplicationTy { ctor: TypeCtor::Bool, .. })
| Ty::Apply(ApplicationTy { ctor: TypeCtor::Int(_), .. })
| Ty::Infer(InferTy::IntVar(..)) => inner_ty,
// Otherwise we resolve via the std::ops::Not trait
_ => self
.resolve_associated_type(inner_ty, self.resolve_ops_not_output()),
}
}
}
}
Expr::BinaryOp { lhs, rhs, op } => match op {
Some(op) => {
let lhs_expectation = match op {
BinaryOp::LogicOp(..) => Expectation::has_type(Ty::simple(TypeCtor::Bool)),
_ => Expectation::none(),
};
let lhs_ty = self.infer_expr(*lhs, &lhs_expectation);
// FIXME: find implementation of trait corresponding to operation
// symbol and resolve associated `Output` type
let rhs_expectation = op::binary_op_rhs_expectation(*op, lhs_ty.clone());
let rhs_ty = self.infer_expr(*rhs, &Expectation::has_type(rhs_expectation));
// FIXME: similar as above, return ty is often associated trait type
op::binary_op_return_ty(*op, lhs_ty, rhs_ty)
}
_ => Ty::Unknown,
},
Expr::Range { lhs, rhs, range_type } => {
let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none()));
let rhs_expect = lhs_ty
.as_ref()
.map_or_else(Expectation::none, |ty| Expectation::has_type(ty.clone()));
let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect));
match (range_type, lhs_ty, rhs_ty) {
(RangeOp::Exclusive, None, None) => match self.resolve_range_full() {
Some(adt) => Ty::simple(TypeCtor::Adt(adt)),
None => Ty::Unknown,
},
(RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() {
Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
None => Ty::Unknown,
},
(RangeOp::Inclusive, None, Some(ty)) => {
match self.resolve_range_to_inclusive() {
Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
None => Ty::Unknown,
}
}
(RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() {
Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
None => Ty::Unknown,
},
(RangeOp::Inclusive, Some(_), Some(ty)) => {
match self.resolve_range_inclusive() {
Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
None => Ty::Unknown,
}
}
(RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() {
Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
None => Ty::Unknown,
},
(RangeOp::Inclusive, _, None) => Ty::Unknown,
}
}
Expr::Index { base, index } => {
let base_ty = self.infer_expr_inner(*base, &Expectation::none());
let index_ty = self.infer_expr(*index, &Expectation::none());
if let (Some(index_trait), Some(krate)) =
(self.resolve_ops_index(), self.resolver.krate())
{
let canonicalized = self.canonicalizer().canonicalize_ty(base_ty);
let self_ty = method_resolution::resolve_indexing_op(
self.db,
&canonicalized.value,
self.trait_env.clone(),
krate,
index_trait,
);
let self_ty =
self_ty.map_or(Ty::Unknown, |t| canonicalized.decanonicalize_ty(t.value));
self.resolve_associated_type_with_params(
self_ty,
self.resolve_ops_index_output(),
&[index_ty],
)
} else {
Ty::Unknown
}
}
Expr::Tuple { exprs } => {
let mut tys = match &expected.ty {
ty_app!(TypeCtor::Tuple { .. }, st) => st
.iter()
.cloned()
.chain(repeat_with(|| self.table.new_type_var()))
.take(exprs.len())
.collect::<Vec<_>>(),
_ => (0..exprs.len()).map(|_| self.table.new_type_var()).collect(),
};
for (expr, ty) in exprs.iter().zip(tys.iter_mut()) {
self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone()));
}
Ty::apply(TypeCtor::Tuple { cardinality: tys.len() as u16 }, Substs(tys.into()))
}
Expr::Array(array) => {
let elem_ty = match &expected.ty {
ty_app!(TypeCtor::Array, st) | ty_app!(TypeCtor::Slice, st) => {
st.as_single().clone()
}
_ => self.table.new_type_var(),
};
match array {
Array::ElementList(items) => {
for expr in items.iter() {
self.infer_expr_coerce(*expr, &Expectation::has_type(elem_ty.clone()));
}
}
Array::Repeat { initializer, repeat } => {
self.infer_expr_coerce(
*initializer,
&Expectation::has_type(elem_ty.clone()),
);
self.infer_expr(
*repeat,
&Expectation::has_type(Ty::simple(TypeCtor::Int(IntTy::usize()))),
);
}
}
Ty::apply_one(TypeCtor::Array, elem_ty)
}
Expr::Literal(lit) => match lit {
Literal::Bool(..) => Ty::simple(TypeCtor::Bool),
Literal::String(..) => {
Ty::apply_one(TypeCtor::Ref(Mutability::Shared), Ty::simple(TypeCtor::Str))
}
Literal::ByteString(..) => {
let byte_type = Ty::simple(TypeCtor::Int(IntTy::u8()));
let array_type = Ty::apply_one(TypeCtor::Array, byte_type);
Ty::apply_one(TypeCtor::Ref(Mutability::Shared), array_type)
}
Literal::Char(..) => Ty::simple(TypeCtor::Char),
Literal::Int(_v, ty) => match ty {
Some(int_ty) => Ty::simple(TypeCtor::Int((*int_ty).into())),
None => self.table.new_integer_var(),
},
Literal::Float(_v, ty) => match ty {
Some(float_ty) => Ty::simple(TypeCtor::Float((*float_ty).into())),
None => self.table.new_float_var(),
},
},
};
// use a new type variable if we got Ty::Unknown here
let ty = self.insert_type_vars_shallow(ty);
let ty = self.resolve_ty_as_possible(ty);
self.write_expr_ty(tgt_expr, ty.clone());
ty
}
fn infer_block(
&mut self,
statements: &[Statement],
tail: Option<ExprId>,
expected: &Expectation,
) -> Ty {
for stmt in statements {
match stmt {
Statement::Let { pat, type_ref, initializer } => {
let decl_ty =
type_ref.as_ref().map(|tr| self.make_ty(tr)).unwrap_or(Ty::Unknown);
// Always use the declared type when specified
let mut ty = decl_ty.clone();
if let Some(expr) = initializer {
let actual_ty =
self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone()));
if decl_ty == Ty::Unknown {
ty = actual_ty;
}
}
let ty = self.resolve_ty_as_possible(ty);
self.infer_pat(*pat, &ty, BindingMode::default());
}
Statement::Expr(expr) => {
self.infer_expr(*expr, &Expectation::none());
}
}
}
let ty = if let Some(expr) = tail {
self.infer_expr_coerce(expr, expected)
} else {
// Citing rustc: if there is no explicit tail expression,
// that is typically equivalent to a tail expression
// of `()` -- except if the block diverges. In that
// case, there is no value supplied from the tail
// expression (assuming there are no other breaks,
// this implies that the type of the block will be
// `!`).
if self.diverges.is_always() {
// we don't even make an attempt at coercion
self.table.new_maybe_never_type_var()
} else {
self.coerce(&Ty::unit(), expected.coercion_target());
Ty::unit()
}
};
ty
}
fn infer_method_call(
&mut self,
tgt_expr: ExprId,
receiver: ExprId,
args: &[ExprId],
method_name: &Name,
generic_args: Option<&GenericArgs>,
) -> Ty {
let receiver_ty = self.infer_expr(receiver, &Expectation::none());
let canonicalized_receiver = self.canonicalizer().canonicalize_ty(receiver_ty.clone());
let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
let resolved = self.resolver.krate().and_then(|krate| {
method_resolution::lookup_method(
&canonicalized_receiver.value,
self.db,
self.trait_env.clone(),
krate,
&traits_in_scope,
method_name,
)
});
let (derefed_receiver_ty, method_ty, def_generics) = match resolved {
Some((ty, func)) => {
let ty = canonicalized_receiver.decanonicalize_ty(ty);
self.write_method_resolution(tgt_expr, func);
(ty, self.db.value_ty(func.into()), Some(generics(self.db.upcast(), func.into())))
}
None => (receiver_ty, Binders::new(0, Ty::Unknown), None),
};
let substs = self.substs_for_method_call(def_generics, generic_args, &derefed_receiver_ty);
let method_ty = method_ty.subst(&substs);
let method_ty = self.insert_type_vars(method_ty);
self.register_obligations_for_call(&method_ty);
let (expected_receiver_ty, param_tys, ret_ty) = match method_ty.callable_sig(self.db) {
Some(sig) => {
if !sig.params().is_empty() {
(sig.params()[0].clone(), sig.params()[1..].to_vec(), sig.ret().clone())
} else {
(Ty::Unknown, Vec::new(), sig.ret().clone())
}
}
None => (Ty::Unknown, Vec::new(), Ty::Unknown),
};
// Apply autoref so the below unification works correctly
// FIXME: return correct autorefs from lookup_method
let actual_receiver_ty = match expected_receiver_ty.as_reference() {
Some((_, mutability)) => Ty::apply_one(TypeCtor::Ref(mutability), derefed_receiver_ty),
_ => derefed_receiver_ty,
};
self.unify(&expected_receiver_ty, &actual_receiver_ty);
self.check_call_arguments(args, &param_tys);
self.normalize_associated_types_in(ret_ty)
}
fn check_call_arguments(&mut self, args: &[ExprId], param_tys: &[Ty]) {
// Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 --
// We do this in a pretty awful way: first we type-check any arguments
// that are not closures, then we type-check the closures. This is so
// that we have more information about the types of arguments when we
// type-check the functions. This isn't really the right way to do this.
for &check_closures in &[false, true] {
let param_iter = param_tys.iter().cloned().chain(repeat(Ty::Unknown));
for (&arg, param_ty) in args.iter().zip(param_iter) {
let is_closure = matches!(&self.body[arg], Expr::Lambda { .. });
if is_closure != check_closures {
continue;
}
let param_ty = self.normalize_associated_types_in(param_ty);
self.infer_expr_coerce(arg, &Expectation::has_type(param_ty.clone()));
}
}
}
fn substs_for_method_call(
&mut self,
def_generics: Option<Generics>,
generic_args: Option<&GenericArgs>,
receiver_ty: &Ty,
) -> Substs {
let (parent_params, self_params, type_params, impl_trait_params) =
def_generics.as_ref().map_or((0, 0, 0, 0), |g| g.provenance_split());
assert_eq!(self_params, 0); // method shouldn't have another Self param
let total_len = parent_params + type_params + impl_trait_params;
let mut substs = Vec::with_capacity(total_len);
// Parent arguments are unknown, except for the receiver type
if let Some(parent_generics) = def_generics.as_ref().map(|p| p.iter_parent()) {
for (_id, param) in parent_generics {
if param.provenance == hir_def::generics::TypeParamProvenance::TraitSelf {
substs.push(receiver_ty.clone());
} else {
substs.push(Ty::Unknown);
}
}
}
// handle provided type arguments
if let Some(generic_args) = generic_args {
// if args are provided, it should be all of them, but we can't rely on that
for arg in generic_args.args.iter().take(type_params) {
match arg {
GenericArg::Type(type_ref) => {
let ty = self.make_ty(type_ref);
substs.push(ty);
}
}
}
};
let supplied_params = substs.len();
for _ in supplied_params..total_len {
substs.push(Ty::Unknown);
}
assert_eq!(substs.len(), total_len);
Substs(substs.into())
}
fn register_obligations_for_call(&mut self, callable_ty: &Ty) {
if let Ty::Apply(a_ty) = callable_ty {
if let TypeCtor::FnDef(def) = a_ty.ctor {
let generic_predicates = self.db.generic_predicates(def.into());
for predicate in generic_predicates.iter() {
let predicate = predicate.clone().subst(&a_ty.parameters);
if let Some(obligation) = Obligation::from_predicate(predicate) {
self.obligations.push(obligation);
}
}
// add obligation for trait implementation, if this is a trait method
match def {
CallableDefId::FunctionId(f) => {
if let AssocContainerId::TraitId(trait_) =
f.lookup(self.db.upcast()).container
{
// construct a TraitDef
let substs = a_ty
.parameters
.prefix(generics(self.db.upcast(), trait_.into()).len());
self.obligations.push(Obligation::Trait(TraitRef { trait_, substs }));
}
}
CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {}
}
}
}
}
}

View file

@ -0,0 +1,241 @@
//! Type inference for patterns.
use std::iter::repeat;
use std::sync::Arc;
use hir_def::{
expr::{BindingAnnotation, Expr, Literal, Pat, PatId, RecordFieldPat},
path::Path,
type_ref::Mutability,
FieldId,
};
use hir_expand::name::Name;
use test_utils::mark;
use super::{BindingMode, Expectation, InferenceContext};
use crate::{utils::variant_data, Substs, Ty, TypeCtor};
impl<'a> InferenceContext<'a> {
fn infer_tuple_struct_pat(
&mut self,
path: Option<&Path>,
subpats: &[PatId],
expected: &Ty,
default_bm: BindingMode,
id: PatId,
) -> Ty {
let (ty, def) = self.resolve_variant(path);
let var_data = def.map(|it| variant_data(self.db.upcast(), it));
if let Some(variant) = def {
self.write_variant_resolution(id.into(), variant);
}
self.unify(&ty, expected);
let substs = ty.substs().unwrap_or_else(Substs::empty);
let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
for (i, &subpat) in subpats.iter().enumerate() {
let expected_ty = var_data
.as_ref()
.and_then(|d| d.field(&Name::new_tuple_field(i)))
.map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs));
let expected_ty = self.normalize_associated_types_in(expected_ty);
self.infer_pat(subpat, &expected_ty, default_bm);
}
ty
}
fn infer_record_pat(
&mut self,
path: Option<&Path>,
subpats: &[RecordFieldPat],
expected: &Ty,
default_bm: BindingMode,
id: PatId,
) -> Ty {
let (ty, def) = self.resolve_variant(path);
let var_data = def.map(|it| variant_data(self.db.upcast(), it));
if let Some(variant) = def {
self.write_variant_resolution(id.into(), variant);
}
self.unify(&ty, expected);
let substs = ty.substs().unwrap_or_else(Substs::empty);
let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
for subpat in subpats {
let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name));
if let Some(local_id) = matching_field {
let field_def = FieldId { parent: def.unwrap(), local_id };
self.result.record_field_pat_resolutions.insert(subpat.pat, field_def);
}
let expected_ty =
matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs));
let expected_ty = self.normalize_associated_types_in(expected_ty);
self.infer_pat(subpat.pat, &expected_ty, default_bm);
}
ty
}
pub(super) fn infer_pat(
&mut self,
pat: PatId,
mut expected: &Ty,
mut default_bm: BindingMode,
) -> Ty {
let body = Arc::clone(&self.body); // avoid borrow checker problem
if is_non_ref_pat(&body, pat) {
while let Some((inner, mutability)) = expected.as_reference() {
expected = inner;
default_bm = match default_bm {
BindingMode::Move => BindingMode::Ref(mutability),
BindingMode::Ref(Mutability::Shared) => BindingMode::Ref(Mutability::Shared),
BindingMode::Ref(Mutability::Mut) => BindingMode::Ref(mutability),
}
}
} else if let Pat::Ref { .. } = &body[pat] {
mark::hit!(match_ergonomics_ref);
// When you encounter a `&pat` pattern, reset to Move.
// This is so that `w` is by value: `let (_, &w) = &(1, &2);`
default_bm = BindingMode::Move;
}
// Lose mutability.
let default_bm = default_bm;
let expected = expected;
let ty = match &body[pat] {
Pat::Tuple { ref args, .. } => {
let expectations = match expected.as_tuple() {
Some(parameters) => &*parameters.0,
_ => &[],
};
let expectations_iter = expectations.iter().chain(repeat(&Ty::Unknown));
let inner_tys = args
.iter()
.zip(expectations_iter)
.map(|(&pat, ty)| self.infer_pat(pat, ty, default_bm))
.collect();
Ty::apply(TypeCtor::Tuple { cardinality: args.len() as u16 }, Substs(inner_tys))
}
Pat::Or(ref pats) => {
if let Some((first_pat, rest)) = pats.split_first() {
let ty = self.infer_pat(*first_pat, expected, default_bm);
for pat in rest {
self.infer_pat(*pat, expected, default_bm);
}
ty
} else {
Ty::Unknown
}
}
Pat::Ref { pat, mutability } => {
let expectation = match expected.as_reference() {
Some((inner_ty, exp_mut)) => {
if *mutability != exp_mut {
// FIXME: emit type error?
}
inner_ty
}
_ => &Ty::Unknown,
};
let subty = self.infer_pat(*pat, expectation, default_bm);
Ty::apply_one(TypeCtor::Ref(*mutability), subty)
}
Pat::TupleStruct { path: p, args: subpats, .. } => {
self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm, pat)
}
Pat::Record { path: p, args: fields, ellipsis: _ } => {
self.infer_record_pat(p.as_ref(), fields, expected, default_bm, pat)
}
Pat::Path(path) => {
// FIXME use correct resolver for the surrounding expression
let resolver = self.resolver.clone();
self.infer_path(&resolver, &path, pat.into()).unwrap_or(Ty::Unknown)
}
Pat::Bind { mode, name: _, subpat } => {
let mode = if mode == &BindingAnnotation::Unannotated {
default_bm
} else {
BindingMode::convert(*mode)
};
let inner_ty = if let Some(subpat) = subpat {
self.infer_pat(*subpat, expected, default_bm)
} else {
expected.clone()
};
let inner_ty = self.insert_type_vars_shallow(inner_ty);
let bound_ty = match mode {
BindingMode::Ref(mutability) => {
Ty::apply_one(TypeCtor::Ref(mutability), inner_ty.clone())
}
BindingMode::Move => inner_ty.clone(),
};
let bound_ty = self.resolve_ty_as_possible(bound_ty);
self.write_pat_ty(pat, bound_ty);
return inner_ty;
}
Pat::Slice { prefix, slice, suffix } => {
let (container_ty, elem_ty) = match &expected {
ty_app!(TypeCtor::Array, st) => (TypeCtor::Array, st.as_single().clone()),
ty_app!(TypeCtor::Slice, st) => (TypeCtor::Slice, st.as_single().clone()),
_ => (TypeCtor::Slice, Ty::Unknown),
};
for pat_id in prefix.iter().chain(suffix) {
self.infer_pat(*pat_id, &elem_ty, default_bm);
}
let pat_ty = Ty::apply_one(container_ty, elem_ty);
if let Some(slice_pat_id) = slice {
self.infer_pat(*slice_pat_id, &pat_ty, default_bm);
}
pat_ty
}
Pat::Wild => expected.clone(),
Pat::Range { start, end } => {
let start_ty = self.infer_expr(*start, &Expectation::has_type(expected.clone()));
let end_ty = self.infer_expr(*end, &Expectation::has_type(start_ty));
end_ty
}
Pat::Lit(expr) => self.infer_expr(*expr, &Expectation::has_type(expected.clone())),
Pat::Missing => Ty::Unknown,
};
// use a new type variable if we got Ty::Unknown here
let ty = self.insert_type_vars_shallow(ty);
if !self.unify(&ty, expected) {
// FIXME record mismatch, we need to change the type of self.type_mismatches for that
}
let ty = self.resolve_ty_as_possible(ty);
self.write_pat_ty(pat, ty.clone());
ty
}
}
fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
match &body[pat] {
Pat::Tuple { .. }
| Pat::TupleStruct { .. }
| Pat::Record { .. }
| Pat::Range { .. }
| Pat::Slice { .. } => true,
Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)),
// FIXME: Path/Lit might actually evaluate to ref, but inference is unimplemented.
Pat::Path(..) => true,
Pat::Lit(expr) => match body[*expr] {
Expr::Literal(Literal::String(..)) => false,
_ => true,
},
Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Missing => false,
}
}

View file

@ -0,0 +1,287 @@
//! Path expression resolution.
use std::iter;
use hir_def::{
path::{Path, PathSegment},
resolver::{ResolveValueResult, Resolver, TypeNs, ValueNs},
AdtId, AssocContainerId, AssocItemId, EnumVariantId, Lookup,
};
use hir_expand::name::Name;
use crate::{method_resolution, Substs, Ty, ValueTyDefId};
use super::{ExprOrPatId, InferenceContext, TraitRef};
impl<'a> InferenceContext<'a> {
pub(super) fn infer_path(
&mut self,
resolver: &Resolver,
path: &Path,
id: ExprOrPatId,
) -> Option<Ty> {
let ty = self.resolve_value_path(resolver, path, id)?;
let ty = self.insert_type_vars(ty);
let ty = self.normalize_associated_types_in(ty);
Some(ty)
}
fn resolve_value_path(
&mut self,
resolver: &Resolver,
path: &Path,
id: ExprOrPatId,
) -> Option<Ty> {
let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
if path.segments().is_empty() {
// This can't actually happen syntax-wise
return None;
}
let ty = self.make_ty(type_ref);
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
let ctx = crate::lower::TyLoweringContext::new(self.db, &resolver);
let (ty, _) = Ty::from_type_relative_path(&ctx, ty, None, remaining_segments_for_ty);
self.resolve_ty_assoc_item(
ty,
&path.segments().last().expect("path had at least one segment").name,
id,
)?
} else {
let value_or_partial =
resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
match value_or_partial {
ResolveValueResult::ValueNs(it) => (it, None),
ResolveValueResult::Partial(def, remaining_index) => {
self.resolve_assoc_item(def, path, remaining_index, id)?
}
}
};
let typable: ValueTyDefId = match value {
ValueNs::LocalBinding(pat) => {
let ty = self.result.type_of_pat.get(pat)?.clone();
let ty = self.resolve_ty_as_possible(ty);
return Some(ty);
}
ValueNs::FunctionId(it) => it.into(),
ValueNs::ConstId(it) => it.into(),
ValueNs::StaticId(it) => it.into(),
ValueNs::StructId(it) => {
self.write_variant_resolution(id, it.into());
it.into()
}
ValueNs::EnumVariantId(it) => {
self.write_variant_resolution(id, it.into());
it.into()
}
ValueNs::ImplSelf(impl_id) => {
let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
let substs = Substs::type_params_for_generics(&generics);
let ty = self.db.impl_self_ty(impl_id).subst(&substs);
if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
let ty = self.db.value_ty(struct_id.into()).subst(&substs);
return Some(ty);
} else {
// FIXME: diagnostic, invalid Self reference
return None;
}
}
};
let ty = self.db.value_ty(typable);
// self_subst is just for the parent
let parent_substs = self_subst.unwrap_or_else(Substs::empty);
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
let substs = Ty::substs_from_path(&ctx, path, typable, true);
let full_substs = Substs::builder(substs.len())
.use_parent_substs(&parent_substs)
.fill(substs.0[parent_substs.len()..].iter().cloned())
.build();
let ty = ty.subst(&full_substs);
Some(ty)
}
fn resolve_assoc_item(
&mut self,
def: TypeNs,
path: &Path,
remaining_index: usize,
id: ExprOrPatId,
) -> Option<(ValueNs, Option<Substs>)> {
assert!(remaining_index < path.segments().len());
// there may be more intermediate segments between the resolved one and
// the end. Only the last segment needs to be resolved to a value; from
// the segments before that, we need to get either a type or a trait ref.
let resolved_segment = path.segments().get(remaining_index - 1).unwrap();
let remaining_segments = path.segments().skip(remaining_index);
let is_before_last = remaining_segments.len() == 1;
match (def, is_before_last) {
(TypeNs::TraitId(trait_), true) => {
let segment =
remaining_segments.last().expect("there should be at least one segment here");
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
let trait_ref = TraitRef::from_resolved_path(&ctx, trait_, resolved_segment, None);
self.resolve_trait_assoc_item(trait_ref, segment, id)
}
(def, _) => {
// Either we already have a type (e.g. `Vec::new`), or we have a
// trait but it's not the last segment, so the next segment
// should resolve to an associated type of that trait (e.g. `<T
// as Iterator>::Item::default`)
let remaining_segments_for_ty =
remaining_segments.take(remaining_segments.len() - 1);
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
let (ty, _) = Ty::from_partly_resolved_hir_path(
&ctx,
def,
resolved_segment,
remaining_segments_for_ty,
true,
);
if let Ty::Unknown = ty {
return None;
}
let ty = self.insert_type_vars(ty);
let ty = self.normalize_associated_types_in(ty);
let segment =
remaining_segments.last().expect("there should be at least one segment here");
self.resolve_ty_assoc_item(ty, &segment.name, id)
}
}
}
fn resolve_trait_assoc_item(
&mut self,
trait_ref: TraitRef,
segment: PathSegment<'_>,
id: ExprOrPatId,
) -> Option<(ValueNs, Option<Substs>)> {
let trait_ = trait_ref.trait_;
let item =
self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| {
match item {
AssocItemId::FunctionId(func) => {
if segment.name == &self.db.function_data(func).name {
Some(AssocItemId::FunctionId(func))
} else {
None
}
}
AssocItemId::ConstId(konst) => {
if self
.db
.const_data(konst)
.name
.as_ref()
.map_or(false, |n| n == segment.name)
{
Some(AssocItemId::ConstId(konst))
} else {
None
}
}
AssocItemId::TypeAliasId(_) => None,
}
})?;
let def = match item {
AssocItemId::FunctionId(f) => ValueNs::FunctionId(f),
AssocItemId::ConstId(c) => ValueNs::ConstId(c),
AssocItemId::TypeAliasId(_) => unreachable!(),
};
self.write_assoc_resolution(id, item);
Some((def, Some(trait_ref.substs)))
}
fn resolve_ty_assoc_item(
&mut self,
ty: Ty,
name: &Name,
id: ExprOrPatId,
) -> Option<(ValueNs, Option<Substs>)> {
if let Ty::Unknown = ty {
return None;
}
if let Some(result) = self.resolve_enum_variant_on_ty(&ty, name, id) {
return Some(result);
}
let canonical_ty = self.canonicalizer().canonicalize_ty(ty.clone());
let krate = self.resolver.krate()?;
let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
method_resolution::iterate_method_candidates(
&canonical_ty.value,
self.db,
self.trait_env.clone(),
krate,
&traits_in_scope,
Some(name),
method_resolution::LookupMode::Path,
move |_ty, item| {
let (def, container) = match item {
AssocItemId::FunctionId(f) => {
(ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
}
AssocItemId::ConstId(c) => {
(ValueNs::ConstId(c), c.lookup(self.db.upcast()).container)
}
AssocItemId::TypeAliasId(_) => unreachable!(),
};
let substs = match container {
AssocContainerId::ImplId(impl_id) => {
let impl_substs = Substs::build_for_def(self.db, impl_id)
.fill(iter::repeat_with(|| self.table.new_type_var()))
.build();
let impl_self_ty = self.db.impl_self_ty(impl_id).subst(&impl_substs);
self.unify(&impl_self_ty, &ty);
Some(impl_substs)
}
AssocContainerId::TraitId(trait_) => {
// we're picking this method
let trait_substs = Substs::build_for_def(self.db, trait_)
.push(ty.clone())
.fill(std::iter::repeat_with(|| self.table.new_type_var()))
.build();
self.obligations.push(super::Obligation::Trait(TraitRef {
trait_,
substs: trait_substs.clone(),
}));
Some(trait_substs)
}
AssocContainerId::ContainerId(_) => None,
};
self.write_assoc_resolution(id, item);
Some((def, substs))
},
)
}
fn resolve_enum_variant_on_ty(
&mut self,
ty: &Ty,
name: &Name,
id: ExprOrPatId,
) -> Option<(ValueNs, Option<Substs>)> {
let (enum_id, subst) = match ty.as_adt() {
Some((AdtId::EnumId(e), subst)) => (e, subst),
_ => return None,
};
let enum_data = self.db.enum_data(enum_id);
let local_id = enum_data.variant(name)?;
let variant = EnumVariantId { parent: enum_id, local_id };
self.write_variant_resolution(id, variant.into());
Some((ValueNs::EnumVariantId(variant), Some(subst.clone())))
}
}

View file

@ -0,0 +1,474 @@
//! Unification and canonicalization logic.
use std::borrow::Cow;
use ena::unify::{InPlaceUnificationTable, NoError, UnifyKey, UnifyValue};
use test_utils::mark;
use super::{InferenceContext, Obligation};
use crate::{
BoundVar, Canonical, DebruijnIndex, GenericPredicate, InEnvironment, InferTy, Substs, Ty,
TyKind, TypeCtor, TypeWalk,
};
impl<'a> InferenceContext<'a> {
pub(super) fn canonicalizer<'b>(&'b mut self) -> Canonicalizer<'a, 'b>
where
'a: 'b,
{
Canonicalizer { ctx: self, free_vars: Vec::new(), var_stack: Vec::new() }
}
}
pub(super) struct Canonicalizer<'a, 'b>
where
'a: 'b,
{
ctx: &'b mut InferenceContext<'a>,
free_vars: Vec<InferTy>,
/// A stack of type variables that is used to detect recursive types (which
/// are an error, but we need to protect against them to avoid stack
/// overflows).
var_stack: Vec<TypeVarId>,
}
#[derive(Debug)]
pub(super) struct Canonicalized<T> {
pub value: Canonical<T>,
free_vars: Vec<InferTy>,
}
impl<'a, 'b> Canonicalizer<'a, 'b>
where
'a: 'b,
{
fn add(&mut self, free_var: InferTy) -> usize {
self.free_vars.iter().position(|&v| v == free_var).unwrap_or_else(|| {
let next_index = self.free_vars.len();
self.free_vars.push(free_var);
next_index
})
}
fn do_canonicalize<T: TypeWalk>(&mut self, t: T, binders: DebruijnIndex) -> T {
t.fold_binders(
&mut |ty, binders| match ty {
Ty::Infer(tv) => {
let inner = tv.to_inner();
if self.var_stack.contains(&inner) {
// recursive type
return tv.fallback_value();
}
if let Some(known_ty) =
self.ctx.table.var_unification_table.inlined_probe_value(inner).known()
{
self.var_stack.push(inner);
let result = self.do_canonicalize(known_ty.clone(), binders);
self.var_stack.pop();
result
} else {
let root = self.ctx.table.var_unification_table.find(inner);
let free_var = match tv {
InferTy::TypeVar(_) => InferTy::TypeVar(root),
InferTy::IntVar(_) => InferTy::IntVar(root),
InferTy::FloatVar(_) => InferTy::FloatVar(root),
InferTy::MaybeNeverTypeVar(_) => InferTy::MaybeNeverTypeVar(root),
};
let position = self.add(free_var);
Ty::Bound(BoundVar::new(binders, position))
}
}
_ => ty,
},
binders,
)
}
fn into_canonicalized<T>(self, result: T) -> Canonicalized<T> {
let kinds = self
.free_vars
.iter()
.map(|v| match v {
// mapping MaybeNeverTypeVar to the same kind as general ones
// should be fine, because as opposed to int or float type vars,
// they don't restrict what kind of type can go into them, they
// just affect fallback.
InferTy::TypeVar(_) | InferTy::MaybeNeverTypeVar(_) => TyKind::General,
InferTy::IntVar(_) => TyKind::Integer,
InferTy::FloatVar(_) => TyKind::Float,
})
.collect();
Canonicalized { value: Canonical { value: result, kinds }, free_vars: self.free_vars }
}
pub(crate) fn canonicalize_ty(mut self, ty: Ty) -> Canonicalized<Ty> {
let result = self.do_canonicalize(ty, DebruijnIndex::INNERMOST);
self.into_canonicalized(result)
}
pub(crate) fn canonicalize_obligation(
mut self,
obligation: InEnvironment<Obligation>,
) -> Canonicalized<InEnvironment<Obligation>> {
let result = match obligation.value {
Obligation::Trait(tr) => {
Obligation::Trait(self.do_canonicalize(tr, DebruijnIndex::INNERMOST))
}
Obligation::Projection(pr) => {
Obligation::Projection(self.do_canonicalize(pr, DebruijnIndex::INNERMOST))
}
};
self.into_canonicalized(InEnvironment {
value: result,
environment: obligation.environment,
})
}
}
impl<T> Canonicalized<T> {
pub fn decanonicalize_ty(&self, mut ty: Ty) -> Ty {
ty.walk_mut_binders(
&mut |ty, binders| {
if let &mut Ty::Bound(bound) = ty {
if bound.debruijn >= binders {
*ty = Ty::Infer(self.free_vars[bound.index]);
}
}
},
DebruijnIndex::INNERMOST,
);
ty
}
pub fn apply_solution(&self, ctx: &mut InferenceContext<'_>, solution: Canonical<Substs>) {
// the solution may contain new variables, which we need to convert to new inference vars
let new_vars = Substs(
solution
.kinds
.iter()
.map(|k| match k {
TyKind::General => ctx.table.new_type_var(),
TyKind::Integer => ctx.table.new_integer_var(),
TyKind::Float => ctx.table.new_float_var(),
})
.collect(),
);
for (i, ty) in solution.value.into_iter().enumerate() {
let var = self.free_vars[i];
// eagerly replace projections in the type; we may be getting types
// e.g. from where clauses where this hasn't happened yet
let ty = ctx.normalize_associated_types_in(ty.clone().subst_bound_vars(&new_vars));
ctx.table.unify(&Ty::Infer(var), &ty);
}
}
}
pub fn unify(tys: &Canonical<(Ty, Ty)>) -> Option<Substs> {
let mut table = InferenceTable::new();
let vars = Substs(
tys.kinds
.iter()
// we always use type vars here because we want everything to
// fallback to Unknown in the end (kind of hacky, as below)
.map(|_| table.new_type_var())
.collect(),
);
let ty1_with_vars = tys.value.0.clone().subst_bound_vars(&vars);
let ty2_with_vars = tys.value.1.clone().subst_bound_vars(&vars);
if !table.unify(&ty1_with_vars, &ty2_with_vars) {
return None;
}
// default any type vars that weren't unified back to their original bound vars
// (kind of hacky)
for (i, var) in vars.iter().enumerate() {
if &*table.resolve_ty_shallow(var) == var {
table.unify(var, &Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i)));
}
}
Some(
Substs::builder(tys.kinds.len())
.fill(vars.iter().map(|v| table.resolve_ty_completely(v.clone())))
.build(),
)
}
#[derive(Clone, Debug)]
pub(crate) struct InferenceTable {
pub(super) var_unification_table: InPlaceUnificationTable<TypeVarId>,
}
impl InferenceTable {
pub fn new() -> Self {
InferenceTable { var_unification_table: InPlaceUnificationTable::new() }
}
pub fn new_type_var(&mut self) -> Ty {
Ty::Infer(InferTy::TypeVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
pub fn new_integer_var(&mut self) -> Ty {
Ty::Infer(InferTy::IntVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
pub fn new_float_var(&mut self) -> Ty {
Ty::Infer(InferTy::FloatVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
pub fn new_maybe_never_type_var(&mut self) -> Ty {
Ty::Infer(InferTy::MaybeNeverTypeVar(
self.var_unification_table.new_key(TypeVarValue::Unknown),
))
}
pub fn resolve_ty_completely(&mut self, ty: Ty) -> Ty {
self.resolve_ty_completely_inner(&mut Vec::new(), ty)
}
pub fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty {
self.resolve_ty_as_possible_inner(&mut Vec::new(), ty)
}
pub fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
self.unify_inner(ty1, ty2, 0)
}
pub fn unify_substs(&mut self, substs1: &Substs, substs2: &Substs, depth: usize) -> bool {
substs1.0.iter().zip(substs2.0.iter()).all(|(t1, t2)| self.unify_inner(t1, t2, depth))
}
fn unify_inner(&mut self, ty1: &Ty, ty2: &Ty, depth: usize) -> bool {
if depth > 1000 {
// prevent stackoverflows
panic!("infinite recursion in unification");
}
if ty1 == ty2 {
return true;
}
// try to resolve type vars first
let ty1 = self.resolve_ty_shallow(ty1);
let ty2 = self.resolve_ty_shallow(ty2);
match (&*ty1, &*ty2) {
(Ty::Apply(a_ty1), Ty::Apply(a_ty2)) if a_ty1.ctor == a_ty2.ctor => {
self.unify_substs(&a_ty1.parameters, &a_ty2.parameters, depth + 1)
}
_ => self.unify_inner_trivial(&ty1, &ty2, depth),
}
}
pub(super) fn unify_inner_trivial(&mut self, ty1: &Ty, ty2: &Ty, depth: usize) -> bool {
match (ty1, ty2) {
(Ty::Unknown, _) | (_, Ty::Unknown) => true,
(Ty::Placeholder(p1), Ty::Placeholder(p2)) if *p1 == *p2 => true,
(Ty::Dyn(dyn1), Ty::Dyn(dyn2)) if dyn1.len() == dyn2.len() => {
for (pred1, pred2) in dyn1.iter().zip(dyn2.iter()) {
if !self.unify_preds(pred1, pred2, depth + 1) {
return false;
}
}
true
}
(Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2)))
| (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2)))
| (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2)))
| (
Ty::Infer(InferTy::MaybeNeverTypeVar(tv1)),
Ty::Infer(InferTy::MaybeNeverTypeVar(tv2)),
) => {
// both type vars are unknown since we tried to resolve them
self.var_unification_table.union(*tv1, *tv2);
true
}
// The order of MaybeNeverTypeVar matters here.
// Unifying MaybeNeverTypeVar and TypeVar will let the latter become MaybeNeverTypeVar.
// Unifying MaybeNeverTypeVar and other concrete type will let the former become it.
(Ty::Infer(InferTy::TypeVar(tv)), other)
| (other, Ty::Infer(InferTy::TypeVar(tv)))
| (Ty::Infer(InferTy::MaybeNeverTypeVar(tv)), other)
| (other, Ty::Infer(InferTy::MaybeNeverTypeVar(tv)))
| (Ty::Infer(InferTy::IntVar(tv)), other @ ty_app!(TypeCtor::Int(_)))
| (other @ ty_app!(TypeCtor::Int(_)), Ty::Infer(InferTy::IntVar(tv)))
| (Ty::Infer(InferTy::FloatVar(tv)), other @ ty_app!(TypeCtor::Float(_)))
| (other @ ty_app!(TypeCtor::Float(_)), Ty::Infer(InferTy::FloatVar(tv))) => {
// the type var is unknown since we tried to resolve it
self.var_unification_table.union_value(*tv, TypeVarValue::Known(other.clone()));
true
}
_ => false,
}
}
fn unify_preds(
&mut self,
pred1: &GenericPredicate,
pred2: &GenericPredicate,
depth: usize,
) -> bool {
match (pred1, pred2) {
(GenericPredicate::Implemented(tr1), GenericPredicate::Implemented(tr2))
if tr1.trait_ == tr2.trait_ =>
{
self.unify_substs(&tr1.substs, &tr2.substs, depth + 1)
}
(GenericPredicate::Projection(proj1), GenericPredicate::Projection(proj2))
if proj1.projection_ty.associated_ty == proj2.projection_ty.associated_ty =>
{
self.unify_substs(
&proj1.projection_ty.parameters,
&proj2.projection_ty.parameters,
depth + 1,
) && self.unify_inner(&proj1.ty, &proj2.ty, depth + 1)
}
_ => false,
}
}
/// If `ty` is a type variable with known type, returns that type;
/// otherwise, return ty.
pub fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> {
let mut ty = Cow::Borrowed(ty);
// The type variable could resolve to a int/float variable. Hence try
// resolving up to three times; each type of variable shouldn't occur
// more than once
for i in 0..3 {
if i > 0 {
mark::hit!(type_var_resolves_to_int_var);
}
match &*ty {
Ty::Infer(tv) => {
let inner = tv.to_inner();
match self.var_unification_table.inlined_probe_value(inner).known() {
Some(known_ty) => {
// The known_ty can't be a type var itself
ty = Cow::Owned(known_ty.clone());
}
_ => return ty,
}
}
_ => return ty,
}
}
log::error!("Inference variable still not resolved: {:?}", ty);
ty
}
/// Resolves the type as far as currently possible, replacing type variables
/// by their known types. All types returned by the infer_* functions should
/// be resolved as far as possible, i.e. contain no type variables with
/// known type.
fn resolve_ty_as_possible_inner(&mut self, tv_stack: &mut Vec<TypeVarId>, ty: Ty) -> Ty {
ty.fold(&mut |ty| match ty {
Ty::Infer(tv) => {
let inner = tv.to_inner();
if tv_stack.contains(&inner) {
mark::hit!(type_var_cycles_resolve_as_possible);
// recursive type
return tv.fallback_value();
}
if let Some(known_ty) =
self.var_unification_table.inlined_probe_value(inner).known()
{
// known_ty may contain other variables that are known by now
tv_stack.push(inner);
let result = self.resolve_ty_as_possible_inner(tv_stack, known_ty.clone());
tv_stack.pop();
result
} else {
ty
}
}
_ => ty,
})
}
/// Resolves the type completely; type variables without known type are
/// replaced by Ty::Unknown.
fn resolve_ty_completely_inner(&mut self, tv_stack: &mut Vec<TypeVarId>, ty: Ty) -> Ty {
ty.fold(&mut |ty| match ty {
Ty::Infer(tv) => {
let inner = tv.to_inner();
if tv_stack.contains(&inner) {
mark::hit!(type_var_cycles_resolve_completely);
// recursive type
return tv.fallback_value();
}
if let Some(known_ty) =
self.var_unification_table.inlined_probe_value(inner).known()
{
// known_ty may contain other variables that are known by now
tv_stack.push(inner);
let result = self.resolve_ty_completely_inner(tv_stack, known_ty.clone());
tv_stack.pop();
result
} else {
tv.fallback_value()
}
}
_ => ty,
})
}
}
/// The ID of a type variable.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct TypeVarId(pub(super) u32);
impl UnifyKey for TypeVarId {
type Value = TypeVarValue;
fn index(&self) -> u32 {
self.0
}
fn from_index(i: u32) -> Self {
TypeVarId(i)
}
fn tag() -> &'static str {
"TypeVarId"
}
}
/// The value of a type variable: either we already know the type, or we don't
/// know it yet.
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum TypeVarValue {
Known(Ty),
Unknown,
}
impl TypeVarValue {
fn known(&self) -> Option<&Ty> {
match self {
TypeVarValue::Known(ty) => Some(ty),
TypeVarValue::Unknown => None,
}
}
}
impl UnifyValue for TypeVarValue {
type Error = NoError;
fn unify_values(value1: &Self, value2: &Self) -> Result<Self, NoError> {
match (value1, value2) {
// We should never equate two type variables, both of which have
// known types. Instead, we recursively equate those types.
(TypeVarValue::Known(t1), TypeVarValue::Known(t2)) => panic!(
"equating two type variables, both of which have known types: {:?} and {:?}",
t1, t2
),
// If one side is known, prefer that one.
(TypeVarValue::Known(..), TypeVarValue::Unknown) => Ok(value1.clone()),
(TypeVarValue::Unknown, TypeVarValue::Known(..)) => Ok(value2.clone()),
(TypeVarValue::Unknown, TypeVarValue::Unknown) => Ok(TypeVarValue::Unknown),
}
}
}

1078
crates/hir_ty/src/lib.rs Normal file

File diff suppressed because it is too large Load diff

1242
crates/hir_ty/src/lower.rs Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,769 @@
//! This module is concerned with finding methods that a given type provides.
//! For details about how this works in rustc, see the method lookup page in the
//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html)
//! and the corresponding code mostly in librustc_typeck/check/method/probe.rs.
use std::{iter, sync::Arc};
use arrayvec::ArrayVec;
use base_db::CrateId;
use hir_def::{
builtin_type::{IntBitness, Signedness},
lang_item::LangItemTarget,
type_ref::Mutability,
AssocContainerId, AssocItemId, FunctionId, HasModule, ImplId, Lookup, TraitId,
};
use hir_expand::name::Name;
use rustc_hash::{FxHashMap, FxHashSet};
use super::Substs;
use crate::{
autoderef,
db::HirDatabase,
primitive::{FloatBitness, FloatTy, IntTy},
utils::all_super_traits,
ApplicationTy, Canonical, DebruijnIndex, InEnvironment, TraitEnvironment, TraitRef, Ty, TyKind,
TypeCtor, TypeWalk,
};
/// This is used as a key for indexing impls.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum TyFingerprint {
Apply(TypeCtor),
}
impl TyFingerprint {
/// Creates a TyFingerprint for looking up an impl. Only certain types can
/// have impls: if we have some `struct S`, we can have an `impl S`, but not
/// `impl &S`. Hence, this will return `None` for reference types and such.
pub(crate) fn for_impl(ty: &Ty) -> Option<TyFingerprint> {
match ty {
Ty::Apply(a_ty) => Some(TyFingerprint::Apply(a_ty.ctor)),
_ => None,
}
}
}
pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [
TyFingerprint::Apply(TypeCtor::Int(IntTy {
signedness: Signedness::Unsigned,
bitness: IntBitness::X8,
})),
TyFingerprint::Apply(TypeCtor::Int(IntTy {
signedness: Signedness::Unsigned,
bitness: IntBitness::X16,
})),
TyFingerprint::Apply(TypeCtor::Int(IntTy {
signedness: Signedness::Unsigned,
bitness: IntBitness::X32,
})),
TyFingerprint::Apply(TypeCtor::Int(IntTy {
signedness: Signedness::Unsigned,
bitness: IntBitness::X64,
})),
TyFingerprint::Apply(TypeCtor::Int(IntTy {
signedness: Signedness::Unsigned,
bitness: IntBitness::X128,
})),
TyFingerprint::Apply(TypeCtor::Int(IntTy {
signedness: Signedness::Unsigned,
bitness: IntBitness::Xsize,
})),
TyFingerprint::Apply(TypeCtor::Int(IntTy {
signedness: Signedness::Signed,
bitness: IntBitness::X8,
})),
TyFingerprint::Apply(TypeCtor::Int(IntTy {
signedness: Signedness::Signed,
bitness: IntBitness::X16,
})),
TyFingerprint::Apply(TypeCtor::Int(IntTy {
signedness: Signedness::Signed,
bitness: IntBitness::X32,
})),
TyFingerprint::Apply(TypeCtor::Int(IntTy {
signedness: Signedness::Signed,
bitness: IntBitness::X64,
})),
TyFingerprint::Apply(TypeCtor::Int(IntTy {
signedness: Signedness::Signed,
bitness: IntBitness::X128,
})),
TyFingerprint::Apply(TypeCtor::Int(IntTy {
signedness: Signedness::Signed,
bitness: IntBitness::Xsize,
})),
];
pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 2] = [
TyFingerprint::Apply(TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 })),
TyFingerprint::Apply(TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 })),
];
/// Trait impls defined or available in some crate.
#[derive(Debug, Eq, PartialEq)]
pub struct TraitImpls {
// If the `Option<TyFingerprint>` is `None`, the impl may apply to any self type.
map: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>,
}
impl TraitImpls {
pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile::span("trait_impls_in_crate_query");
let mut impls = Self { map: FxHashMap::default() };
let crate_def_map = db.crate_def_map(krate);
for (_module_id, module_data) in crate_def_map.modules.iter() {
for impl_id in module_data.scope.impls() {
let target_trait = match db.impl_trait(impl_id) {
Some(tr) => tr.value.trait_,
None => continue,
};
let self_ty = db.impl_self_ty(impl_id);
let self_ty_fp = TyFingerprint::for_impl(&self_ty.value);
impls
.map
.entry(target_trait)
.or_default()
.entry(self_ty_fp)
.or_default()
.push(impl_id);
}
}
Arc::new(impls)
}
pub(crate) fn trait_impls_in_deps_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile::span("trait_impls_in_deps_query");
let crate_graph = db.crate_graph();
let mut res = Self { map: FxHashMap::default() };
for krate in crate_graph.transitive_deps(krate) {
res.merge(&db.trait_impls_in_crate(krate));
}
Arc::new(res)
}
fn merge(&mut self, other: &Self) {
for (trait_, other_map) in &other.map {
let map = self.map.entry(*trait_).or_default();
for (fp, impls) in other_map {
let vec = map.entry(*fp).or_default();
vec.extend(impls);
}
}
}
/// Queries all impls of the given trait.
pub fn for_trait(&self, trait_: TraitId) -> impl Iterator<Item = ImplId> + '_ {
self.map
.get(&trait_)
.into_iter()
.flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
}
/// Queries all impls of `trait_` that may apply to `self_ty`.
pub fn for_trait_and_self_ty(
&self,
trait_: TraitId,
self_ty: TyFingerprint,
) -> impl Iterator<Item = ImplId> + '_ {
self.map
.get(&trait_)
.into_iter()
.flat_map(move |map| map.get(&None).into_iter().chain(map.get(&Some(self_ty))))
.flat_map(|v| v.iter().copied())
}
pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
self.map.values().flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
}
}
/// Inherent impls defined in some crate.
///
/// Inherent impls can only be defined in the crate that also defines the self type of the impl
/// (note that some primitives are considered to be defined by both libcore and liballoc).
///
/// This makes inherent impl lookup easier than trait impl lookup since we only have to consider a
/// single crate.
#[derive(Debug, Eq, PartialEq)]
pub struct InherentImpls {
map: FxHashMap<TyFingerprint, Vec<ImplId>>,
}
impl InherentImpls {
pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
let mut map: FxHashMap<_, Vec<_>> = FxHashMap::default();
let crate_def_map = db.crate_def_map(krate);
for (_module_id, module_data) in crate_def_map.modules.iter() {
for impl_id in module_data.scope.impls() {
let data = db.impl_data(impl_id);
if data.target_trait.is_some() {
continue;
}
let self_ty = db.impl_self_ty(impl_id);
if let Some(fp) = TyFingerprint::for_impl(&self_ty.value) {
map.entry(fp).or_default().push(impl_id);
}
}
}
Arc::new(Self { map })
}
pub fn for_self_ty(&self, self_ty: &Ty) -> &[ImplId] {
match TyFingerprint::for_impl(self_ty) {
Some(fp) => self.map.get(&fp).map(|vec| vec.as_ref()).unwrap_or(&[]),
None => &[],
}
}
pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
self.map.values().flat_map(|v| v.iter().copied())
}
}
impl Ty {
pub fn def_crates(
&self,
db: &dyn HirDatabase,
cur_crate: CrateId,
) -> Option<ArrayVec<[CrateId; 2]>> {
// Types like slice can have inherent impls in several crates, (core and alloc).
// The corresponding impls are marked with lang items, so we can use them to find the required crates.
macro_rules! lang_item_crate {
($($name:expr),+ $(,)?) => {{
let mut v = ArrayVec::<[LangItemTarget; 2]>::new();
$(
v.extend(db.lang_item(cur_crate, $name.into()));
)+
v
}};
}
let lang_item_targets = match self {
Ty::Apply(a_ty) => match a_ty.ctor {
TypeCtor::Adt(def_id) => {
return Some(std::iter::once(def_id.module(db.upcast()).krate).collect())
}
TypeCtor::Bool => lang_item_crate!("bool"),
TypeCtor::Char => lang_item_crate!("char"),
TypeCtor::Float(f) => match f.bitness {
// There are two lang items: one in libcore (fXX) and one in libstd (fXX_runtime)
FloatBitness::X32 => lang_item_crate!("f32", "f32_runtime"),
FloatBitness::X64 => lang_item_crate!("f64", "f64_runtime"),
},
TypeCtor::Int(i) => lang_item_crate!(i.ty_to_string()),
TypeCtor::Str => lang_item_crate!("str_alloc", "str"),
TypeCtor::Slice => lang_item_crate!("slice_alloc", "slice"),
TypeCtor::RawPtr(Mutability::Shared) => lang_item_crate!("const_ptr"),
TypeCtor::RawPtr(Mutability::Mut) => lang_item_crate!("mut_ptr"),
_ => return None,
},
_ => return None,
};
let res = lang_item_targets
.into_iter()
.filter_map(|it| match it {
LangItemTarget::ImplDefId(it) => Some(it),
_ => None,
})
.map(|it| it.lookup(db.upcast()).container.module(db.upcast()).krate)
.collect();
Some(res)
}
}
/// Look up the method with the given name, returning the actual autoderefed
/// receiver type (but without autoref applied yet).
pub(crate) fn lookup_method(
ty: &Canonical<Ty>,
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
krate: CrateId,
traits_in_scope: &FxHashSet<TraitId>,
name: &Name,
) -> Option<(Ty, FunctionId)> {
iterate_method_candidates(
ty,
db,
env,
krate,
&traits_in_scope,
Some(name),
LookupMode::MethodCall,
|ty, f| match f {
AssocItemId::FunctionId(f) => Some((ty.clone(), f)),
_ => None,
},
)
}
/// Whether we're looking up a dotted method call (like `v.len()`) or a path
/// (like `Vec::new`).
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum LookupMode {
/// Looking up a method call like `v.len()`: We only consider candidates
/// that have a `self` parameter, and do autoderef.
MethodCall,
/// Looking up a path like `Vec::new` or `Vec::default`: We consider all
/// candidates including associated constants, but don't do autoderef.
Path,
}
// This would be nicer if it just returned an iterator, but that runs into
// lifetime problems, because we need to borrow temp `CrateImplDefs`.
// FIXME add a context type here?
pub fn iterate_method_candidates<T>(
ty: &Canonical<Ty>,
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
krate: CrateId,
traits_in_scope: &FxHashSet<TraitId>,
name: Option<&Name>,
mode: LookupMode,
mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>,
) -> Option<T> {
let mut slot = None;
iterate_method_candidates_impl(
ty,
db,
env,
krate,
traits_in_scope,
name,
mode,
&mut |ty, item| {
assert!(slot.is_none());
slot = callback(ty, item);
slot.is_some()
},
);
slot
}
fn iterate_method_candidates_impl(
ty: &Canonical<Ty>,
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
krate: CrateId,
traits_in_scope: &FxHashSet<TraitId>,
name: Option<&Name>,
mode: LookupMode,
callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
) -> bool {
match mode {
LookupMode::MethodCall => {
// For method calls, rust first does any number of autoderef, and then one
// autoref (i.e. when the method takes &self or &mut self). We just ignore
// the autoref currently -- when we find a method matching the given name,
// we assume it fits.
// Also note that when we've got a receiver like &S, even if the method we
// find in the end takes &self, we still do the autoderef step (just as
// rustc does an autoderef and then autoref again).
let ty = InEnvironment { value: ty.clone(), environment: env.clone() };
// We have to be careful about the order we're looking at candidates
// in here. Consider the case where we're resolving `x.clone()`
// where `x: &Vec<_>`. This resolves to the clone method with self
// type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
// the receiver type exactly matches before cases where we have to
// do autoref. But in the autoderef steps, the `&_` self type comes
// up *before* the `Vec<_>` self type.
//
// On the other hand, we don't want to just pick any by-value method
// before any by-autoref method; it's just that we need to consider
// the methods by autoderef order of *receiver types*, not *self
// types*.
let deref_chain = autoderef_method_receiver(db, krate, ty);
for i in 0..deref_chain.len() {
if iterate_method_candidates_with_autoref(
&deref_chain[i..],
db,
env.clone(),
krate,
traits_in_scope,
name,
callback,
) {
return true;
}
}
false
}
LookupMode::Path => {
// No autoderef for path lookups
iterate_method_candidates_for_self_ty(
&ty,
db,
env,
krate,
traits_in_scope,
name,
callback,
)
}
}
}
fn iterate_method_candidates_with_autoref(
deref_chain: &[Canonical<Ty>],
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
krate: CrateId,
traits_in_scope: &FxHashSet<TraitId>,
name: Option<&Name>,
mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
) -> bool {
if iterate_method_candidates_by_receiver(
&deref_chain[0],
&deref_chain[1..],
db,
env.clone(),
krate,
&traits_in_scope,
name,
&mut callback,
) {
return true;
}
let refed = Canonical {
kinds: deref_chain[0].kinds.clone(),
value: Ty::apply_one(TypeCtor::Ref(Mutability::Shared), deref_chain[0].value.clone()),
};
if iterate_method_candidates_by_receiver(
&refed,
deref_chain,
db,
env.clone(),
krate,
&traits_in_scope,
name,
&mut callback,
) {
return true;
}
let ref_muted = Canonical {
kinds: deref_chain[0].kinds.clone(),
value: Ty::apply_one(TypeCtor::Ref(Mutability::Mut), deref_chain[0].value.clone()),
};
if iterate_method_candidates_by_receiver(
&ref_muted,
deref_chain,
db,
env,
krate,
&traits_in_scope,
name,
&mut callback,
) {
return true;
}
false
}
fn iterate_method_candidates_by_receiver(
receiver_ty: &Canonical<Ty>,
rest_of_deref_chain: &[Canonical<Ty>],
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
krate: CrateId,
traits_in_scope: &FxHashSet<TraitId>,
name: Option<&Name>,
mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
) -> bool {
// We're looking for methods with *receiver* type receiver_ty. These could
// be found in any of the derefs of receiver_ty, so we have to go through
// that.
for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) {
if iterate_inherent_methods(self_ty, db, name, Some(receiver_ty), krate, &mut callback) {
return true;
}
}
for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) {
if iterate_trait_method_candidates(
self_ty,
db,
env.clone(),
krate,
&traits_in_scope,
name,
Some(receiver_ty),
&mut callback,
) {
return true;
}
}
false
}
fn iterate_method_candidates_for_self_ty(
self_ty: &Canonical<Ty>,
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
krate: CrateId,
traits_in_scope: &FxHashSet<TraitId>,
name: Option<&Name>,
mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
) -> bool {
if iterate_inherent_methods(self_ty, db, name, None, krate, &mut callback) {
return true;
}
iterate_trait_method_candidates(self_ty, db, env, krate, traits_in_scope, name, None, callback)
}
fn iterate_trait_method_candidates(
self_ty: &Canonical<Ty>,
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
krate: CrateId,
traits_in_scope: &FxHashSet<TraitId>,
name: Option<&Name>,
receiver_ty: Option<&Canonical<Ty>>,
callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
) -> bool {
// if ty is `dyn Trait`, the trait doesn't need to be in scope
let inherent_trait =
self_ty.value.dyn_trait().into_iter().flat_map(|t| all_super_traits(db.upcast(), t));
let env_traits = if let Ty::Placeholder(_) = self_ty.value {
// if we have `T: Trait` in the param env, the trait doesn't need to be in scope
env.trait_predicates_for_self_ty(&self_ty.value)
.map(|tr| tr.trait_)
.flat_map(|t| all_super_traits(db.upcast(), t))
.collect()
} else {
Vec::new()
};
let traits =
inherent_trait.chain(env_traits.into_iter()).chain(traits_in_scope.iter().copied());
'traits: for t in traits {
let data = db.trait_data(t);
// we'll be lazy about checking whether the type implements the
// trait, but if we find out it doesn't, we'll skip the rest of the
// iteration
let mut known_implemented = false;
for (_name, item) in data.items.iter() {
if !is_valid_candidate(db, name, receiver_ty, *item, self_ty) {
continue;
}
if !known_implemented {
let goal = generic_implements_goal(db, env.clone(), t, self_ty.clone());
if db.trait_solve(krate, goal).is_none() {
continue 'traits;
}
}
known_implemented = true;
if callback(&self_ty.value, *item) {
return true;
}
}
}
false
}
fn iterate_inherent_methods(
self_ty: &Canonical<Ty>,
db: &dyn HirDatabase,
name: Option<&Name>,
receiver_ty: Option<&Canonical<Ty>>,
krate: CrateId,
callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
) -> bool {
let def_crates = match self_ty.value.def_crates(db, krate) {
Some(k) => k,
None => return false,
};
for krate in def_crates {
let impls = db.inherent_impls_in_crate(krate);
for &impl_def in impls.for_self_ty(&self_ty.value) {
for &item in db.impl_data(impl_def).items.iter() {
if !is_valid_candidate(db, name, receiver_ty, item, self_ty) {
continue;
}
// we have to check whether the self type unifies with the type
// that the impl is for. If we have a receiver type, this
// already happens in `is_valid_candidate` above; if not, we
// check it here
if receiver_ty.is_none() && inherent_impl_substs(db, impl_def, self_ty).is_none() {
test_utils::mark::hit!(impl_self_type_match_without_receiver);
continue;
}
if callback(&self_ty.value, item) {
return true;
}
}
}
}
false
}
/// Returns the self type for the index trait call.
pub fn resolve_indexing_op(
db: &dyn HirDatabase,
ty: &Canonical<Ty>,
env: Arc<TraitEnvironment>,
krate: CrateId,
index_trait: TraitId,
) -> Option<Canonical<Ty>> {
let ty = InEnvironment { value: ty.clone(), environment: env.clone() };
let deref_chain = autoderef_method_receiver(db, krate, ty);
for ty in deref_chain {
let goal = generic_implements_goal(db, env.clone(), index_trait, ty.clone());
if db.trait_solve(krate, goal).is_some() {
return Some(ty);
}
}
None
}
fn is_valid_candidate(
db: &dyn HirDatabase,
name: Option<&Name>,
receiver_ty: Option<&Canonical<Ty>>,
item: AssocItemId,
self_ty: &Canonical<Ty>,
) -> bool {
match item {
AssocItemId::FunctionId(m) => {
let data = db.function_data(m);
if let Some(name) = name {
if &data.name != name {
return false;
}
}
if let Some(receiver_ty) = receiver_ty {
if !data.has_self_param {
return false;
}
let transformed_receiver_ty = match transform_receiver_ty(db, m, self_ty) {
Some(ty) => ty,
None => return false,
};
if transformed_receiver_ty != receiver_ty.value {
return false;
}
}
true
}
AssocItemId::ConstId(c) => {
let data = db.const_data(c);
name.map_or(true, |name| data.name.as_ref() == Some(name)) && receiver_ty.is_none()
}
_ => false,
}
}
pub(crate) fn inherent_impl_substs(
db: &dyn HirDatabase,
impl_id: ImplId,
self_ty: &Canonical<Ty>,
) -> Option<Substs> {
// we create a var for each type parameter of the impl; we need to keep in
// mind here that `self_ty` might have vars of its own
let vars = Substs::build_for_def(db, impl_id)
.fill_with_bound_vars(DebruijnIndex::INNERMOST, self_ty.kinds.len())
.build();
let self_ty_with_vars = db.impl_self_ty(impl_id).subst(&vars);
let mut kinds = self_ty.kinds.to_vec();
kinds.extend(iter::repeat(TyKind::General).take(vars.len()));
let tys = Canonical { kinds: kinds.into(), value: (self_ty_with_vars, self_ty.value.clone()) };
let substs = super::infer::unify(&tys);
// We only want the substs for the vars we added, not the ones from self_ty.
// Also, if any of the vars we added are still in there, we replace them by
// Unknown. I think this can only really happen if self_ty contained
// Unknown, and in that case we want the result to contain Unknown in those
// places again.
substs.map(|s| fallback_bound_vars(s.suffix(vars.len()), self_ty.kinds.len()))
}
/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past
/// num_vars_to_keep) by `Ty::Unknown`.
fn fallback_bound_vars(s: Substs, num_vars_to_keep: usize) -> Substs {
s.fold_binders(
&mut |ty, binders| {
if let Ty::Bound(bound) = &ty {
if bound.index >= num_vars_to_keep && bound.debruijn >= binders {
Ty::Unknown
} else {
ty
}
} else {
ty
}
},
DebruijnIndex::INNERMOST,
)
}
fn transform_receiver_ty(
db: &dyn HirDatabase,
function_id: FunctionId,
self_ty: &Canonical<Ty>,
) -> Option<Ty> {
let substs = match function_id.lookup(db.upcast()).container {
AssocContainerId::TraitId(_) => Substs::build_for_def(db, function_id)
.push(self_ty.value.clone())
.fill_with_unknown()
.build(),
AssocContainerId::ImplId(impl_id) => inherent_impl_substs(db, impl_id, &self_ty)?,
AssocContainerId::ContainerId(_) => unreachable!(),
};
let sig = db.callable_item_signature(function_id.into());
Some(sig.value.params()[0].clone().subst_bound_vars(&substs))
}
pub fn implements_trait(
ty: &Canonical<Ty>,
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
krate: CrateId,
trait_: TraitId,
) -> bool {
let goal = generic_implements_goal(db, env, trait_, ty.clone());
let solution = db.trait_solve(krate, goal);
solution.is_some()
}
/// This creates Substs for a trait with the given Self type and type variables
/// for all other parameters, to query Chalk with it.
fn generic_implements_goal(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
trait_: TraitId,
self_ty: Canonical<Ty>,
) -> Canonical<InEnvironment<super::Obligation>> {
let mut kinds = self_ty.kinds.to_vec();
let substs = super::Substs::build_for_def(db, trait_)
.push(self_ty.value)
.fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
.build();
kinds.extend(iter::repeat(TyKind::General).take(substs.len() - 1));
let trait_ref = TraitRef { trait_, substs };
let obligation = super::Obligation::Trait(trait_ref);
Canonical { kinds: kinds.into(), value: InEnvironment::new(env, obligation) }
}
fn autoderef_method_receiver(
db: &dyn HirDatabase,
krate: CrateId,
ty: InEnvironment<Canonical<Ty>>,
) -> Vec<Canonical<Ty>> {
let mut deref_chain: Vec<_> = autoderef::autoderef(db, Some(krate), ty).collect();
// As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!)
if let Some(Ty::Apply(ApplicationTy { ctor: TypeCtor::Array, parameters })) =
deref_chain.last().map(|ty| &ty.value)
{
let kinds = deref_chain.last().unwrap().kinds.clone();
let unsized_ty = Ty::apply(TypeCtor::Slice, parameters.clone());
deref_chain.push(Canonical { value: unsized_ty, kinds })
}
deref_chain
}

58
crates/hir_ty/src/op.rs Normal file
View file

@ -0,0 +1,58 @@
//! Helper functions for binary operator type inference.
use hir_def::expr::{ArithOp, BinaryOp, CmpOp};
use super::{InferTy, Ty, TypeCtor};
use crate::ApplicationTy;
pub(super) fn binary_op_return_ty(op: BinaryOp, lhs_ty: Ty, rhs_ty: Ty) -> Ty {
match op {
BinaryOp::LogicOp(_) | BinaryOp::CmpOp(_) => Ty::simple(TypeCtor::Bool),
BinaryOp::Assignment { .. } => Ty::unit(),
BinaryOp::ArithOp(ArithOp::Shl) | BinaryOp::ArithOp(ArithOp::Shr) => match lhs_ty {
Ty::Apply(ApplicationTy { ctor, .. }) => match ctor {
TypeCtor::Int(..) | TypeCtor::Float(..) => lhs_ty,
_ => Ty::Unknown,
},
Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => lhs_ty,
_ => Ty::Unknown,
},
BinaryOp::ArithOp(_) => match rhs_ty {
Ty::Apply(ApplicationTy { ctor, .. }) => match ctor {
TypeCtor::Int(..) | TypeCtor::Float(..) => rhs_ty,
_ => Ty::Unknown,
},
Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => rhs_ty,
_ => Ty::Unknown,
},
}
}
pub(super) fn binary_op_rhs_expectation(op: BinaryOp, lhs_ty: Ty) -> Ty {
match op {
BinaryOp::LogicOp(..) => Ty::simple(TypeCtor::Bool),
BinaryOp::Assignment { op: None } => lhs_ty,
BinaryOp::CmpOp(CmpOp::Eq { .. }) => match lhs_ty {
Ty::Apply(ApplicationTy { ctor, .. }) => match ctor {
TypeCtor::Int(..)
| TypeCtor::Float(..)
| TypeCtor::Str
| TypeCtor::Char
| TypeCtor::Bool => lhs_ty,
_ => Ty::Unknown,
},
Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => lhs_ty,
_ => Ty::Unknown,
},
BinaryOp::ArithOp(ArithOp::Shl) | BinaryOp::ArithOp(ArithOp::Shr) => Ty::Unknown,
BinaryOp::CmpOp(CmpOp::Ord { .. })
| BinaryOp::Assignment { op: Some(_) }
| BinaryOp::ArithOp(_) => match lhs_ty {
Ty::Apply(ApplicationTy { ctor, .. }) => match ctor {
TypeCtor::Int(..) | TypeCtor::Float(..) => lhs_ty,
_ => Ty::Unknown,
},
Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => lhs_ty,
_ => Ty::Unknown,
},
}
}

View file

@ -0,0 +1,139 @@
//! Defines primitive types, which have a couple of peculiarities:
//!
//! * during type inference, they can be uncertain (ie, `let x = 92;`)
//! * they don't belong to any particular crate.
use std::fmt;
pub use hir_def::builtin_type::{BuiltinFloat, BuiltinInt, FloatBitness, IntBitness, Signedness};
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct IntTy {
pub signedness: Signedness,
pub bitness: IntBitness,
}
impl fmt::Debug for IntTy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl fmt::Display for IntTy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.ty_to_string())
}
}
impl IntTy {
pub fn isize() -> IntTy {
IntTy { signedness: Signedness::Signed, bitness: IntBitness::Xsize }
}
pub fn i8() -> IntTy {
IntTy { signedness: Signedness::Signed, bitness: IntBitness::X8 }
}
pub fn i16() -> IntTy {
IntTy { signedness: Signedness::Signed, bitness: IntBitness::X16 }
}
pub fn i32() -> IntTy {
IntTy { signedness: Signedness::Signed, bitness: IntBitness::X32 }
}
pub fn i64() -> IntTy {
IntTy { signedness: Signedness::Signed, bitness: IntBitness::X64 }
}
pub fn i128() -> IntTy {
IntTy { signedness: Signedness::Signed, bitness: IntBitness::X128 }
}
pub fn usize() -> IntTy {
IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::Xsize }
}
pub fn u8() -> IntTy {
IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X8 }
}
pub fn u16() -> IntTy {
IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X16 }
}
pub fn u32() -> IntTy {
IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X32 }
}
pub fn u64() -> IntTy {
IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X64 }
}
pub fn u128() -> IntTy {
IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X128 }
}
pub fn ty_to_string(self) -> &'static str {
match (self.signedness, self.bitness) {
(Signedness::Signed, IntBitness::Xsize) => "isize",
(Signedness::Signed, IntBitness::X8) => "i8",
(Signedness::Signed, IntBitness::X16) => "i16",
(Signedness::Signed, IntBitness::X32) => "i32",
(Signedness::Signed, IntBitness::X64) => "i64",
(Signedness::Signed, IntBitness::X128) => "i128",
(Signedness::Unsigned, IntBitness::Xsize) => "usize",
(Signedness::Unsigned, IntBitness::X8) => "u8",
(Signedness::Unsigned, IntBitness::X16) => "u16",
(Signedness::Unsigned, IntBitness::X32) => "u32",
(Signedness::Unsigned, IntBitness::X64) => "u64",
(Signedness::Unsigned, IntBitness::X128) => "u128",
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct FloatTy {
pub bitness: FloatBitness,
}
impl fmt::Debug for FloatTy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl fmt::Display for FloatTy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.ty_to_string())
}
}
impl FloatTy {
pub fn f32() -> FloatTy {
FloatTy { bitness: FloatBitness::X32 }
}
pub fn f64() -> FloatTy {
FloatTy { bitness: FloatBitness::X64 }
}
pub fn ty_to_string(self) -> &'static str {
match self.bitness {
FloatBitness::X32 => "f32",
FloatBitness::X64 => "f64",
}
}
}
impl From<BuiltinInt> for IntTy {
fn from(t: BuiltinInt) -> Self {
IntTy { signedness: t.signedness, bitness: t.bitness }
}
}
impl From<BuiltinFloat> for FloatTy {
fn from(t: BuiltinFloat) -> Self {
FloatTy { bitness: t.bitness }
}
}

View file

@ -0,0 +1,136 @@
//! Database used for testing `hir`.
use std::{
fmt, panic,
sync::{Arc, Mutex},
};
use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast};
use hir_def::{db::DefDatabase, ModuleId};
use hir_expand::db::AstDatabase;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::TextRange;
use test_utils::extract_annotations;
#[salsa::database(
base_db::SourceDatabaseExtStorage,
base_db::SourceDatabaseStorage,
hir_expand::db::AstDatabaseStorage,
hir_def::db::InternDatabaseStorage,
hir_def::db::DefDatabaseStorage,
crate::db::HirDatabaseStorage
)]
#[derive(Default)]
pub struct TestDB {
storage: salsa::Storage<TestDB>,
events: Mutex<Option<Vec<salsa::Event>>>,
}
impl fmt::Debug for TestDB {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("TestDB").finish()
}
}
impl Upcast<dyn AstDatabase> for TestDB {
fn upcast(&self) -> &(dyn AstDatabase + 'static) {
&*self
}
}
impl Upcast<dyn DefDatabase> for TestDB {
fn upcast(&self) -> &(dyn DefDatabase + 'static) {
&*self
}
}
impl salsa::Database for TestDB {
fn salsa_event(&self, event: salsa::Event) {
let mut events = self.events.lock().unwrap();
if let Some(events) = &mut *events {
events.push(event);
}
}
}
impl salsa::ParallelDatabase for TestDB {
fn snapshot(&self) -> salsa::Snapshot<TestDB> {
salsa::Snapshot::new(TestDB {
storage: self.storage.snapshot(),
events: Default::default(),
})
}
}
impl panic::RefUnwindSafe for TestDB {}
impl FileLoader for TestDB {
fn file_text(&self, file_id: FileId) -> Arc<String> {
FileLoaderDelegate(self).file_text(file_id)
}
fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(anchor, path)
}
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id)
}
}
impl TestDB {
pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate);
for (local_id, data) in crate_def_map.modules.iter() {
if data.origin.file_id() == Some(file_id) {
return ModuleId { krate, local_id };
}
}
}
panic!("Can't find module for file")
}
pub(crate) fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> {
let mut files = Vec::new();
let crate_graph = self.crate_graph();
for krate in crate_graph.iter() {
let crate_def_map = self.crate_def_map(krate);
for (module_id, _) in crate_def_map.modules.iter() {
let file_id = crate_def_map[module_id].origin.file_id();
files.extend(file_id)
}
}
files
.into_iter()
.filter_map(|file_id| {
let text = self.file_text(file_id);
let annotations = extract_annotations(&text);
if annotations.is_empty() {
return None;
}
Some((file_id, annotations))
})
.collect()
}
}
impl TestDB {
pub fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
*self.events.lock().unwrap() = Some(Vec::new());
f();
self.events.lock().unwrap().take().unwrap()
}
pub fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
let events = self.log(f);
events
.into_iter()
.filter_map(|e| match e.kind {
// This pretty horrible, but `Debug` is the only way to inspect
// QueryDescriptor at the moment.
salsa::EventKind::WillExecute { database_key } => {
Some(format!("{:?}", database_key.debug(self)))
}
_ => None,
})
.collect()
}
}

359
crates/hir_ty/src/tests.rs Normal file
View file

@ -0,0 +1,359 @@
mod never_type;
mod coercion;
mod regression;
mod simple;
mod patterns;
mod traits;
mod method_resolution;
mod macros;
mod display_source_code;
use std::sync::Arc;
use base_db::{fixture::WithFixture, FileRange, SourceDatabase, SourceDatabaseExt};
use expect::Expect;
use hir_def::{
body::{BodySourceMap, SyntheticSyntax},
child_by_source::ChildBySource,
db::DefDatabase,
item_scope::ItemScope,
keys,
nameres::CrateDefMap,
AssocItemId, DefWithBodyId, LocalModuleId, Lookup, ModuleDefId,
};
use hir_expand::{db::AstDatabase, InFile};
use stdx::format_to;
use syntax::{
algo,
ast::{self, AstNode},
SyntaxNode,
};
use crate::{
db::HirDatabase, display::HirDisplay, infer::TypeMismatch, test_db::TestDB, InferenceResult, Ty,
};
// These tests compare the inference results for all expressions in a file
// against snapshots of the expected results using expect. Use
// `env UPDATE_EXPECT=1 cargo test -p hir_ty` to update the snapshots.
fn setup_tracing() -> tracing::subscriber::DefaultGuard {
use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry};
use tracing_tree::HierarchicalLayer;
let filter = EnvFilter::from_env("CHALK_DEBUG");
let layer = HierarchicalLayer::default()
.with_indent_lines(true)
.with_ansi(false)
.with_indent_amount(2)
.with_writer(std::io::stderr);
let subscriber = Registry::default().with(filter).with(layer);
tracing::subscriber::set_default(subscriber)
}
fn check_types(ra_fixture: &str) {
check_types_impl(ra_fixture, false)
}
fn check_types_source_code(ra_fixture: &str) {
check_types_impl(ra_fixture, true)
}
fn check_types_impl(ra_fixture: &str, display_source: bool) {
let _tracing = setup_tracing();
let db = TestDB::with_files(ra_fixture);
let mut checked_one = false;
for (file_id, annotations) in db.extract_annotations() {
for (range, expected) in annotations {
let ty = type_at_range(&db, FileRange { file_id, range });
let actual = if display_source {
let module = db.module_for_file(file_id);
ty.display_source_code(&db, module).unwrap()
} else {
ty.display(&db).to_string()
};
assert_eq!(expected, actual);
checked_one = true;
}
}
assert!(checked_one, "no `//^` annotations found");
}
fn type_at_range(db: &TestDB, pos: FileRange) -> Ty {
let file = db.parse(pos.file_id).ok().unwrap();
let expr = algo::find_node_at_range::<ast::Expr>(file.syntax(), pos.range).unwrap();
let fn_def = expr.syntax().ancestors().find_map(ast::Fn::cast).unwrap();
let module = db.module_for_file(pos.file_id);
let func = *module.child_by_source(db)[keys::FUNCTION]
.get(&InFile::new(pos.file_id.into(), fn_def))
.unwrap();
let (_body, source_map) = db.body_with_source_map(func.into());
if let Some(expr_id) = source_map.node_expr(InFile::new(pos.file_id.into(), &expr)) {
let infer = db.infer(func.into());
return infer[expr_id].clone();
}
panic!("Can't find expression")
}
fn infer(ra_fixture: &str) -> String {
infer_with_mismatches(ra_fixture, false)
}
fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let _tracing = setup_tracing();
let (db, file_id) = TestDB::with_single_file(content);
let mut buf = String::new();
let mut infer_def = |inference_result: Arc<InferenceResult>,
body_source_map: Arc<BodySourceMap>| {
let mut types: Vec<(InFile<SyntaxNode>, &Ty)> = Vec::new();
let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new();
for (pat, ty) in inference_result.type_of_pat.iter() {
let syntax_ptr = match body_source_map.pat_syntax(pat) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id).unwrap();
sp.map(|ptr| {
ptr.either(
|it| it.to_node(&root).syntax().clone(),
|it| it.to_node(&root).syntax().clone(),
)
})
}
Err(SyntheticSyntax) => continue,
};
types.push((syntax_ptr, ty));
}
for (expr, ty) in inference_result.type_of_expr.iter() {
let node = match body_source_map.expr_syntax(expr) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id).unwrap();
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
}
Err(SyntheticSyntax) => continue,
};
types.push((node.clone(), ty));
if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) {
mismatches.push((node, mismatch));
}
}
// sort ranges for consistency
types.sort_by_key(|(node, _)| {
let range = node.value.text_range();
(range.start(), range.end())
});
for (node, ty) in &types {
let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) {
(self_param.self_token().unwrap().text_range(), "self".to_string())
} else {
(node.value.text_range(), node.value.text().to_string().replace("\n", " "))
};
let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" };
format_to!(
buf,
"{}{:?} '{}': {}\n",
macro_prefix,
range,
ellipsize(text, 15),
ty.display(&db)
);
}
if include_mismatches {
mismatches.sort_by_key(|(node, _)| {
let range = node.value.text_range();
(range.start(), range.end())
});
for (src_ptr, mismatch) in &mismatches {
let range = src_ptr.value.text_range();
let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
format_to!(
buf,
"{}{:?}: expected {}, got {}\n",
macro_prefix,
range,
mismatch.expected.display(&db),
mismatch.actual.display(&db),
);
}
}
};
let module = db.module_for_file(file_id);
let crate_def_map = db.crate_def_map(module.krate);
let mut defs: Vec<DefWithBodyId> = Vec::new();
visit_module(&db, &crate_def_map, module.local_id, &mut |it| defs.push(it));
defs.sort_by_key(|def| match def {
DefWithBodyId::FunctionId(it) => {
let loc = it.lookup(&db);
let tree = db.item_tree(loc.id.file_id);
tree.source(&db, loc.id).syntax().text_range().start()
}
DefWithBodyId::ConstId(it) => {
let loc = it.lookup(&db);
let tree = db.item_tree(loc.id.file_id);
tree.source(&db, loc.id).syntax().text_range().start()
}
DefWithBodyId::StaticId(it) => {
let loc = it.lookup(&db);
let tree = db.item_tree(loc.id.file_id);
tree.source(&db, loc.id).syntax().text_range().start()
}
});
for def in defs {
let (_body, source_map) = db.body_with_source_map(def);
let infer = db.infer(def);
infer_def(infer, source_map);
}
buf.truncate(buf.trim_end().len());
buf
}
fn visit_module(
db: &TestDB,
crate_def_map: &CrateDefMap,
module_id: LocalModuleId,
cb: &mut dyn FnMut(DefWithBodyId),
) {
visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb);
for impl_id in crate_def_map[module_id].scope.impls() {
let impl_data = db.impl_data(impl_id);
for &item in impl_data.items.iter() {
match item {
AssocItemId::FunctionId(it) => {
let def = it.into();
cb(def);
let body = db.body(def);
visit_scope(db, crate_def_map, &body.item_scope, cb);
}
AssocItemId::ConstId(it) => {
let def = it.into();
cb(def);
let body = db.body(def);
visit_scope(db, crate_def_map, &body.item_scope, cb);
}
AssocItemId::TypeAliasId(_) => (),
}
}
}
fn visit_scope(
db: &TestDB,
crate_def_map: &CrateDefMap,
scope: &ItemScope,
cb: &mut dyn FnMut(DefWithBodyId),
) {
for decl in scope.declarations() {
match decl {
ModuleDefId::FunctionId(it) => {
let def = it.into();
cb(def);
let body = db.body(def);
visit_scope(db, crate_def_map, &body.item_scope, cb);
}
ModuleDefId::ConstId(it) => {
let def = it.into();
cb(def);
let body = db.body(def);
visit_scope(db, crate_def_map, &body.item_scope, cb);
}
ModuleDefId::StaticId(it) => {
let def = it.into();
cb(def);
let body = db.body(def);
visit_scope(db, crate_def_map, &body.item_scope, cb);
}
ModuleDefId::TraitId(it) => {
let trait_data = db.trait_data(it);
for &(_, item) in trait_data.items.iter() {
match item {
AssocItemId::FunctionId(it) => cb(it.into()),
AssocItemId::ConstId(it) => cb(it.into()),
AssocItemId::TypeAliasId(_) => (),
}
}
}
ModuleDefId::ModuleId(it) => visit_module(db, crate_def_map, it.local_id, cb),
_ => (),
}
}
}
}
fn ellipsize(mut text: String, max_len: usize) -> String {
if text.len() <= max_len {
return text;
}
let ellipsis = "...";
let e_len = ellipsis.len();
let mut prefix_len = (max_len - e_len) / 2;
while !text.is_char_boundary(prefix_len) {
prefix_len += 1;
}
let mut suffix_len = max_len - e_len - prefix_len;
while !text.is_char_boundary(text.len() - suffix_len) {
suffix_len += 1;
}
text.replace_range(prefix_len..text.len() - suffix_len, ellipsis);
text
}
#[test]
fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
let (mut db, pos) = TestDB::with_position(
"
//- /lib.rs
fn foo() -> i32 {
<|>1 + 1
}
",
);
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id);
let crate_def_map = db.crate_def_map(module.krate);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(def);
});
});
assert!(format!("{:?}", events).contains("infer"))
}
let new_text = "
fn foo() -> i32 {
1
+
1
}
"
.to_string();
db.set_file_text(pos.file_id, Arc::new(new_text));
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id);
let crate_def_map = db.crate_def_map(module.krate);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(def);
});
});
assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
}
}
fn check_infer(ra_fixture: &str, expect: Expect) {
let mut actual = infer(ra_fixture);
actual.push('\n');
expect.assert_eq(&actual);
}
fn check_infer_with_mismatches(ra_fixture: &str, expect: Expect) {
let mut actual = infer_with_mismatches(ra_fixture, true);
actual.push('\n');
expect.assert_eq(&actual);
}

View file

@ -0,0 +1,861 @@
use expect::expect;
use test_utils::mark;
use super::{check_infer, check_infer_with_mismatches};
#[test]
fn infer_block_expr_type_mismatch() {
check_infer(
r"
fn test() {
let a: i32 = { 1i64 };
}
",
expect![[r"
10..40 '{ ...4 }; }': ()
20..21 'a': i32
29..37 '{ 1i64 }': i64
31..35 '1i64': i64
"]],
);
}
#[test]
fn coerce_places() {
check_infer(
r#"
struct S<T> { a: T }
fn f<T>(_: &[T]) -> T { loop {} }
fn g<T>(_: S<&[T]>) -> T { loop {} }
fn gen<T>() -> *mut [T; 2] { loop {} }
fn test1<U>() -> *mut [U] {
gen()
}
fn test2() {
let arr: &[u8; 1] = &[1];
let a: &[_] = arr;
let b = f(arr);
let c: &[_] = { arr };
let d = g(S { a: arr });
let e: [&[_]; 1] = [arr];
let f: [&[_]; 2] = [arr; 2];
let g: (&[_], &[_]) = (arr, arr);
}
#[lang = "sized"]
pub trait Sized {}
#[lang = "unsize"]
pub trait Unsize<T: ?Sized> {}
#[lang = "coerce_unsized"]
pub trait CoerceUnsized<T> {}
impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
"#,
expect![[r"
30..31 '_': &[T]
44..55 '{ loop {} }': T
46..53 'loop {}': !
51..53 '{}': ()
64..65 '_': S<&[T]>
81..92 '{ loop {} }': T
83..90 'loop {}': !
88..90 '{}': ()
121..132 '{ loop {} }': *mut [T; _]
123..130 'loop {}': !
128..130 '{}': ()
159..172 '{ gen() }': *mut [U]
165..168 'gen': fn gen<U>() -> *mut [U; _]
165..170 'gen()': *mut [U; _]
185..419 '{ ...rr); }': ()
195..198 'arr': &[u8; _]
211..215 '&[1]': &[u8; _]
212..215 '[1]': [u8; _]
213..214 '1': u8
226..227 'a': &[u8]
236..239 'arr': &[u8; _]
249..250 'b': u8
253..254 'f': fn f<u8>(&[u8]) -> u8
253..259 'f(arr)': u8
255..258 'arr': &[u8; _]
269..270 'c': &[u8]
279..286 '{ arr }': &[u8]
281..284 'arr': &[u8; _]
296..297 'd': u8
300..301 'g': fn g<u8>(S<&[u8]>) -> u8
300..315 'g(S { a: arr })': u8
302..314 'S { a: arr }': S<&[u8]>
309..312 'arr': &[u8; _]
325..326 'e': [&[u8]; _]
340..345 '[arr]': [&[u8]; _]
341..344 'arr': &[u8; _]
355..356 'f': [&[u8]; _]
370..378 '[arr; 2]': [&[u8]; _]
371..374 'arr': &[u8; _]
376..377 '2': usize
388..389 'g': (&[u8], &[u8])
406..416 '(arr, arr)': (&[u8], &[u8])
407..410 'arr': &[u8; _]
412..415 'arr': &[u8; _]
"]],
);
}
#[test]
fn infer_let_stmt_coerce() {
check_infer(
r"
fn test() {
let x: &[isize] = &[1];
let x: *const [isize] = &[1];
}
",
expect![[r"
10..75 '{ ...[1]; }': ()
20..21 'x': &[isize]
34..38 '&[1]': &[isize; _]
35..38 '[1]': [isize; _]
36..37 '1': isize
48..49 'x': *const [isize]
68..72 '&[1]': &[isize; _]
69..72 '[1]': [isize; _]
70..71 '1': isize
"]],
);
}
#[test]
fn infer_custom_coerce_unsized() {
check_infer(
r#"
struct A<T: ?Sized>(*const T);
struct B<T: ?Sized>(*const T);
struct C<T: ?Sized> { inner: *const T }
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<B<U>> for B<T> {}
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<C<U>> for C<T> {}
fn foo1<T>(x: A<[T]>) -> A<[T]> { x }
fn foo2<T>(x: B<[T]>) -> B<[T]> { x }
fn foo3<T>(x: C<[T]>) -> C<[T]> { x }
fn test(a: A<[u8; 2]>, b: B<[u8; 2]>, c: C<[u8; 2]>) {
let d = foo1(a);
let e = foo2(b);
let f = foo3(c);
}
#[lang = "sized"]
pub trait Sized {}
#[lang = "unsize"]
pub trait Unsize<T: ?Sized> {}
#[lang = "coerce_unsized"]
pub trait CoerceUnsized<T> {}
impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
"#,
expect![[r"
257..258 'x': A<[T]>
278..283 '{ x }': A<[T]>
280..281 'x': A<[T]>
295..296 'x': B<[T]>
316..321 '{ x }': B<[T]>
318..319 'x': B<[T]>
333..334 'x': C<[T]>
354..359 '{ x }': C<[T]>
356..357 'x': C<[T]>
369..370 'a': A<[u8; _]>
384..385 'b': B<[u8; _]>
399..400 'c': C<[u8; _]>
414..480 '{ ...(c); }': ()
424..425 'd': A<[{unknown}]>
428..432 'foo1': fn foo1<{unknown}>(A<[{unknown}]>) -> A<[{unknown}]>
428..435 'foo1(a)': A<[{unknown}]>
433..434 'a': A<[u8; _]>
445..446 'e': B<[u8]>
449..453 'foo2': fn foo2<u8>(B<[u8]>) -> B<[u8]>
449..456 'foo2(b)': B<[u8]>
454..455 'b': B<[u8; _]>
466..467 'f': C<[u8]>
470..474 'foo3': fn foo3<u8>(C<[u8]>) -> C<[u8]>
470..477 'foo3(c)': C<[u8]>
475..476 'c': C<[u8; _]>
"]],
);
}
#[test]
fn infer_if_coerce() {
check_infer(
r#"
fn foo<T>(x: &[T]) -> &[T] { loop {} }
fn test() {
let x = if true {
foo(&[1])
} else {
&[1]
};
}
#[lang = "sized"]
pub trait Sized {}
#[lang = "unsize"]
pub trait Unsize<T: ?Sized> {}
"#,
expect![[r"
10..11 'x': &[T]
27..38 '{ loop {} }': &[T]
29..36 'loop {}': !
34..36 '{}': ()
49..125 '{ ... }; }': ()
59..60 'x': &[i32]
63..122 'if tru... }': &[i32]
66..70 'true': bool
71..96 '{ ... }': &[i32]
81..84 'foo': fn foo<i32>(&[i32]) -> &[i32]
81..90 'foo(&[1])': &[i32]
85..89 '&[1]': &[i32; _]
86..89 '[1]': [i32; _]
87..88 '1': i32
102..122 '{ ... }': &[i32; _]
112..116 '&[1]': &[i32; _]
113..116 '[1]': [i32; _]
114..115 '1': i32
"]],
);
}
#[test]
fn infer_if_else_coerce() {
check_infer(
r#"
fn foo<T>(x: &[T]) -> &[T] { loop {} }
fn test() {
let x = if true {
&[1]
} else {
foo(&[1])
};
}
#[lang = "sized"]
pub trait Sized {}
#[lang = "unsize"]
pub trait Unsize<T: ?Sized> {}
#[lang = "coerce_unsized"]
pub trait CoerceUnsized<T> {}
impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
"#,
expect![[r"
10..11 'x': &[T]
27..38 '{ loop {} }': &[T]
29..36 'loop {}': !
34..36 '{}': ()
49..125 '{ ... }; }': ()
59..60 'x': &[i32]
63..122 'if tru... }': &[i32]
66..70 'true': bool
71..91 '{ ... }': &[i32; _]
81..85 '&[1]': &[i32; _]
82..85 '[1]': [i32; _]
83..84 '1': i32
97..122 '{ ... }': &[i32]
107..110 'foo': fn foo<i32>(&[i32]) -> &[i32]
107..116 'foo(&[1])': &[i32]
111..115 '&[1]': &[i32; _]
112..115 '[1]': [i32; _]
113..114 '1': i32
"]],
)
}
#[test]
fn infer_match_first_coerce() {
check_infer(
r#"
fn foo<T>(x: &[T]) -> &[T] { loop {} }
fn test(i: i32) {
let x = match i {
2 => foo(&[2]),
1 => &[1],
_ => &[3],
};
}
#[lang = "sized"]
pub trait Sized {}
#[lang = "unsize"]
pub trait Unsize<T: ?Sized> {}
"#,
expect![[r"
10..11 'x': &[T]
27..38 '{ loop {} }': &[T]
29..36 'loop {}': !
34..36 '{}': ()
47..48 'i': i32
55..149 '{ ... }; }': ()
65..66 'x': &[i32]
69..146 'match ... }': &[i32]
75..76 'i': i32
87..88 '2': i32
87..88 '2': i32
92..95 'foo': fn foo<i32>(&[i32]) -> &[i32]
92..101 'foo(&[2])': &[i32]
96..100 '&[2]': &[i32; _]
97..100 '[2]': [i32; _]
98..99 '2': i32
111..112 '1': i32
111..112 '1': i32
116..120 '&[1]': &[i32; _]
117..120 '[1]': [i32; _]
118..119 '1': i32
130..131 '_': i32
135..139 '&[3]': &[i32; _]
136..139 '[3]': [i32; _]
137..138 '3': i32
"]],
);
}
#[test]
fn infer_match_second_coerce() {
check_infer(
r#"
fn foo<T>(x: &[T]) -> &[T] { loop {} }
fn test(i: i32) {
let x = match i {
1 => &[1],
2 => foo(&[2]),
_ => &[3],
};
}
#[lang = "sized"]
pub trait Sized {}
#[lang = "unsize"]
pub trait Unsize<T: ?Sized> {}
#[lang = "coerce_unsized"]
pub trait CoerceUnsized<T> {}
impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
"#,
expect![[r"
10..11 'x': &[T]
27..38 '{ loop {} }': &[T]
29..36 'loop {}': !
34..36 '{}': ()
47..48 'i': i32
55..149 '{ ... }; }': ()
65..66 'x': &[i32]
69..146 'match ... }': &[i32]
75..76 'i': i32
87..88 '1': i32
87..88 '1': i32
92..96 '&[1]': &[i32; _]
93..96 '[1]': [i32; _]
94..95 '1': i32
106..107 '2': i32
106..107 '2': i32
111..114 'foo': fn foo<i32>(&[i32]) -> &[i32]
111..120 'foo(&[2])': &[i32]
115..119 '&[2]': &[i32; _]
116..119 '[2]': [i32; _]
117..118 '2': i32
130..131 '_': i32
135..139 '&[3]': &[i32; _]
136..139 '[3]': [i32; _]
137..138 '3': i32
"]],
);
}
#[test]
fn coerce_merge_one_by_one1() {
mark::check!(coerce_merge_fail_fallback);
check_infer(
r"
fn test() {
let t = &mut 1;
let x = match 1 {
1 => t as *mut i32,
2 => t as &i32,
_ => t as *const i32,
};
}
",
expect![[r"
10..144 '{ ... }; }': ()
20..21 't': &mut i32
24..30 '&mut 1': &mut i32
29..30 '1': i32
40..41 'x': *const i32
44..141 'match ... }': *const i32
50..51 '1': i32
62..63 '1': i32
62..63 '1': i32
67..68 't': &mut i32
67..80 't as *mut i32': *mut i32
90..91 '2': i32
90..91 '2': i32
95..96 't': &mut i32
95..104 't as &i32': &i32
114..115 '_': i32
119..120 't': &mut i32
119..134 't as *const i32': *const i32
"]],
);
}
#[test]
fn return_coerce_unknown() {
check_infer_with_mismatches(
r"
fn foo() -> u32 {
return unknown;
}
",
expect![[r"
16..39 '{ ...own; }': u32
22..36 'return unknown': !
29..36 'unknown': u32
"]],
);
}
#[test]
fn coerce_autoderef() {
check_infer_with_mismatches(
r"
struct Foo;
fn takes_ref_foo(x: &Foo) {}
fn test() {
takes_ref_foo(&Foo);
takes_ref_foo(&&Foo);
takes_ref_foo(&&&Foo);
}
",
expect![[r"
29..30 'x': &Foo
38..40 '{}': ()
51..132 '{ ...oo); }': ()
57..70 'takes_ref_foo': fn takes_ref_foo(&Foo)
57..76 'takes_...(&Foo)': ()
71..75 '&Foo': &Foo
72..75 'Foo': Foo
82..95 'takes_ref_foo': fn takes_ref_foo(&Foo)
82..102 'takes_...&&Foo)': ()
96..101 '&&Foo': &&Foo
97..101 '&Foo': &Foo
98..101 'Foo': Foo
108..121 'takes_ref_foo': fn takes_ref_foo(&Foo)
108..129 'takes_...&&Foo)': ()
122..128 '&&&Foo': &&&Foo
123..128 '&&Foo': &&Foo
124..128 '&Foo': &Foo
125..128 'Foo': Foo
"]],
);
}
#[test]
fn coerce_autoderef_generic() {
check_infer_with_mismatches(
r"
struct Foo;
fn takes_ref<T>(x: &T) -> T { *x }
fn test() {
takes_ref(&Foo);
takes_ref(&&Foo);
takes_ref(&&&Foo);
}
",
expect![[r"
28..29 'x': &T
40..46 '{ *x }': T
42..44 '*x': T
43..44 'x': &T
57..126 '{ ...oo); }': ()
63..72 'takes_ref': fn takes_ref<Foo>(&Foo) -> Foo
63..78 'takes_ref(&Foo)': Foo
73..77 '&Foo': &Foo
74..77 'Foo': Foo
84..93 'takes_ref': fn takes_ref<&Foo>(&&Foo) -> &Foo
84..100 'takes_...&&Foo)': &Foo
94..99 '&&Foo': &&Foo
95..99 '&Foo': &Foo
96..99 'Foo': Foo
106..115 'takes_ref': fn takes_ref<&&Foo>(&&&Foo) -> &&Foo
106..123 'takes_...&&Foo)': &&Foo
116..122 '&&&Foo': &&&Foo
117..122 '&&Foo': &&Foo
118..122 '&Foo': &Foo
119..122 'Foo': Foo
"]],
);
}
#[test]
fn coerce_autoderef_block() {
check_infer_with_mismatches(
r#"
struct String {}
#[lang = "deref"]
trait Deref { type Target; }
impl Deref for String { type Target = str; }
fn takes_ref_str(x: &str) {}
fn returns_string() -> String { loop {} }
fn test() {
takes_ref_str(&{ returns_string() });
}
"#,
expect![[r"
126..127 'x': &str
135..137 '{}': ()
168..179 '{ loop {} }': String
170..177 'loop {}': !
175..177 '{}': ()
190..235 '{ ... }); }': ()
196..209 'takes_ref_str': fn takes_ref_str(&str)
196..232 'takes_...g() })': ()
210..231 '&{ ret...ng() }': &String
211..231 '{ retu...ng() }': String
213..227 'returns_string': fn returns_string() -> String
213..229 'return...ring()': String
"]],
);
}
#[test]
fn closure_return_coerce() {
check_infer_with_mismatches(
r"
fn foo() {
let x = || {
if true {
return &1u32;
}
&&1u32
};
}
",
expect![[r"
9..105 '{ ... }; }': ()
19..20 'x': || -> &u32
23..102 '|| { ... }': || -> &u32
26..102 '{ ... }': &u32
36..81 'if tru... }': ()
39..43 'true': bool
44..81 '{ ... }': ()
58..70 'return &1u32': !
65..70 '&1u32': &u32
66..70 '1u32': u32
90..96 '&&1u32': &&u32
91..96 '&1u32': &u32
92..96 '1u32': u32
"]],
);
}
#[test]
fn coerce_fn_item_to_fn_ptr() {
check_infer_with_mismatches(
r"
fn foo(x: u32) -> isize { 1 }
fn test() {
let f: fn(u32) -> isize = foo;
}
",
expect![[r"
7..8 'x': u32
24..29 '{ 1 }': isize
26..27 '1': isize
40..78 '{ ...foo; }': ()
50..51 'f': fn(u32) -> isize
72..75 'foo': fn foo(u32) -> isize
"]],
);
}
#[test]
fn coerce_fn_items_in_match_arms() {
mark::check!(coerce_fn_reification);
check_infer_with_mismatches(
r"
fn foo1(x: u32) -> isize { 1 }
fn foo2(x: u32) -> isize { 2 }
fn foo3(x: u32) -> isize { 3 }
fn test() {
let x = match 1 {
1 => foo1,
2 => foo2,
_ => foo3,
};
}
",
expect![[r"
8..9 'x': u32
25..30 '{ 1 }': isize
27..28 '1': isize
39..40 'x': u32
56..61 '{ 2 }': isize
58..59 '2': isize
70..71 'x': u32
87..92 '{ 3 }': isize
89..90 '3': isize
103..192 '{ ... }; }': ()
113..114 'x': fn(u32) -> isize
117..189 'match ... }': fn(u32) -> isize
123..124 '1': i32
135..136 '1': i32
135..136 '1': i32
140..144 'foo1': fn foo1(u32) -> isize
154..155 '2': i32
154..155 '2': i32
159..163 'foo2': fn foo2(u32) -> isize
173..174 '_': i32
178..182 'foo3': fn foo3(u32) -> isize
"]],
);
}
#[test]
fn coerce_closure_to_fn_ptr() {
check_infer_with_mismatches(
r"
fn test() {
let f: fn(u32) -> isize = |x| { 1 };
}
",
expect![[r"
10..54 '{ ...1 }; }': ()
20..21 'f': fn(u32) -> isize
42..51 '|x| { 1 }': |u32| -> isize
43..44 'x': u32
46..51 '{ 1 }': isize
48..49 '1': isize
"]],
);
}
#[test]
fn coerce_placeholder_ref() {
// placeholders should unify, even behind references
check_infer_with_mismatches(
r"
struct S<T> { t: T }
impl<TT> S<TT> {
fn get(&self) -> &TT {
&self.t
}
}
",
expect![[r"
50..54 'self': &S<TT>
63..86 '{ ... }': &TT
73..80 '&self.t': &TT
74..78 'self': &S<TT>
74..80 'self.t': TT
"]],
);
}
#[test]
fn coerce_unsize_array() {
check_infer_with_mismatches(
r#"
#[lang = "unsize"]
pub trait Unsize<T> {}
#[lang = "coerce_unsized"]
pub trait CoerceUnsized<T> {}
impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
fn test() {
let f: &[usize] = &[1, 2, 3];
}
"#,
expect![[r"
161..198 '{ ... 3]; }': ()
171..172 'f': &[usize]
185..195 '&[1, 2, 3]': &[usize; _]
186..195 '[1, 2, 3]': [usize; _]
187..188 '1': usize
190..191 '2': usize
193..194 '3': usize
"]],
);
}
#[test]
fn coerce_unsize_trait_object_simple() {
check_infer_with_mismatches(
r#"
#[lang = "sized"]
pub trait Sized {}
#[lang = "unsize"]
pub trait Unsize<T> {}
#[lang = "coerce_unsized"]
pub trait CoerceUnsized<T> {}
impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
trait Foo<T, U> {}
trait Bar<U, T, X>: Foo<T, U> {}
trait Baz<T, X>: Bar<usize, T, X> {}
struct S<T, X>;
impl<T, X> Foo<T, usize> for S<T, X> {}
impl<T, X> Bar<usize, T, X> for S<T, X> {}
impl<T, X> Baz<T, X> for S<T, X> {}
fn test() {
let obj: &dyn Baz<i8, i16> = &S;
let obj: &dyn Bar<_, i8, i16> = &S;
let obj: &dyn Foo<i8, _> = &S;
}
"#,
expect![[r"
424..539 '{ ... &S; }': ()
434..437 'obj': &dyn Baz<i8, i16>
459..461 '&S': &S<i8, i16>
460..461 'S': S<i8, i16>
471..474 'obj': &dyn Bar<usize, i8, i16>
499..501 '&S': &S<i8, i16>
500..501 'S': S<i8, i16>
511..514 'obj': &dyn Foo<i8, usize>
534..536 '&S': &S<i8, {unknown}>
535..536 'S': S<i8, {unknown}>
"]],
);
}
#[test]
// The rust reference says this should be possible, but rustc doesn't implement
// it. We used to support it, but Chalk doesn't.
#[ignore]
fn coerce_unsize_trait_object_to_trait_object() {
check_infer_with_mismatches(
r#"
#[lang = "sized"]
pub trait Sized {}
#[lang = "unsize"]
pub trait Unsize<T> {}
#[lang = "coerce_unsized"]
pub trait CoerceUnsized<T> {}
impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
trait Foo<T, U> {}
trait Bar<U, T, X>: Foo<T, U> {}
trait Baz<T, X>: Bar<usize, T, X> {}
struct S<T, X>;
impl<T, X> Foo<T, usize> for S<T, X> {}
impl<T, X> Bar<usize, T, X> for S<T, X> {}
impl<T, X> Baz<T, X> for S<T, X> {}
fn test() {
let obj: &dyn Baz<i8, i16> = &S;
let obj: &dyn Bar<_, _, _> = obj;
let obj: &dyn Foo<_, _> = obj;
let obj2: &dyn Baz<i8, i16> = &S;
let _: &dyn Foo<_, _> = obj2;
}
"#,
expect![[r"
424..609 '{ ...bj2; }': ()
434..437 'obj': &dyn Baz<i8, i16>
459..461 '&S': &S<i8, i16>
460..461 'S': S<i8, i16>
471..474 'obj': &dyn Bar<usize, i8, i16>
496..499 'obj': &dyn Baz<i8, i16>
509..512 'obj': &dyn Foo<i8, usize>
531..534 'obj': &dyn Bar<usize, i8, i16>
544..548 'obj2': &dyn Baz<i8, i16>
570..572 '&S': &S<i8, i16>
571..572 'S': S<i8, i16>
582..583 '_': &dyn Foo<i8, usize>
602..606 'obj2': &dyn Baz<i8, i16>
"]],
);
}
#[test]
fn coerce_unsize_super_trait_cycle() {
check_infer_with_mismatches(
r#"
#[lang = "sized"]
pub trait Sized {}
#[lang = "unsize"]
pub trait Unsize<T> {}
#[lang = "coerce_unsized"]
pub trait CoerceUnsized<T> {}
impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
trait A {}
trait B: C + A {}
trait C: B {}
trait D: C
struct S;
impl A for S {}
impl B for S {}
impl C for S {}
impl D for S {}
fn test() {
let obj: &dyn D = &S;
let obj: &dyn A = &S;
}
"#,
expect![[r"
328..383 '{ ... &S; }': ()
338..341 'obj': &dyn D
352..354 '&S': &S
353..354 'S': S
364..367 'obj': &dyn A
378..380 '&S': &S
379..380 'S': S
"]],
);
}
#[ignore]
#[test]
fn coerce_unsize_generic() {
// FIXME: Implement this
// https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
check_infer_with_mismatches(
r#"
#[lang = "unsize"]
pub trait Unsize<T> {}
#[lang = "coerce_unsized"]
pub trait CoerceUnsized<T> {}
impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
struct Foo<T> { t: T };
struct Bar<T>(Foo<T>);
fn test() {
let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] };
let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] });
}
"#,
expect![[r"
"]],
);
}

View file

@ -0,0 +1,41 @@
use super::check_types_source_code;
#[test]
fn qualify_path_to_submodule() {
check_types_source_code(
r#"
mod foo {
pub struct Foo;
}
fn bar() {
let foo: foo::Foo = foo::Foo;
foo
} //^ foo::Foo
"#,
);
}
#[test]
fn omit_default_type_parameters() {
check_types_source_code(
r#"
struct Foo<T = u8> { t: T }
fn main() {
let foo = Foo { t: 5u8 };
foo;
} //^ Foo
"#,
);
check_types_source_code(
r#"
struct Foo<K, T = u8> { k: K, t: T }
fn main() {
let foo = Foo { k: 400, t: 5u8 };
foo;
} //^ Foo<i32>
"#,
);
}

View file

@ -0,0 +1,787 @@
use std::fs;
use expect::expect;
use test_utils::project_dir;
use super::{check_infer, check_types};
#[test]
fn cfg_impl_def() {
check_types(
r#"
//- /main.rs crate:main deps:foo cfg:test
use foo::S as T;
struct S;
#[cfg(test)]
impl S {
fn foo1(&self) -> i32 { 0 }
}
#[cfg(not(test))]
impl S {
fn foo2(&self) -> i32 { 0 }
}
fn test() {
let t = (S.foo1(), S.foo2(), T.foo3(), T.foo4());
t;
} //^ (i32, {unknown}, i32, {unknown})
//- /foo.rs crate:foo
struct S;
#[cfg(not(test))]
impl S {
fn foo3(&self) -> i32 { 0 }
}
#[cfg(test)]
impl S {
fn foo4(&self) -> i32 { 0 }
}
"#,
);
}
#[test]
fn infer_macros_expanded() {
check_infer(
r#"
struct Foo(Vec<i32>);
macro_rules! foo {
($($item:expr),*) => {
{
Foo(vec![$($item,)*])
}
};
}
fn main() {
let x = foo!(1,2);
}
"#,
expect![[r#"
!0..17 '{Foo(v...,2,])}': Foo
!1..4 'Foo': Foo({unknown}) -> Foo
!1..16 'Foo(vec![1,2,])': Foo
!5..15 'vec![1,2,]': {unknown}
155..181 '{ ...,2); }': ()
165..166 'x': Foo
"#]],
);
}
#[test]
fn infer_legacy_textual_scoped_macros_expanded() {
check_infer(
r#"
struct Foo(Vec<i32>);
#[macro_use]
mod m {
macro_rules! foo {
($($item:expr),*) => {
{
Foo(vec![$($item,)*])
}
};
}
}
fn main() {
let x = foo!(1,2);
let y = crate::foo!(1,2);
}
"#,
expect![[r#"
!0..17 '{Foo(v...,2,])}': Foo
!1..4 'Foo': Foo({unknown}) -> Foo
!1..16 'Foo(vec![1,2,])': Foo
!5..15 'vec![1,2,]': {unknown}
194..250 '{ ...,2); }': ()
204..205 'x': Foo
227..228 'y': {unknown}
231..247 'crate:...!(1,2)': {unknown}
"#]],
);
}
#[test]
fn infer_path_qualified_macros_expanded() {
check_infer(
r#"
#[macro_export]
macro_rules! foo {
() => { 42i32 }
}
mod m {
pub use super::foo as bar;
}
fn main() {
let x = crate::foo!();
let y = m::bar!();
}
"#,
expect![[r#"
!0..5 '42i32': i32
!0..5 '42i32': i32
110..163 '{ ...!(); }': ()
120..121 'x': i32
147..148 'y': i32
"#]],
);
}
#[test]
fn expr_macro_expanded_in_various_places() {
check_infer(
r#"
macro_rules! spam {
() => (1isize);
}
fn spam() {
spam!();
(spam!());
spam!().spam(spam!());
for _ in spam!() {}
|| spam!();
while spam!() {}
break spam!();
return spam!();
match spam!() {
_ if spam!() => spam!(),
}
spam!()(spam!());
Spam { spam: spam!() };
spam!()[spam!()];
await spam!();
spam!() as usize;
&spam!();
-spam!();
spam!()..spam!();
spam!() + spam!();
}
"#,
expect![[r#"
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
53..456 '{ ...!(); }': ()
87..108 'spam!(...am!())': {unknown}
114..133 'for _ ...!() {}': ()
118..119 '_': {unknown}
131..133 '{}': ()
138..148 '|| spam!()': || -> isize
154..170 'while ...!() {}': ()
168..170 '{}': ()
175..188 'break spam!()': !
194..208 'return spam!()': !
214..268 'match ... }': isize
238..239 '_': isize
273..289 'spam!(...am!())': {unknown}
295..317 'Spam {...m!() }': {unknown}
323..339 'spam!(...am!()]': {unknown}
364..380 'spam!(... usize': usize
386..394 '&spam!()': &isize
400..408 '-spam!()': isize
414..430 'spam!(...pam!()': {unknown}
436..453 'spam!(...pam!()': isize
"#]],
);
}
#[test]
fn infer_type_value_macro_having_same_name() {
check_infer(
r#"
#[macro_export]
macro_rules! foo {
() => {
mod foo {
pub use super::foo;
}
};
($x:tt) => {
$x
};
}
foo!();
fn foo() {
let foo = foo::foo!(42i32);
}
"#,
expect![[r#"
!0..5 '42i32': i32
170..205 '{ ...32); }': ()
180..183 'foo': i32
"#]],
);
}
#[test]
fn processes_impls_generated_by_macros() {
check_types(
r#"
macro_rules! m {
($ident:ident) => (impl Trait for $ident {})
}
trait Trait { fn foo(self) -> u128 {} }
struct S;
m!(S);
fn test() { S.foo(); }
//^ u128
"#,
);
}
#[test]
fn infer_assoc_items_generated_by_macros() {
check_types(
r#"
macro_rules! m {
() => (fn foo(&self) -> u128 {0})
}
struct S;
impl S {
m!();
}
fn test() { S.foo(); }
//^ u128
"#,
);
}
#[test]
fn infer_assoc_items_generated_by_macros_chain() {
check_types(
r#"
macro_rules! m_inner {
() => {fn foo(&self) -> u128 {0}}
}
macro_rules! m {
() => {m_inner!();}
}
struct S;
impl S {
m!();
}
fn test() { S.foo(); }
//^ u128
"#,
);
}
#[test]
fn infer_macro_with_dollar_crate_is_correct_in_expr() {
check_types(
r#"
//- /main.rs crate:main deps:foo
fn test() {
let x = (foo::foo!(1), foo::foo!(2));
x;
} //^ (i32, usize)
//- /lib.rs crate:foo
#[macro_export]
macro_rules! foo {
(1) => { $crate::bar!() };
(2) => { 1 + $crate::baz() };
}
#[macro_export]
macro_rules! bar {
() => { 42 }
}
pub fn baz() -> usize { 31usize }
"#,
);
}
#[test]
fn infer_macro_with_dollar_crate_is_correct_in_trait_associate_type() {
check_types(
r#"
//- /main.rs crate:main deps:foo
use foo::Trait;
fn test() {
let msg = foo::Message(foo::MessageRef);
let r = msg.deref();
r;
//^ &MessageRef
}
//- /lib.rs crate:foo
pub struct MessageRef;
pub struct Message(MessageRef);
pub trait Trait {
type Target;
fn deref(&self) -> &Self::Target;
}
#[macro_export]
macro_rules! expand {
() => {
impl Trait for Message {
type Target = $crate::MessageRef;
fn deref(&self) -> &Self::Target {
&self.0
}
}
}
}
expand!();
"#,
);
}
#[test]
fn infer_type_value_non_legacy_macro_use_as() {
check_infer(
r#"
mod m {
macro_rules! _foo {
($x:ident) => { type $x = u64; }
}
pub(crate) use _foo as foo;
}
m::foo!(foo);
use foo as bar;
fn f() -> bar { 0 }
fn main() {
let _a = f();
}
"#,
expect![[r#"
158..163 '{ 0 }': u64
160..161 '0': u64
174..196 '{ ...f(); }': ()
184..186 '_a': u64
190..191 'f': fn f() -> u64
190..193 'f()': u64
"#]],
);
}
#[test]
fn infer_local_macro() {
check_infer(
r#"
fn main() {
macro_rules! foo {
() => { 1usize }
}
let _a = foo!();
}
"#,
expect![[r#"
!0..6 '1usize': usize
10..89 '{ ...!(); }': ()
16..65 'macro_... }': {unknown}
74..76 '_a': usize
"#]],
);
}
#[test]
fn infer_local_inner_macros() {
check_types(
r#"
//- /main.rs crate:main deps:foo
fn test() {
let x = foo::foo!(1);
x;
} //^ i32
//- /lib.rs crate:foo
#[macro_export(local_inner_macros)]
macro_rules! foo {
(1) => { bar!() };
}
#[macro_export]
macro_rules! bar {
() => { 42 }
}
"#,
);
}
#[test]
fn infer_builtin_macros_line() {
check_infer(
r#"
#[rustc_builtin_macro]
macro_rules! line {() => {}}
fn main() {
let x = line!();
}
"#,
expect![[r#"
!0..1 '0': i32
63..87 '{ ...!(); }': ()
73..74 'x': i32
"#]],
);
}
#[test]
fn infer_builtin_macros_file() {
check_infer(
r#"
#[rustc_builtin_macro]
macro_rules! file {() => {}}
fn main() {
let x = file!();
}
"#,
expect![[r#"
!0..2 '""': &str
63..87 '{ ...!(); }': ()
73..74 'x': &str
"#]],
);
}
#[test]
fn infer_builtin_macros_column() {
check_infer(
r#"
#[rustc_builtin_macro]
macro_rules! column {() => {}}
fn main() {
let x = column!();
}
"#,
expect![[r#"
!0..1 '0': i32
65..91 '{ ...!(); }': ()
75..76 'x': i32
"#]],
);
}
#[test]
fn infer_builtin_macros_concat() {
check_infer(
r#"
#[rustc_builtin_macro]
macro_rules! concat {() => {}}
fn main() {
let x = concat!("hello", concat!("world", "!"));
}
"#,
expect![[r#"
!0..13 '"helloworld!"': &str
65..121 '{ ...")); }': ()
75..76 'x': &str
"#]],
);
}
#[test]
fn infer_builtin_macros_include() {
check_types(
r#"
//- /main.rs
#[rustc_builtin_macro]
macro_rules! include {() => {}}
include!("foo.rs");
fn main() {
bar();
} //^ u32
//- /foo.rs
fn bar() -> u32 {0}
"#,
);
}
#[test]
#[ignore]
fn include_accidentally_quadratic() {
let file = project_dir().join("crates/syntax/test_data/accidentally_quadratic");
let big_file = fs::read_to_string(file).unwrap();
let big_file = vec![big_file; 10].join("\n");
let fixture = r#"
//- /main.rs
#[rustc_builtin_macro]
macro_rules! include {() => {}}
include!("foo.rs");
fn main() {
RegisterBlock { };
//^ RegisterBlock
}
"#;
let fixture = format!("{}\n//- /foo.rs\n{}", fixture, big_file);
check_types(&fixture);
}
#[test]
fn infer_builtin_macros_include_concat() {
check_types(
r#"
//- /main.rs
#[rustc_builtin_macro]
macro_rules! include {() => {}}
#[rustc_builtin_macro]
macro_rules! concat {() => {}}
include!(concat!("f", "oo.rs"));
fn main() {
bar();
} //^ u32
//- /foo.rs
fn bar() -> u32 {0}
"#,
);
}
#[test]
fn infer_builtin_macros_include_concat_with_bad_env_should_failed() {
check_types(
r#"
//- /main.rs
#[rustc_builtin_macro]
macro_rules! include {() => {}}
#[rustc_builtin_macro]
macro_rules! concat {() => {}}
#[rustc_builtin_macro]
macro_rules! env {() => {}}
include!(concat!(env!("OUT_DIR"), "/foo.rs"));
fn main() {
bar();
} //^ {unknown}
//- /foo.rs
fn bar() -> u32 {0}
"#,
);
}
#[test]
fn infer_builtin_macros_include_itself_should_failed() {
check_types(
r#"
#[rustc_builtin_macro]
macro_rules! include {() => {}}
include!("main.rs");
fn main() {
0
} //^ i32
"#,
);
}
#[test]
fn infer_builtin_macros_concat_with_lazy() {
check_infer(
r#"
macro_rules! hello {() => {"hello"}}
#[rustc_builtin_macro]
macro_rules! concat {() => {}}
fn main() {
let x = concat!(hello!(), concat!("world", "!"));
}
"#,
expect![[r#"
!0..13 '"helloworld!"': &str
103..160 '{ ...")); }': ()
113..114 'x': &str
"#]],
);
}
#[test]
fn infer_builtin_macros_env() {
check_infer(
r#"
//- /main.rs env:foo=bar
#[rustc_builtin_macro]
macro_rules! env {() => {}}
fn main() {
let x = env!("foo");
}
"#,
expect![[r#"
!0..22 '"__RA_...TED__"': &str
62..90 '{ ...o"); }': ()
72..73 'x': &str
"#]],
);
}
#[test]
fn infer_derive_clone_simple() {
check_types(
r#"
//- /main.rs crate:main deps:core
#[derive(Clone)]
struct S;
fn test() {
S.clone();
} //^ S
//- /lib.rs crate:core
#[prelude_import]
use clone::*;
mod clone {
trait Clone {
fn clone(&self) -> Self;
}
}
"#,
);
}
#[test]
fn infer_derive_clone_in_core() {
check_types(
r#"
//- /lib.rs crate:core
#[prelude_import]
use clone::*;
mod clone {
trait Clone {
fn clone(&self) -> Self;
}
}
#[derive(Clone)]
pub struct S;
//- /main.rs crate:main deps:core
use core::S;
fn test() {
S.clone();
} //^ S
"#,
);
}
#[test]
fn infer_derive_clone_with_params() {
check_types(
r#"
//- /main.rs crate:main deps:core
#[derive(Clone)]
struct S;
#[derive(Clone)]
struct Wrapper<T>(T);
struct NonClone;
fn test() {
(Wrapper(S).clone(), Wrapper(NonClone).clone());
//^ (Wrapper<S>, {unknown})
}
//- /lib.rs crate:core
#[prelude_import]
use clone::*;
mod clone {
trait Clone {
fn clone(&self) -> Self;
}
}
"#,
);
}
#[test]
fn infer_custom_derive_simple() {
// FIXME: this test current now do nothing
check_types(
r#"
//- /main.rs crate:main
use foo::Foo;
#[derive(Foo)]
struct S{}
fn test() {
S{};
} //^ S
"#,
);
}
#[test]
fn macro_in_arm() {
check_infer(
r#"
macro_rules! unit {
() => { () };
}
fn main() {
let x = match () {
unit!() => 92u32,
};
}
"#,
expect![[r#"
51..110 '{ ... }; }': ()
61..62 'x': u32
65..107 'match ... }': u32
71..73 '()': ()
84..91 'unit!()': ()
95..100 '92u32': u32
"#]],
);
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,409 @@
use expect::expect;
use super::{check_infer_with_mismatches, check_types};
#[test]
fn infer_never1() {
check_types(
r#"
fn test() {
let t = return;
t;
} //^ !
"#,
);
}
#[test]
fn infer_never2() {
check_types(
r#"
fn gen<T>() -> T { loop {} }
fn test() {
let a = gen();
if false { a } else { loop {} };
a;
} //^ !
"#,
);
}
#[test]
fn infer_never3() {
check_types(
r#"
fn gen<T>() -> T { loop {} }
fn test() {
let a = gen();
if false { loop {} } else { a };
a;
//^ !
}
"#,
);
}
#[test]
fn never_type_in_generic_args() {
check_types(
r#"
enum Option<T> { None, Some(T) }
fn test() {
let a = if true { Option::None } else { Option::Some(return) };
a;
} //^ Option<!>
"#,
);
}
#[test]
fn never_type_can_be_reinferred1() {
check_types(
r#"
fn gen<T>() -> T { loop {} }
fn test() {
let a = gen();
if false { loop {} } else { a };
a;
//^ ()
if false { a };
}
"#,
);
}
#[test]
fn never_type_can_be_reinferred2() {
check_types(
r#"
enum Option<T> { None, Some(T) }
fn test() {
let a = if true { Option::None } else { Option::Some(return) };
a;
//^ Option<i32>
match 42 {
42 => a,
_ => Option::Some(42),
};
}
"#,
);
}
#[test]
fn never_type_can_be_reinferred3() {
check_types(
r#"
enum Option<T> { None, Some(T) }
fn test() {
let a = if true { Option::None } else { Option::Some(return) };
a;
//^ Option<&str>
match 42 {
42 => a,
_ => Option::Some("str"),
};
}
"#,
);
}
#[test]
fn match_no_arm() {
check_types(
r#"
enum Void {}
fn test(a: Void) {
let t = match a {};
t;
} //^ !
"#,
);
}
#[test]
fn match_unknown_arm() {
check_types(
r#"
fn test(a: Option) {
let t = match 0 {
_ => unknown,
};
t;
} //^ {unknown}
"#,
);
}
#[test]
fn if_never() {
check_types(
r#"
fn test() {
let i = if true {
loop {}
} else {
3.0
};
i;
} //^ f64
"#,
);
}
#[test]
fn if_else_never() {
check_types(
r#"
fn test(input: bool) {
let i = if input {
2.0
} else {
return
};
i;
} //^ f64
"#,
);
}
#[test]
fn match_first_arm_never() {
check_types(
r#"
fn test(a: i32) {
let i = match a {
1 => return,
2 => 2.0,
3 => loop {},
_ => 3.0,
};
i;
} //^ f64
"#,
);
}
#[test]
fn match_second_arm_never() {
check_types(
r#"
fn test(a: i32) {
let i = match a {
1 => 3.0,
2 => loop {},
3 => 3.0,
_ => return,
};
i;
} //^ f64
"#,
);
}
#[test]
fn match_all_arms_never() {
check_types(
r#"
fn test(a: i32) {
let i = match a {
2 => return,
_ => loop {},
};
i;
} //^ !
"#,
);
}
#[test]
fn match_no_never_arms() {
check_types(
r#"
fn test(a: i32) {
let i = match a {
2 => 2.0,
_ => 3.0,
};
i;
} //^ f64
"#,
);
}
#[test]
fn diverging_expression_1() {
check_infer_with_mismatches(
r"
//- /main.rs
fn test1() {
let x: u32 = return;
}
fn test2() {
let x: u32 = { return; };
}
fn test3() {
let x: u32 = loop {};
}
fn test4() {
let x: u32 = { loop {} };
}
fn test5() {
let x: u32 = { if true { loop {}; } else { loop {}; } };
}
fn test6() {
let x: u32 = { let y: u32 = { loop {}; }; };
}
",
expect![[r"
11..39 '{ ...urn; }': ()
21..22 'x': u32
30..36 'return': !
51..84 '{ ...; }; }': ()
61..62 'x': u32
70..81 '{ return; }': u32
72..78 'return': !
96..125 '{ ... {}; }': ()
106..107 'x': u32
115..122 'loop {}': !
120..122 '{}': ()
137..170 '{ ...} }; }': ()
147..148 'x': u32
156..167 '{ loop {} }': u32
158..165 'loop {}': !
163..165 '{}': ()
182..246 '{ ...} }; }': ()
192..193 'x': u32
201..243 '{ if t...}; } }': u32
203..241 'if tru... {}; }': u32
206..210 'true': bool
211..223 '{ loop {}; }': u32
213..220 'loop {}': !
218..220 '{}': ()
229..241 '{ loop {}; }': u32
231..238 'loop {}': !
236..238 '{}': ()
258..310 '{ ...; }; }': ()
268..269 'x': u32
277..307 '{ let ...; }; }': u32
283..284 'y': u32
292..304 '{ loop {}; }': u32
294..301 'loop {}': !
299..301 '{}': ()
"]],
);
}
#[test]
fn diverging_expression_2() {
check_infer_with_mismatches(
r#"
//- /main.rs
fn test1() {
// should give type mismatch
let x: u32 = { loop {}; "foo" };
}
"#,
expect![[r#"
11..84 '{ ..." }; }': ()
54..55 'x': u32
63..81 '{ loop...foo" }': &str
65..72 'loop {}': !
70..72 '{}': ()
74..79 '"foo"': &str
63..81: expected u32, got &str
74..79: expected u32, got &str
"#]],
);
}
#[test]
fn diverging_expression_3_break() {
check_infer_with_mismatches(
r"
//- /main.rs
fn test1() {
// should give type mismatch
let x: u32 = { loop { break; } };
}
fn test2() {
// should give type mismatch
let x: u32 = { for a in b { break; }; };
// should give type mismatch as well
let x: u32 = { for a in b {}; };
// should give type mismatch as well
let x: u32 = { for a in b { return; }; };
}
fn test3() {
// should give type mismatch
let x: u32 = { while true { break; }; };
// should give type mismatch as well -- there's an implicit break, even if it's never hit
let x: u32 = { while true {}; };
// should give type mismatch as well
let x: u32 = { while true { return; }; };
}
",
expect![[r"
11..85 '{ ...} }; }': ()
54..55 'x': u32
63..82 '{ loop...k; } }': ()
65..80 'loop { break; }': ()
70..80 '{ break; }': ()
72..77 'break': !
63..82: expected u32, got ()
65..80: expected u32, got ()
97..343 '{ ...; }; }': ()
140..141 'x': u32
149..175 '{ for ...; }; }': ()
151..172 'for a ...eak; }': ()
155..156 'a': {unknown}
160..161 'b': {unknown}
162..172 '{ break; }': ()
164..169 'break': !
226..227 'x': u32
235..253 '{ for ... {}; }': ()
237..250 'for a in b {}': ()
241..242 'a': {unknown}
246..247 'b': {unknown}
248..250 '{}': ()
304..305 'x': u32
313..340 '{ for ...; }; }': ()
315..337 'for a ...urn; }': ()
319..320 'a': {unknown}
324..325 'b': {unknown}
326..337 '{ return; }': ()
328..334 'return': !
149..175: expected u32, got ()
235..253: expected u32, got ()
313..340: expected u32, got ()
355..654 '{ ...; }; }': ()
398..399 'x': u32
407..433 '{ whil...; }; }': ()
409..430 'while ...eak; }': ()
415..419 'true': bool
420..430 '{ break; }': ()
422..427 'break': !
537..538 'x': u32
546..564 '{ whil... {}; }': ()
548..561 'while true {}': ()
554..558 'true': bool
559..561 '{}': ()
615..616 'x': u32
624..651 '{ whil...; }; }': ()
626..648 'while ...urn; }': ()
632..636 'true': bool
637..648 '{ return; }': ()
639..645 'return': !
407..433: expected u32, got ()
546..564: expected u32, got ()
624..651: expected u32, got ()
"]],
);
}

View file

@ -0,0 +1,656 @@
use expect::expect;
use test_utils::mark;
use super::{check_infer, check_infer_with_mismatches};
#[test]
fn infer_pattern() {
check_infer(
r#"
fn test(x: &i32) {
let y = x;
let &z = x;
let a = z;
let (c, d) = (1, "hello");
for (e, f) in some_iter {
let g = e;
}
if let [val] = opt {
let h = val;
}
let lambda = |a: u64, b, c: i32| { a + b; c };
let ref ref_to_x = x;
let mut mut_x = x;
let ref mut mut_ref_to_x = x;
let k = mut_ref_to_x;
}
"#,
expect![[r#"
8..9 'x': &i32
17..368 '{ ...o_x; }': ()
27..28 'y': &i32
31..32 'x': &i32
42..44 '&z': &i32
43..44 'z': i32
47..48 'x': &i32
58..59 'a': i32
62..63 'z': i32
73..79 '(c, d)': (i32, &str)
74..75 'c': i32
77..78 'd': &str
82..94 '(1, "hello")': (i32, &str)
83..84 '1': i32
86..93 '"hello"': &str
101..151 'for (e... }': ()
105..111 '(e, f)': ({unknown}, {unknown})
106..107 'e': {unknown}
109..110 'f': {unknown}
115..124 'some_iter': {unknown}
125..151 '{ ... }': ()
139..140 'g': {unknown}
143..144 'e': {unknown}
157..204 'if let... }': ()
164..169 '[val]': [{unknown}]
165..168 'val': {unknown}
172..175 'opt': [{unknown}]
176..204 '{ ... }': ()
190..191 'h': {unknown}
194..197 'val': {unknown}
214..220 'lambda': |u64, u64, i32| -> i32
223..255 '|a: u6...b; c }': |u64, u64, i32| -> i32
224..225 'a': u64
232..233 'b': u64
235..236 'c': i32
243..255 '{ a + b; c }': i32
245..246 'a': u64
245..250 'a + b': u64
249..250 'b': u64
252..253 'c': i32
266..278 'ref ref_to_x': &&i32
281..282 'x': &i32
292..301 'mut mut_x': &i32
304..305 'x': &i32
315..335 'ref mu...f_to_x': &mut &i32
338..339 'x': &i32
349..350 'k': &mut &i32
353..365 'mut_ref_to_x': &mut &i32
"#]],
);
}
#[test]
fn infer_literal_pattern() {
check_infer_with_mismatches(
r#"
fn any<T>() -> T { loop {} }
fn test(x: &i32) {
if let "foo" = any() {}
if let 1 = any() {}
if let 1u32 = any() {}
if let 1f32 = any() {}
if let 1.0 = any() {}
if let true = any() {}
}
"#,
expect![[r#"
17..28 '{ loop {} }': T
19..26 'loop {}': !
24..26 '{}': ()
37..38 'x': &i32
46..208 '{ ...) {} }': ()
52..75 'if let...y() {}': ()
59..64 '"foo"': &str
59..64 '"foo"': &str
67..70 'any': fn any<&str>() -> &str
67..72 'any()': &str
73..75 '{}': ()
80..99 'if let...y() {}': ()
87..88 '1': i32
87..88 '1': i32
91..94 'any': fn any<i32>() -> i32
91..96 'any()': i32
97..99 '{}': ()
104..126 'if let...y() {}': ()
111..115 '1u32': u32
111..115 '1u32': u32
118..121 'any': fn any<u32>() -> u32
118..123 'any()': u32
124..126 '{}': ()
131..153 'if let...y() {}': ()
138..142 '1f32': f32
138..142 '1f32': f32
145..148 'any': fn any<f32>() -> f32
145..150 'any()': f32
151..153 '{}': ()
158..179 'if let...y() {}': ()
165..168 '1.0': f64
165..168 '1.0': f64
171..174 'any': fn any<f64>() -> f64
171..176 'any()': f64
177..179 '{}': ()
184..206 'if let...y() {}': ()
191..195 'true': bool
191..195 'true': bool
198..201 'any': fn any<bool>() -> bool
198..203 'any()': bool
204..206 '{}': ()
"#]],
);
}
#[test]
fn infer_range_pattern() {
check_infer_with_mismatches(
r#"
fn test(x: &i32) {
if let 1..76 = 2u32 {}
if let 1..=76 = 2u32 {}
}
"#,
expect![[r#"
8..9 'x': &i32
17..75 '{ ...2 {} }': ()
23..45 'if let...u32 {}': ()
30..35 '1..76': u32
38..42 '2u32': u32
43..45 '{}': ()
50..73 'if let...u32 {}': ()
57..63 '1..=76': u32
66..70 '2u32': u32
71..73 '{}': ()
"#]],
);
}
#[test]
fn infer_pattern_match_ergonomics() {
check_infer(
r#"
struct A<T>(T);
fn test() {
let A(n) = &A(1);
let A(n) = &mut A(1);
}
"#,
expect![[r#"
27..78 '{ ...(1); }': ()
37..41 'A(n)': A<i32>
39..40 'n': &i32
44..49 '&A(1)': &A<i32>
45..46 'A': A<i32>(i32) -> A<i32>
45..49 'A(1)': A<i32>
47..48 '1': i32
59..63 'A(n)': A<i32>
61..62 'n': &mut i32
66..75 '&mut A(1)': &mut A<i32>
71..72 'A': A<i32>(i32) -> A<i32>
71..75 'A(1)': A<i32>
73..74 '1': i32
"#]],
);
}
#[test]
fn infer_pattern_match_ergonomics_ref() {
mark::check!(match_ergonomics_ref);
check_infer(
r#"
fn test() {
let v = &(1, &2);
let (_, &w) = v;
}
"#,
expect![[r#"
10..56 '{ ...= v; }': ()
20..21 'v': &(i32, &i32)
24..32 '&(1, &2)': &(i32, &i32)
25..32 '(1, &2)': (i32, &i32)
26..27 '1': i32
29..31 '&2': &i32
30..31 '2': i32
42..49 '(_, &w)': (i32, &i32)
43..44 '_': i32
46..48 '&w': &i32
47..48 'w': i32
52..53 'v': &(i32, &i32)
"#]],
);
}
#[test]
fn infer_pattern_match_slice() {
check_infer(
r#"
fn test() {
let slice: &[f64] = &[0.0];
match slice {
&[] => {},
&[a] => {
a;
},
&[b, c] => {
b;
c;
}
_ => {}
}
}
"#,
expect![[r#"
10..209 '{ ... } }': ()
20..25 'slice': &[f64]
36..42 '&[0.0]': &[f64; _]
37..42 '[0.0]': [f64; _]
38..41 '0.0': f64
48..207 'match ... }': ()
54..59 'slice': &[f64]
70..73 '&[]': &[f64]
71..73 '[]': [f64]
77..79 '{}': ()
89..93 '&[a]': &[f64]
90..93 '[a]': [f64]
91..92 'a': f64
97..123 '{ ... }': ()
111..112 'a': f64
133..140 '&[b, c]': &[f64]
134..140 '[b, c]': [f64]
135..136 'b': f64
138..139 'c': f64
144..185 '{ ... }': ()
158..159 'b': f64
173..174 'c': f64
194..195 '_': &[f64]
199..201 '{}': ()
"#]],
);
}
#[test]
fn infer_pattern_match_string_literal() {
check_infer_with_mismatches(
r#"
fn test() {
let s: &str = "hello";
match s {
"hello" => {}
_ => {}
}
}
"#,
expect![[r#"
10..98 '{ ... } }': ()
20..21 's': &str
30..37 '"hello"': &str
43..96 'match ... }': ()
49..50 's': &str
61..68 '"hello"': &str
61..68 '"hello"': &str
72..74 '{}': ()
83..84 '_': &str
88..90 '{}': ()
"#]],
);
}
#[test]
fn infer_pattern_match_or() {
check_infer_with_mismatches(
r#"
fn test() {
let s: &str = "hello";
match s {
"hello" | "world" => {}
_ => {}
}
}
"#,
expect![[r#"
10..108 '{ ... } }': ()
20..21 's': &str
30..37 '"hello"': &str
43..106 'match ... }': ()
49..50 's': &str
61..68 '"hello"': &str
61..68 '"hello"': &str
61..78 '"hello...world"': &str
71..78 '"world"': &str
71..78 '"world"': &str
82..84 '{}': ()
93..94 '_': &str
98..100 '{}': ()
"#]],
);
}
#[test]
fn infer_pattern_match_arr() {
check_infer(
r#"
fn test() {
let arr: [f64; 2] = [0.0, 1.0];
match arr {
[1.0, a] => {
a;
},
[b, c] => {
b;
c;
}
}
}
"#,
expect![[r#"
10..179 '{ ... } }': ()
20..23 'arr': [f64; _]
36..46 '[0.0, 1.0]': [f64; _]
37..40 '0.0': f64
42..45 '1.0': f64
52..177 'match ... }': ()
58..61 'arr': [f64; _]
72..80 '[1.0, a]': [f64; _]
73..76 '1.0': f64
73..76 '1.0': f64
78..79 'a': f64
84..110 '{ ... }': ()
98..99 'a': f64
120..126 '[b, c]': [f64; _]
121..122 'b': f64
124..125 'c': f64
130..171 '{ ... }': ()
144..145 'b': f64
159..160 'c': f64
"#]],
);
}
#[test]
fn infer_adt_pattern() {
check_infer(
r#"
enum E {
A { x: usize },
B
}
struct S(u32, E);
fn test() {
let e = E::A { x: 3 };
let S(y, z) = foo;
let E::A { x: new_var } = e;
match e {
E::A { x } => x,
E::B if foo => 1,
E::B => 10,
};
let ref d @ E::A { .. } = e;
d;
}
"#,
expect![[r#"
67..288 '{ ... d; }': ()
77..78 'e': E
81..94 'E::A { x: 3 }': E
91..92 '3': usize
105..112 'S(y, z)': S
107..108 'y': u32
110..111 'z': E
115..118 'foo': S
128..147 'E::A {..._var }': E
138..145 'new_var': usize
150..151 'e': E
158..244 'match ... }': usize
164..165 'e': E
176..186 'E::A { x }': E
183..184 'x': usize
190..191 'x': usize
201..205 'E::B': E
209..212 'foo': bool
216..217 '1': usize
227..231 'E::B': E
235..237 '10': usize
255..274 'ref d ...{ .. }': &E
263..274 'E::A { .. }': E
277..278 'e': E
284..285 'd': &E
"#]],
);
}
#[test]
fn enum_variant_through_self_in_pattern() {
check_infer(
r#"
enum E {
A { x: usize },
B(usize),
C
}
impl E {
fn test() {
match (loop {}) {
Self::A { x } => { x; },
Self::B(x) => { x; },
Self::C => {},
};
}
}
"#,
expect![[r#"
75..217 '{ ... }': ()
85..210 'match ... }': ()
92..99 'loop {}': !
97..99 '{}': ()
115..128 'Self::A { x }': E
125..126 'x': usize
132..138 '{ x; }': ()
134..135 'x': usize
152..162 'Self::B(x)': E
160..161 'x': usize
166..172 '{ x; }': ()
168..169 'x': usize
186..193 'Self::C': E
197..199 '{}': ()
"#]],
);
}
#[test]
fn infer_generics_in_patterns() {
check_infer(
r#"
struct A<T> {
x: T,
}
enum Option<T> {
Some(T),
None,
}
fn test(a1: A<u32>, o: Option<u64>) {
let A { x: x2 } = a1;
let A::<i64> { x: x3 } = A { x: 1 };
match o {
Option::Some(t) => t,
_ => 1,
};
}
"#,
expect![[r#"
78..80 'a1': A<u32>
90..91 'o': Option<u64>
106..243 '{ ... }; }': ()
116..127 'A { x: x2 }': A<u32>
123..125 'x2': u32
130..132 'a1': A<u32>
142..160 'A::<i6...: x3 }': A<i64>
156..158 'x3': i64
163..173 'A { x: 1 }': A<i64>
170..171 '1': i64
179..240 'match ... }': u64
185..186 'o': Option<u64>
197..212 'Option::Some(t)': Option<u64>
210..211 't': u64
216..217 't': u64
227..228 '_': Option<u64>
232..233 '1': u64
"#]],
);
}
#[test]
fn infer_const_pattern() {
check_infer_with_mismatches(
r#"
enum Option<T> { None }
use Option::None;
struct Foo;
const Bar: usize = 1;
fn test() {
let a: Option<u32> = None;
let b: Option<i64> = match a {
None => None,
};
let _: () = match () { Foo => Foo }; // Expected mismatch
let _: () = match () { Bar => Bar }; // Expected mismatch
}
"#,
expect![[r#"
73..74 '1': usize
87..309 '{ ...atch }': ()
97..98 'a': Option<u32>
114..118 'None': Option<u32>
128..129 'b': Option<i64>
145..182 'match ... }': Option<i64>
151..152 'a': Option<u32>
163..167 'None': Option<u32>
171..175 'None': Option<i64>
192..193 '_': ()
200..223 'match ... Foo }': Foo
206..208 '()': ()
211..214 'Foo': Foo
218..221 'Foo': Foo
254..255 '_': ()
262..285 'match ... Bar }': usize
268..270 '()': ()
273..276 'Bar': usize
280..283 'Bar': usize
200..223: expected (), got Foo
262..285: expected (), got usize
"#]],
);
}
#[test]
fn infer_guard() {
check_infer(
r#"
struct S;
impl S { fn foo(&self) -> bool { false } }
fn main() {
match S {
s if s.foo() => (),
}
}
"#,
expect![[r#"
27..31 'self': &S
41..50 '{ false }': bool
43..48 'false': bool
64..115 '{ ... } }': ()
70..113 'match ... }': ()
76..77 'S': S
88..89 's': S
93..94 's': S
93..100 's.foo()': bool
104..106 '()': ()
"#]],
)
}
#[test]
fn match_ergonomics_in_closure_params() {
check_infer(
r#"
#[lang = "fn_once"]
trait FnOnce<Args> {
type Output;
}
fn foo<T, U, F: FnOnce(T) -> U>(t: T, f: F) -> U { loop {} }
fn test() {
foo(&(1, "a"), |&(x, y)| x); // normal, no match ergonomics
foo(&(1, "a"), |(x, y)| x);
}
"#,
expect![[r#"
93..94 't': T
99..100 'f': F
110..121 '{ loop {} }': U
112..119 'loop {}': !
117..119 '{}': ()
133..232 '{ ... x); }': ()
139..142 'foo': fn foo<&(i32, &str), i32, |&(i32, &str)| -> i32>(&(i32, &str), |&(i32, &str)| -> i32) -> i32
139..166 'foo(&(...y)| x)': i32
143..152 '&(1, "a")': &(i32, &str)
144..152 '(1, "a")': (i32, &str)
145..146 '1': i32
148..151 '"a"': &str
154..165 '|&(x, y)| x': |&(i32, &str)| -> i32
155..162 '&(x, y)': &(i32, &str)
156..162 '(x, y)': (i32, &str)
157..158 'x': i32
160..161 'y': &str
164..165 'x': i32
203..206 'foo': fn foo<&(i32, &str), &i32, |&(i32, &str)| -> &i32>(&(i32, &str), |&(i32, &str)| -> &i32) -> &i32
203..229 'foo(&(...y)| x)': &i32
207..216 '&(1, "a")': &(i32, &str)
208..216 '(1, "a")': (i32, &str)
209..210 '1': i32
212..215 '"a"': &str
218..228 '|(x, y)| x': |&(i32, &str)| -> &i32
219..225 '(x, y)': (i32, &str)
220..221 'x': &i32
223..224 'y': &&str
227..228 'x': &i32
"#]],
);
}
#[test]
fn slice_tail_pattern() {
check_infer(
r#"
fn foo(params: &[i32]) {
match params {
[head, tail @ ..] => {
}
}
}
"#,
expect![[r#"
7..13 'params': &[i32]
23..92 '{ ... } }': ()
29..90 'match ... }': ()
35..41 'params': &[i32]
52..69 '[head,... @ ..]': [i32]
53..57 'head': &i32
59..68 'tail @ ..': &[i32]
66..68 '..': [i32]
73..84 '{ }': ()
"#]],
);
}

View file

@ -0,0 +1,842 @@
use expect::expect;
use test_utils::mark;
use super::{check_infer, check_types};
#[test]
fn bug_484() {
check_infer(
r#"
fn test() {
let x = if true {};
}
"#,
expect![[r#"
10..37 '{ ... {}; }': ()
20..21 'x': ()
24..34 'if true {}': ()
27..31 'true': bool
32..34 '{}': ()
"#]],
);
}
#[test]
fn no_panic_on_field_of_enum() {
check_infer(
r#"
enum X {}
fn test(x: X) {
x.some_field;
}
"#,
expect![[r#"
19..20 'x': X
25..46 '{ ...eld; }': ()
31..32 'x': X
31..43 'x.some_field': {unknown}
"#]],
);
}
#[test]
fn bug_585() {
check_infer(
r#"
fn test() {
X {};
match x {
A::B {} => (),
A::Y() => (),
}
}
"#,
expect![[r#"
10..88 '{ ... } }': ()
16..20 'X {}': {unknown}
26..86 'match ... }': ()
32..33 'x': {unknown}
44..51 'A::B {}': {unknown}
55..57 '()': ()
67..73 'A::Y()': {unknown}
77..79 '()': ()
"#]],
);
}
#[test]
fn bug_651() {
check_infer(
r#"
fn quux() {
let y = 92;
1 + y;
}
"#,
expect![[r#"
10..40 '{ ...+ y; }': ()
20..21 'y': i32
24..26 '92': i32
32..33 '1': i32
32..37 '1 + y': i32
36..37 'y': i32
"#]],
);
}
#[test]
fn recursive_vars() {
mark::check!(type_var_cycles_resolve_completely);
mark::check!(type_var_cycles_resolve_as_possible);
check_infer(
r#"
fn test() {
let y = unknown;
[y, &y];
}
"#,
expect![[r#"
10..47 '{ ...&y]; }': ()
20..21 'y': &{unknown}
24..31 'unknown': &{unknown}
37..44 '[y, &y]': [&&{unknown}; _]
38..39 'y': &{unknown}
41..43 '&y': &&{unknown}
42..43 'y': &{unknown}
"#]],
);
}
#[test]
fn recursive_vars_2() {
check_infer(
r#"
fn test() {
let x = unknown;
let y = unknown;
[(x, y), (&y, &x)];
}
"#,
expect![[r#"
10..79 '{ ...x)]; }': ()
20..21 'x': &&{unknown}
24..31 'unknown': &&{unknown}
41..42 'y': &&{unknown}
45..52 'unknown': &&{unknown}
58..76 '[(x, y..., &x)]': [(&&&{unknown}, &&&{unknown}); _]
59..65 '(x, y)': (&&&{unknown}, &&&{unknown})
60..61 'x': &&{unknown}
63..64 'y': &&{unknown}
67..75 '(&y, &x)': (&&&{unknown}, &&&{unknown})
68..70 '&y': &&&{unknown}
69..70 'y': &&{unknown}
72..74 '&x': &&&{unknown}
73..74 'x': &&{unknown}
"#]],
);
}
#[test]
fn infer_std_crash_1() {
// caused stack overflow, taken from std
check_infer(
r#"
enum Maybe<T> {
Real(T),
Fake,
}
fn write() {
match something_unknown {
Maybe::Real(ref mut something) => (),
}
}
"#,
expect![[r#"
53..138 '{ ... } }': ()
59..136 'match ... }': ()
65..82 'someth...nknown': Maybe<{unknown}>
93..123 'Maybe:...thing)': Maybe<{unknown}>
105..122 'ref mu...ething': &mut {unknown}
127..129 '()': ()
"#]],
);
}
#[test]
fn infer_std_crash_2() {
mark::check!(type_var_resolves_to_int_var);
// caused "equating two type variables, ...", taken from std
check_infer(
r#"
fn test_line_buffer() {
&[0, b'\n', 1, b'\n'];
}
"#,
expect![[r#"
22..52 '{ ...n']; }': ()
28..49 '&[0, b...b'\n']': &[u8; _]
29..49 '[0, b'...b'\n']': [u8; _]
30..31 '0': u8
33..38 'b'\n'': u8
40..41 '1': u8
43..48 'b'\n'': u8
"#]],
);
}
#[test]
fn infer_std_crash_3() {
// taken from rustc
check_infer(
r#"
pub fn compute() {
match nope!() {
SizeSkeleton::Pointer { non_zero: true, tail } => {}
}
}
"#,
expect![[r#"
17..107 '{ ... } }': ()
23..105 'match ... }': ()
29..36 'nope!()': {unknown}
47..93 'SizeSk...tail }': {unknown}
81..85 'true': bool
81..85 'true': bool
87..91 'tail': {unknown}
97..99 '{}': ()
"#]],
);
}
#[test]
fn infer_std_crash_4() {
// taken from rustc
check_infer(
r#"
pub fn primitive_type() {
match *self {
BorrowedRef { type_: Primitive(p), ..} => {},
}
}
"#,
expect![[r#"
24..105 '{ ... } }': ()
30..103 'match ... }': ()
36..41 '*self': {unknown}
37..41 'self': {unknown}
52..90 'Borrow...), ..}': {unknown}
73..85 'Primitive(p)': {unknown}
83..84 'p': {unknown}
94..96 '{}': ()
"#]],
);
}
#[test]
fn infer_std_crash_5() {
// taken from rustc
check_infer(
r#"
fn extra_compiler_flags() {
for content in doesnt_matter {
let name = if doesnt_matter {
first
} else {
&content
};
let content = if ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE.contains(&name) {
name
} else {
content
};
}
}
"#,
expect![[r#"
26..322 '{ ... } }': ()
32..320 'for co... }': ()
36..43 'content': &{unknown}
47..60 'doesnt_matter': {unknown}
61..320 '{ ... }': ()
75..79 'name': &&{unknown}
82..166 'if doe... }': &&{unknown}
85..98 'doesnt_matter': bool
99..128 '{ ... }': &&{unknown}
113..118 'first': &&{unknown}
134..166 '{ ... }': &&{unknown}
148..156 '&content': &&{unknown}
149..156 'content': &{unknown}
181..188 'content': &{unknown}
191..313 'if ICE... }': &{unknown}
194..231 'ICE_RE..._VALUE': {unknown}
194..247 'ICE_RE...&name)': bool
241..246 '&name': &&&{unknown}
242..246 'name': &&{unknown}
248..276 '{ ... }': &&{unknown}
262..266 'name': &&{unknown}
282..313 '{ ... }': &{unknown}
296..303 'content': &{unknown}
"#]],
);
}
#[test]
fn infer_nested_generics_crash() {
// another crash found typechecking rustc
check_infer(
r#"
struct Canonical<V> {
value: V,
}
struct QueryResponse<V> {
value: V,
}
fn test<R>(query_response: Canonical<QueryResponse<R>>) {
&query_response.value;
}
"#,
expect![[r#"
91..105 'query_response': Canonical<QueryResponse<R>>
136..166 '{ ...lue; }': ()
142..163 '&query....value': &QueryResponse<R>
143..157 'query_response': Canonical<QueryResponse<R>>
143..163 'query_....value': QueryResponse<R>
"#]],
);
}
#[test]
fn infer_paren_macro_call() {
check_infer(
r#"
macro_rules! bar { () => {0u32} }
fn test() {
let a = (bar!());
}
"#,
expect![[r#"
!0..4 '0u32': u32
44..69 '{ ...()); }': ()
54..55 'a': u32
"#]],
);
}
#[test]
fn bug_1030() {
check_infer(
r#"
struct HashSet<T, H>;
struct FxHasher;
type FxHashSet<T> = HashSet<T, FxHasher>;
impl<T, H> HashSet<T, H> {
fn default() -> HashSet<T, H> {}
}
pub fn main_loop() {
FxHashSet::default();
}
"#,
expect![[r#"
143..145 '{}': ()
168..197 '{ ...t(); }': ()
174..192 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<{unknown}, FxHasher>
174..194 'FxHash...ault()': HashSet<{unknown}, FxHasher>
"#]],
);
}
#[test]
fn issue_2669() {
check_infer(
r#"
trait A {}
trait Write {}
struct Response<T> {}
trait D {
fn foo();
}
impl<T:A> D for Response<T> {
fn foo() {
end();
fn end<W: Write>() {
let _x: T = loop {};
}
}
}
"#,
expect![[r#"
119..214 '{ ... }': ()
129..132 'end': fn end<{unknown}>()
129..134 'end()': ()
163..208 '{ ... }': ()
181..183 '_x': !
190..197 'loop {}': !
195..197 '{}': ()
"#]],
)
}
#[test]
fn issue_2705() {
check_infer(
r#"
trait Trait {}
fn test() {
<Trait<u32>>::foo()
}
"#,
expect![[r#"
25..52 '{ ...oo() }': ()
31..48 '<Trait...>::foo': {unknown}
31..50 '<Trait...:foo()': ()
"#]],
);
}
#[test]
fn issue_2683_chars_impl() {
check_types(
r#"
//- /main.rs crate:main deps:std
fn test() {
let chars: std::str::Chars<'_>;
(chars.next(), chars.nth(1));
} //^ (Option<char>, Option<char>)
//- /std.rs crate:std
#[prelude_import]
use prelude::*;
pub mod prelude {
pub use crate::iter::Iterator;
pub use crate::option::Option;
}
pub mod iter {
pub use self::traits::Iterator;
pub mod traits {
pub use self::iterator::Iterator;
pub mod iterator {
pub trait Iterator {
type Item;
fn next(&mut self) -> Option<Self::Item>;
fn nth(&mut self, n: usize) -> Option<Self::Item> {}
}
}
}
}
pub mod option {
pub enum Option<T> {}
}
pub mod str {
pub struct Chars<'a> {}
impl<'a> Iterator for Chars<'a> {
type Item = char;
fn next(&mut self) -> Option<char> {}
}
}
"#,
);
}
#[test]
fn issue_3642_bad_macro_stackover() {
check_types(
r#"
#[macro_export]
macro_rules! match_ast {
(match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
(match ($node:expr) {
$( ast::$ast:ident($it:ident) => $res:expr, )*
_ => $catch_all:expr $(,)?
}) => {{
$( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )*
{ $catch_all }
}};
}
fn main() {
let anchor = match_ast! {
//^ ()
match parent {
as => {},
_ => return None
}
};
}"#,
);
}
#[test]
fn issue_3999_slice() {
check_infer(
r#"
fn foo(params: &[usize]) {
match params {
[ps @ .., _] => {}
}
}
"#,
expect![[r#"
7..13 'params': &[usize]
25..80 '{ ... } }': ()
31..78 'match ... }': ()
37..43 'params': &[usize]
54..66 '[ps @ .., _]': [usize]
55..62 'ps @ ..': &[usize]
60..62 '..': [usize]
64..65 '_': usize
70..72 '{}': ()
"#]],
);
}
#[test]
fn issue_3999_struct() {
// rust-analyzer should not panic on seeing this malformed
// record pattern.
check_infer(
r#"
struct Bar {
a: bool,
}
fn foo(b: Bar) {
match b {
Bar { a: .. } => {},
}
}
"#,
expect![[r#"
35..36 'b': Bar
43..95 '{ ... } }': ()
49..93 'match ... }': ()
55..56 'b': Bar
67..80 'Bar { a: .. }': Bar
76..78 '..': bool
84..86 '{}': ()
"#]],
);
}
#[test]
fn issue_4235_name_conflicts() {
check_infer(
r#"
struct FOO {}
static FOO:FOO = FOO {};
impl FOO {
fn foo(&self) {}
}
fn main() {
let a = &FOO;
a.foo();
}
"#,
expect![[r#"
31..37 'FOO {}': FOO
63..67 'self': &FOO
69..71 '{}': ()
85..119 '{ ...o(); }': ()
95..96 'a': &FOO
99..103 '&FOO': &FOO
100..103 'FOO': FOO
109..110 'a': &FOO
109..116 'a.foo()': ()
"#]],
);
}
#[test]
fn issue_4465_dollar_crate_at_type() {
check_infer(
r#"
pub struct Foo {}
pub fn anything<T>() -> T {
loop {}
}
macro_rules! foo {
() => {{
let r: $crate::Foo = anything();
r
}};
}
fn main() {
let _a = foo!();
}
"#,
expect![[r#"
44..59 '{ loop {} }': T
50..57 'loop {}': !
55..57 '{}': ()
!0..31 '{letr:...g();r}': Foo
!4..5 'r': Foo
!18..26 'anything': fn anything<Foo>() -> Foo
!18..28 'anything()': Foo
!29..30 'r': Foo
163..187 '{ ...!(); }': ()
173..175 '_a': Foo
"#]],
);
}
#[test]
fn issue_4053_diesel_where_clauses() {
check_infer(
r#"
trait BoxedDsl<DB> {
type Output;
fn internal_into_boxed(self) -> Self::Output;
}
struct SelectStatement<From, Select, Distinct, Where, Order, LimitOffset, GroupBy, Locking> {
order: Order,
}
trait QueryFragment<DB: Backend> {}
trait Into<T> { fn into(self) -> T; }
impl<F, S, D, W, O, LOf, DB> BoxedDsl<DB>
for SelectStatement<F, S, D, W, O, LOf, G>
where
O: Into<dyn QueryFragment<DB>>,
{
type Output = XXX;
fn internal_into_boxed(self) -> Self::Output {
self.order.into();
}
}
"#,
expect![[r#"
65..69 'self': Self
267..271 'self': Self
466..470 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
488..522 '{ ... }': ()
498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
498..508 'self.order': O
498..515 'self.o...into()': dyn QueryFragment<DB>
"#]],
);
}
#[test]
fn issue_4953() {
check_infer(
r#"
pub struct Foo(pub i64);
impl Foo {
fn test() -> Self { Self(0i64) }
}
"#,
expect![[r#"
58..72 '{ Self(0i64) }': Foo
60..64 'Self': Foo(i64) -> Foo
60..70 'Self(0i64)': Foo
65..69 '0i64': i64
"#]],
);
check_infer(
r#"
pub struct Foo<T>(pub T);
impl Foo<i64> {
fn test() -> Self { Self(0i64) }
}
"#,
expect![[r#"
64..78 '{ Self(0i64) }': Foo<i64>
66..70 'Self': Foo<i64>(i64) -> Foo<i64>
66..76 'Self(0i64)': Foo<i64>
71..75 '0i64': i64
"#]],
);
}
#[test]
fn issue_4931() {
check_infer(
r#"
trait Div<T> {
type Output;
}
trait CheckedDiv: Div<()> {}
trait PrimInt: CheckedDiv<Output = ()> {
fn pow(self);
}
fn check<T: PrimInt>(i: T) {
i.pow();
}
"#,
expect![[r#"
117..121 'self': Self
148..149 'i': T
154..170 '{ ...w(); }': ()
160..161 'i': T
160..167 'i.pow()': ()
"#]],
);
}
#[test]
fn issue_4885() {
check_infer(
r#"
#[lang = "coerce_unsized"]
pub trait CoerceUnsized<T> {}
trait Future {
type Output;
}
trait Foo<R> {
type Bar;
}
fn foo<R, K>(key: &K) -> impl Future<Output = K::Bar>
where
K: Foo<R>,
{
bar(key)
}
fn bar<R, K>(key: &K) -> impl Future<Output = K::Bar>
where
K: Foo<R>,
{
}
"#,
expect![[r#"
136..139 'key': &K
198..214 '{ ...key) }': impl Future<Output = <K as Foo<R>>::Bar>
204..207 'bar': fn bar<R, K>(&K) -> impl Future<Output = <K as Foo<R>>::Bar>
204..212 'bar(key)': impl Future<Output = <K as Foo<R>>::Bar>
208..211 'key': &K
228..231 'key': &K
290..293 '{ }': ()
"#]],
);
}
#[test]
fn issue_4800() {
check_infer(
r#"
trait Debug {}
struct Foo<T>;
type E1<T> = (T, T, T);
type E2<T> = E1<E1<E1<(T, T, T)>>>;
impl Debug for Foo<E2<()>> {}
struct Request;
pub trait Future {
type Output;
}
pub struct PeerSet<D>;
impl<D> Service<Request> for PeerSet<D>
where
D: Discover,
D::Key: Debug,
{
type Error = ();
type Future = dyn Future<Output = Self::Error>;
fn call(&mut self) -> Self::Future {
loop {}
}
}
pub trait Discover {
type Key;
}
pub trait Service<Request> {
type Error;
type Future: Future<Output = Self::Error>;
fn call(&mut self) -> Self::Future;
}
"#,
expect![[r#"
379..383 'self': &mut PeerSet<D>
401..424 '{ ... }': dyn Future<Output = ()>
411..418 'loop {}': !
416..418 '{}': ()
575..579 'self': &mut Self
"#]],
);
}
#[test]
fn issue_4966() {
check_infer(
r#"
pub trait IntoIterator {
type Item;
}
struct Repeat<A> { element: A }
struct Map<F> { f: F }
struct Vec<T> {}
#[lang = "deref"]
pub trait Deref {
type Target;
}
impl<T> Deref for Vec<T> {
type Target = [T];
}
fn from_iter<A, T: IntoIterator<Item = A>>(iter: T) -> Vec<A> {}
fn main() {
let inner = Map { f: |_: &f64| 0.0 };
let repeat = Repeat { element: inner };
let vec = from_iter(repeat);
vec.foo_bar();
}
"#,
expect![[r#"
270..274 'iter': T
289..291 '{}': ()
303..447 '{ ...r(); }': ()
313..318 'inner': Map<|&f64| -> f64>
321..345 'Map { ... 0.0 }': Map<|&f64| -> f64>
330..343 '|_: &f64| 0.0': |&f64| -> f64
331..332 '_': &f64
340..343 '0.0': f64
356..362 'repeat': Repeat<Map<|&f64| -> f64>>
365..390 'Repeat...nner }': Repeat<Map<|&f64| -> f64>>
383..388 'inner': Map<|&f64| -> f64>
401..404 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
407..416 'from_iter': fn from_iter<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>, Repeat<Map<|&f64| -> f64>>>(Repeat<Map<|&f64| -> f64>>) -> Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
407..424 'from_i...epeat)': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
417..423 'repeat': Repeat<Map<|&f64| -> f64>>
431..434 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
431..444 'vec.foo_bar()': {unknown}
"#]],
);
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

272
crates/hir_ty/src/traits.rs Normal file
View file

@ -0,0 +1,272 @@
//! Trait solving using Chalk.
use std::sync::Arc;
use base_db::CrateId;
use chalk_ir::cast::Cast;
use chalk_solve::Solver;
use hir_def::{lang_item::LangItemTarget, TraitId};
use crate::{db::HirDatabase, DebruijnIndex, Substs};
use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk};
use self::chalk::{from_chalk, Interner, ToChalk};
pub(crate) mod chalk;
// This controls the maximum size of types Chalk considers. If we set this too
// high, we can run into slow edge cases; if we set it too low, Chalk won't
// find some solutions.
// FIXME this is currently hardcoded in the recursive solver
// const CHALK_SOLVER_MAX_SIZE: usize = 10;
/// This controls how much 'time' we give the Chalk solver before giving up.
const CHALK_SOLVER_FUEL: i32 = 100;
#[derive(Debug, Copy, Clone)]
struct ChalkContext<'a> {
db: &'a dyn HirDatabase,
krate: CrateId,
}
fn create_chalk_solver() -> chalk_recursive::RecursiveSolver<Interner> {
let overflow_depth = 100;
let caching_enabled = true;
chalk_recursive::RecursiveSolver::new(overflow_depth, caching_enabled)
}
/// A set of clauses that we assume to be true. E.g. if we are inside this function:
/// ```rust
/// fn foo<T: Default>(t: T) {}
/// ```
/// we assume that `T: Default`.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct TraitEnvironment {
pub predicates: Vec<GenericPredicate>,
}
impl TraitEnvironment {
/// Returns trait refs with the given self type which are supposed to hold
/// in this trait env. E.g. if we are in `foo<T: SomeTrait>()`, this will
/// find that `T: SomeTrait` if we call it for `T`.
pub(crate) fn trait_predicates_for_self_ty<'a>(
&'a self,
ty: &'a Ty,
) -> impl Iterator<Item = &'a TraitRef> + 'a {
self.predicates.iter().filter_map(move |pred| match pred {
GenericPredicate::Implemented(tr) if tr.self_ty() == ty => Some(tr),
_ => None,
})
}
}
/// Something (usually a goal), along with an environment.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct InEnvironment<T> {
pub environment: Arc<TraitEnvironment>,
pub value: T,
}
impl<T> InEnvironment<T> {
pub fn new(environment: Arc<TraitEnvironment>, value: T) -> InEnvironment<T> {
InEnvironment { environment, value }
}
}
/// Something that needs to be proven (by Chalk) during type checking, e.g. that
/// a certain type implements a certain trait. Proving the Obligation might
/// result in additional information about inference variables.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum Obligation {
/// Prove that a certain type implements a trait (the type is the `Self` type
/// parameter to the `TraitRef`).
Trait(TraitRef),
Projection(ProjectionPredicate),
}
impl Obligation {
pub fn from_predicate(predicate: GenericPredicate) -> Option<Obligation> {
match predicate {
GenericPredicate::Implemented(trait_ref) => Some(Obligation::Trait(trait_ref)),
GenericPredicate::Projection(projection_pred) => {
Some(Obligation::Projection(projection_pred))
}
GenericPredicate::Error => None,
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct ProjectionPredicate {
pub projection_ty: ProjectionTy,
pub ty: Ty,
}
impl TypeWalk for ProjectionPredicate {
fn walk(&self, f: &mut impl FnMut(&Ty)) {
self.projection_ty.walk(f);
self.ty.walk(f);
}
fn walk_mut_binders(
&mut self,
f: &mut impl FnMut(&mut Ty, DebruijnIndex),
binders: DebruijnIndex,
) {
self.projection_ty.walk_mut_binders(f, binders);
self.ty.walk_mut_binders(f, binders);
}
}
/// Solve a trait goal using Chalk.
pub(crate) fn trait_solve_query(
db: &dyn HirDatabase,
krate: CrateId,
goal: Canonical<InEnvironment<Obligation>>,
) -> Option<Solution> {
let _p = profile::span("trait_solve_query").detail(|| match &goal.value.value {
Obligation::Trait(it) => db.trait_data(it.trait_).name.to_string(),
Obligation::Projection(_) => "projection".to_string(),
});
log::info!("trait_solve_query({})", goal.value.value.display(db));
if let Obligation::Projection(pred) = &goal.value.value {
if let Ty::Bound(_) = &pred.projection_ty.parameters[0] {
// Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible
return Some(Solution::Ambig(Guidance::Unknown));
}
}
let canonical = goal.to_chalk(db).cast(&Interner);
// We currently don't deal with universes (I think / hope they're not yet
// relevant for our use cases?)
let u_canonical = chalk_ir::UCanonical { canonical, universes: 1 };
let solution = solve(db, krate, &u_canonical);
solution.map(|solution| solution_from_chalk(db, solution))
}
fn solve(
db: &dyn HirDatabase,
krate: CrateId,
goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>,
) -> Option<chalk_solve::Solution<Interner>> {
let context = ChalkContext { db, krate };
log::debug!("solve goal: {:?}", goal);
let mut solver = create_chalk_solver();
let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL);
let should_continue = || {
context.db.check_canceled();
let remaining = fuel.get();
fuel.set(remaining - 1);
if remaining == 0 {
log::debug!("fuel exhausted");
}
remaining > 0
};
let mut solve = || {
let solution = solver.solve_limited(&context, goal, should_continue);
log::debug!("solve({:?}) => {:?}", goal, solution);
solution
};
// don't set the TLS for Chalk unless Chalk debugging is active, to make
// extra sure we only use it for debugging
let solution =
if is_chalk_debug() { chalk::tls::set_current_program(db, solve) } else { solve() };
solution
}
fn is_chalk_debug() -> bool {
std::env::var("CHALK_DEBUG").is_ok()
}
fn solution_from_chalk(
db: &dyn HirDatabase,
solution: chalk_solve::Solution<Interner>,
) -> Solution {
let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<Interner>>| {
let result = from_chalk(db, subst);
SolutionVariables(result)
};
match solution {
chalk_solve::Solution::Unique(constr_subst) => {
let subst = chalk_ir::Canonical {
value: constr_subst.value.subst,
binders: constr_subst.binders,
};
Solution::Unique(convert_subst(subst))
}
chalk_solve::Solution::Ambig(chalk_solve::Guidance::Definite(subst)) => {
Solution::Ambig(Guidance::Definite(convert_subst(subst)))
}
chalk_solve::Solution::Ambig(chalk_solve::Guidance::Suggested(subst)) => {
Solution::Ambig(Guidance::Suggested(convert_subst(subst)))
}
chalk_solve::Solution::Ambig(chalk_solve::Guidance::Unknown) => {
Solution::Ambig(Guidance::Unknown)
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SolutionVariables(pub Canonical<Substs>);
#[derive(Clone, Debug, PartialEq, Eq)]
/// A (possible) solution for a proposed goal.
pub enum Solution {
/// The goal indeed holds, and there is a unique value for all existential
/// variables.
Unique(SolutionVariables),
/// The goal may be provable in multiple ways, but regardless we may have some guidance
/// for type inference. In this case, we don't return any lifetime
/// constraints, since we have not "committed" to any particular solution
/// yet.
Ambig(Guidance),
}
#[derive(Clone, Debug, PartialEq, Eq)]
/// When a goal holds ambiguously (e.g., because there are multiple possible
/// solutions), we issue a set of *guidance* back to type inference.
pub enum Guidance {
/// The existential variables *must* have the given values if the goal is
/// ever to hold, but that alone isn't enough to guarantee the goal will
/// actually hold.
Definite(SolutionVariables),
/// There are multiple plausible values for the existentials, but the ones
/// here are suggested as the preferred choice heuristically. These should
/// be used for inference fallback only.
Suggested(SolutionVariables),
/// There's no useful information to feed back to type inference
Unknown,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum FnTrait {
FnOnce,
FnMut,
Fn,
}
impl FnTrait {
fn lang_item_name(self) -> &'static str {
match self {
FnTrait::FnOnce => "fn_once",
FnTrait::FnMut => "fn_mut",
FnTrait::Fn => "fn",
}
}
pub fn get_id(&self, db: &dyn HirDatabase, krate: CrateId) -> Option<TraitId> {
let target = db.lang_item(krate, self.lang_item_name().into())?;
match target {
LangItemTarget::TraitId(t) => Some(t),
_ => None,
}
}
}

View file

@ -0,0 +1,586 @@
//! Conversion code from/to Chalk.
use std::sync::Arc;
use log::debug;
use chalk_ir::{fold::shift::Shift, CanonicalVarKinds, GenericArg, TypeName};
use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
use base_db::{salsa::InternKey, CrateId};
use hir_def::{
lang_item::{lang_attr, LangItemTarget},
AssocContainerId, AssocItemId, HasModule, Lookup, TypeAliasId,
};
use super::ChalkContext;
use crate::{
db::HirDatabase,
display::HirDisplay,
method_resolution::{TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS},
utils::generics,
CallableDefId, DebruijnIndex, FnSig, GenericPredicate, Substs, Ty, TypeCtor,
};
use mapping::{
convert_where_clauses, generic_predicate_to_inline_bound, make_binders, TypeAliasAsValue,
};
pub use self::interner::*;
pub(super) mod tls;
mod interner;
mod mapping;
pub(super) trait ToChalk {
type Chalk;
fn to_chalk(self, db: &dyn HirDatabase) -> Self::Chalk;
fn from_chalk(db: &dyn HirDatabase, chalk: Self::Chalk) -> Self;
}
pub(super) fn from_chalk<T, ChalkT>(db: &dyn HirDatabase, chalk: ChalkT) -> T
where
T: ToChalk<Chalk = ChalkT>,
{
T::from_chalk(db, chalk)
}
impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> {
self.db.associated_ty_data(id)
}
fn trait_datum(&self, trait_id: TraitId) -> Arc<TraitDatum> {
self.db.trait_datum(self.krate, trait_id)
}
fn adt_datum(&self, struct_id: AdtId) -> Arc<StructDatum> {
self.db.struct_datum(self.krate, struct_id)
}
fn adt_repr(&self, _struct_id: AdtId) -> rust_ir::AdtRepr {
rust_ir::AdtRepr { repr_c: false, repr_packed: false }
}
fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> {
self.db.impl_datum(self.krate, impl_id)
}
fn fn_def_datum(
&self,
fn_def_id: chalk_ir::FnDefId<Interner>,
) -> Arc<rust_ir::FnDefDatum<Interner>> {
self.db.fn_def_datum(self.krate, fn_def_id)
}
fn impls_for_trait(
&self,
trait_id: TraitId,
parameters: &[GenericArg<Interner>],
binders: &CanonicalVarKinds<Interner>,
) -> Vec<ImplId> {
debug!("impls_for_trait {:?}", trait_id);
let trait_: hir_def::TraitId = from_chalk(self.db, trait_id);
let ty: Ty = from_chalk(self.db, parameters[0].assert_ty_ref(&Interner).clone());
fn binder_kind(ty: &Ty, binders: &CanonicalVarKinds<Interner>) -> Option<chalk_ir::TyKind> {
if let Ty::Bound(bv) = ty {
let binders = binders.as_slice(&Interner);
if bv.debruijn == DebruijnIndex::INNERMOST {
if let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind {
return Some(tk);
}
}
}
None
}
let self_ty_fp = TyFingerprint::for_impl(&ty);
let fps: &[TyFingerprint] = match binder_kind(&ty, binders) {
Some(chalk_ir::TyKind::Integer) => &ALL_INT_FPS,
Some(chalk_ir::TyKind::Float) => &ALL_FLOAT_FPS,
_ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]),
};
// Note: Since we're using impls_for_trait, only impls where the trait
// can be resolved should ever reach Chalk. `impl_datum` relies on that
// and will panic if the trait can't be resolved.
let in_deps = self.db.trait_impls_in_deps(self.krate);
let in_self = self.db.trait_impls_in_crate(self.krate);
let impl_maps = [in_deps, in_self];
let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db);
let result: Vec<_> = if fps.is_empty() {
debug!("Unrestricted search for {:?} impls...", trait_);
impl_maps
.iter()
.flat_map(|crate_impl_defs| crate_impl_defs.for_trait(trait_).map(id_to_chalk))
.collect()
} else {
impl_maps
.iter()
.flat_map(|crate_impl_defs| {
fps.iter().flat_map(move |fp| {
crate_impl_defs.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk)
})
})
.collect()
};
debug!("impls_for_trait returned {} impls", result.len());
result
}
fn impl_provided_for(&self, auto_trait_id: TraitId, struct_id: AdtId) -> bool {
debug!("impl_provided_for {:?}, {:?}", auto_trait_id, struct_id);
false // FIXME
}
fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc<AssociatedTyValue> {
self.db.associated_ty_value(self.krate, id)
}
fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<Interner>> {
vec![]
}
fn local_impls_to_coherence_check(&self, _trait_id: TraitId) -> Vec<ImplId> {
// We don't do coherence checking (yet)
unimplemented!()
}
fn interner(&self) -> &Interner {
&Interner
}
fn well_known_trait_id(
&self,
well_known_trait: rust_ir::WellKnownTrait,
) -> Option<chalk_ir::TraitId<Interner>> {
let lang_attr = lang_attr_from_well_known_trait(well_known_trait);
let trait_ = match self.db.lang_item(self.krate, lang_attr.into()) {
Some(LangItemTarget::TraitId(trait_)) => trait_,
_ => return None,
};
Some(trait_.to_chalk(self.db))
}
fn program_clauses_for_env(
&self,
environment: &chalk_ir::Environment<Interner>,
) -> chalk_ir::ProgramClauses<Interner> {
self.db.program_clauses_for_chalk_env(self.krate, environment.clone())
}
fn opaque_ty_data(&self, id: chalk_ir::OpaqueTyId<Interner>) -> Arc<OpaqueTyDatum> {
let interned_id = crate::db::InternedOpaqueTyId::from(id);
let full_id = self.db.lookup_intern_impl_trait_id(interned_id);
let (func, idx) = match full_id {
crate::OpaqueTyId::ReturnTypeImplTrait(func, idx) => (func, idx),
};
let datas =
self.db.return_type_impl_traits(func).expect("impl trait id without impl traits");
let data = &datas.value.impl_traits[idx as usize];
let bound = OpaqueTyDatumBound {
bounds: make_binders(
data.bounds
.value
.iter()
.cloned()
.filter(|b| !b.is_error())
.map(|b| b.to_chalk(self.db))
.collect(),
1,
),
where_clauses: make_binders(vec![], 0),
};
let num_vars = datas.num_binders;
Arc::new(OpaqueTyDatum { opaque_ty_id: id, bound: make_binders(bound, num_vars) })
}
fn hidden_opaque_type(&self, _id: chalk_ir::OpaqueTyId<Interner>) -> chalk_ir::Ty<Interner> {
// FIXME: actually provide the hidden type; it is relevant for auto traits
Ty::Unknown.to_chalk(self.db)
}
fn is_object_safe(&self, _trait_id: chalk_ir::TraitId<Interner>) -> bool {
// FIXME: implement actual object safety
true
}
fn closure_kind(
&self,
_closure_id: chalk_ir::ClosureId<Interner>,
_substs: &chalk_ir::Substitution<Interner>,
) -> rust_ir::ClosureKind {
// Fn is the closure kind that implements all three traits
rust_ir::ClosureKind::Fn
}
fn closure_inputs_and_output(
&self,
_closure_id: chalk_ir::ClosureId<Interner>,
substs: &chalk_ir::Substitution<Interner>,
) -> chalk_ir::Binders<rust_ir::FnDefInputsAndOutputDatum<Interner>> {
let sig_ty: Ty =
from_chalk(self.db, substs.at(&Interner, 0).assert_ty_ref(&Interner).clone());
let sig = FnSig::from_fn_ptr_substs(
&sig_ty.substs().expect("first closure param should be fn ptr"),
false,
);
let io = rust_ir::FnDefInputsAndOutputDatum {
argument_types: sig.params().iter().map(|ty| ty.clone().to_chalk(self.db)).collect(),
return_type: sig.ret().clone().to_chalk(self.db),
};
make_binders(io.shifted_in(&Interner), 0)
}
fn closure_upvars(
&self,
_closure_id: chalk_ir::ClosureId<Interner>,
_substs: &chalk_ir::Substitution<Interner>,
) -> chalk_ir::Binders<chalk_ir::Ty<Interner>> {
let ty = Ty::unit().to_chalk(self.db);
make_binders(ty, 0)
}
fn closure_fn_substitution(
&self,
_closure_id: chalk_ir::ClosureId<Interner>,
_substs: &chalk_ir::Substitution<Interner>,
) -> chalk_ir::Substitution<Interner> {
Substs::empty().to_chalk(self.db)
}
fn trait_name(&self, _trait_id: chalk_ir::TraitId<Interner>) -> String {
unimplemented!()
}
fn adt_name(&self, _struct_id: chalk_ir::AdtId<Interner>) -> String {
unimplemented!()
}
fn assoc_type_name(&self, _assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String {
unimplemented!()
}
fn opaque_type_name(&self, _opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
unimplemented!()
}
fn fn_def_name(&self, _fn_def_id: chalk_ir::FnDefId<Interner>) -> String {
unimplemented!()
}
}
pub(crate) fn program_clauses_for_chalk_env_query(
db: &dyn HirDatabase,
krate: CrateId,
environment: chalk_ir::Environment<Interner>,
) -> chalk_ir::ProgramClauses<Interner> {
chalk_solve::program_clauses_for_env(&ChalkContext { db, krate }, &environment)
}
pub(crate) fn associated_ty_data_query(
db: &dyn HirDatabase,
id: AssocTypeId,
) -> Arc<AssociatedTyDatum> {
debug!("associated_ty_data {:?}", id);
let type_alias: TypeAliasId = from_chalk(db, id);
let trait_ = match type_alias.lookup(db.upcast()).container {
AssocContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"),
};
// Lower bounds -- we could/should maybe move this to a separate query in `lower`
let type_alias_data = db.type_alias_data(type_alias);
let generic_params = generics(db.upcast(), type_alias.into());
let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
let ctx = crate::TyLoweringContext::new(db, &resolver)
.with_type_param_mode(crate::lower::TypeParamLoweringMode::Variable);
let self_ty = Ty::Bound(crate::BoundVar::new(crate::DebruijnIndex::INNERMOST, 0));
let bounds = type_alias_data
.bounds
.iter()
.flat_map(|bound| GenericPredicate::from_type_bound(&ctx, bound, self_ty.clone()))
.filter_map(|pred| generic_predicate_to_inline_bound(db, &pred, &self_ty))
.map(|bound| make_binders(bound.shifted_in(&Interner), 0))
.collect();
let where_clauses = convert_where_clauses(db, type_alias.into(), &bound_vars);
let bound_data = rust_ir::AssociatedTyDatumBound { bounds, where_clauses };
let datum = AssociatedTyDatum {
trait_id: trait_.to_chalk(db),
id,
name: type_alias,
binders: make_binders(bound_data, generic_params.len()),
};
Arc::new(datum)
}
pub(crate) fn trait_datum_query(
db: &dyn HirDatabase,
krate: CrateId,
trait_id: TraitId,
) -> Arc<TraitDatum> {
debug!("trait_datum {:?}", trait_id);
let trait_: hir_def::TraitId = from_chalk(db, trait_id);
let trait_data = db.trait_data(trait_);
debug!("trait {:?} = {:?}", trait_id, trait_data.name);
let generic_params = generics(db.upcast(), trait_.into());
let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
let flags = rust_ir::TraitFlags {
auto: trait_data.auto,
upstream: trait_.lookup(db.upcast()).container.module(db.upcast()).krate != krate,
non_enumerable: true,
coinductive: false, // only relevant for Chalk testing
// FIXME: set these flags correctly
marker: false,
fundamental: false,
};
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
let associated_ty_ids =
trait_data.associated_types().map(|type_alias| type_alias.to_chalk(db)).collect();
let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
let well_known =
lang_attr(db.upcast(), trait_).and_then(|name| well_known_trait_from_lang_attr(&name));
let trait_datum = TraitDatum {
id: trait_id,
binders: make_binders(trait_datum_bound, bound_vars.len()),
flags,
associated_ty_ids,
well_known,
};
Arc::new(trait_datum)
}
fn well_known_trait_from_lang_attr(name: &str) -> Option<WellKnownTrait> {
Some(match name {
"sized" => WellKnownTrait::Sized,
"copy" => WellKnownTrait::Copy,
"clone" => WellKnownTrait::Clone,
"drop" => WellKnownTrait::Drop,
"fn_once" => WellKnownTrait::FnOnce,
"fn_mut" => WellKnownTrait::FnMut,
"fn" => WellKnownTrait::Fn,
"unsize" => WellKnownTrait::Unsize,
_ => return None,
})
}
fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str {
match attr {
WellKnownTrait::Sized => "sized",
WellKnownTrait::Copy => "copy",
WellKnownTrait::Clone => "clone",
WellKnownTrait::Drop => "drop",
WellKnownTrait::FnOnce => "fn_once",
WellKnownTrait::FnMut => "fn_mut",
WellKnownTrait::Fn => "fn",
WellKnownTrait::Unsize => "unsize",
}
}
pub(crate) fn struct_datum_query(
db: &dyn HirDatabase,
krate: CrateId,
struct_id: AdtId,
) -> Arc<StructDatum> {
debug!("struct_datum {:?}", struct_id);
let type_ctor: TypeCtor = from_chalk(db, TypeName::Adt(struct_id));
debug!("struct {:?} = {:?}", struct_id, type_ctor);
let num_params = type_ctor.num_ty_params(db);
let upstream = type_ctor.krate(db) != Some(krate);
let where_clauses = type_ctor
.as_generic_def()
.map(|generic_def| {
let generic_params = generics(db.upcast(), generic_def);
let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
convert_where_clauses(db, generic_def, &bound_vars)
})
.unwrap_or_else(Vec::new);
let flags = rust_ir::AdtFlags {
upstream,
// FIXME set fundamental and phantom_data flags correctly
fundamental: false,
phantom_data: false,
};
// FIXME provide enum variants properly (for auto traits)
let variant = rust_ir::AdtVariantDatum {
fields: Vec::new(), // FIXME add fields (only relevant for auto traits),
};
let struct_datum_bound = rust_ir::AdtDatumBound { variants: vec![variant], where_clauses };
let struct_datum = StructDatum {
// FIXME set ADT kind
kind: rust_ir::AdtKind::Struct,
id: struct_id,
binders: make_binders(struct_datum_bound, num_params),
flags,
};
Arc::new(struct_datum)
}
pub(crate) fn impl_datum_query(
db: &dyn HirDatabase,
krate: CrateId,
impl_id: ImplId,
) -> Arc<ImplDatum> {
let _p = profile::span("impl_datum");
debug!("impl_datum {:?}", impl_id);
let impl_: hir_def::ImplId = from_chalk(db, impl_id);
impl_def_datum(db, krate, impl_id, impl_)
}
fn impl_def_datum(
db: &dyn HirDatabase,
krate: CrateId,
chalk_id: ImplId,
impl_id: hir_def::ImplId,
) -> Arc<ImplDatum> {
let trait_ref = db
.impl_trait(impl_id)
// ImplIds for impls where the trait ref can't be resolved should never reach Chalk
.expect("invalid impl passed to Chalk")
.value;
let impl_data = db.impl_data(impl_id);
let generic_params = generics(db.upcast(), impl_id.into());
let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
let trait_ = trait_ref.trait_;
let impl_type = if impl_id.lookup(db.upcast()).container.module(db.upcast()).krate == krate {
rust_ir::ImplType::Local
} else {
rust_ir::ImplType::External
};
let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars);
let negative = impl_data.is_negative;
debug!(
"impl {:?}: {}{} where {:?}",
chalk_id,
if negative { "!" } else { "" },
trait_ref.display(db),
where_clauses
);
let trait_ref = trait_ref.to_chalk(db);
let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive };
let impl_datum_bound = rust_ir::ImplDatumBound { trait_ref, where_clauses };
let trait_data = db.trait_data(trait_);
let associated_ty_value_ids = impl_data
.items
.iter()
.filter_map(|item| match item {
AssocItemId::TypeAliasId(type_alias) => Some(*type_alias),
_ => None,
})
.filter(|&type_alias| {
// don't include associated types that don't exist in the trait
let name = &db.type_alias_data(type_alias).name;
trait_data.associated_type_by_name(name).is_some()
})
.map(|type_alias| TypeAliasAsValue(type_alias).to_chalk(db))
.collect();
debug!("impl_datum: {:?}", impl_datum_bound);
let impl_datum = ImplDatum {
binders: make_binders(impl_datum_bound, bound_vars.len()),
impl_type,
polarity,
associated_ty_value_ids,
};
Arc::new(impl_datum)
}
pub(crate) fn associated_ty_value_query(
db: &dyn HirDatabase,
krate: CrateId,
id: AssociatedTyValueId,
) -> Arc<AssociatedTyValue> {
let type_alias: TypeAliasAsValue = from_chalk(db, id);
type_alias_associated_ty_value(db, krate, type_alias.0)
}
fn type_alias_associated_ty_value(
db: &dyn HirDatabase,
_krate: CrateId,
type_alias: TypeAliasId,
) -> Arc<AssociatedTyValue> {
let type_alias_data = db.type_alias_data(type_alias);
let impl_id = match type_alias.lookup(db.upcast()).container {
AssocContainerId::ImplId(it) => it,
_ => panic!("assoc ty value should be in impl"),
};
let trait_ref = db.impl_trait(impl_id).expect("assoc ty value should not exist").value; // we don't return any assoc ty values if the impl'd trait can't be resolved
let assoc_ty = db
.trait_data(trait_ref.trait_)
.associated_type_by_name(&type_alias_data.name)
.expect("assoc ty value should not exist"); // validated when building the impl data as well
let ty = db.ty(type_alias.into());
let value_bound = rust_ir::AssociatedTyValueBound { ty: ty.value.to_chalk(db) };
let value = rust_ir::AssociatedTyValue {
impl_id: impl_id.to_chalk(db),
associated_ty_id: assoc_ty.to_chalk(db),
value: make_binders(value_bound, ty.num_binders),
};
Arc::new(value)
}
pub(crate) fn fn_def_datum_query(
db: &dyn HirDatabase,
_krate: CrateId,
fn_def_id: FnDefId,
) -> Arc<FnDefDatum> {
let callable_def: CallableDefId = from_chalk(db, fn_def_id);
let generic_params = generics(db.upcast(), callable_def.into());
let sig = db.callable_item_signature(callable_def);
let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
let where_clauses = convert_where_clauses(db, callable_def.into(), &bound_vars);
let bound = rust_ir::FnDefDatumBound {
// Note: Chalk doesn't actually use this information yet as far as I am aware, but we provide it anyway
inputs_and_output: make_binders(
rust_ir::FnDefInputsAndOutputDatum {
argument_types: sig
.value
.params()
.iter()
.map(|ty| ty.clone().to_chalk(db))
.collect(),
return_type: sig.value.ret().clone().to_chalk(db),
}
.shifted_in(&Interner),
0,
),
where_clauses,
};
let datum = FnDefDatum {
id: fn_def_id,
abi: (),
safety: chalk_ir::Safety::Safe,
variadic: sig.value.is_varargs,
binders: make_binders(bound, sig.num_binders),
};
Arc::new(datum)
}
impl From<FnDefId> for crate::db::InternedCallableDefId {
fn from(fn_def_id: FnDefId) -> Self {
InternKey::from_intern_id(fn_def_id.0)
}
}
impl From<crate::db::InternedCallableDefId> for FnDefId {
fn from(callable_def_id: crate::db::InternedCallableDefId) -> Self {
chalk_ir::FnDefId(callable_def_id.as_intern_id())
}
}
impl From<OpaqueTyId> for crate::db::InternedOpaqueTyId {
fn from(id: OpaqueTyId) -> Self {
InternKey::from_intern_id(id.0)
}
}
impl From<crate::db::InternedOpaqueTyId> for OpaqueTyId {
fn from(id: crate::db::InternedOpaqueTyId) -> Self {
chalk_ir::OpaqueTyId(id.as_intern_id())
}
}
impl From<chalk_ir::ClosureId<Interner>> for crate::db::ClosureId {
fn from(id: chalk_ir::ClosureId<Interner>) -> Self {
Self::from_intern_id(id.0)
}
}
impl From<crate::db::ClosureId> for chalk_ir::ClosureId<Interner> {
fn from(id: crate::db::ClosureId) -> Self {
chalk_ir::ClosureId(id.as_intern_id())
}
}

View file

@ -0,0 +1,383 @@
//! Implementation of the Chalk `Interner` trait, which allows customizing the
//! representation of the various objects Chalk deals with (types, goals etc.).
use super::tls;
use base_db::salsa::InternId;
use chalk_ir::{GenericArg, Goal, GoalData};
use hir_def::TypeAliasId;
use std::{fmt, sync::Arc};
#[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)]
pub struct Interner;
pub type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
pub type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum<Interner>;
pub type TraitId = chalk_ir::TraitId<Interner>;
pub type TraitDatum = chalk_solve::rust_ir::TraitDatum<Interner>;
pub type AdtId = chalk_ir::AdtId<Interner>;
pub type StructDatum = chalk_solve::rust_ir::AdtDatum<Interner>;
pub type ImplId = chalk_ir::ImplId<Interner>;
pub type ImplDatum = chalk_solve::rust_ir::ImplDatum<Interner>;
pub type AssociatedTyValueId = chalk_solve::rust_ir::AssociatedTyValueId<Interner>;
pub type AssociatedTyValue = chalk_solve::rust_ir::AssociatedTyValue<Interner>;
pub type FnDefId = chalk_ir::FnDefId<Interner>;
pub type FnDefDatum = chalk_solve::rust_ir::FnDefDatum<Interner>;
pub type OpaqueTyId = chalk_ir::OpaqueTyId<Interner>;
pub type OpaqueTyDatum = chalk_solve::rust_ir::OpaqueTyDatum<Interner>;
impl chalk_ir::interner::Interner for Interner {
type InternedType = Box<chalk_ir::TyData<Self>>; // FIXME use Arc?
type InternedLifetime = chalk_ir::LifetimeData<Self>;
type InternedConst = Arc<chalk_ir::ConstData<Self>>;
type InternedConcreteConst = ();
type InternedGenericArg = chalk_ir::GenericArgData<Self>;
type InternedGoal = Arc<GoalData<Self>>;
type InternedGoals = Vec<Goal<Self>>;
type InternedSubstitution = Vec<GenericArg<Self>>;
type InternedProgramClause = chalk_ir::ProgramClauseData<Self>;
type InternedProgramClauses = Arc<[chalk_ir::ProgramClause<Self>]>;
type InternedQuantifiedWhereClauses = Vec<chalk_ir::QuantifiedWhereClause<Self>>;
type InternedVariableKinds = Vec<chalk_ir::VariableKind<Self>>;
type InternedCanonicalVarKinds = Vec<chalk_ir::CanonicalVarKind<Self>>;
type InternedConstraints = Vec<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>>;
type DefId = InternId;
type InternedAdtId = hir_def::AdtId;
type Identifier = TypeAliasId;
type FnAbi = ();
fn debug_adt_id(type_kind_id: AdtId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_struct_id(type_kind_id, fmt)))
}
fn debug_trait_id(type_kind_id: TraitId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_trait_id(type_kind_id, fmt)))
}
fn debug_assoc_type_id(id: AssocTypeId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_assoc_type_id(id, fmt)))
}
fn debug_alias(
alias: &chalk_ir::AliasTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_alias(alias, fmt)))
}
fn debug_projection_ty(
proj: &chalk_ir::ProjectionTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt)))
}
fn debug_opaque_ty(
opaque_ty: &chalk_ir::OpaqueTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_opaque_ty(opaque_ty, fmt)))
}
fn debug_opaque_ty_id(
opaque_ty_id: chalk_ir::OpaqueTyId<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_opaque_ty_id(opaque_ty_id, fmt)))
}
fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_ty(ty, fmt)))
}
fn debug_lifetime(
lifetime: &chalk_ir::Lifetime<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_lifetime(lifetime, fmt)))
}
fn debug_generic_arg(
parameter: &GenericArg<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_generic_arg(parameter, fmt)))
}
fn debug_goal(goal: &Goal<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_goal(goal, fmt)))
}
fn debug_goals(
goals: &chalk_ir::Goals<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_goals(goals, fmt)))
}
fn debug_program_clause_implication(
pci: &chalk_ir::ProgramClauseImplication<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_program_clause_implication(pci, fmt)))
}
fn debug_application_ty(
application_ty: &chalk_ir::ApplicationTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_application_ty(application_ty, fmt)))
}
fn debug_substitution(
substitution: &chalk_ir::Substitution<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_substitution(substitution, fmt)))
}
fn debug_separator_trait_ref(
separator_trait_ref: &chalk_ir::SeparatorTraitRef<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| {
Some(prog?.debug_separator_trait_ref(separator_trait_ref, fmt))
})
}
fn debug_fn_def_id(
fn_def_id: chalk_ir::FnDefId<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_fn_def_id(fn_def_id, fmt)))
}
fn debug_const(
constant: &chalk_ir::Const<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_const(constant, fmt)))
}
fn debug_variable_kinds(
variable_kinds: &chalk_ir::VariableKinds<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_variable_kinds(variable_kinds, fmt)))
}
fn debug_variable_kinds_with_angles(
variable_kinds: &chalk_ir::VariableKinds<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| {
Some(prog?.debug_variable_kinds_with_angles(variable_kinds, fmt))
})
}
fn debug_canonical_var_kinds(
canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| {
Some(prog?.debug_canonical_var_kinds(canonical_var_kinds, fmt))
})
}
fn debug_program_clause(
clause: &chalk_ir::ProgramClause<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_program_clause(clause, fmt)))
}
fn debug_program_clauses(
clauses: &chalk_ir::ProgramClauses<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_program_clauses(clauses, fmt)))
}
fn debug_quantified_where_clauses(
clauses: &chalk_ir::QuantifiedWhereClauses<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_quantified_where_clauses(clauses, fmt)))
}
fn intern_ty(&self, ty: chalk_ir::TyData<Self>) -> Box<chalk_ir::TyData<Self>> {
Box::new(ty)
}
fn ty_data<'a>(&self, ty: &'a Box<chalk_ir::TyData<Self>>) -> &'a chalk_ir::TyData<Self> {
ty
}
fn intern_lifetime(
&self,
lifetime: chalk_ir::LifetimeData<Self>,
) -> chalk_ir::LifetimeData<Self> {
lifetime
}
fn lifetime_data<'a>(
&self,
lifetime: &'a chalk_ir::LifetimeData<Self>,
) -> &'a chalk_ir::LifetimeData<Self> {
lifetime
}
fn intern_const(&self, constant: chalk_ir::ConstData<Self>) -> Arc<chalk_ir::ConstData<Self>> {
Arc::new(constant)
}
fn const_data<'a>(
&self,
constant: &'a Arc<chalk_ir::ConstData<Self>>,
) -> &'a chalk_ir::ConstData<Self> {
constant
}
fn const_eq(&self, _ty: &Box<chalk_ir::TyData<Self>>, _c1: &(), _c2: &()) -> bool {
true
}
fn intern_generic_arg(
&self,
parameter: chalk_ir::GenericArgData<Self>,
) -> chalk_ir::GenericArgData<Self> {
parameter
}
fn generic_arg_data<'a>(
&self,
parameter: &'a chalk_ir::GenericArgData<Self>,
) -> &'a chalk_ir::GenericArgData<Self> {
parameter
}
fn intern_goal(&self, goal: GoalData<Self>) -> Arc<GoalData<Self>> {
Arc::new(goal)
}
fn intern_goals<E>(
&self,
data: impl IntoIterator<Item = Result<Goal<Self>, E>>,
) -> Result<Self::InternedGoals, E> {
data.into_iter().collect()
}
fn goal_data<'a>(&self, goal: &'a Arc<GoalData<Self>>) -> &'a GoalData<Self> {
goal
}
fn goals_data<'a>(&self, goals: &'a Vec<Goal<Interner>>) -> &'a [Goal<Interner>] {
goals
}
fn intern_substitution<E>(
&self,
data: impl IntoIterator<Item = Result<GenericArg<Self>, E>>,
) -> Result<Vec<GenericArg<Self>>, E> {
data.into_iter().collect()
}
fn substitution_data<'a>(
&self,
substitution: &'a Vec<GenericArg<Self>>,
) -> &'a [GenericArg<Self>] {
substitution
}
fn intern_program_clause(
&self,
data: chalk_ir::ProgramClauseData<Self>,
) -> chalk_ir::ProgramClauseData<Self> {
data
}
fn program_clause_data<'a>(
&self,
clause: &'a chalk_ir::ProgramClauseData<Self>,
) -> &'a chalk_ir::ProgramClauseData<Self> {
clause
}
fn intern_program_clauses<E>(
&self,
data: impl IntoIterator<Item = Result<chalk_ir::ProgramClause<Self>, E>>,
) -> Result<Arc<[chalk_ir::ProgramClause<Self>]>, E> {
data.into_iter().collect()
}
fn program_clauses_data<'a>(
&self,
clauses: &'a Arc<[chalk_ir::ProgramClause<Self>]>,
) -> &'a [chalk_ir::ProgramClause<Self>] {
&clauses
}
fn intern_quantified_where_clauses<E>(
&self,
data: impl IntoIterator<Item = Result<chalk_ir::QuantifiedWhereClause<Self>, E>>,
) -> Result<Self::InternedQuantifiedWhereClauses, E> {
data.into_iter().collect()
}
fn quantified_where_clauses_data<'a>(
&self,
clauses: &'a Self::InternedQuantifiedWhereClauses,
) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] {
clauses
}
fn intern_generic_arg_kinds<E>(
&self,
data: impl IntoIterator<Item = Result<chalk_ir::VariableKind<Self>, E>>,
) -> Result<Self::InternedVariableKinds, E> {
data.into_iter().collect()
}
fn variable_kinds_data<'a>(
&self,
parameter_kinds: &'a Self::InternedVariableKinds,
) -> &'a [chalk_ir::VariableKind<Self>] {
&parameter_kinds
}
fn intern_canonical_var_kinds<E>(
&self,
data: impl IntoIterator<Item = Result<chalk_ir::CanonicalVarKind<Self>, E>>,
) -> Result<Self::InternedCanonicalVarKinds, E> {
data.into_iter().collect()
}
fn canonical_var_kinds_data<'a>(
&self,
canonical_var_kinds: &'a Self::InternedCanonicalVarKinds,
) -> &'a [chalk_ir::CanonicalVarKind<Self>] {
&canonical_var_kinds
}
fn intern_constraints<E>(
&self,
data: impl IntoIterator<Item = Result<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>, E>>,
) -> Result<Self::InternedConstraints, E> {
data.into_iter().collect()
}
fn constraints_data<'a>(
&self,
constraints: &'a Self::InternedConstraints,
) -> &'a [chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>] {
constraints
}
fn debug_closure_id(
_fn_def_id: chalk_ir::ClosureId<Self>,
_fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
None
}
fn debug_constraints(
_clauses: &chalk_ir::Constraints<Self>,
_fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
None
}
}
impl chalk_ir::interner::HasInterner for Interner {
type Interner = Self;
}

View file

@ -0,0 +1,787 @@
//! This module contains the implementations of the `ToChalk` trait, which
//! handles conversion between our data types and their corresponding types in
//! Chalk (in both directions); plus some helper functions for more specialized
//! conversions.
use chalk_ir::{
cast::Cast, fold::shift::Shift, interner::HasInterner, PlaceholderIndex, Scalar, TypeName,
UniverseIndex,
};
use chalk_solve::rust_ir;
use base_db::salsa::InternKey;
use hir_def::{type_ref::Mutability, AssocContainerId, GenericDefId, Lookup, TypeAliasId};
use crate::{
db::HirDatabase,
primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness},
traits::{Canonical, Obligation},
ApplicationTy, CallableDefId, GenericPredicate, InEnvironment, OpaqueTy, OpaqueTyId,
ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TyKind, TypeCtor,
};
use super::interner::*;
use super::*;
impl ToChalk for Ty {
type Chalk = chalk_ir::Ty<Interner>;
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Ty<Interner> {
match self {
Ty::Apply(apply_ty) => match apply_ty.ctor {
TypeCtor::Ref(m) => ref_to_chalk(db, m, apply_ty.parameters),
TypeCtor::Array => array_to_chalk(db, apply_ty.parameters),
TypeCtor::FnPtr { num_args: _, is_varargs } => {
let substitution = apply_ty.parameters.to_chalk(db).shifted_in(&Interner);
chalk_ir::TyData::Function(chalk_ir::FnPointer {
num_binders: 0,
abi: (),
safety: chalk_ir::Safety::Safe,
variadic: is_varargs,
substitution,
})
.intern(&Interner)
}
_ => {
let name = apply_ty.ctor.to_chalk(db);
let substitution = apply_ty.parameters.to_chalk(db);
chalk_ir::ApplicationTy { name, substitution }.cast(&Interner).intern(&Interner)
}
},
Ty::Projection(proj_ty) => {
let associated_ty_id = proj_ty.associated_ty.to_chalk(db);
let substitution = proj_ty.parameters.to_chalk(db);
chalk_ir::AliasTy::Projection(chalk_ir::ProjectionTy {
associated_ty_id,
substitution,
})
.cast(&Interner)
.intern(&Interner)
}
Ty::Placeholder(id) => {
let interned_id = db.intern_type_param_id(id);
PlaceholderIndex {
ui: UniverseIndex::ROOT,
idx: interned_id.as_intern_id().as_usize(),
}
.to_ty::<Interner>(&Interner)
}
Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx).intern(&Interner),
Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"),
Ty::Dyn(predicates) => {
let where_clauses = chalk_ir::QuantifiedWhereClauses::from_iter(
&Interner,
predicates.iter().filter(|p| !p.is_error()).cloned().map(|p| p.to_chalk(db)),
);
let bounded_ty = chalk_ir::DynTy {
bounds: make_binders(where_clauses, 1),
lifetime: FAKE_PLACEHOLDER.to_lifetime(&Interner),
};
chalk_ir::TyData::Dyn(bounded_ty).intern(&Interner)
}
Ty::Opaque(opaque_ty) => {
let opaque_ty_id = opaque_ty.opaque_ty_id.to_chalk(db);
let substitution = opaque_ty.parameters.to_chalk(db);
chalk_ir::TyData::Alias(chalk_ir::AliasTy::Opaque(chalk_ir::OpaqueTy {
opaque_ty_id,
substitution,
}))
.intern(&Interner)
}
Ty::Unknown => {
let substitution = chalk_ir::Substitution::empty(&Interner);
let name = TypeName::Error;
chalk_ir::ApplicationTy { name, substitution }.cast(&Interner).intern(&Interner)
}
}
}
fn from_chalk(db: &dyn HirDatabase, chalk: chalk_ir::Ty<Interner>) -> Self {
match chalk.data(&Interner).clone() {
chalk_ir::TyData::Apply(apply_ty) => match apply_ty.name {
TypeName::Error => Ty::Unknown,
TypeName::Ref(m) => ref_from_chalk(db, m, apply_ty.substitution),
TypeName::Array => array_from_chalk(db, apply_ty.substitution),
_ => {
let ctor = from_chalk(db, apply_ty.name);
let parameters = from_chalk(db, apply_ty.substitution);
Ty::Apply(ApplicationTy { ctor, parameters })
}
},
chalk_ir::TyData::Placeholder(idx) => {
assert_eq!(idx.ui, UniverseIndex::ROOT);
let interned_id = crate::db::GlobalTypeParamId::from_intern_id(
crate::salsa::InternId::from(idx.idx),
);
Ty::Placeholder(db.lookup_intern_type_param_id(interned_id))
}
chalk_ir::TyData::Alias(chalk_ir::AliasTy::Projection(proj)) => {
let associated_ty = from_chalk(db, proj.associated_ty_id);
let parameters = from_chalk(db, proj.substitution);
Ty::Projection(ProjectionTy { associated_ty, parameters })
}
chalk_ir::TyData::Alias(chalk_ir::AliasTy::Opaque(opaque_ty)) => {
let impl_trait_id = from_chalk(db, opaque_ty.opaque_ty_id);
let parameters = from_chalk(db, opaque_ty.substitution);
Ty::Opaque(OpaqueTy { opaque_ty_id: impl_trait_id, parameters })
}
chalk_ir::TyData::Function(chalk_ir::FnPointer {
num_binders,
variadic,
substitution,
..
}) => {
assert_eq!(num_binders, 0);
let parameters: Substs = from_chalk(
db,
substitution.shifted_out(&Interner).expect("fn ptr should have no binders"),
);
Ty::Apply(ApplicationTy {
ctor: TypeCtor::FnPtr {
num_args: (parameters.len() - 1) as u16,
is_varargs: variadic,
},
parameters,
})
}
chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx),
chalk_ir::TyData::InferenceVar(_iv, _kind) => Ty::Unknown,
chalk_ir::TyData::Dyn(where_clauses) => {
assert_eq!(where_clauses.bounds.binders.len(&Interner), 1);
let predicates = where_clauses
.bounds
.skip_binders()
.iter(&Interner)
.map(|c| from_chalk(db, c.clone()))
.collect();
Ty::Dyn(predicates)
}
}
}
}
const FAKE_PLACEHOLDER: PlaceholderIndex =
PlaceholderIndex { ui: UniverseIndex::ROOT, idx: usize::MAX };
/// We currently don't model lifetimes, but Chalk does. So, we have to insert a
/// fake lifetime here, because Chalks built-in logic may expect it to be there.
fn ref_to_chalk(
db: &dyn HirDatabase,
mutability: Mutability,
subst: Substs,
) -> chalk_ir::Ty<Interner> {
let arg = subst[0].clone().to_chalk(db);
let lifetime = FAKE_PLACEHOLDER.to_lifetime(&Interner);
chalk_ir::ApplicationTy {
name: TypeName::Ref(mutability.to_chalk(db)),
substitution: chalk_ir::Substitution::from_iter(
&Interner,
vec![lifetime.cast(&Interner), arg.cast(&Interner)],
),
}
.intern(&Interner)
}
/// Here we remove the lifetime from the type we got from Chalk.
fn ref_from_chalk(
db: &dyn HirDatabase,
mutability: chalk_ir::Mutability,
subst: chalk_ir::Substitution<Interner>,
) -> Ty {
let tys = subst
.iter(&Interner)
.filter_map(|p| Some(from_chalk(db, p.ty(&Interner)?.clone())))
.collect();
Ty::apply(TypeCtor::Ref(from_chalk(db, mutability)), Substs(tys))
}
/// We currently don't model constants, but Chalk does. So, we have to insert a
/// fake constant here, because Chalks built-in logic may expect it to be there.
fn array_to_chalk(db: &dyn HirDatabase, subst: Substs) -> chalk_ir::Ty<Interner> {
let arg = subst[0].clone().to_chalk(db);
let usize_ty = chalk_ir::ApplicationTy {
name: TypeName::Scalar(Scalar::Uint(chalk_ir::UintTy::Usize)),
substitution: chalk_ir::Substitution::empty(&Interner),
}
.intern(&Interner);
let const_ = FAKE_PLACEHOLDER.to_const(&Interner, usize_ty);
chalk_ir::ApplicationTy {
name: TypeName::Array,
substitution: chalk_ir::Substitution::from_iter(
&Interner,
vec![arg.cast(&Interner), const_.cast(&Interner)],
),
}
.intern(&Interner)
}
/// Here we remove the const from the type we got from Chalk.
fn array_from_chalk(db: &dyn HirDatabase, subst: chalk_ir::Substitution<Interner>) -> Ty {
let tys = subst
.iter(&Interner)
.filter_map(|p| Some(from_chalk(db, p.ty(&Interner)?.clone())))
.collect();
Ty::apply(TypeCtor::Array, Substs(tys))
}
impl ToChalk for Substs {
type Chalk = chalk_ir::Substitution<Interner>;
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Substitution<Interner> {
chalk_ir::Substitution::from_iter(&Interner, self.iter().map(|ty| ty.clone().to_chalk(db)))
}
fn from_chalk(db: &dyn HirDatabase, parameters: chalk_ir::Substitution<Interner>) -> Substs {
let tys = parameters
.iter(&Interner)
.map(|p| match p.ty(&Interner) {
Some(ty) => from_chalk(db, ty.clone()),
None => unimplemented!(),
})
.collect();
Substs(tys)
}
}
impl ToChalk for TraitRef {
type Chalk = chalk_ir::TraitRef<Interner>;
fn to_chalk(self: TraitRef, db: &dyn HirDatabase) -> chalk_ir::TraitRef<Interner> {
let trait_id = self.trait_.to_chalk(db);
let substitution = self.substs.to_chalk(db);
chalk_ir::TraitRef { trait_id, substitution }
}
fn from_chalk(db: &dyn HirDatabase, trait_ref: chalk_ir::TraitRef<Interner>) -> Self {
let trait_ = from_chalk(db, trait_ref.trait_id);
let substs = from_chalk(db, trait_ref.substitution);
TraitRef { trait_, substs }
}
}
impl ToChalk for hir_def::TraitId {
type Chalk = TraitId;
fn to_chalk(self, _db: &dyn HirDatabase) -> TraitId {
chalk_ir::TraitId(self.as_intern_id())
}
fn from_chalk(_db: &dyn HirDatabase, trait_id: TraitId) -> hir_def::TraitId {
InternKey::from_intern_id(trait_id.0)
}
}
impl ToChalk for OpaqueTyId {
type Chalk = chalk_ir::OpaqueTyId<Interner>;
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::OpaqueTyId<Interner> {
db.intern_impl_trait_id(self).into()
}
fn from_chalk(
db: &dyn HirDatabase,
opaque_ty_id: chalk_ir::OpaqueTyId<Interner>,
) -> OpaqueTyId {
db.lookup_intern_impl_trait_id(opaque_ty_id.into())
}
}
impl ToChalk for TypeCtor {
type Chalk = TypeName<Interner>;
fn to_chalk(self, db: &dyn HirDatabase) -> TypeName<Interner> {
match self {
TypeCtor::AssociatedType(type_alias) => {
let type_id = type_alias.to_chalk(db);
TypeName::AssociatedType(type_id)
}
TypeCtor::OpaqueType(impl_trait_id) => {
let id = impl_trait_id.to_chalk(db);
TypeName::OpaqueType(id)
}
TypeCtor::Bool => TypeName::Scalar(Scalar::Bool),
TypeCtor::Char => TypeName::Scalar(Scalar::Char),
TypeCtor::Int(int_ty) => TypeName::Scalar(int_ty_to_chalk(int_ty)),
TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 }) => {
TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F32))
}
TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 }) => {
TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F64))
}
TypeCtor::Tuple { cardinality } => TypeName::Tuple(cardinality.into()),
TypeCtor::RawPtr(mutability) => TypeName::Raw(mutability.to_chalk(db)),
TypeCtor::Slice => TypeName::Slice,
TypeCtor::Array => TypeName::Array,
TypeCtor::Ref(mutability) => TypeName::Ref(mutability.to_chalk(db)),
TypeCtor::Str => TypeName::Str,
TypeCtor::FnDef(callable_def) => {
let id = callable_def.to_chalk(db);
TypeName::FnDef(id)
}
TypeCtor::Never => TypeName::Never,
TypeCtor::Closure { def, expr } => {
let closure_id = db.intern_closure((def, expr));
TypeName::Closure(closure_id.into())
}
TypeCtor::Adt(adt_id) => TypeName::Adt(chalk_ir::AdtId(adt_id)),
TypeCtor::FnPtr { .. } => {
// This should not be reached, since Chalk doesn't represent
// function pointers with TypeName
unreachable!()
}
}
}
fn from_chalk(db: &dyn HirDatabase, type_name: TypeName<Interner>) -> TypeCtor {
match type_name {
TypeName::Adt(struct_id) => TypeCtor::Adt(struct_id.0),
TypeName::AssociatedType(type_id) => TypeCtor::AssociatedType(from_chalk(db, type_id)),
TypeName::OpaqueType(opaque_type_id) => {
TypeCtor::OpaqueType(from_chalk(db, opaque_type_id))
}
TypeName::Scalar(Scalar::Bool) => TypeCtor::Bool,
TypeName::Scalar(Scalar::Char) => TypeCtor::Char,
TypeName::Scalar(Scalar::Int(int_ty)) => TypeCtor::Int(IntTy {
signedness: Signedness::Signed,
bitness: bitness_from_chalk_int(int_ty),
}),
TypeName::Scalar(Scalar::Uint(uint_ty)) => TypeCtor::Int(IntTy {
signedness: Signedness::Unsigned,
bitness: bitness_from_chalk_uint(uint_ty),
}),
TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F32)) => {
TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 })
}
TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F64)) => {
TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 })
}
TypeName::Tuple(cardinality) => TypeCtor::Tuple { cardinality: cardinality as u16 },
TypeName::Raw(mutability) => TypeCtor::RawPtr(from_chalk(db, mutability)),
TypeName::Slice => TypeCtor::Slice,
TypeName::Ref(mutability) => TypeCtor::Ref(from_chalk(db, mutability)),
TypeName::Str => TypeCtor::Str,
TypeName::Never => TypeCtor::Never,
TypeName::FnDef(fn_def_id) => {
let callable_def = from_chalk(db, fn_def_id);
TypeCtor::FnDef(callable_def)
}
TypeName::Array => TypeCtor::Array,
TypeName::Closure(id) => {
let id: crate::db::ClosureId = id.into();
let (def, expr) = db.lookup_intern_closure(id);
TypeCtor::Closure { def, expr }
}
TypeName::Error => {
// this should not be reached, since we don't represent TypeName::Error with TypeCtor
unreachable!()
}
}
}
}
fn bitness_from_chalk_uint(uint_ty: chalk_ir::UintTy) -> IntBitness {
use chalk_ir::UintTy;
match uint_ty {
UintTy::Usize => IntBitness::Xsize,
UintTy::U8 => IntBitness::X8,
UintTy::U16 => IntBitness::X16,
UintTy::U32 => IntBitness::X32,
UintTy::U64 => IntBitness::X64,
UintTy::U128 => IntBitness::X128,
}
}
fn bitness_from_chalk_int(int_ty: chalk_ir::IntTy) -> IntBitness {
use chalk_ir::IntTy;
match int_ty {
IntTy::Isize => IntBitness::Xsize,
IntTy::I8 => IntBitness::X8,
IntTy::I16 => IntBitness::X16,
IntTy::I32 => IntBitness::X32,
IntTy::I64 => IntBitness::X64,
IntTy::I128 => IntBitness::X128,
}
}
fn int_ty_to_chalk(int_ty: IntTy) -> Scalar {
use chalk_ir::{IntTy, UintTy};
match int_ty.signedness {
Signedness::Signed => Scalar::Int(match int_ty.bitness {
IntBitness::Xsize => IntTy::Isize,
IntBitness::X8 => IntTy::I8,
IntBitness::X16 => IntTy::I16,
IntBitness::X32 => IntTy::I32,
IntBitness::X64 => IntTy::I64,
IntBitness::X128 => IntTy::I128,
}),
Signedness::Unsigned => Scalar::Uint(match int_ty.bitness {
IntBitness::Xsize => UintTy::Usize,
IntBitness::X8 => UintTy::U8,
IntBitness::X16 => UintTy::U16,
IntBitness::X32 => UintTy::U32,
IntBitness::X64 => UintTy::U64,
IntBitness::X128 => UintTy::U128,
}),
}
}
impl ToChalk for Mutability {
type Chalk = chalk_ir::Mutability;
fn to_chalk(self, _db: &dyn HirDatabase) -> Self::Chalk {
match self {
Mutability::Shared => chalk_ir::Mutability::Not,
Mutability::Mut => chalk_ir::Mutability::Mut,
}
}
fn from_chalk(_db: &dyn HirDatabase, chalk: Self::Chalk) -> Self {
match chalk {
chalk_ir::Mutability::Mut => Mutability::Mut,
chalk_ir::Mutability::Not => Mutability::Shared,
}
}
}
impl ToChalk for hir_def::ImplId {
type Chalk = ImplId;
fn to_chalk(self, _db: &dyn HirDatabase) -> ImplId {
chalk_ir::ImplId(self.as_intern_id())
}
fn from_chalk(_db: &dyn HirDatabase, impl_id: ImplId) -> hir_def::ImplId {
InternKey::from_intern_id(impl_id.0)
}
}
impl ToChalk for CallableDefId {
type Chalk = FnDefId;
fn to_chalk(self, db: &dyn HirDatabase) -> FnDefId {
db.intern_callable_def(self).into()
}
fn from_chalk(db: &dyn HirDatabase, fn_def_id: FnDefId) -> CallableDefId {
db.lookup_intern_callable_def(fn_def_id.into())
}
}
impl ToChalk for TypeAliasId {
type Chalk = AssocTypeId;
fn to_chalk(self, _db: &dyn HirDatabase) -> AssocTypeId {
chalk_ir::AssocTypeId(self.as_intern_id())
}
fn from_chalk(_db: &dyn HirDatabase, type_alias_id: AssocTypeId) -> TypeAliasId {
InternKey::from_intern_id(type_alias_id.0)
}
}
pub struct TypeAliasAsValue(pub TypeAliasId);
impl ToChalk for TypeAliasAsValue {
type Chalk = AssociatedTyValueId;
fn to_chalk(self, _db: &dyn HirDatabase) -> AssociatedTyValueId {
rust_ir::AssociatedTyValueId(self.0.as_intern_id())
}
fn from_chalk(
_db: &dyn HirDatabase,
assoc_ty_value_id: AssociatedTyValueId,
) -> TypeAliasAsValue {
TypeAliasAsValue(TypeAliasId::from_intern_id(assoc_ty_value_id.0))
}
}
impl ToChalk for GenericPredicate {
type Chalk = chalk_ir::QuantifiedWhereClause<Interner>;
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::QuantifiedWhereClause<Interner> {
match self {
GenericPredicate::Implemented(trait_ref) => {
let chalk_trait_ref = trait_ref.to_chalk(db);
let chalk_trait_ref = chalk_trait_ref.shifted_in(&Interner);
make_binders(chalk_ir::WhereClause::Implemented(chalk_trait_ref), 0)
}
GenericPredicate::Projection(projection_pred) => {
let ty = projection_pred.ty.to_chalk(db).shifted_in(&Interner);
let projection = projection_pred.projection_ty.to_chalk(db).shifted_in(&Interner);
let alias = chalk_ir::AliasTy::Projection(projection);
make_binders(chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq { alias, ty }), 0)
}
GenericPredicate::Error => panic!("tried passing GenericPredicate::Error to Chalk"),
}
}
fn from_chalk(
db: &dyn HirDatabase,
where_clause: chalk_ir::QuantifiedWhereClause<Interner>,
) -> GenericPredicate {
// we don't produce any where clauses with binders and can't currently deal with them
match where_clause
.skip_binders()
.shifted_out(&Interner)
.expect("unexpected bound vars in where clause")
{
chalk_ir::WhereClause::Implemented(tr) => {
GenericPredicate::Implemented(from_chalk(db, tr))
}
chalk_ir::WhereClause::AliasEq(projection_eq) => {
let projection_ty = from_chalk(
db,
match projection_eq.alias {
chalk_ir::AliasTy::Projection(p) => p,
_ => unimplemented!(),
},
);
let ty = from_chalk(db, projection_eq.ty);
GenericPredicate::Projection(ProjectionPredicate { projection_ty, ty })
}
chalk_ir::WhereClause::LifetimeOutlives(_) => {
// we shouldn't get these from Chalk
panic!("encountered LifetimeOutlives from Chalk")
}
chalk_ir::WhereClause::TypeOutlives(_) => {
// we shouldn't get these from Chalk
panic!("encountered TypeOutlives from Chalk")
}
}
}
}
impl ToChalk for ProjectionTy {
type Chalk = chalk_ir::ProjectionTy<Interner>;
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::ProjectionTy<Interner> {
chalk_ir::ProjectionTy {
associated_ty_id: self.associated_ty.to_chalk(db),
substitution: self.parameters.to_chalk(db),
}
}
fn from_chalk(
db: &dyn HirDatabase,
projection_ty: chalk_ir::ProjectionTy<Interner>,
) -> ProjectionTy {
ProjectionTy {
associated_ty: from_chalk(db, projection_ty.associated_ty_id),
parameters: from_chalk(db, projection_ty.substitution),
}
}
}
impl ToChalk for ProjectionPredicate {
type Chalk = chalk_ir::AliasEq<Interner>;
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasEq<Interner> {
chalk_ir::AliasEq {
alias: chalk_ir::AliasTy::Projection(self.projection_ty.to_chalk(db)),
ty: self.ty.to_chalk(db),
}
}
fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::AliasEq<Interner>) -> Self {
unimplemented!()
}
}
impl ToChalk for Obligation {
type Chalk = chalk_ir::DomainGoal<Interner>;
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::DomainGoal<Interner> {
match self {
Obligation::Trait(tr) => tr.to_chalk(db).cast(&Interner),
Obligation::Projection(pr) => pr.to_chalk(db).cast(&Interner),
}
}
fn from_chalk(_db: &dyn HirDatabase, _goal: chalk_ir::DomainGoal<Interner>) -> Self {
unimplemented!()
}
}
impl<T> ToChalk for Canonical<T>
where
T: ToChalk,
T::Chalk: HasInterner<Interner = Interner>,
{
type Chalk = chalk_ir::Canonical<T::Chalk>;
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical<T::Chalk> {
let kinds = self
.kinds
.iter()
.map(|k| match k {
TyKind::General => chalk_ir::TyKind::General,
TyKind::Integer => chalk_ir::TyKind::Integer,
TyKind::Float => chalk_ir::TyKind::Float,
})
.map(|tk| {
chalk_ir::CanonicalVarKind::new(
chalk_ir::VariableKind::Ty(tk),
chalk_ir::UniverseIndex::ROOT,
)
});
let value = self.value.to_chalk(db);
chalk_ir::Canonical {
value,
binders: chalk_ir::CanonicalVarKinds::from_iter(&Interner, kinds),
}
}
fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> {
let kinds = canonical
.binders
.iter(&Interner)
.map(|k| match k.kind {
chalk_ir::VariableKind::Ty(tk) => match tk {
chalk_ir::TyKind::General => TyKind::General,
chalk_ir::TyKind::Integer => TyKind::Integer,
chalk_ir::TyKind::Float => TyKind::Float,
},
chalk_ir::VariableKind::Lifetime => panic!("unexpected lifetime from Chalk"),
chalk_ir::VariableKind::Const(_) => panic!("unexpected const from Chalk"),
})
.collect();
Canonical { kinds, value: from_chalk(db, canonical.value) }
}
}
impl ToChalk for Arc<TraitEnvironment> {
type Chalk = chalk_ir::Environment<Interner>;
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Environment<Interner> {
let mut clauses = Vec::new();
for pred in &self.predicates {
if pred.is_error() {
// for env, we just ignore errors
continue;
}
let program_clause: chalk_ir::ProgramClause<Interner> =
pred.clone().to_chalk(db).cast(&Interner);
clauses.push(program_clause.into_from_env_clause(&Interner));
}
chalk_ir::Environment::new(&Interner).add_clauses(&Interner, clauses)
}
fn from_chalk(
_db: &dyn HirDatabase,
_env: chalk_ir::Environment<Interner>,
) -> Arc<TraitEnvironment> {
unimplemented!()
}
}
impl<T: ToChalk> ToChalk for InEnvironment<T>
where
T::Chalk: chalk_ir::interner::HasInterner<Interner = Interner>,
{
type Chalk = chalk_ir::InEnvironment<T::Chalk>;
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::InEnvironment<T::Chalk> {
chalk_ir::InEnvironment {
environment: self.environment.to_chalk(db),
goal: self.value.to_chalk(db),
}
}
fn from_chalk(
db: &dyn HirDatabase,
in_env: chalk_ir::InEnvironment<T::Chalk>,
) -> InEnvironment<T> {
InEnvironment {
environment: from_chalk(db, in_env.environment),
value: from_chalk(db, in_env.goal),
}
}
}
pub(super) fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T>
where
T: HasInterner<Interner = Interner>,
{
chalk_ir::Binders::new(
chalk_ir::VariableKinds::from_iter(
&Interner,
std::iter::repeat(chalk_ir::VariableKind::Ty(chalk_ir::TyKind::General)).take(num_vars),
),
value,
)
}
pub(super) fn convert_where_clauses(
db: &dyn HirDatabase,
def: GenericDefId,
substs: &Substs,
) -> Vec<chalk_ir::QuantifiedWhereClause<Interner>> {
let generic_predicates = db.generic_predicates(def);
let mut result = Vec::with_capacity(generic_predicates.len());
for pred in generic_predicates.iter() {
if pred.value.is_error() {
// skip errored predicates completely
continue;
}
result.push(pred.clone().subst(substs).to_chalk(db));
}
result
}
pub(super) fn generic_predicate_to_inline_bound(
db: &dyn HirDatabase,
pred: &GenericPredicate,
self_ty: &Ty,
) -> Option<rust_ir::InlineBound<Interner>> {
// An InlineBound is like a GenericPredicate, except the self type is left out.
// We don't have a special type for this, but Chalk does.
match pred {
GenericPredicate::Implemented(trait_ref) => {
if &trait_ref.substs[0] != self_ty {
// we can only convert predicates back to type bounds if they
// have the expected self type
return None;
}
let args_no_self = trait_ref.substs[1..]
.iter()
.map(|ty| ty.clone().to_chalk(db).cast(&Interner))
.collect();
let trait_bound =
rust_ir::TraitBound { trait_id: trait_ref.trait_.to_chalk(db), args_no_self };
Some(rust_ir::InlineBound::TraitBound(trait_bound))
}
GenericPredicate::Projection(proj) => {
if &proj.projection_ty.parameters[0] != self_ty {
return None;
}
let trait_ = match proj.projection_ty.associated_ty.lookup(db.upcast()).container {
AssocContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"),
};
let args_no_self = proj.projection_ty.parameters[1..]
.iter()
.map(|ty| ty.clone().to_chalk(db).cast(&Interner))
.collect();
let alias_eq_bound = rust_ir::AliasEqBound {
value: proj.ty.clone().to_chalk(db),
trait_bound: rust_ir::TraitBound { trait_id: trait_.to_chalk(db), args_no_self },
associated_ty_id: proj.projection_ty.associated_ty.to_chalk(db),
parameters: Vec::new(), // FIXME we don't support generic associated types yet
};
Some(rust_ir::InlineBound::AliasEqBound(alias_eq_bound))
}
GenericPredicate::Error => None,
}
}

View file

@ -0,0 +1,358 @@
//! Implementation of Chalk debug helper functions using TLS.
use std::fmt;
use chalk_ir::{AliasTy, GenericArg, Goal, Goals, Lifetime, ProgramClauseImplication, TypeName};
use itertools::Itertools;
use super::{from_chalk, Interner};
use crate::{db::HirDatabase, CallableDefId, TypeCtor};
use hir_def::{AdtId, AssocContainerId, DefWithBodyId, Lookup, TypeAliasId};
pub use unsafe_tls::{set_current_program, with_current_program};
pub struct DebugContext<'a>(&'a dyn HirDatabase);
impl DebugContext<'_> {
pub fn debug_struct_id(
&self,
id: super::AdtId,
f: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
let type_ctor: TypeCtor = from_chalk(self.0, TypeName::Adt(id));
match type_ctor {
TypeCtor::Bool => write!(f, "bool")?,
TypeCtor::Char => write!(f, "char")?,
TypeCtor::Int(t) => write!(f, "{}", t)?,
TypeCtor::Float(t) => write!(f, "{}", t)?,
TypeCtor::Str => write!(f, "str")?,
TypeCtor::Slice => write!(f, "slice")?,
TypeCtor::Array => write!(f, "array")?,
TypeCtor::RawPtr(m) => write!(f, "*{}", m.as_keyword_for_ptr())?,
TypeCtor::Ref(m) => write!(f, "&{}", m.as_keyword_for_ref())?,
TypeCtor::Never => write!(f, "!")?,
TypeCtor::Tuple { .. } => {
write!(f, "()")?;
}
TypeCtor::FnPtr { .. } => {
write!(f, "fn")?;
}
TypeCtor::FnDef(def) => {
let name = match def {
CallableDefId::FunctionId(ff) => self.0.function_data(ff).name.clone(),
CallableDefId::StructId(s) => self.0.struct_data(s).name.clone(),
CallableDefId::EnumVariantId(e) => {
let enum_data = self.0.enum_data(e.parent);
enum_data.variants[e.local_id].name.clone()
}
};
match def {
CallableDefId::FunctionId(_) => write!(f, "{{fn {}}}", name)?,
CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
write!(f, "{{ctor {}}}", name)?
}
}
}
TypeCtor::Adt(def_id) => {
let name = match def_id {
AdtId::StructId(it) => self.0.struct_data(it).name.clone(),
AdtId::UnionId(it) => self.0.union_data(it).name.clone(),
AdtId::EnumId(it) => self.0.enum_data(it).name.clone(),
};
write!(f, "{}", name)?;
}
TypeCtor::AssociatedType(type_alias) => {
let trait_ = match type_alias.lookup(self.0.upcast()).container {
AssocContainerId::TraitId(it) => it,
_ => panic!("not an associated type"),
};
let trait_name = self.0.trait_data(trait_).name.clone();
let name = self.0.type_alias_data(type_alias).name.clone();
write!(f, "{}::{}", trait_name, name)?;
}
TypeCtor::OpaqueType(opaque_ty_id) => match opaque_ty_id {
crate::OpaqueTyId::ReturnTypeImplTrait(func, idx) => {
write!(f, "{{impl trait {} of {:?}}}", idx, func)?;
}
},
TypeCtor::Closure { def, expr } => {
write!(f, "{{closure {:?} in ", expr.into_raw())?;
match def {
DefWithBodyId::FunctionId(func) => {
write!(f, "fn {}", self.0.function_data(func).name)?
}
DefWithBodyId::StaticId(s) => {
if let Some(name) = self.0.static_data(s).name.as_ref() {
write!(f, "body of static {}", name)?;
} else {
write!(f, "body of unnamed static {:?}", s)?;
}
}
DefWithBodyId::ConstId(c) => {
if let Some(name) = self.0.const_data(c).name.as_ref() {
write!(f, "body of const {}", name)?;
} else {
write!(f, "body of unnamed const {:?}", c)?;
}
}
};
write!(f, "}}")?;
}
}
Ok(())
}
pub fn debug_trait_id(
&self,
id: super::TraitId,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
let trait_: hir_def::TraitId = from_chalk(self.0, id);
let trait_data = self.0.trait_data(trait_);
write!(fmt, "{}", trait_data.name)
}
pub fn debug_assoc_type_id(
&self,
id: super::AssocTypeId,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
let type_alias: TypeAliasId = from_chalk(self.0, id);
let type_alias_data = self.0.type_alias_data(type_alias);
let trait_ = match type_alias.lookup(self.0.upcast()).container {
AssocContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"),
};
let trait_data = self.0.trait_data(trait_);
write!(fmt, "{}::{}", trait_data.name, type_alias_data.name)
}
pub fn debug_opaque_ty_id(
&self,
opaque_ty_id: chalk_ir::OpaqueTyId<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
fmt.debug_struct("OpaqueTyId").field("index", &opaque_ty_id.0).finish()
}
pub fn debug_alias(
&self,
alias_ty: &AliasTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
match alias_ty {
AliasTy::Projection(projection_ty) => self.debug_projection_ty(projection_ty, fmt),
AliasTy::Opaque(opaque_ty) => self.debug_opaque_ty(opaque_ty, fmt),
}
}
pub fn debug_projection_ty(
&self,
projection_ty: &chalk_ir::ProjectionTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
let type_alias: TypeAliasId = from_chalk(self.0, projection_ty.associated_ty_id);
let type_alias_data = self.0.type_alias_data(type_alias);
let trait_ = match type_alias.lookup(self.0.upcast()).container {
AssocContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"),
};
let trait_data = self.0.trait_data(trait_);
let params = projection_ty.substitution.as_slice(&Interner);
write!(fmt, "<{:?} as {}", &params[0], trait_data.name,)?;
if params.len() > 1 {
write!(
fmt,
"<{}>",
&params[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
)?;
}
write!(fmt, ">::{}", type_alias_data.name)
}
pub fn debug_opaque_ty(
&self,
opaque_ty: &chalk_ir::OpaqueTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", opaque_ty.opaque_ty_id)
}
pub fn debug_ty(
&self,
ty: &chalk_ir::Ty<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", ty.data(&Interner))
}
pub fn debug_lifetime(
&self,
lifetime: &Lifetime<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", lifetime.data(&Interner))
}
pub fn debug_generic_arg(
&self,
parameter: &GenericArg<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", parameter.data(&Interner).inner_debug())
}
pub fn debug_goal(
&self,
goal: &Goal<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
let goal_data = goal.data(&Interner);
write!(fmt, "{:?}", goal_data)
}
pub fn debug_goals(
&self,
goals: &Goals<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", goals.debug(&Interner))
}
pub fn debug_program_clause_implication(
&self,
pci: &ProgramClauseImplication<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", pci.debug(&Interner))
}
pub fn debug_application_ty(
&self,
application_ty: &chalk_ir::ApplicationTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", application_ty.debug(&Interner))
}
pub fn debug_substitution(
&self,
substitution: &chalk_ir::Substitution<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", substitution.debug(&Interner))
}
pub fn debug_separator_trait_ref(
&self,
separator_trait_ref: &chalk_ir::SeparatorTraitRef<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", separator_trait_ref.debug(&Interner))
}
pub fn debug_fn_def_id(
&self,
fn_def_id: chalk_ir::FnDefId<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
let def: CallableDefId = from_chalk(self.0, fn_def_id);
let name = match def {
CallableDefId::FunctionId(ff) => self.0.function_data(ff).name.clone(),
CallableDefId::StructId(s) => self.0.struct_data(s).name.clone(),
CallableDefId::EnumVariantId(e) => {
let enum_data = self.0.enum_data(e.parent);
enum_data.variants[e.local_id].name.clone()
}
};
match def {
CallableDefId::FunctionId(_) => write!(fmt, "{{fn {}}}", name),
CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
write!(fmt, "{{ctor {}}}", name)
}
}
}
pub fn debug_const(
&self,
_constant: &chalk_ir::Const<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "const")
}
pub fn debug_variable_kinds(
&self,
variable_kinds: &chalk_ir::VariableKinds<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", variable_kinds.as_slice(&Interner))
}
pub fn debug_variable_kinds_with_angles(
&self,
variable_kinds: &chalk_ir::VariableKinds<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", variable_kinds.inner_debug(&Interner))
}
pub fn debug_canonical_var_kinds(
&self,
canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", canonical_var_kinds.as_slice(&Interner))
}
pub fn debug_program_clause(
&self,
clause: &chalk_ir::ProgramClause<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", clause.data(&Interner))
}
pub fn debug_program_clauses(
&self,
clauses: &chalk_ir::ProgramClauses<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", clauses.as_slice(&Interner))
}
pub fn debug_quantified_where_clauses(
&self,
clauses: &chalk_ir::QuantifiedWhereClauses<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", clauses.as_slice(&Interner))
}
}
mod unsafe_tls {
use super::DebugContext;
use crate::db::HirDatabase;
use scoped_tls::scoped_thread_local;
scoped_thread_local!(static PROGRAM: DebugContext);
pub fn with_current_program<R>(
op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R,
) -> R {
if PROGRAM.is_set() {
PROGRAM.with(|prog| op(Some(prog)))
} else {
op(None)
}
}
pub fn set_current_program<OP, R>(p: &dyn HirDatabase, op: OP) -> R
where
OP: FnOnce() -> R,
{
let ctx = DebugContext(p);
// we're transmuting the lifetime in the DebugContext to static. This is
// fine because we only keep the reference for the lifetime of this
// function, *and* the only way to access the context is through
// `with_current_program`, which hides the lifetime through the `for`
// type.
let static_p: &DebugContext<'static> =
unsafe { std::mem::transmute::<&DebugContext, &DebugContext<'static>>(&ctx) };
PROGRAM.set(static_p, || op())
}
}

257
crates/hir_ty/src/utils.rs Normal file
View file

@ -0,0 +1,257 @@
//! Helper functions for working with def, which don't need to be a separate
//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
use std::sync::Arc;
use hir_def::generics::WherePredicateTarget;
use hir_def::{
adt::VariantData,
db::DefDatabase,
generics::{GenericParams, TypeParamData, TypeParamProvenance},
path::Path,
resolver::{HasResolver, TypeNs},
type_ref::TypeRef,
AssocContainerId, GenericDefId, Lookup, TraitId, TypeAliasId, TypeParamId, VariantId,
};
use hir_expand::name::{name, Name};
use crate::{db::HirDatabase, GenericPredicate, TraitRef};
fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> {
let resolver = trait_.resolver(db);
// returning the iterator directly doesn't easily work because of
// lifetime problems, but since there usually shouldn't be more than a
// few direct traits this should be fine (we could even use some kind of
// SmallVec if performance is a concern)
let generic_params = db.generic_params(trait_.into());
let trait_self = generic_params.find_trait_self_param();
generic_params
.where_predicates
.iter()
.filter_map(|pred| match &pred.target {
WherePredicateTarget::TypeRef(TypeRef::Path(p)) if p == &Path::from(name![Self]) => {
pred.bound.as_path()
}
WherePredicateTarget::TypeParam(local_id) if Some(*local_id) == trait_self => {
pred.bound.as_path()
}
_ => None,
})
.filter_map(|path| match resolver.resolve_path_in_type_ns_fully(db, path.mod_path()) {
Some(TypeNs::TraitId(t)) => Some(t),
_ => None,
})
.collect()
}
fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef) -> Vec<TraitRef> {
// returning the iterator directly doesn't easily work because of
// lifetime problems, but since there usually shouldn't be more than a
// few direct traits this should be fine (we could even use some kind of
// SmallVec if performance is a concern)
let generic_params = db.generic_params(trait_ref.trait_.into());
let trait_self = match generic_params.find_trait_self_param() {
Some(p) => TypeParamId { parent: trait_ref.trait_.into(), local_id: p },
None => return Vec::new(),
};
db.generic_predicates_for_param(trait_self)
.iter()
.filter_map(|pred| {
pred.as_ref().filter_map(|pred| match pred {
GenericPredicate::Implemented(tr) => Some(tr.clone()),
_ => None,
})
})
.map(|pred| pred.subst(&trait_ref.substs))
.collect()
}
/// Returns an iterator over the whole super trait hierarchy (including the
/// trait itself).
pub(super) fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> {
// we need to take care a bit here to avoid infinite loops in case of cycles
// (i.e. if we have `trait A: B; trait B: A;`)
let mut result = vec![trait_];
let mut i = 0;
while i < result.len() {
let t = result[i];
// yeah this is quadratic, but trait hierarchies should be flat
// enough that this doesn't matter
for tt in direct_super_traits(db, t) {
if !result.contains(&tt) {
result.push(tt);
}
}
i += 1;
}
result
}
/// Given a trait ref (`Self: Trait`), builds all the implied trait refs for
/// super traits. The original trait ref will be included. So the difference to
/// `all_super_traits` is that we keep track of type parameters; for example if
/// we have `Self: Trait<u32, i32>` and `Trait<T, U>: OtherTrait<U>` we'll get
/// `Self: OtherTrait<i32>`.
pub(super) fn all_super_trait_refs(db: &dyn HirDatabase, trait_ref: TraitRef) -> Vec<TraitRef> {
// we need to take care a bit here to avoid infinite loops in case of cycles
// (i.e. if we have `trait A: B; trait B: A;`)
let mut result = vec![trait_ref];
let mut i = 0;
while i < result.len() {
let t = &result[i];
// yeah this is quadratic, but trait hierarchies should be flat
// enough that this doesn't matter
for tt in direct_super_trait_refs(db, t) {
if !result.iter().any(|tr| tr.trait_ == tt.trait_) {
result.push(tt);
}
}
i += 1;
}
result
}
pub(super) fn associated_type_by_name_including_super_traits(
db: &dyn HirDatabase,
trait_ref: TraitRef,
name: &Name,
) -> Option<(TraitRef, TypeAliasId)> {
all_super_trait_refs(db, trait_ref).into_iter().find_map(|t| {
let assoc_type = db.trait_data(t.trait_).associated_type_by_name(name)?;
Some((t, assoc_type))
})
}
pub(super) fn variant_data(db: &dyn DefDatabase, var: VariantId) -> Arc<VariantData> {
match var {
VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
VariantId::EnumVariantId(it) => {
db.enum_data(it.parent).variants[it.local_id].variant_data.clone()
}
}
}
/// Helper for mutating `Arc<[T]>` (i.e. `Arc::make_mut` for Arc slices).
/// The underlying values are cloned if there are other strong references.
pub(crate) fn make_mut_slice<T: Clone>(a: &mut Arc<[T]>) -> &mut [T] {
if Arc::get_mut(a).is_none() {
*a = a.iter().cloned().collect();
}
Arc::get_mut(a).unwrap()
}
pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
Generics { def, params: db.generic_params(def), parent_generics }
}
#[derive(Debug)]
pub(crate) struct Generics {
def: GenericDefId,
pub(crate) params: Arc<GenericParams>,
parent_generics: Option<Box<Generics>>,
}
impl Generics {
pub(crate) fn iter<'a>(
&'a self,
) -> impl Iterator<Item = (TypeParamId, &'a TypeParamData)> + 'a {
self.parent_generics
.as_ref()
.into_iter()
.flat_map(|it| {
it.params
.types
.iter()
.map(move |(local_id, p)| (TypeParamId { parent: it.def, local_id }, p))
})
.chain(
self.params
.types
.iter()
.map(move |(local_id, p)| (TypeParamId { parent: self.def, local_id }, p)),
)
}
pub(crate) fn iter_parent<'a>(
&'a self,
) -> impl Iterator<Item = (TypeParamId, &'a TypeParamData)> + 'a {
self.parent_generics.as_ref().into_iter().flat_map(|it| {
it.params
.types
.iter()
.map(move |(local_id, p)| (TypeParamId { parent: it.def, local_id }, p))
})
}
pub(crate) fn len(&self) -> usize {
self.len_split().0
}
/// (total, parents, child)
pub(crate) fn len_split(&self) -> (usize, usize, usize) {
let parent = self.parent_generics.as_ref().map_or(0, |p| p.len());
let child = self.params.types.len();
(parent + child, parent, child)
}
/// (parent total, self param, type param list, impl trait)
pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize) {
let parent = self.parent_generics.as_ref().map_or(0, |p| p.len());
let self_params = self
.params
.types
.iter()
.filter(|(_, p)| p.provenance == TypeParamProvenance::TraitSelf)
.count();
let list_params = self
.params
.types
.iter()
.filter(|(_, p)| p.provenance == TypeParamProvenance::TypeParamList)
.count();
let impl_trait_params = self
.params
.types
.iter()
.filter(|(_, p)| p.provenance == TypeParamProvenance::ArgumentImplTrait)
.count();
(parent, self_params, list_params, impl_trait_params)
}
pub(crate) fn param_idx(&self, param: TypeParamId) -> Option<usize> {
Some(self.find_param(param)?.0)
}
fn find_param(&self, param: TypeParamId) -> Option<(usize, &TypeParamData)> {
if param.parent == self.def {
let (idx, (_local_id, data)) = self
.params
.types
.iter()
.enumerate()
.find(|(_, (idx, _))| *idx == param.local_id)
.unwrap();
let (_total, parent_len, _child) = self.len_split();
Some((parent_len + idx, data))
} else {
self.parent_generics.as_ref().and_then(|g| g.find_param(param))
}
}
}
fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
let container = match def {
GenericDefId::FunctionId(it) => it.lookup(db).container,
GenericDefId::TypeAliasId(it) => it.lookup(db).container,
GenericDefId::ConstId(it) => it.lookup(db).container,
GenericDefId::EnumVariantId(it) => return Some(it.parent.into()),
GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) => return None,
};
match container {
AssocContainerId::ImplId(it) => Some(it.into()),
AssocContainerId::TraitId(it) => Some(it.into()),
AssocContainerId::ContainerId(_) => None,
}
}